diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..2799f6e6 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,4 @@ +# .coveragerc to control coverage.py + +[report] +include = bamboo_engine/*.py \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..df60084c --- /dev/null +++ b/.gitignore @@ -0,0 +1,102 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +.cover/ +.hypothesis/ + +# Translations +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# IPython Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# dotenv +.env + +# virtualenv +venv/ +ENV/ + +# Spyder project settings +.spyderproject + +# Rope project settings +.ropeproject + +# PyCharm project settings +.idea + + +# Frontend project lib file +static/pipeline/node_modules/ +node_modules/ +webpack_cache + +# OS generate file +.DS_Store + + +# Editor setting file +.vscode/ + diff --git a/README.md b/README.md new file mode 100644 index 00000000..7d797129 --- /dev/null +++ b/README.md @@ -0,0 +1,200 @@ +bamboo-engine 是一个通用的流程引擎,他可以解析,执行,调度由用户创建的流程任务,并提供了如暂停,撤销,跳过,强制失败,重试和重入等等灵活的控制能力和并行、子流程等进阶特性,并可通过水平扩展来进一步提升任务的并发处理能力。 + +> bamboo_engine 与 bamboo_pipeline 的性能对比: [性能对比](./docs/upgrade/bamboo_pipeline_vs_bamboo_engine.md) + + + +- [整体设计](#整体设计) +- [Quick start](#quick-start) + - [1. 安装依赖](#1-安装依赖) + - [2. 项目初始化](#2-项目初始化) + - [3. 执行流程](#3-执行流程) +- [benchmark](#benchmark) + + +- 使用文档 + - [核心概念](./docs/user_guide/basic_concept.md) + - [流程编排](./docs/user_guide/flow_orchestration.md) + - [流程构造器](./docs/user_guide/flow_builder.md) + - [SPLICE 变量](./docs/user_guide/splice_var.md) + - [Engine API](./docs/user_guide/engine_api.md) + - [监控](./docs/user_guide/monitor.md) +- 运行时文档 + - bamboo-pipeline + - [自定义组件](./docs/user_guide/custom_component.md) + - [运行自定义组件](./docs/user_guide/run_your_component.md) + - [组件单元测试](./docs/user_guide/component_unit_test.md) + - [Worker 配置](./docs/user_guide/workers.md) + +## 整体设计 + +bamboo-engine 是流程引擎核心模块、调度逻辑的定义和实现,并没有内置特定的引擎运行时,需要搭配实现了 `bamboo_engine.eri.interfaces.EngineRuntimeInterface` 接口的引擎运行时使用,目前提供了以下运行时可供使用: + +- 基于 Django,Celery 的运行时:[bamboo-pipeline](./runtime/bamboo-pipeline) + +引擎整体架构: + +![](./docs/assets/img/code_arch.png) + +## Quick start + +### 1. 安装依赖 + +``` +$ pip install bamboo-pipeline +``` +### 2. 项目初始化 + +由于 `bamboo-pipeline` 运行时基于 Django 实现,所以需要新建一个 Django 项目: + +``` +$ django-admin startproject bamboo_engine_playground +$ cd bamboo_engine_playground +``` + +在 `bamboo_engine_playground.settings.py` 下添加如下配置: + +```python +from pipeline.eri.celery.queues import * +from celery import Celery + +app = Celery("proj") + +app.config_from_object("django.conf:settings") + +INSTALLED_APPS = [ + ... + "pipeline", + "pipeline.engine", + "pipeline.component_framework", + "pipeline.eri", + ... +] +``` + +在 `bamboo_engine_playground` 目录下初始化数据库: + +``` +$ python manage.py migrate +``` + +### 3. 执行流程 + +首先在 `bamboo_engine_playground` 目录下启动 celery worker: + +``` +$ python manage.py celery worker -Q er_execute,er_schedule -l info +``` + +创建并执行一个简单的流程: + +![](./docs/assets/img/simple_example.png) + +```python +import time + +from bamboo_engine import api +from bamboo_engine.builder import * +from pipeline.eri.runtime import BambooDjangoRuntime + +# 使用 builder 构造出流程描述结构 +start = EmptyStartEvent() +# 这里先使用 bamboo-pipeline 自带的示例组件,我们会在后续的章节中学习如何自定义组件 +act = ServiceActivity(component_code="example_component") +end = EmptyEndEvent() + +start.extend(act).extend(end) + +pipeline = builder.build_tree(start) + +# 执行流程对象 +runtime = BambooDjangoRuntime() + +api.run_pipeline(runtime=runtime, pipeline=pipeline) + +# 等待 1s 后获取流程执行结果 +time.sleep(1) + +result = api.get_pipeline_states(runtime=runtime, root_id=pipeline["id"]) + +print(result.data) +``` + +随后我们就能够看到流程的状态信息,如下所示,流程中的所有节点已经执行成功: + +```python +{'pc31c89e6b85a4e2c8c5db477978c1a57': {'id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'state': 'FINISHED', + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'version': 'vaf47e56f2f31401e979c3c47b2a0c285', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 688664, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 688423, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 775165, tzinfo=), + 'children': {'e42035b3f98374062921a191115fc602e': {'id': 'e42035b3f98374062921a191115fc602e', + 'state': 'FINISHED', + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'version': 've2d0fa10d7d842a1bcac25984620232a', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 744490, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 744308, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 746690, tzinfo=)}, + 'e327f83de42df4ebfab375c271bf63d29': {'id': 'e327f83de42df4ebfab375c271bf63d29', + 'state': 'FINISHED', + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'version': 'v893cdc14150d4df5b20f2db32ba142b3', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 753321, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 753122, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 758697, tzinfo=)}, + 'e6c7d7a3721ca4b19a5a7f3b34d8387bf': {'id': 'e6c7d7a3721ca4b19a5a7f3b34d8387bf', + 'state': 'FINISHED', + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'version': 'v0c661ee6994d4eb4bdbfe5260f9a9f22', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 767563, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 767384, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 773341, tzinfo=)}}}} +``` + +恭喜你,你已经成功的创建了一个流程并把它运行起来了! + +## benchmark + + +测试环境: + +- MacBook Pro(16 英寸,2019) +- 处理器:2.6 GHz 六核Intel Core i7 +- 内存:32 GB 2667 MHz DDR4 +- OS:macOS Big Sur 11.2.1 +- Broker:RabbitMQ 3.8.2 +- MySQL:5.7.22 +- worker 启动命令(单个 worker 进程 -c 参数不变,通过增加进程来提高并发处理能力) + - python manage.py celery worker -c 100 -P gevent -l info -Q er_execute -n execute_%(process_num)02d + - python manage.py celery worker -c 100 -P gevent -l info -Q er_schedule -n schedule_%(process_num)02d + +| 测试场景 | worker concurrency | 流程执行耗时(s) | +| --------------------------------- | ------------------ | --------------- | +| 100个流程(单流程17个节点)并发执行 | 100 | 25.98 | +| 100个流程(单流程17个节点)并发执行 | 200 | 14.75 | +| 100个流程(单流程17个节点)并发执行 | 500 | 8.29 | +| 100个流程(单流程17个节点)并发执行 | 1000 | 6.78 | +| 1000节点大流程 | 100 | 19.33 | +| 1000节点大流程 | 200 | 12.5 | +| 1000节点大流程 | 500 | 11 | +| 1000节点大流程 | 1000 | 7.5 | + +![](./benchmark/EXECUTION%20水平扩展/Line-20210309.png) diff --git a/bamboo_engine/__init__.py b/bamboo_engine/__init__.py new file mode 100644 index 00000000..14355651 --- /dev/null +++ b/bamboo_engine/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .engine import Engine # noqa diff --git a/bamboo_engine/__version__.py b/bamboo_engine/__version__.py new file mode 100644 index 00000000..bd13acc7 --- /dev/null +++ b/bamboo_engine/__version__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +__version__ = "1.3.2" diff --git a/bamboo_engine/api.py b/bamboo_engine/api.py new file mode 100644 index 00000000..e3a008f8 --- /dev/null +++ b/bamboo_engine/api.py @@ -0,0 +1,627 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +API 模块用于向外暴露接口,bamboo-engine 的使用者应该永远只用这个模块与 bamboo-engien 进行交互 +""" + +import logging +import functools +import traceback +from typing import Optional, Any, List +from contextlib import contextmanager + +from .utils.object import Representable +from .eri import EngineRuntimeInterface, ContextValue +from .engine import Engine +from .template import Template +from .context import Context +from .utils.constants import VAR_CONTEXT_MAPPING + +logger = logging.getLogger("bamboo_engine") + + +class EngineAPIResult(Representable): + """ + api 统一返回结果 + """ + + def __init__( + self, + result: bool, + message: str, + exc: Optional[Exception] = None, + data: Optional[Any] = None, + exc_trace: Optional[str] = None, + ): + """ + :param result: 是否执行成功 + :type result: bool + :param message: 附加消息,result 为 False 时关注 + :type message: str + :param exc: 异常对象 + :type exc: Exception + :param data: 数据 + :type data: Any + """ + self.result = result + self.message = message + self.exc = exc + self.data = data + self.exc_trace = exc_trace + + +def _ensure_return_api_result(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + data = func(*args, **kwargs) + except Exception as e: + logger.exception("{} raise error.".format(func.__name__)) + trace = traceback.format_exc() + return EngineAPIResult(result=False, message="fail", exc=e, data=None, exc_trace=trace) + + if isinstance(data, EngineAPIResult): + return data + return EngineAPIResult(result=True, message="success", exc=None, data=data, exc_trace=None) + + return wrapper + + +@_ensure_return_api_result +def run_pipeline(runtime: EngineRuntimeInterface, pipeline: dict, **options) -> EngineAPIResult: + """ + 执行 pipeline + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline: pipeline 描述对象 + :type pipeline: dict + :return: 执行结果 + :rtype: EngineAPIResult + """ + + Engine(runtime).run_pipeline(pipeline, **options) + + +@_ensure_return_api_result +def pause_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult: + """ + 暂停 pipeline 的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline_id: piipeline id + :type pipeline_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + + Engine(runtime).pause_pipeline(pipeline_id) + + +@_ensure_return_api_result +def revoke_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult: + """ + 撤销 pipeline,使其无法继续执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline_id: pipeline id + :type pipeline_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).revoke_pipeline(pipeline_id) + + +@_ensure_return_api_result +def resume_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult: + """ + 继续被 pause_pipeline 接口暂停的 pipeline 的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline_id: pipeline id + :type pipeline_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).resume_pipeline(pipeline_id) + + +@_ensure_return_api_result +def pause_node_appoint(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 预约暂停某个节点的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 id + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).pause_node_appoint(node_id) + + +@_ensure_return_api_result +def resume_node_appoint(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 继续由于某个节点而暂停的 pipeline 的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 id + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).resume_node_appoint(node_id) + + +@_ensure_return_api_result +def retry_node(runtime: EngineRuntimeInterface, node_id: str, data: Optional[dict] = None) -> EngineAPIResult: + """ + 重试某个执行失败的节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 失败的节点 id + :type node_id: str + :param data: 重试时使用的节点执行输入 + :type data: dict + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).retry_node(node_id, data) + + +@_ensure_return_api_result +def retry_subprocess(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 重试进入失败的子流程节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 子流程节点 id + :type node_id: str + :return: [description] + :rtype: EngineAPIResult + """ + Engine(runtime).retry_subprocess(node_id) + + +@_ensure_return_api_result +def skip_node(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 跳过某个执行失败的节点(仅限 event,activity) + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 失败的节点 id + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).skip_node(node_id) + + +@_ensure_return_api_result +def skip_exclusive_gateway(runtime: EngineRuntimeInterface, node_id: str, flow_id: str) -> EngineAPIResult: + """ + 跳过某个执行失败的分支网关 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 失败的分支网关 id + :type node_id: str + :param flow_id: 需要往下执行的 flow id + :type flow_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).skip_exclusive_gateway(node_id, flow_id) + + +@_ensure_return_api_result +def forced_fail_activity(runtime: EngineRuntimeInterface, node_id: str, ex_data: str) -> EngineAPIResult: + """ + 强制失败某个 activity 节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :param message: 异常信息 + :type message: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).forced_fail_activity(node_id, ex_data) + + +@_ensure_return_api_result +def callback(runtime: EngineRuntimeInterface, node_id: str, version: str, data: dict) -> EngineAPIResult: + """ + 回调某个节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param version: 节点执行版本 + :param version: str + :param data: 节点 ID + :type data: dict + :return: 执行结果 + :rtype: EngineAPIResult + """ + Engine(runtime).callback(node_id, version, data) + + +@_ensure_return_api_result +def get_pipeline_states(runtime: EngineRuntimeInterface, root_id: str, flat_children=True) -> EngineAPIResult: + """ + 返回某个任务的状态树 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param root_id: 根节点 ID + :type root_id: str + :param flat_children: 是否将所有子节点展开 + :type flat_children: bool + :return: 执行结果 + :rtype: EngineAPIResult + """ + states = runtime.get_state_by_root(root_id) + if not states: + return {} + + root_state = None + children = {} + for s in states: + if s.node_id != root_id: + children[s.node_id] = { + "id": s.node_id, + "state": s.name, + "root_id:": s.root_id, + "parent_id": s.parent_id, + "version": s.version, + "loop": s.loop, + "retry": s.retry, + "skip": s.skip, + "error_ignorable": s.error_ignored, + "error_ignored": s.error_ignored, + "created_time": s.created_time, + "started_time": s.started_time, + "archived_time": s.archived_time, + "children": {}, + } + else: + root_state = s + + if not flat_children: + # set node children + for node_id, state in children.items(): + if state["parent_id"] in children: + children[state["parent_id"]]["children"][node_id] = state + + # pop sub child + for node_id in list(children.keys()): + if children[node_id]["parent_id"] != root_state.node_id: + children.pop(node_id) + + state_tree = {} + state_tree[root_state.node_id] = { + "id": root_state.node_id, + "state": root_state.name, + "root_id:": root_state.root_id, + "parent_id": root_state.root_id, + "version": root_state.version, + "loop": root_state.loop, + "retry": root_state.retry, + "skip": root_state.skip, + "error_ignorable": s.error_ignored, + "error_ignored": s.error_ignored, + "created_time": root_state.created_time, + "started_time": root_state.started_time, + "archived_time": root_state.archived_time, + "children": children, + } + return state_tree + + +@_ensure_return_api_result +def get_children_states(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 返回某个节点及其所有子节点的状态 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 父流程 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + parent_state = runtime.get_state_or_none(node_id) + if not parent_state: + return {} + + states = runtime.get_state_by_parent(node_id) + children = {} + for s in states: + children[s.node_id] = { + "id": s.node_id, + "state": s.name, + "root_id:": s.root_id, + "parent_id": s.parent_id, + "version": s.version, + "loop": s.loop, + "retry": s.retry, + "skip": s.skip, + "error_ignorable": s.error_ignored, + "error_ignored": s.error_ignored, + "created_time": s.created_time, + "started_time": s.started_time, + "archived_time": s.archived_time, + "children": {}, + } + + state_tree = {} + state_tree[parent_state.node_id] = { + "id": parent_state.node_id, + "state": parent_state.name, + "root_id:": parent_state.root_id, + "parent_id": parent_state.root_id, + "version": parent_state.version, + "loop": parent_state.loop, + "retry": parent_state.retry, + "skip": parent_state.skip, + "error_ignorable": parent_state.error_ignored, + "error_ignored": parent_state.error_ignored, + "created_time": parent_state.created_time, + "started_time": parent_state.started_time, + "archived_time": parent_state.archived_time, + "children": children, + } + return state_tree + + +@_ensure_return_api_result +def get_execution_data_inputs(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 获取某个节点执行数据的输入数据 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + return runtime.get_execution_data_inputs(node_id) + + +@_ensure_return_api_result +def get_execution_data_outputs(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 获取某个节点的执行数据输出 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + return runtime.get_execution_data_outputs(node_id) + + +@_ensure_return_api_result +def get_execution_data(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 获取某个节点的执行数据 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + data = runtime.get_execution_data(node_id) + return {"inputs": data.inputs, "outputs": data.outputs} + + +@_ensure_return_api_result +def get_data(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 获取某个节点的原始输入数据 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + data = runtime.get_data(node_id) + return { + "inputs": {k: {"need_render": v.need_render, "value": v.value} for k, v in data.inputs.items()}, + "outputs": data.outputs, + } + + +@_ensure_return_api_result +def get_node_histories(runtime: EngineRuntimeInterface, node_id: str, loop: int = -1) -> EngineAPIResult: + """ + 获取某个节点的历史记录概览 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 执行结果 + :rtype: EngineAPIResult + """ + return [ + { + "id": h.id, + "node_id": h.node_id, + "started_time": h.started_time, + "archived_time": h.archived_time, + "loop": h.loop, + "skip": h.skip, + "version": h.version, + "inputs": h.inputs, + "outputs": h.outputs, + } + for h in runtime.get_histories(node_id, loop) + ] + + +@_ensure_return_api_result +def get_node_short_histories(runtime: EngineRuntimeInterface, node_id: str, loop: int = -1) -> EngineAPIResult: + """ + 获取某个节点的简要历史记录 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 执行结果 + :rtype: EngineAPIResult + """ + return [ + { + "id": h.id, + "node_id": h.node_id, + "started_time": h.started_time, + "archived_time": h.archived_time, + "loop": h.loop, + "skip": h.skip, + "version": h.version, + } + for h in runtime.get_short_histories(node_id, loop) + ] + + +@_ensure_return_api_result +def get_pipeline_debug_info(runtime: EngineRuntimeInterface, pipeline_id: str): + """ + 获取某个流程的调试信息 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline_id: 流程 ID + :type pipeline_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + + return { + "contex_values": runtime.get_context(pipeline_id), + "processes": runtime.get_process_info_with_root_pipeline(pipeline_id), + } + + +@_ensure_return_api_result +def get_node_debug_info(runtime: EngineRuntimeInterface, node_id: str): + """ + 获取某个节点的调试信息 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ + + data = None + state = None + err = [] + + try: + data = runtime.get_data(node_id) + except Exception as e: + err.append(str(e)) + + try: + state = runtime.get_state(node_id) + except Exception as e: + err.append(str(e)) + + return { + "node": runtime.get_node(node_id), + "data": data, + "state": state, + "err": err, + } + + +@_ensure_return_api_result +def preview_node_inputs( + runtime: EngineRuntimeInterface, + pipeline: dict, + node_id: str, + subprocess_stack: List[str] = [], + root_pipeline_data: dict = {}, + parent_params: dict = {}, +): + """ + 预览某个节点的输入结果 + + :param pipeline: 预处理后的流程树数据 + :type pipeline: dict + :param node_id: 节点 ID + :type node_id: str + :param subprocess_stack: 子流程,需保证顺序 + :type subprocess_stack: List[str] + :param root_pipeline_data: root流程数据 + :param parent_params: 父流程传入参数 + :return: 执行结果 + :rtype: EngineAPIResult + """ + context_values = [ + ContextValue(key=key, type=VAR_CONTEXT_MAPPING[info["type"]], value=info["value"], code=info.get("custom_type")) + for key, info in list(pipeline["data"].get("inputs", {}).items()) + list(parent_params.items()) + ] + context = Context(runtime, context_values, root_pipeline_data) + + if subprocess_stack: + subprocess = subprocess_stack[0] + child_pipeline = pipeline["activities"][subprocess]["pipeline"] + param_data = { + key: info["value"] + for key, info in pipeline["activities"][subprocess]["params"].items() + } + hydrated_context = context.hydrate(deformat=True) + hydrated_param_data = Template(param_data).render(hydrated_context) + formatted_param_data = {key: {"value": value, "type": "plain"} for key, value in hydrated_param_data.items()} + return preview_node_inputs( + runtime=runtime, + pipeline=child_pipeline, + node_id=node_id, + subprocess_stack=subprocess_stack[1:], + root_pipeline_data=root_pipeline_data, + parent_params=formatted_param_data, + ) + raw_inputs = pipeline["activities"][node_id]["component"]["inputs"] + raw_inputs = {key: info["value"] for key, info in raw_inputs.items()} + hydrated_context = context.hydrate(deformat=True) + inputs = Template(raw_inputs).render(hydrated_context) + return inputs diff --git a/bamboo_engine/builder/__init__.py b/bamboo_engine/builder/__init__.py new file mode 100644 index 00000000..01852d18 --- /dev/null +++ b/bamboo_engine/builder/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .builder import * # noqa +from .flow import * # noqa diff --git a/bamboo_engine/builder/builder.py b/bamboo_engine/builder/builder.py new file mode 100644 index 00000000..95d4d071 --- /dev/null +++ b/bamboo_engine/builder/builder.py @@ -0,0 +1,228 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy +import queue + +from bamboo_engine.utils.string import unique_id + +from .flow.data import Data, Params +from .flow.event import ExecutableEndEvent + + +__all__ = ["build_tree"] + +__skeleton = { + "id": None, + "start_event": None, + "end_event": None, + "activities": {}, + "gateways": {}, + "flows": {}, + "data": {"inputs": {}, "outputs": []}, +} + +__node_type = { + "ServiceActivity": "activities", + "SubProcess": "activities", + "EmptyEndEvent": "end_event", + "EmptyStartEvent": "start_event", + "ParallelGateway": "gateways", + "ConditionalParallelGateway": "gateways", + "ExclusiveGateway": "gateways", + "ConvergeGateway": "gateways", +} + +__start_elem = {"EmptyStartEvent"} + +__end_elem = {"EmptyEndEvent"} + +__multiple_incoming_type = { + "ServiceActivity", + "ConvergeGateway", + "EmptyEndEvent", + "ParallelGateway", + "ConditionalParallelGateway", + "ExclusiveGateway", + "SubProcess", +} + +__incoming = "__incoming" + + +def build_tree(start_elem, id=None, data=None): + tree = copy.deepcopy(__skeleton) + elem_queue = queue.Queue() + processed_elem = set() + + tree[__incoming] = {} + elem_queue.put(start_elem) + + while not elem_queue.empty(): + # get elem + elem = elem_queue.get() + + # update node when we meet again + if elem.id in processed_elem: + __update(tree, elem) + continue + + # add to queue + for e in elem.outgoing: + elem_queue.put(e) + + # mark as processed + processed_elem.add(elem.id) + + # tree grow + __grow(tree, elem) + + del tree[__incoming] + tree["id"] = id or unique_id("p") + user_data = data.to_dict() if isinstance(data, Data) else data + tree["data"] = user_data or tree["data"] + return tree + + +def __update(tree, elem): + node_type = __node_type[elem.type()] + node = tree[node_type] if node_type == "end_event" else tree[node_type][elem.id] + node["incoming"] = tree[__incoming][elem.id] + + +def __grow(tree, elem): + if elem.type() in __start_elem: + outgoing = unique_id("f") + tree["start_event"] = { + "incoming": "", + "outgoing": outgoing, + "type": elem.type(), + "id": elem.id, + "name": elem.name, + } + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + elif elem.type() in __end_elem or isinstance(elem, ExecutableEndEvent): + tree["end_event"] = { + "incoming": tree[__incoming][elem.id], + "outgoing": "", + "type": elem.type(), + "id": elem.id, + "name": elem.name, + } + + elif elem.type() == "ServiceActivity": + outgoing = unique_id("f") + + tree["activities"][elem.id] = { + "incoming": tree[__incoming][elem.id], + "outgoing": outgoing, + "type": elem.type(), + "id": elem.id, + "name": elem.name, + "error_ignorable": elem.error_ignorable, + "timeout": elem.timeout, + "skippable": elem.skippable, + "retryable": elem.retryable, + "component": elem.component_dict(), + "optional": False, + } + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + elif elem.type() == "SubProcess": + outgoing = unique_id("f") + + subprocess_param = ( + elem.params.to_dict() if isinstance(elem.params, Params) else elem.params + ) + + subprocess = { + "id": elem.id, + "incoming": tree[__incoming][elem.id], + "name": elem.name, + "outgoing": outgoing, + "type": elem.type(), + "params": subprocess_param, + } + + subprocess["pipeline"] = build_tree( + start_elem=elem.start, id=elem.id, data=elem.data + ) + + tree["activities"][elem.id] = subprocess + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + elif elem.type() == "ParallelGateway": + outgoing = [unique_id("f") for _ in range(len(elem.outgoing))] + + tree["gateways"][elem.id] = { + "id": elem.id, + "incoming": tree[__incoming][elem.id], + "outgoing": outgoing, + "type": elem.type(), + "name": elem.name, + } + + for i, next_elem in enumerate(elem.outgoing): + __grow_flow(tree, outgoing[i], elem, next_elem) + + elif elem.type() in {"ExclusiveGateway", "ConditionalParallelGateway"}: + outgoing = [unique_id("f") for _ in range(len(elem.outgoing))] + + tree["gateways"][elem.id] = { + "id": elem.id, + "incoming": tree[__incoming][elem.id], + "outgoing": outgoing, + "type": elem.type(), + "name": elem.name, + "conditions": elem.link_conditions_with(outgoing), + } + + for i, next_elem in enumerate(elem.outgoing): + __grow_flow(tree, outgoing[i], elem, next_elem) + + elif elem.type() == "ConvergeGateway": + outgoing = unique_id("f") + + tree["gateways"][elem.id] = { + "id": elem.id, + "incoming": tree[__incoming][elem.id], + "outgoing": outgoing, + "type": elem.type(), + "name": elem.name, + } + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + else: + raise Exception() + + +def __grow_flow(tree, outgoing, elem, next_element): + tree["flows"][outgoing] = { + "is_default": False, + "source": elem.id, + "target": next_element.id, + "id": outgoing, + } + if next_element.type() in __multiple_incoming_type: + tree[__incoming].setdefault(next_element.id, []).append(outgoing) + else: + tree[__incoming][next_element.id] = outgoing diff --git a/bamboo_engine/builder/flow/__init__.py b/bamboo_engine/builder/flow/__init__.py new file mode 100644 index 00000000..3d636925 --- /dev/null +++ b/bamboo_engine/builder/flow/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .activity import * # noqa +from .event import * # noqa +from .gateway import * # noqa +from .data import * # noqa diff --git a/bamboo_engine/builder/flow/activity.py b/bamboo_engine/builder/flow/activity.py new file mode 100644 index 00000000..e524c290 --- /dev/null +++ b/bamboo_engine/builder/flow/activity.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .base import * # noqa +from bamboo_engine.eri import NodeType +from bamboo_engine.utils.collections import FancyDict + +__all__ = ["ServiceActivity", "SubProcess"] + + +class ServiceActivity(Element): + def __init__( + self, + component_code=None, + error_ignorable=False, + timeout=None, + skippable=True, + retryable=True, + *args, + **kwargs + ): + self.component = FancyDict({"code": component_code, "inputs": FancyDict({})}) + self.error_ignorable = error_ignorable + self.timeout = timeout + self.skippable = skippable + self.retryable = retryable + super(ServiceActivity, self).__init__(*args, **kwargs) + + def type(self): + return NodeType.ServiceActivity.value + + def component_dict(self): + return { + "code": self.component.code, + "inputs": { + key: var.to_dict() for key, var in list(self.component.inputs.items()) + }, + } + + +class SubProcess(Element): + def __init__( + self, start=None, data=None, params=None, global_outputs=None, *args, **kwargs + ): + self.start = start + self.data = data + self.params = params or {} + self.global_outputs = FancyDict(global_outputs or {}) + super(SubProcess, self).__init__(*args, **kwargs) + + def type(self): + return NodeType.SubProcess.value diff --git a/bamboo_engine/builder/flow/base.py b/bamboo_engine/builder/flow/base.py new file mode 100644 index 00000000..176ae35e --- /dev/null +++ b/bamboo_engine/builder/flow/base.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.utils.string import unique_id + +__all__ = ["Element"] + + +class Element(object): + def __init__(self, id=None, name=None, outgoing=None): + self.id = id or unique_id("e") + self.name = name + self.outgoing = outgoing or [] + + def extend(self, element): + """ + build a connection from self to element and return element + :param element: target + :rtype: Element + """ + self.outgoing.append(element) + return element + + def connect(self, *args): + """ + build connections from self to elements in args and return self + :param args: target elements + :rtype: Element + """ + for e in args: + self.outgoing.append(e) + return self + + def converge(self, element): + """ + converge all connection those diverge from self to element and return element + :param element: target + :rtype: Element + """ + for e in self.outgoing: + e.tail().connect(element) + return element + + def to(self, element): + return element + + def tail(self): + """ + get tail element for self + :rtype: Element + """ + is_tail = len(self.outgoing) == 0 + e = self + + while not is_tail: + e = e.outgoing[0] + is_tail = len(e.outgoing) == 0 + + return e + + def type(self): + raise NotImplementedError() + + def __eq__(self, other): + return self.id == other.id + + def __repr__(self): + return "<{cls} {name}:{id}>".format( + cls=type(self).__name__, name=self.name, id=self.id + ) diff --git a/bamboo_engine/builder/flow/data.py b/bamboo_engine/builder/flow/data.py new file mode 100644 index 00000000..b5a9e799 --- /dev/null +++ b/bamboo_engine/builder/flow/data.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.utils.collections import FancyDict + + +class Data(object): + def __init__(self, inputs=None, outputs=None, pre_render_keys=None): + self.inputs = FancyDict(inputs or {}) + self.outputs = outputs or [] + self.pre_render_keys = pre_render_keys or [] + + def to_dict(self): + base = {"inputs": {}, "outputs": self.outputs, "pre_render_keys": self.pre_render_keys} + + for key, value in list(self.inputs.items()): + base["inputs"][key] = value.to_dict() if isinstance(value, Var) else value + + return base + + +class Params(object): + def __init__(self, params=None): + self.params = FancyDict(params or {}) + + def to_dict(self): + base = {} + + for key, value in list(self.params.items()): + base[key] = value.to_dict() if isinstance(value, Var) else value + + return base + + +class Var(object): + PLAIN = "plain" + SPLICE = "splice" + LAZY = "lazy" + + def __init__(self, type, value, custom_type=None): + self.type = type + self.value = value + self.custom_type = custom_type + + def to_dict(self): + base = {"type": self.type, "value": self.value} + if self.type == self.LAZY: + base["custom_type"] = self.custom_type + + return base + + +class DataInput(Var): + def __init__(self, *args, **kwargs): + super(DataInput, self).__init__(*args, **kwargs) + + def to_dict(self): + base = super(DataInput, self).to_dict() + base["is_param"] = True + return base + + +class NodeOutput(Var): + def __init__(self, source_act, source_key, *args, **kwargs): + self.source_act = source_act + self.source_key = source_key + kwargs["value"] = None + super(NodeOutput, self).__init__(*args, **kwargs) + + def to_dict(self): + base = super(NodeOutput, self).to_dict() + base["source_act"] = self.source_act + base["source_key"] = self.source_key + return base + + +class RewritableNodeOutput(Var): + def __init__(self, source_act, *args, **kwargs): + self.source_act = source_act + kwargs["value"] = None + super(RewritableNodeOutput, self).__init__(*args, **kwargs) + + def to_dict(self): + base = super(RewritableNodeOutput, self).to_dict() + base["source_act"] = self.source_act + return base diff --git a/bamboo_engine/builder/flow/event.py b/bamboo_engine/builder/flow/event.py new file mode 100644 index 00000000..bc2ae6d9 --- /dev/null +++ b/bamboo_engine/builder/flow/event.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from bamboo_engine.eri import NodeType +from .base import * # noqa + +__all__ = ["EmptyEndEvent", "EmptyStartEvent", "ExecutableEndEvent"] + + +class EmptyStartEvent(Element): + def type(self): + return NodeType.EmptyStartEvent.value + + +class EmptyEndEvent(Element): + def type(self): + return NodeType.EmptyEndEvent.value + + +class ExecutableEndEvent(Element): + def __init__(self, type, **kwargs): + self._type = type + super(ExecutableEndEvent, self).__init__(**kwargs) + + def type(self): + return self._type diff --git a/bamboo_engine/builder/flow/gateway.py b/bamboo_engine/builder/flow/gateway.py new file mode 100644 index 00000000..1fc9c3d0 --- /dev/null +++ b/bamboo_engine/builder/flow/gateway.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.eri import NodeType + +from .base import * # noqa + +__all__ = [ + "ParallelGateway", + "ExclusiveGateway", + "ConvergeGateway", + "ConditionalParallelGateway", +] + + +class ParallelGateway(Element): + def type(self): + return NodeType.ParallelGateway.value + + +class ConditionGateway(Element): + def __init__(self, conditions=None, *args, **kwargs): + self.conditions = conditions or {} + super(ConditionGateway, self).__init__(*args, **kwargs) + + def add_condition(self, index, evaluate): + self.conditions[index] = evaluate + + def link_conditions_with(self, outgoing): + conditions = {} + for i, out in enumerate(outgoing): + conditions[out] = {"evaluate": self.conditions[i]} + + return conditions + + +class ConditionalParallelGateway(ConditionGateway): + def type(self): + return NodeType.ConditionalParallelGateway.value + + +class ExclusiveGateway(ConditionGateway): + def type(self): + return NodeType.ExclusiveGateway.value + + +class ConvergeGateway(Element): + def type(self): + return NodeType.ConvergeGateway.value diff --git a/bamboo_engine/config.py b/bamboo_engine/config.py new file mode 100644 index 00000000..fc88e3ec --- /dev/null +++ b/bamboo_engine/config.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +引擎内部配置模块 +""" + + +class Settings: + """ + 引擎全局配置对象 + """ + + MAKO_SANDBOX_SHIELD_WORDS = [ + "ascii", + "bytearray", + "bytes", + "callable", + "chr", + "classmethod", + "compile", + "delattr", + "dir", + "divmod", + "exec", + "eval", + "filter", + "frozenset", + "getattr", + "globals", + "hasattr", + "hash", + "help", + "id", + "input", + "isinstance", + "issubclass", + "iter", + "locals", + "map", + "memoryview", + "next", + "object", + "open", + "print", + "property", + "repr", + "setattr", + "staticmethod", + "super", + "type", + "vars", + "__import__", + ] + + MAKO_SANDBOX_IMPORT_MODULES = {} + + RERUN_INDEX_OFFSET = 0 diff --git a/bamboo_engine/context.py b/bamboo_engine/context.py new file mode 100644 index 00000000..2db5b13f --- /dev/null +++ b/bamboo_engine/context.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +流程上下文相关逻辑封装模块 +""" + +import logging +from weakref import WeakValueDictionary +from typing import List, Dict, Any + +from bamboo_engine.eri import ( + ContextValue, + ExecutionData, + EngineRuntimeInterface, + Variable, + ContextValueType, +) +from .template.template import Template +from .utils.string import format_var_key, deformat_var_key + +logger = logging.getLogger("bamboo_engine") + + +class PlainVariable(Variable): + """ + 普通变量 + """ + + def __init__(self, key: str, value: Any): + self.key = key + self.value = value + + def get(self): + return self.value + + +class SpliceVariable(Variable): + """ + 模板类型变量,会尝试在流程上下文中解析变量中定义的模板 + """ + + def __init__(self, key: str, value: Any, pool: WeakValueDictionary): + self.key = key + self.value = value + self.pool = pool + self.refs = [k for k in Template(value).get_reference()] + import pdb + + def get(self): + context = {} + for r in self.refs: + if r not in self.pool: + continue + + var = self.pool[r] + if issubclass(var.__class__, Variable): + var = var.get() + context[deformat_var_key(r)] = var + + return Template(self.value).render(context=context) + + +def _raw_key(key: str) -> str: + return key + + +class Context: + """ + 流程执行上下文,封装引擎在执行流程的过程中对上下文进行的操作和逻辑 + """ + + def __init__( + self, + runtime: EngineRuntimeInterface, + values: List[ContextValue], + additional_data: dict, + ): + """ + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param values: 上下文数据列表 + :type values: List[ContextValue] + :param additional_data: 额外数据字典 + :type additional_data: dict + """ + self.values = values + self.runtime = runtime + self.pool = WeakValueDictionary() + self.variables = {} + self.additional_data = additional_data + + # 将上下文数据转换成变量,变量内封装了自身解析的逻辑,且实现了 Variable 接口 + for v in self.values: + if v.type is ContextValueType.PLAIN: + self.variables[v.key] = PlainVariable(key=v.key, value=v.value) + elif v.type is ContextValueType.SPLICE: + self.variables[v.key] = SpliceVariable(key=v.key, value=v.value, pool=self.pool) + elif v.type is ContextValueType.COMPUTE: + self.variables[v.key] = self.runtime.get_compute_variable( + code=v.code, + key=v.key, + value=SpliceVariable(key=v.key, value=v.value, pool=self.pool), + additional_data=self.additional_data, + ) + + for k, var in self.variables.items(): + self.pool[k] = var + + def hydrate(self, deformat=False, mute_error=False) -> Dict[str, Any]: + """ + 将当前上下文中的数据清洗成 Dict[str, Any] 类型的朴素数据,过程中会进行变量引用的分析和替换 + + :param deformat: 是否将返回字典中的 key 值从 ${%s} 替换为 %s + :type deformat: bool, optional + :return: 上下文数据朴素值字典 + :rtype: Dict[str, Any] + """ + key_formatter = deformat_var_key if deformat else _raw_key + hydrated = {} + + for key, var in self.pool.items(): + try: + hydrated[key_formatter(key)] = var.get() + except Exception as e: + if not mute_error: + raise e + logger.exception("%s get error." % key) + hydrated[key_formatter(key)] = str(e) + + return hydrated + + def extract_outputs( + self, + pipeline_id: str, + data_outputs: Dict[str, str], + execution_data_outputs: Dict[str, Any], + ): + """ + 将某个节点的输出提取到流程上下文中 + + :param pipeline_id: 上下文对应的流程/子流程 ID + :type pipeline_id: str + :param data_outputs: 节点输出键映射 + :type data_outputs: Dict[str, str] + :param execution_data_outputs: 节点执行数据输出 + :type execution_data_outputs: Dict[str, Any] + """ + update = {} + for origin_key, target_key in data_outputs.items(): + if origin_key not in execution_data_outputs: + continue + + update[target_key] = ContextValue( + key=target_key, + type=ContextValueType.PLAIN, + value=execution_data_outputs[origin_key], + ) + + self.runtime.upsert_plain_context_values(pipeline_id=pipeline_id, update=update) diff --git a/bamboo_engine/engine.py b/bamboo_engine/engine.py new file mode 100644 index 00000000..deea1471 --- /dev/null +++ b/bamboo_engine/engine.py @@ -0,0 +1,861 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +# 引擎核心模块 + +import time +import random +import logging +import traceback +from typing import Optional +from contextlib import contextmanager + + +from . import states +from . import validator +from .local import set_node_info, CurrentNodeInfo +from .exceptions import InvalidOperationError, NotFoundError, StateVersionNotMatchError +from .handler import HandlerFactory +from .metrics import ( + ENGINE_RUNNING_PROCESSES, + ENGINE_RUNNING_SCHEDULES, + ENGINE_PROCESS_RUNNING_TIME, + ENGINE_SCHEDULE_RUNNING_TIME, + ENGINE_NODE_EXECUTE_TIME, + ENGINE_NODE_SCHEDULE_TIME, + setup_gauge, + setup_histogram, +) +from .eri import ( + EngineRuntimeInterface, + ScheduleType, + NodeType, + State, + ExecutionData, + DataInput, + Node, +) +from .utils.string import get_lower_case_name + +logger = logging.getLogger("bamboo_engine") + + +class Engine: + """ + 流程引擎,封装流程核心调度逻辑 + """ + + PURE_SKIP_ENABLE_NODE_TYPE = {NodeType.ServiceActivity, NodeType.EmptyStartEvent} + + def __init__(self, runtime: EngineRuntimeInterface): + self.runtime = runtime + + # api + def run_pipeline( + self, + pipeline: dict, + root_pipeline_data: Optional[dict] = None, + root_pipeline_context: Optional[dict] = None, + subprocess_context: Optional[dict] = None, + **options + ): + """ + 运行流程 + + :param pipeline: 流程数据 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + """ + + root_pipeline_data = {} if root_pipeline_data is None else root_pipeline_data + root_pipeline_context = {} if root_pipeline_context is None else root_pipeline_context + subprocess_context = {} if subprocess_context is None else subprocess_context + cycle_tolerate = options.get("cycle_tolerate", False) + validator.validate_and_process_pipeline(pipeline, cycle_tolerate) + + self.runtime.pre_prepare_run_pipeline( + pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options + ) + + process_id = self.runtime.prepare_run_pipeline( + pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options + ) + # execute from start event + self.runtime.execute(process_id, pipeline["start_event"]["id"]) + + self.runtime.post_prepare_run_pipeline( + pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options + ) + + def pause_pipeline(self, pipeline_id: str): + """ + 暂停流程 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + if not self.runtime.has_state(pipeline_id): + raise NotFoundError("node({}) does not exist".format(pipeline_id)) + + self.runtime.pre_pause_pipeline(pipeline_id) + + self.runtime.set_state(node_id=pipeline_id, to_state=states.SUSPENDED) + + self.runtime.post_pause_pipeline(pipeline_id) + + def revoke_pipeline(self, pipeline_id: str): + """ + 撤销流程 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + if not self.runtime.has_state(pipeline_id): + raise NotFoundError("node({}) does not exist".format(pipeline_id)) + + self.runtime.pre_revoke_pipeline(pipeline_id) + + self.runtime.set_state(node_id=pipeline_id, to_state=states.REVOKED) + + self.runtime.post_revoke_pipeline(pipeline_id) + + def resume_pipeline(self, pipeline_id: str): + """ + 继续流程 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + state = self.runtime.get_state(pipeline_id) + + if state.name != states.SUSPENDED: + raise InvalidOperationError("pipeline({}) state is: {}".format(pipeline_id, state.name)) + + info_list = self.runtime.get_suspended_process_info(pipeline_id) + + self.runtime.pre_resume_pipeline(pipeline_id) + + self.runtime.set_state(node_id=pipeline_id, to_state=states.RUNNING) + + if info_list: + self.runtime.batch_resume(process_id_list=[i.process_id for i in info_list]) + for info in info_list: + self.runtime.execute(info.process_id, info.current_node) + + self.runtime.post_resume_pipeline(pipeline_id) + + def pause_node_appoint(self, node_id: str): + """ + 预约暂停节点 + + :param node_id: 节点 ID + :type node_id: str + """ + node = self.runtime.get_node(node_id) + + if node.type == NodeType.SubProcess: + raise InvalidOperationError("can not use pause_node_appoint api for {}".format(node.type)) + + self.runtime.pre_pause_node(node_id) + + self.runtime.set_state(node_id=node_id, to_state=states.SUSPENDED) + + self.runtime.post_pause_node(node_id) + + def resume_node_appoint(self, node_id: str): + """ + 继续由于节点暂停被阻塞的流程的执行 + + :param node_id: 节点 ID + :type node_id: str + """ + node = self.runtime.get_node(node_id) + + if node.type == NodeType.SubProcess: + raise InvalidOperationError("can not use pause_node_appoint api for {}".format(node.type)) + + self.runtime.pre_resume_node(node_id) + + info_list = self.runtime.get_suspended_process_info(node_id) + + self.runtime.set_state(node_id=node_id, to_state=states.READY) + + # found process suspended by node suspend + for info in info_list: + self.runtime.resume(process_id=info.process_id) + self.runtime.execute(info.process_id, info.current_node) + + self.runtime.post_resume_node(node_id) + + def retry_node(self, node_id: str, data: Optional[dict] = None): + """ + 重试节点 + + :param node_id: 节点 ID + :type node_id: str + :param data: 重试时使用的输入数据, defaults to None + :type data: Optional[dict], optional + """ + node = self.runtime.get_node(node_id) + + if not node.can_retry: + raise InvalidOperationError("can not retry node({}) with can_retry({})".format(node_id, node.can)) + + state = self.runtime.get_state(node_id) + + process_id = self._ensure_state_is_fail_and_return_process_id(state) + + self.runtime.pre_retry_node(node_id, data) + + if data is not None: + self.runtime.set_data_inputs( + node_id, {k: DataInput(need_render=True, value=v) for k, v in data.items()}, + ) + + self._add_history(node_id, state) + + self.runtime.set_state( + node_id=node_id, + to_state=states.READY, + is_retry=True, + refresh_version=True, + clear_started_time=True, + clear_archived_time=True, + ) + + self.runtime.execute(process_id, node_id) + + self.runtime.post_retry_node(node_id, data) + + def retry_subprocess(self, node_id: str): + """ + 重试进入失败的子流程 + + :param node_id: 子流程 ID + :type node_id: str + :raises InvalidOperationError: [description] + """ + node = self.runtime.get_node(node_id) + + if node.type is not NodeType.SubProcess: + raise InvalidOperationError("node({}) type is not SubProcess".format(node_id)) + + state = self.runtime.get_state(node_id) + + process_id = self._ensure_state_is_fail_and_return_process_id(state) + + self.runtime.pre_retry_subprocess(node_id) + + proc_info = self.runtime.get_process_info(process_id) + + # reset pipeline stack + if proc_info.pipeline_stack[-1] == node_id: + self.runtime.set_pipeline_stack(process_id, proc_info.pipeline_stack[:-1]) + + self._add_history(node_id, state) + + self.runtime.set_state( + node_id=node_id, + to_state=states.READY, + is_retry=True, + refresh_version=True, + clear_started_time=True, + clear_archived_time=True, + ) + + self.runtime.execute(process_id, node_id) + + self.runtime.post_retry_subprocess(node_id) + + def skip_node(self, node_id: str): + """ + 跳过失败的节点继续执行 + + :param node_id: 节点 ID + :type node_id: str + :raises InvalidOperationError: [description] + :raises InvalidOperationError: [description] + """ + node = self.runtime.get_node(node_id) + + if not node.can_skip: + raise InvalidOperationError("can not skip this node") + + if node.type not in self.PURE_SKIP_ENABLE_NODE_TYPE: + raise InvalidOperationError("can not use skip_node api for {}".format(node.type)) + + state = self.runtime.get_state(node_id) + + process_id = self._ensure_state_is_fail_and_return_process_id(state) + + self.runtime.pre_skip_node(node_id) + + # pure skip node type only has 1 next node + next_node_id = node.target_nodes[0] + + self._add_history(node_id, state) + + self.runtime.set_state( + node_id=node_id, to_state=states.FINISHED, is_skip=True, refresh_version=True, set_archive_time=True, + ) + + # 跳过节点时不再做节点输出提取到上下文的操作 + # 因为节点失败的位置未知,可能提取出来的变量是无法预知的,会导致不可预知的行为 + self.runtime.execute(process_id, next_node_id) + + self.runtime.post_skip_node(node_id) + + def skip_exclusive_gateway(self, node_id: str, flow_id: str): + """ + 跳过执行失败的分支网关继续执行 + + :param node_id: 节点 ID + :type node_id: str + :param flow_id: 需要继续执行的流 ID + :type flow_id: str + :raises InvalidOperationError: [description] + """ + node = self.runtime.get_node(node_id) + + if node.type != NodeType.ExclusiveGateway: + raise InvalidOperationError("{} is not exclusive gateway, actual: {}".format(node_id, node.type.value)) + + next_node_id = node.targets[flow_id] + + state = self.runtime.get_state(node_id) + + process_id = self._ensure_state_is_fail_and_return_process_id(state) + + self.runtime.pre_skip_exclusive_gateway(node_id, flow_id) + + self._add_history(node_id, state) + + self.runtime.set_state( + node_id=node_id, to_state=states.FINISHED, is_skip=True, refresh_version=True, set_archive_time=True, + ) + + self.runtime.execute(process_id, next_node_id) + + self.runtime.post_skip_exclusive_gateway(node_id, flow_id) + + def forced_fail_activity(self, node_id: str, ex_data: str): + """ + 强制失败某个 Activity + + :param node_id: 节点 ID + :type node_id: str + :param ex_data: 强制失败时写入节点有慈航数据的信息 + :type ex_data: str + :raises InvalidOperationError: [description] + :raises InvalidOperationError: [description] + """ + node = self.runtime.get_node(node_id) + + if node.type != NodeType.ServiceActivity: + raise InvalidOperationError("{} is not activity, actual: {}".format(node_id, node.type.value)) + + state = self.runtime.get_state(node_id) + + if state.name != states.RUNNING: + raise InvalidOperationError("{} state is not RUNNING, actual: {}".format(node_id, state.name)) + + process_id = self.runtime.get_process_id_with_current_node_id(node_id) + + if not process_id: + raise InvalidOperationError("can not find process with current node id: {}".format(node_id)) + + self.runtime.pre_forced_fail_activity(node_id, ex_data) + + outputs = self.runtime.get_execution_data_outputs(node_id) + + outputs["ex_data"] = ex_data + outputs["_forced_failed"] = True + + old_ver = state.version + new_ver = self.runtime.set_state( + node_id=node_id, to_state=states.FAILED, refresh_version=True, set_archive_time=True, + ) + + self.runtime.set_execution_data_outputs(node_id, outputs) + + self.runtime.kill(process_id) + + self.runtime.post_forced_fail_activity(node_id, ex_data, old_ver, new_ver) + + def callback(self, node_id: str, version: str, data: dict): + """ + 回调某个节点 + + :param node_id: 节点 ID + :type node_id: str + :param version: 回调执行版本 + :type version: str + :param data: 回调数据 + :type data: dict + :raises InvalidOperationError: [description] + :raises InvalidOperationError: [description] + :raises InvalidOperationError: [description] + :raises InvalidOperationError: [description] + """ + + process_id = self.runtime.get_sleep_process_with_current_node_id(node_id) + + if not process_id: + raise InvalidOperationError("can not find process with current node id: {}".format(node_id)) + + state = self.runtime.get_state(node_id) + + schedule = self.runtime.get_schedule_with_node_and_version(node_id, version) + + if state.version != version: + self.runtime.expire_schedule(schedule.id) + raise InvalidOperationError("node version {} not exist".format(version)) + + if schedule.finished: + raise InvalidOperationError("scheudle is already finished") + + if schedule.expired: + raise InvalidOperationError("scheudle is already expired") + + self.runtime.pre_callback(node_id, version, data) + + data_id = self.runtime.set_callback_data(node_id, state.version, data) + + self.runtime.schedule(process_id, node_id, schedule.id, data_id) + + self.runtime.post_callback(node_id, version, data) + + # engine event + @setup_gauge(ENGINE_RUNNING_PROCESSES) + @setup_histogram(ENGINE_PROCESS_RUNNING_TIME) + def execute(self, process_id: int, node_id: str): + """ + 在某个进程上从某个节点开始进入推进循环 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + """ + + process_info = self.runtime.get_process_info(process_id) + self.runtime.wake_up(process_id) + + current_node_id = node_id + + # 推进循环 + while True: + # 进程心跳 + self.runtime.beat(process_id) + + # 遇到推进终点后需要尝试唤醒父进程 + if current_node_id == process_info.destination_id: + self.runtime.die(process_id) + wake_up_seccess = self.runtime.child_process_finish(process_info.parent_id, process_id) + + if wake_up_seccess: + self.runtime.execute(process_info.parent_id, process_info.destination_id) + + return + + try: + + self.runtime.set_current_node(process_id, current_node_id) + + # 冻结检测 + if self.runtime.is_frozen(process_id): + self.runtime.freeze(process_id) + return + + node_state_map = self.runtime.batch_get_state_name(process_info.pipeline_stack) + + # 检测根流程是否被撤销 + if node_state_map[process_info.root_pipeline_id] == states.REVOKED: + self.runtime.die(process_id) + logger.info( + "[%s] root pipeline revoked checked at node %s", process_info.root_pipeline_id, current_node_id, + ) + return + + # 检测流程栈中是否有被暂停的流程 + for pid in process_info.pipeline_stack: + if node_state_map[pid] == states.SUSPENDED: + logger.info( + "[%s] process %s suspended by subprocess %s", + process_info.root_pipeline_id, + process_id, + pid, + ) + self.runtime.suspend(process_id, pid) + return + + node = self.runtime.get_node(current_node_id) + node_state = self.runtime.get_state_or_none(current_node_id) + loop = 1 + inner_loop = 1 + reset_mark_bit = False + + if node_state: + rerun_limit = self.runtime.node_rerun_limit(process_info.root_pipeline_id, current_node_id) + # 重入次数超过限制 + if ( + node_state.name == states.FINISHED + and node.type != NodeType.SubProcess + and node_state.loop > rerun_limit + ): + exec_outputs = self.runtime.get_execution_data_outputs(current_node_id) + exec_outputs["ex_data"] = "node execution exceed rerun limit {}".format(rerun_limit) + + self.runtime.set_execution_data_outputs(current_node_id, exec_outputs) + self.runtime.set_state( + node_id=current_node_id, to_state=states.FAILED, set_archive_time=True, + ) + self.runtime.sleep(process_id) + + return + + # 检测节点是否被预约暂停 + if node_state.name == states.SUSPENDED: + # 预约暂停的节点在预约时获取不到 root_id 和 parent_id,故在此进行设置 + self.runtime.set_state_root_and_parent( + node_id=current_node_id, + root_id=process_info.root_pipeline_id, + parent_id=process_info.top_pipeline_id, + ) + self.runtime.suspend(process_id, current_node_id) + logger.info( + "[%s] process %s suspended by node %s", + process_info.root_pipeline_id, + process_id, + current_node_id, + ) + return + + # 设置状态前检测 + if node_state.name not in states.INVERTED_TRANSITION[states.RUNNING]: + self.runtime.sleep(process_id) + return + + if node_state.name == states.FINISHED: + loop = node_state.loop + 1 + inner_loop = node_state.inner_loop + 1 + reset_mark_bit = True + + # 重入前记录历史 + if ( + node.type in {NodeType.SubProcess, NodeType.ServiceActivity} + and node_state.name == states.FINISHED + ): + self._add_history(node_id=current_node_id, state=node_state) + + version = self.runtime.set_state( + node_id=current_node_id, + to_state=states.RUNNING, + loop=loop, + inner_loop=inner_loop, + root_id=process_info.root_pipeline_id, + parent_id=process_info.top_pipeline_id, + set_started_time=True, + reset_skip=reset_mark_bit, + reset_retry=reset_mark_bit, + reset_error_ignored=reset_mark_bit, + refresh_version=reset_mark_bit, + ) + + logger.info( + "[%s] before execute %s(%s) state: %s", + process_info.root_pipeline_id, + node.__class__.__name__, + current_node_id, + node_state, + ) + handler = HandlerFactory.get_handler(node, self.runtime) + set_node_info(CurrentNodeInfo(node_id=current_node_id, version=version, loop=loop)) + type_label = self._get_metrics_node_type(node) + execute_start = time.time() + execute_result = handler.execute(process_info, loop, inner_loop, version) + ENGINE_NODE_EXECUTE_TIME.labels(type_label).observe(time.time() - execute_start) + + # 进程是否要进入睡眠 + if execute_result.should_sleep: + self.runtime.sleep(process_id) + + # 节点是否准备好进入调度 + if execute_result.schedule_ready: + schedule = self.runtime.set_schedule( + process_id=process_id, + node_id=current_node_id, + version=version, + schedule_type=execute_result.schedule_type, + ) + if execute_result.schedule_type == ScheduleType.POLL: + self.runtime.schedule(process_id, current_node_id, schedule.id) + # 是否有待调度的子进程 + elif execute_result.dispatch_processes: + children = [d.process_id for d in execute_result.dispatch_processes] + logger.info( + "[%s] %s dispatch %s children: %s", + process_info.root_pipeline_id, + process_info.top_pipeline_id, + len(execute_result.dispatch_processes), + execute_result.dispatch_processes, + ) + self.runtime.join(process_id, children) + for d in execute_result.dispatch_processes: + self.runtime.execute(d.process_id, d.node_id) + + if execute_result.should_die: + self.runtime.die(process_id) + + if execute_result.should_sleep or execute_result.should_die: + return + + current_node_id = execute_result.next_node_id + except Exception as e: + ex_data = traceback.format_exc() + logger.warning( + "[%s]execute exception catch at node(%s): %s", + process_info.root_pipeline_id, + current_node_id, + ex_data, + ) + + # state version already changed, so give up this execute + if isinstance(e, StateVersionNotMatchError): + logger.warning( + "[%s]execute exception catch StateVersionNotMatchError at node(%s): %s", + process_info.root_pipeline_id, + current_node_id, + ex_data, + ) + return + + # make sure sleep call at first, because remain operations may have been completed in execute + self.runtime.sleep(process_info.process_id) + + outputs = self.runtime.get_execution_data_outputs(current_node_id) + outputs["ex_data"] = ex_data + self.runtime.set_execution_data_outputs(current_node_id, outputs) + + self.runtime.set_state( + node_id=current_node_id, + to_state=states.FAILED, + root_id=process_info.root_pipeline_id, + parent_id=process_info.top_pipeline_id, + set_started_time=True, + set_archive_time=True, + ) + + return + + @setup_gauge(ENGINE_RUNNING_SCHEDULES) + @setup_histogram(ENGINE_SCHEDULE_RUNNING_TIME) + def schedule( + self, process_id: int, node_id: str, schedule_id: str, callback_data_id: Optional[int] = None, + ): + """ + 在某个进程上开始某个节点的调度 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param schedule_id: 调度对象 ID + :type schedule_id: str + :param callback_data_id: 回调数据 ID, defaults to None + :type callback_data_id: Optional[int], optional + """ + root_pipeline_id = "" + try: + process_info = self.runtime.get_process_info(process_id) + root_pipeline_id = process_info.root_pipeline_id + + state = self.runtime.get_state(node_id) + schedule = self.runtime.get_schedule(schedule_id) + + # engine context prepare + set_node_info(CurrentNodeInfo(node_id=node_id, version=state.version, loop=state.loop)) + + # schedule alredy finished + if schedule.finished: + logger.warning( + "[%s] schedule(%s) %s with version %s already finished", + root_pipeline_id, + schedule_id, + node_id, + schedule.version, + ) + return + + # 检查 schedule 是否过期 + if state.version != schedule.version: + logger.info( + "[%s] schedule(%s) %s with version %s expired, current version: %s", + root_pipeline_id, + schedule_id, + node_id, + schedule.version, + state.version, + ) + self.runtime.expire_schedule(schedule_id) + return + + # 检查节点状态是否合法 + if state.name != states.RUNNING: + logger.info( + "[%s] schedule(%s) %s with version %s state is not running: %s", + root_pipeline_id, + schedule_id, + node_id, + schedule.version, + state.name, + ) + self.runtime.expire_schedule(schedule_id) + return + + # try to get lock + lock_get = self.runtime.apply_schedule_lock(schedule_id) + + if not lock_get: + # only retry at multiple calback type + if schedule.type is not ScheduleType.MULTIPLE_CALLBACK: + logger.info( + "[%s] schedule(%s) %s with version %s is not multiple callback type, will not retry to get lock", # noqa + root_pipeline_id, + schedule_id, + node_id, + schedule.version, + ) + return + + try_after = random.randint(1, 5) + logger.info( + "[%s] schedule(%s) lock %s with data %s fetch fail, try after %s", + root_pipeline_id, + node_id, + schedule_id, + callback_data_id, + try_after, + ) + self.runtime.set_next_schedule( + process_id=process_id, + node_id=node_id, + schedule_id=schedule_id, + callback_data_id=callback_data_id, + schedule_after=try_after, + ) + return + + with self._schedule_lock_keeper(schedule_id): + # 进程心跳 + self.runtime.beat(process_id) + + # fetch callback data + callback_data = None + if callback_data_id: + callback_data = self.runtime.get_callback_data(callback_data_id) + + # fetch node info and start schedule + node = self.runtime.get_node(node_id) + handler = HandlerFactory.get_handler(node, self.runtime) + type_label = self._get_metrics_node_type(node) + + logger.info( + "[%s] before schedule node %s with data %s", root_pipeline_id, node, callback_data, + ) + schedule_start = time.time() + schedule_result = handler.schedule(process_info, state.loop, state.inner_loop, schedule, callback_data) + ENGINE_NODE_SCHEDULE_TIME.labels(type_label).observe(time.time() - schedule_start) + + if schedule_result.has_next_schedule: + self.runtime.set_next_schedule( + process_info.process_id, node_id, schedule_id, schedule_result.schedule_after, + ) + + if schedule_result.schedule_done: + self.runtime.finish_schedule(schedule_id) + self.runtime.execute(process_id, schedule_result.next_node_id) + except Exception as e: + ex_data = traceback.format_exc() + logger.warning( + "[%s]schedule exception catch at node(%s): %s", root_pipeline_id, node_id, ex_data, + ) + + # state version already changed, so give up this schedule + if isinstance(e, StateVersionNotMatchError): + logger.exception( + "[%s]schedule exception catch StateVersionNotMatchError at node(%s): %s", + root_pipeline_id, + node_id, + ex_data, + ) + return + + # make sure release_schedule_lock call at first, + # because remain operations may have been completed in execute + self.runtime.release_schedule_lock(schedule_id) + + outputs = self.runtime.get_execution_data_outputs(node_id) + outputs["ex_data"] = ex_data + self.runtime.set_execution_data_outputs(node_id, outputs) + + self.runtime.set_state(node_id=node_id, to_state=states.FAILED, set_archive_time=True) + + # help method + @contextmanager + def _schedule_lock_keeper(self, schedule_id: int): + yield + self.runtime.release_schedule_lock(schedule_id) + + def _add_history( + self, node_id: str, state: Optional[State] = None, exec_data: Optional[ExecutionData] = None, + ) -> int: + if not state: + state = self.runtime.get_state(node_id) + + if not exec_data: + exec_data = self.runtime.get_execution_data(node_id) + + return self.runtime.add_history( + node_id=node_id, + started_time=state.started_time, + archived_time=state.archived_time, + loop=state.loop, + skip=state.skip, + retry=state.retry, + version=state.version, + inputs=exec_data.inputs, + outputs=exec_data.outputs, + ) + + def _ensure_state_is_fail_and_return_process_id(self, state: State) -> str: + if state.name != states.FAILED: + raise InvalidOperationError("{} state is not FAILED, actual {}".format(state.node_id, state.name)) + + process_id = self.runtime.get_sleep_process_with_current_node_id(state.node_id) + + if not process_id: + raise InvalidOperationError("can not find sleep process with current node id: {}".format(state.node_id)) + + return process_id + + def _get_metrics_node_type(self, node: Node) -> str: + if node.type != NodeType.ServiceActivity: + return get_lower_case_name(node.type.value) + + return "{}_{}".format(node.code, node.version) diff --git a/bamboo_engine/eri/__init__.py b/bamboo_engine/eri/__init__.py new file mode 100644 index 00000000..685aed2e --- /dev/null +++ b/bamboo_engine/eri/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +引擎运行时接口定义模块 +""" + +from .interfaces import * # noqa +from .models import * # noqa diff --git a/bamboo_engine/eri/interfaces.py b/bamboo_engine/eri/interfaces.py new file mode 100644 index 00000000..dda08e05 --- /dev/null +++ b/bamboo_engine/eri/interfaces.py @@ -0,0 +1,1431 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from weakref import WeakValueDictionary +from datetime import datetime +from abc import ABCMeta, abstractmethod +from typing import List, Optional, Dict, Set, Any + +from .models import ( + State, + Node, + Schedule, + ScheduleType, + Data, + DataInput, + ExecutionData, + ExecutionHistory, + ExecutionShortHistory, + CallbackData, + ProcessInfo, + SuspendedProcessInfo, + DispatchProcess, + ContextValue, +) + +# plugin interface + +__version__ = "3.0.0" + + +def version(): + return __version__ + + +class Service(metaclass=ABCMeta): + """ + 服务对象接口 + """ + + @abstractmethod + def pre_execute(self, data: ExecutionData, root_pipeline_data: ExecutionData): + """ + execute 执行前执行的逻辑 + + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + """ + + @abstractmethod + def execute(self, data: ExecutionData, root_pipeline_data: ExecutionData) -> bool: + """ + execute 逻辑 + + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + :return: 是否执行成功 + :rtype: bool + """ + + @abstractmethod + def schedule( + self, + schedule: Schedule, + data: ExecutionData, + root_pipeline_data: ExecutionData, + callback_data: Optional[CallbackData] = None, + ) -> bool: + """ + schedule 逻辑 + + :param schedule: Schedule 对象 + :type schedule: Schedule + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + :param callback_data: 回调数据, defaults to None + :type callback_data: Optional[CallbackData], optional + :return: [description] + :rtype: bool + """ + + @abstractmethod + def need_schedule(self) -> bool: + """ + 服务是否需要调度 + + :return: 是否需要调度 + :rtype: bool + """ + + @abstractmethod + def schedule_type(self) -> Optional[ScheduleType]: + """ + 服务调度类型 + + :return: 调度类型 + :rtype: Optional[ScheduleType] + """ + + @abstractmethod + def is_schedule_done(self) -> bool: + """ + 调度是否完成 + + :return: 调度是否完成 + :rtype: bool + """ + + @abstractmethod + def schedule_after( + self, + schedule: Optional[Schedule], + data: ExecutionData, + root_pipeline_data: ExecutionData, + ) -> int: + """ + 计算下一次调度间隔 + + :param schedule: 调度对象,未进行调度时传入为空 + :type schedule: Optional[Schedule] + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + :return: 调度间隔,单位为秒 + :rtype: int + """ + + @abstractmethod + def setup_runtime_attributes(self, **attrs): + """ + 装载运行时属性 + + :param attrs: 运行时属性 + :type attrs: Dict[str, Any] + """ + + +class ExecutableEvent(metaclass=ABCMeta): + """ + 可执行结束节点接口 + """ + + @abstractmethod + def execute(pipeline_stack: List[str], root_pipeline_id: str): + """ + execute 逻辑 + + :param pipeline_stack: 流程栈 + :type pipeline_stack: List[str] + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + """ + + +class Variable(metaclass=ABCMeta): + """ + 变量接口 + """ + + @abstractmethod + def get(self) -> Any: + """ + 获取变量值 + + :return: 变量值 + :rtype: Any + """ + + +# runtime interface + + +class PluginManagerMixin: + """ + 插件管理接口,声明了插件(服务,可执行结束节点,变量)管理相关的接口 + """ + + @abstractmethod + def get_service(self, code: str, version: str) -> Service: + """ + 根据代号与版本获取特定服务对象实例 + + :param code: 服务唯一代号 + :type code: str + :param version: 服务版本 + :type version: str + :return: 服务对象实例 + :rtype: Service + """ + + @abstractmethod + def get_executable_end_event(self, code: str) -> ExecutableEvent: + """ + 根据代号获取特定可执行结束事件实例 + + :param code: 可执行结束事件唯一代号 + :type code: str + :return: 可执行结束事件实例 + :rtype: ExecutableEvent: + """ + + @abstractmethod + def get_compute_variable( + self, + code: str, + key: str, + value: Variable, + additional_data: dict, + ) -> Variable: + """ + 根据代号获取变量实例 + + :param code: 唯一代号 + :type code: str + :param key: 变量 key + :type key: str + :param value: 变量配置 + :type value: Any + :param additional_data: 额外数据字典 + :type additional_data: dict + :return: 变量实例 + :rtype: Variable + """ + + +class EngineAPIHooksMixin: + """ + 引擎 API 执行时调用的钩子相关接口声明 + """ + + @abstractmethod + def pre_prepare_run_pipeline( + self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options + ): + """ + 调用 pre_prepare_run_pipeline 前执行的钩子 + + :param pipeline: 流程描述对象 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + """ + + @abstractmethod + def post_prepare_run_pipeline( + self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options + ): + """ + 调用 pre_prepare_run_pipeline 后执行的钩子 + + :param pipeline: 流程描述对象 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + """ + + @abstractmethod + def pre_pause_pipeline(self, pipeline_id: str): + """ + 暂停 pipeline 前执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + @abstractmethod + def post_pause_pipeline(self, pipeline_id: str): + """ + 暂停 pipeline 后执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + @abstractmethod + def pre_revoke_pipeline(self, pipeline_id: str): + """ + 撤销 pipeline 前执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + @abstractmethod + def post_revoke_pipeline(self, pipeline_id: str): + """ + 撤销 pipeline 后执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + @abstractmethod + def pre_resume_pipeline(self, pipeline_id: str): + """ + 继续 pipeline 前执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + @abstractmethod + def post_resume_pipeline(self, pipeline_id: str): + """ + 继续 pipeline 后执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + @abstractmethod + def pre_resume_node(self, node_id: str): + """ + 继续节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + @abstractmethod + def post_resume_node(self, node_id: str): + """ + 继续节点后执行的钩子 + + :param node_id: [description]节点 ID + :type node_id: str + """ + + @abstractmethod + def pre_pause_node(self, node_id: str): + """ + 暂停节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + @abstractmethod + def post_pause_node(self, node_id: str): + """ + 暂停节点后执行的钩子 + + :param node_id: [description]节点 ID + :type node_id: str + """ + + @abstractmethod + def pre_retry_node(self, node_id: str, data: Optional[dict]): + """ + 重试节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param data: 重试时使用的节点执行输入 + :type data: Optional[dict] + """ + + @abstractmethod + def post_retry_node(self, node_id: str, data: Optional[dict]): + """ + 重试节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param data: 重试时使用的节点执行输入 + :type data: Optional[dict] + """ + + @abstractmethod + def pre_skip_node(self, node_id: str): + """ + 跳过节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + @abstractmethod + def post_skip_node(self, node_id: str): + """ + 跳过节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + @abstractmethod + def pre_skip_exclusive_gateway(self, node_id: str, flow_id: str): + """ + 跳过分支网关前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param flow_id: 跳过后选择的目标流 ID + :type flow_id: str + """ + + @abstractmethod + def post_skip_exclusive_gateway(self, node_id: str, flow_id: str): + """ + 跳过分支网关后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param flow_id: 跳过后选择的目标流 ID + :type flow_id: str + """ + + @abstractmethod + def pre_forced_fail_activity(self, node_id: str, ex_data: str): + """ + 强制失败节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param ex_data: 写入节点执行数据的失败信息 + :type ex_data: str + """ + + @abstractmethod + def post_forced_fail_activity(self, node_id: str, ex_data: str, old_version: str, new_version: str): + """ + 强制失败节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param ex_data: 写入节点执行数据的失败信息 + :type ex_data: str + :param old_version: 强制失败前的节点版本 + :type old_version: str + :param new_version: 强制失败后的节点版本 + :type new_version: str + """ + + @abstractmethod + def pre_callback(self, node_id: str, version: str, data: str): + """ + 回调节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param version: 节点执行版本 + :type version: str + :param data: 回调数据 + :type data: str + """ + + @abstractmethod + def post_callback(self, node_id: str, version: str, data: str): + """ + 回调节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param version: 节点执行版本 + :type version: str + :param data: 回调数据 + :type data: str + """ + + @abstractmethod + def pre_retry_subprocess(self, node_id: str): + """ + 子流程重试前执行的钩子 + + :param node_id: 子流程节点 ID + :type node_id: str + """ + + @abstractmethod + def post_retry_subprocess(self, node_id: str): + """ + 子流程重试后执行的钩子 + + :param node_id: 子流程节点 ID + :type node_id: str + """ + + +class TaskMixin: + """ + 引擎任务派发相关接口 + """ + + @abstractmethod + def execute(self, process_id: int, node_id: str): + """ + 派发执行任务,执行任务被拉起执行时应该调用 Engine 实例的 execute 方法 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + """ + + @abstractmethod + def schedule( + self, + process_id: int, + node_id: str, + schedule_id: str, + callback_data_id: Optional[int] = None, + ): + """ + 派发调度任务,调度任务被拉起执行时应该调用 Engine 实例的 schedule 方法 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param schedule_id: 调度 ID + :type schedule_id: str + :param callback_data_id: 回调数据, defaults to None + :type callback_data_id: Optional[int], optional + """ + + @abstractmethod + def set_next_schedule( + self, + process_id: int, + node_id: str, + schedule_id: str, + schedule_after: int, + callback_data_id: Optional[int] = None, + ): + """ + 设置下次调度时间,调度倒数归零后应该执行 Engine 实例的 schedule 方法 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param schedule_id: 调度 ID + :type schedule_id: str + :param schedule_after: 调度倒数 + :type schedule_after: int + :param callback_data_id: 回调数据, defaults to None + :type callback_data_id: Optional[int], optional + """ + + @abstractmethod + def start_timeout_monitor(self, process_id: int, node_id: str, version: str, timeout: int): + """ + 开始对某个节点执行的超时监控,若超时时间归零后节点未进入归档状态,则强制失败该节点 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + :param timeout: 超时时间,单位为秒 + :type timeout: int + """ + + @abstractmethod + def stop_timeout_monitor( + self, + process_id: int, + node_id: str, + version: str, + ): + """ + 停止对某个节点的超时监控 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + """ + + +class ProcessMixin: + """ + 进程相关接口 + """ + + @abstractmethod + def beat(self, process_id: int): + """ + 进程心跳 + + :param process_id: 进程 ID + :type process_id: int + """ + + @abstractmethod + def wake_up(self, process_id: int): + """ + 将当前进程标记为唤醒状态 + + :param process_id: 进程 ID + :type process_id: int + """ + + @abstractmethod + def sleep(self, process_id: int): + """ + 将当前进程标记为睡眠状态 + + :param process_id: 进程 ID + :type process_id: int + """ + + @abstractmethod + def suspend(self, process_id: int, by: str): + """ + 将当前进程标记为阻塞状态 + + :param process_id: 进程 ID + :type process_id: int + :param by: 造成阻塞的节点信息 + :type by: str + """ + + @abstractmethod + def resume(self, process_id: int): + """ + 将进程标记为非阻塞状态 + + :param process_id: 进程 ID + :type process_id: int + """ + + @abstractmethod + def batch_resume(self, process_id_list: List[int]): + """ + 批量将进程标记为非阻塞状态 + + :param process_id_list: 进程 ID 列表 + :type process_id_list: List[int] + """ + + @abstractmethod + def die(self, process_id: int): + """ + 将当前进程标记为非存活状态 + + :param process_id: 进程 ID + :type process_id: int + """ + + @abstractmethod + def get_process_info(self, process_id: int) -> ProcessInfo: + """ + 获取某个进程的基本信息 + + :param process_id: 进程 ID + :type process_id: int + :return: 进程基本信息 + :rtype: ProcessInfo + """ + + @abstractmethod + def get_process_info_with_root_pipeline(self, pipeline_id: str) -> List[ProcessInfo]: + """ + 根据根流程 ID 获取一批进程的信息 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :return: 进程基本信息 + :rtype: List[ProcessInfo] + """ + + @abstractmethod + def kill(self, process_id: int): + """ + 强制结束某个进程正在进行的活动,并将其标志为睡眠状态 + + :param process_id: 进程 ID + :type process_id: int + """ + + @abstractmethod + def get_suspended_process_info(self, suspended_by: str) -> List[SuspendedProcessInfo]: + """ + 获取由于 pipeline 暂停而被暂停执行的进程信息 + + : param suspended_by: 进程 ID + : type suspended_by: str + : return: 暂停的进程信息 + : rtype: SuspendedProcessInfo + """ + + @abstractmethod + def get_sleep_process_with_current_node_id(self, node_id: str) -> Optional[int]: + """ + 获取由于处于睡眠状态且当前节点 ID 为 node_id 的进程 ID + + : param node_id: 节点 ID + : type node_id: str + : return: 进程 ID + : rtype: str + """ + + @abstractmethod + def get_process_id_with_current_node_id(self, node_id: str) -> Optional[int]: + """ + 获取当前节点 ID 为 node_id 且存活的进程 ID + + : param node_id: 节点 ID + : type node_id: str + : return: 进程 ID + : rtype: str + """ + + @abstractmethod + def set_current_node(self, process_id: int, node_id: str): + """ + 将进程当前处理节点标记为 node + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + """ + + @abstractmethod + def child_process_finish(self, parent_id: int, process_id: int) -> bool: + """ + 标记某个进程的子进程执行完成,并返回是否能够唤醒父进程继续执行的标志位 + + :param parent_id: 父进程 ID + :type parent_id: int + :param process_id: 子进程 ID + :type process_id: int + :return: 是否能够唤醒父进程继续执行 + :rtype: bool + """ + + @abstractmethod + def is_frozen(self, process_id: int) -> bool: + """ + 检测当前进程是否需要被冻结 + + :param process_id: 进程 ID + :type process_id: int + :return: 是否需要被冻结 + :rtype: bool + """ + + @abstractmethod + def freeze(self, process_id: int): + """ + 冻结当前进程 + + :param process_id: 进程 ID + :type process_id: int + """ + + @abstractmethod + def fork( + self, + parent_id: str, + root_pipeline_id: str, + pipeline_stack: List[str], + from_to: Dict[str, str], + ) -> List[DispatchProcess]: + """ + 根据当前进程 fork 出多个子进程 + + :param parent_id: 父进程 ID + :type parent_id: str + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + :param pipeline_stack: 子流程栈 + :type pipeline_stack: List[str] + :param from_to: 子进程的执行开始节点和目标节点 + :type from_to: Dict[str, str] + :return: 待调度进程信息列表 + :rtype: List[DispatchProcess] + """ + + @abstractmethod + def join(self, process_id: int, children_id: List[str]): + """ + 让父进程等待子进程 + + :param process_id: 父进程 ID + :type process_id: int + :param children_id: 子进程 ID 列表 + :type children_id: List[str] + """ + + @abstractmethod + def set_pipeline_stack(self, process_id: int, stack: List[str]): + """ + 设置进程的流程栈 + + :param process_id: 进程 ID + :type process_id: int + :param stack: 流程栈 + :type stack: List[str] + """ + + +class StateMixin: + """ + 状态相关接口 + """ + + @abstractmethod + def get_state(self, node_id: str) -> State: + """ + 获取某个节点的状态对象 + + : param node_id: 节点 ID + : type node_id: str + : return: State 实例 + : rtype: State + """ + + @abstractmethod + def get_state_or_none(self, node_id: str) -> Optional[State]: + """ + 获取某个节点的状态对象,如果不存在则返回 None + + : param node_id: 节点 ID + : type node_id: str + : return: State 实例 + : rtype: State + """ + + @abstractmethod + def get_state_by_root(self, root_id: str) -> List[State]: + """ + 根据根节点 ID 获取一批节点状态 + + :param root_id: 根节点 ID + :type root_id: str + :return: 节点状态列表 + :rtype: List[State] + """ + + @abstractmethod + def get_state_by_parent(self, parent_id: str) -> List[State]: + """ + 根据父节点 ID 获取一批节点状态 + + :param parent_id: 父节点 ID + :type parent_id: str + :return: 节点状态列表 + :rtype: List[State] + """ + + @abstractmethod + def batch_get_state_name(self, node_id_list: List[str]) -> Dict[str, str]: + """ + 批量获取一批节点的状态 + + :param node_id_list: 节点 ID 列表 + :type node_id_list: List[str] + :return: 节点ID -> 状态名称 + :rtype: Dict[str, str] + """ + + @abstractmethod + def has_state(self, node_id: str) -> bool: + """ + 是否存在某个节点的的状态 + + :param node_id: 节点 ID + :type node_id: str + :return: 该节点状态是否存在 + :rtype: bool + """ + + @abstractmethod + def reset_state_inner_loop(self, node_id: str) -> str: + """ + 设置节点的当前流程重入次数 + + :param node_id: 节点 ID + :type node_id: str + """ + + @abstractmethod + def reset_children_state_inner_loop(self, node_id: str): + """ + 批量设置子流程节点的所有子节点inner_loop次数 + + :param node_id: 子流程节点 ID + :type node_id: str + """ + + @abstractmethod + def set_state( + self, + node_id: str, + to_state: str, + loop: int = -1, + inner_loop: int = -1, + version: str = None, + root_id: Optional[str] = None, + parent_id: Optional[str] = None, + is_retry: bool = False, + is_skip: bool = False, + reset_retry: bool = False, + reset_skip: bool = False, + error_ignored: bool = False, + reset_error_ignored: bool = False, + refresh_version: bool = False, + clear_started_time: bool = False, + set_started_time: bool = False, + clear_archived_time: bool = False, + set_archive_time: bool = False, + ) -> str: + """ + 设置节点的状态,如果节点存在,进行状态转换时需要满足状态转换状态机 + + :param node_id: 节点 ID + :type node_id: str + :param to_state: 目标状态 + :type to_state: str + :param loop: 循环次数, 为 -1 时表示不设置 + :type loop: int, optional + :param inner_loop: 当前流程循环次数, 为 -1 时表示不设置 + :type inner_loop: int, optional + :param version: 目标状态版本,为空时表示不做版本校验 + :type version: Optional[str], optional + :param root_id: 根节点 ID,为空时表示不设置 + :type root_id: Optional[str], optional + :param parent_id: 父节点 ID,为空时表示不设置 + :type parent_id: Optional[str], optional + :param is_retry: 是否增加重试次数 + :type is_retry: bool, optional + :param is_skip: 是否将跳过设置为 True + :type is_skip: bool, optional + :param reset_retry: 是否重置重试次数 + :type reset_retry: bool, optional + :param reset_skip: 是否重置跳过标志 + :type reset_skip: bool, optional + :param error_ignored: 是否为忽略错误跳过 + :type error_ignored: bool, optional + :param reset_error_ignored: 是否重置忽略错误标志 + :type reset_error_ignored: bool, optional + :param refresh_version: 是否刷新版本号 + :type refresh_version: bool, optional + :param clear_started_time: 是否清空开始时间 + :type clear_started_time: bool, optional + :param set_started_time: 是否设置开始时间 + :type set_started_time: bool, optional + :param clear_archived_time: 是否清空归档时间 + :type clear_archived_time: bool, optional + :param set_archive_time: 是否设置归档时间 + :type set_archive_time: bool, optional + :return: 该节点最新版本 + :rtype: str + """ + + @abstractmethod + def set_state_root_and_parent(self, node_id: str, root_id: str, parent_id: str): + """ + 设置节点的根流程和父流程 ID + + :param node_id: 节点 ID + :type node_id: str + :param root_id: 根流程 ID + :type root_id: str + :param parent_id: 父流程 ID + :type parent_id: str + """ + + +class NodeMixin: + """ + 节点相关接口 + """ + + @abstractmethod + def get_node(self, node_id: str) -> Node: + """ + 获取某个节点的详细信息 + + :param node_id: 节点 ID + :type node_id: str + :return: Node 实例 + :rtype: Node + """ + + +class ScheduleMixin: + """ + 调度实例相关接口 + """ + + @abstractmethod + def set_schedule( + self, + process_id: int, + node_id: str, + version: str, + schedule_type: ScheduleType, + ) -> Schedule: + """ + 设置 schedule 对象 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + :param schedule_type: 调度类型 + :type schedule_type: ScheduleType + :return: 调度对象实例 + :rtype: Schedule + """ + + @abstractmethod + def get_schedule(self, schedule_id: str) -> Schedule: + """ + 获取 Schedule 对象 + + :param schedule_id: 调度实例 ID + :type schedule_id: str + :return: Schedule 对象实例 + :rtype: Schedule + """ + + @abstractmethod + def get_schedule_with_node_and_version(self, node_id: str, version: str) -> Schedule: + """ + 通过节点 ID 和执行版本来获取 Scheudle 对象 + + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + :return: Schedule 对象 + :rtype: Schedule + """ + + @abstractmethod + def apply_schedule_lock(self, schedule_id: str) -> bool: + """ + 获取 Schedule 对象的调度锁,返回是否成功获取锁 + + :param schedule_id: 调度实例 ID + :type schedule_id: str + :return: 是否成功获取锁 + :rtype: bool + """ + + @abstractmethod + def release_schedule_lock(self, schedule_id: int): + """ + 释放指定 Schedule 的调度锁 + + :param schedule_id: Schedule ID + :type schedule_id: int + """ + + @abstractmethod + def expire_schedule(self, schedule_id: int): + """ + 将某个 Schedule 对象标记为已过期 + + :param schedule_id: 调度实例 ID + :type schedule_id: int + """ + + @abstractmethod + def finish_schedule(self, schedule_id: int): + """ + 将某个 Schedule 对象标记为已完成 + + :param schedule_id: 调度实例 ID + :type schedule_id: int + """ + + @abstractmethod + def add_schedule_times(self, schedule_id: int): + """ + 将某个 Schedule 对象的调度次数 +1 + + :param schedule_id: 调度实例 ID + :type schedule_id: int + """ + + +class ContextMixin: + """ + 流程上下文相关接口 + """ + + @abstractmethod + def get_context_values(self, pipeline_id: str, keys: set) -> List[ContextValue]: + """ + 获取某个流程上下文中的 keys 所指定的键对应变量的值 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :param keys: 变量键 + :type keys: set + :return: 变量值信息 + :rtype: List[ContextValue] + """ + + @abstractmethod + def get_context_key_references(self, pipeline_id: str, keys: set) -> set: + """ + 获取某个流程上下文中 keys 所指定的变量直接和间接引用的其他所有变量的键 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :param keys: 变量 key 列表 + :type keys: set + :return: keys 所指定的变量直接和简介引用的其他所有变量的键 + :rtype: set + """ + + @abstractmethod + def upsert_plain_context_values(self, pipeline_id: str, update: Dict[str, ContextValue]): + """ + 更新或创建新的普通上下文数据 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :param update: 更新数据 + :type update: Dict[str, ContextValue] + """ + + def get_context(self, pipeline_id: str) -> List[ContextValue]: + """ + 获取某个流程的所有上下文数据 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :return: [description] + :rtype: List[ContextValue] + """ + + def get_context_outputs(self, pipeline_id: str) -> Set[str]: + """ + 获取流程上下文需要输出的数据 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :return: 输出数据 key + :rtype: Set[str] + """ + + +class DataMixin: + """ + 节点数据,执行数据,回调数据相关接口 + """ + + @abstractmethod + def get_data(self, node_id: str) -> Data: + """ + 获取某个节点的数据对象 + + :param node_id: 节点 ID + :type node_id: str + :return: 数据对象实例 + :rtype: Data + """ + + @abstractmethod + def get_data_inputs(self, node_id: str) -> Dict[str, DataInput]: + """ + 获取某个节点的输入数据 + + :param node_id: 节点 ID + :type node_id: str + :return: 输入数据字典 + :rtype: dict + """ + + @abstractmethod + def get_data_outputs(self, node_id: str) -> dict: + """ + 获取某个节点的输出数据 + + :param node_id: 节点 ID + :type node_id: str + :return: 输入数据字典 + :rtype: dict + """ + + @abstractmethod + def set_data_inputs(self, node_id: str, data: Dict[str, DataInput]): + """ + 将节点数据对象的 inputs 设置为 data + + : param node_id: 节点 ID + : type node_id: str + : param data: 目标数据 + : type data: dict + """ + + # execution data relate + @abstractmethod + def get_execution_data(self, node_id: str) -> ExecutionData: + """ + 获取某个节点的执行数据 + + : param node_id: 节点 ID + : type node_id: str + : return: 执行数据实例 + : rtype: ExecutionData + """ + + @abstractmethod + def get_execution_data_inputs(self, node_id: str) -> dict: + """ + 获取某个节点的执行数据输入 + + :param node_id: 节点 ID + :type node_id: str + :return: 执行数据输入 + :rtype: dict + """ + + @abstractmethod + def get_execution_data_outputs(self, node_id: str) -> dict: + """ + 获取某个节点的执行数据输出 + + :param node_id: 节点 ID + :type node_id: str + :return: 执行数据输出 + :rtype: dict + """ + + @abstractmethod + def set_execution_data(self, node_id: str, data: ExecutionData): + """ + 设置某个节点的执行数据 + + :param node_id: 节点 ID + :type node_id: str + :param data: 执行数据实例 + :type data: ExecutionData + """ + + @abstractmethod + def set_execution_data_inputs(self, node_id: str, inputs: dict): + """ + 设置某个节点的执行数据输入 + + :param node_id: 节点 ID + :type node_id: str + :param outputs: 输出数据 + :type outputs: dict + """ + + @abstractmethod + def set_execution_data_outputs(self, node_id: str, outputs: dict): + """ + 设置某个节点的执行数据输出 + + :param node_id: 节点 ID + :type node_id: str + :param outputs: 输出数据 + :type outputs: dict + """ + + # callback data relate + @abstractmethod + def set_callback_data(self, node_id: str, version: str, data: dict) -> int: + """ + 设置某个节点执行数据的回调数据 + + :param node_id: 节点 ID + :type node_id: str + :param version: 节点执行版本 + :type version: str + :param data: 回调数据 + :type data: dict + :return: 回调数据 ID + :rtype: int + """ + + @abstractmethod + def get_callback_data(self, data_id: int) -> CallbackData: + """ + 获取回调数据 + + :param data_id: Data ID + :type data_id: int + :return: 回调数据实例 + :rtype: CallbackData + """ + + +class ExecutionHistoryMixin: + """ + 执行历史相关接口 + """ + + @abstractmethod + def add_history( + self, + node_id: str, + started_time: datetime, + archived_time: datetime, + loop: int, + skip: bool, + retry: int, + version: str, + inputs: dict, + outputs: dict, + ) -> int: + """ + 为某个节点记录一次执行历史 + + : param node_id: 节点 ID + : type node_id: str + : param started_time: 开始时间 + : type started_time: datetime + : param archived_time: 归档时间 + : type archived_time: datetime + : param loop: 重入计数 + : type loop: int + : param skip: 是否跳过 + : type skip: bool + : param retry: 重试次数 + : type retry: int + : param version: 节点执行版本号 + : type version: str + : param inputs: 输入数据 + : type inputs: dict + : param outputs: 输出数据 + : type outputs: dict + """ + + @abstractmethod + def get_histories(self, node_id: str, loop: int = -1) -> List[ExecutionHistory]: + """ + 返回某个节点的历史记录 + + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 历史记录列表 + :rtype: List[History] + """ + + @abstractmethod + def get_short_histories(self, node_id: str, loop: int = -1) -> List[ExecutionShortHistory]: + """ + 返回某个节点的简要历史记录 + + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 历史记录列表 + :rtype: List[ExecutionShortHistory] + """ + + +class EngineRuntimeInterface( + PluginManagerMixin, + EngineAPIHooksMixin, + TaskMixin, + ProcessMixin, + StateMixin, + NodeMixin, + ScheduleMixin, + ContextMixin, + DataMixin, + ExecutionHistoryMixin, + metaclass=ABCMeta, +): + @abstractmethod + def prepare_run_pipeline( + self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options + ) -> int: + """ + 进行 pipeline 执行前的准备工作,并返回 进程 ID,该函数执行完成后即代表 + pipeline 是随时可以通过 execute(process_id, start_event_id) 启动执行的 + 一般来说,应该完成以下工作: + - 准备好进程模型 + - 准备好流程中每个节点的信息 + - 准备好流程中每个节点数据对象的信息 + + :param pipeline: pipeline 描述对象 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + :return: 进程 ID + :rtype: str + """ + + @abstractmethod + def node_rerun_limit(self, root_pipeline_id: str, node_id: str) -> int: + """ + 返回节点最大重入次数 + + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + :param node_id: 节点 ID + :type node_id: str + :return: 节点最大重入次数 + :rtype: int + """ diff --git a/bamboo_engine/eri/models.py b/bamboo_engine/eri/models.py new file mode 100644 index 00000000..3df6da9f --- /dev/null +++ b/bamboo_engine/eri/models.py @@ -0,0 +1,631 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +ERI 中相关的模型对象 +""" + +from enum import Enum +from datetime import datetime +from typing import List, Dict, Any, Optional +from abc import ABCMeta, abstractmethod + +from bamboo_engine.utils.object import Representable +from bamboo_engine.utils.collections import FancyDict +from bamboo_engine.exceptions import ValueError + + +# node relate models +class NodeType(Enum): + """ + 节点类型枚举 + """ + + ServiceActivity = "ServiceActivity" + SubProcess = "SubProcess" + ExclusiveGateway = "ExclusiveGateway" + ParallelGateway = "ParallelGateway" + ConditionalParallelGateway = "ConditionalParallelGateway" + ConvergeGateway = "ConvergeGateway" + EmptyStartEvent = "EmptyStartEvent" + EmptyEndEvent = "EmptyEndEvent" + ExecutableEndEvent = "ExecutableEndEvent" + + +class Node(Representable): + """ + 节点信息描述类 + """ + + def __init__( + self, + id: str, + type: NodeType, + target_flows: List[str], + target_nodes: List[str], + targets: Dict[str, str], + root_pipeline_id: str, + parent_pipeline_id: str, + can_skip: bool = True, + can_retry: bool = True, + ): + """ + + :param id: 节点 ID + :type id: str + :param type: 节点类型 + :type type: NodeType + :param target_flows: 节点目标流 ID 列表 + :type target_flows: List[str] + :param target_nodes: 目标节点 ID 列表 + :type target_nodes: List[str] + :param targets: 节点目标流,目标节点 ID 映射 + :type targets: Dict[str, str] + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + :param parent_pipeline_id: 父流程 ID + :type parent_pipeline_id: str + :param can_skip: 节点是否能够跳过 + :type can_skip: bool + :param can_retry: 节点是否能够重试 + :type can_retry: bool + """ + self.id = id + self.type = type + self.targets = targets + self.target_flows = target_flows + self.target_nodes = target_nodes + self.root_pipeline_id = root_pipeline_id + self.parent_pipeline_id = parent_pipeline_id + self.can_skip = can_skip + self.can_retry = can_retry + + +class EmptyStartEvent(Node): + pass + + +class ConvergeGateway(Node): + pass + + +class EmptyEndEvent(Node): + pass + + +class Condition(Representable): + """ + 分支条件 + """ + + def __init__(self, name: str, evaluation: str, target_id: str, flow_id: str): + """ + + :param name: 条件名 + :type name: str + :param evaluation: 条件表达式 + :type evaluation: str + :param target_id: 目标节点 ID + :type target_id: str + :param flow_id: 目标流 ID + :type flow_id: str + """ + self.name = name + self.evaluation = evaluation + self.target_id = target_id + self.flow_id = flow_id + + +class ParallelGateway(Node): + """ + 并行网关 + """ + + def __init__(self, converge_gateway_id: str, *args, **kwargs): + """ + + :param converge_gateway_id: 汇聚网关 ID + :type converge_gateway_id: str + """ + super().__init__(*args, **kwargs) + self.converge_gateway_id = converge_gateway_id + + +class ConditionalParallelGateway(Node): + """ + 条件并行网关 + """ + + def __init__(self, conditions: List[Condition], converge_gateway_id: str, *args, **kwargs): + """ + + :param conditions: 分支条件 + :type conditions: List[Condition] + :param converge_gateway_id: 汇聚网关 ID + :type converge_gateway_id: str + """ + super().__init__(*args, **kwargs) + self.conditions = conditions + self.converge_gateway_id = converge_gateway_id + + +class ExclusiveGateway(Node): + """ + 分支网关 + """ + + def __init__(self, conditions: List[Condition], *args, **kwargs): + """ + + :param conditions: 分支条件 + :type conditions: List[Condition] + """ + super().__init__(*args, **kwargs) + self.conditions = conditions + + +class ServiceActivity(Node): + """ + 服务节点 + """ + + def __init__(self, code: str, version: str, timeout: Optional[int], error_ignorable: bool, *args, **kwargs): + """ + + :param code: Service Code + :type code: str + :param version: 版本 + :type version: str + :param timeout: 超时限制 + :type timeout: Optional[int] + :param error_ignorable: 是否忽略错误 + :type error_ignorable: bool + """ + + super().__init__(*args, **kwargs) + self.code = code + self.version = version + self.timeout = timeout + self.error_ignorable = error_ignorable + + +class SubProcess(Node): + """ + 子流程 + """ + + def __init__(self, start_event_id: str, *args, **kwargs): + """ + + :param start_event_id: 子流程开始节点 ID + :type start_event_id: str + """ + super().__init__(*args, **kwargs) + self.start_event_id = start_event_id + + +class ExecutableEndEvent(Node): + """ + 可执行结束节点 + """ + + def __init__(self, code: str, *args, **kwargs): + """ + + :param code: 可执行结束节点 ID + :type code: str + """ + super().__init__(*args, **kwargs) + self.code = code + + +# runtime relate models +class ScheduleType(Enum): + """ + 调度类型 + """ + + CALLBACK = 1 + MULTIPLE_CALLBACK = 2 + POLL = 3 + + +class Schedule(Representable): + """ + 调度对象 + """ + + def __init__( + self, + id: int, + type: ScheduleType, + process_id: int, + node_id: str, + finished: bool, + expired: bool, + version: str, + times: int, + ): + """ + + :param id: ID + :type id: int + :param type: 类型 + :type type: ScheduleType + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param finished: 是否已完成 + :type finished: bool + :param expired: 是否已过期 + :type expired: bool + :param version: 绑定版本 + :type version: str + :param times: 调度次数 + :type times: int + """ + self.id = id + self.type = type + self.process_id = process_id + self.node_id = node_id + self.finished = finished + self.expired = expired + self.version = version + self.times = times + + +class State(Representable): + """ + 节点状态对象 + """ + + def __init__( + self, + node_id: str, + root_id: str, + parent_id: str, + name: str, + version: str, + loop: int, + inner_loop: int, + retry: int, + skip: bool, + error_ignored: bool, + created_time: datetime, + started_time: datetime, + archived_time: datetime, + ): + """ + :param node_id: 节点 ID + :type node_id: str + :param root_id: 根流程 ID + :type root_id: str + :param parent_id: 父流程 ID + :type parent_id: str + :param name: 状态名 + :type name: str + :param version: 版本 + :type version: str + :param loop: 重入次数 + :type loop: int + :param inner_loop: 子流程重入次数 + :type inner_loop: int + :param retry: 重试次数 + :type retry: int + :param skip: 是否跳过 + :type skip: bool + :param error_ignored: 是否出错后自动忽略 + :type error_ignored: bool + :param started_time: 创建时间 + :type started_time: datetime + :param started_time: 开始时间 + :type started_time: datetime + :param archived_time: 归档时间 + :type archived_time: datetime + """ + self.node_id = node_id + self.root_id = root_id + self.parent_id = parent_id + self.name = name + self.version = version + self.loop = loop + self.inner_loop = inner_loop + self.retry = retry + self.skip = skip + self.error_ignored = error_ignored + self.created_time = created_time + self.started_time = started_time + self.archived_time = archived_time + + +class DataInput(Representable): + """ + 节点数据输入项 + """ + + def __init__(self, need_render: bool, value: Any): + """ + :type is_splice: bool + :param value: 是否需要进行模板解析 + :type value: Any + """ + self.need_render = need_render + self.value = value + + +class Data(Representable): + """ + 节点数据对象 + """ + + def __init__(self, inputs: Dict[str, DataInput], outputs: Dict[str, str]): + """ + + :param inputs: 输入数据 + :type inputs: Dict[str, Any] + :param outputs: 节点输出配置 + :type outputs: Dict[str, str] + """ + self.inputs = inputs + self.outputs = outputs + + def plain_inputs(self) -> Dict[str, Any]: + """ + 获取不带输入项类型的输入字典 + """ + return {key: di.value for key, di in self.inputs.items()} + + def need_render_inputs(self) -> Dict[str, Any]: + """ + 获取需要进行渲染的输入项字典 + """ + return {key: di.value for key, di in self.inputs.items() if di.need_render} + + def render_escape_inputs(self) -> Dict[str, Any]: + """ + 获取不需要进行渲染的输入项字典 + """ + return {key: di.value for key, di in self.inputs.items() if not di.need_render} + + +class ExecutionData(Representable): + """ + 节点输出数据 + """ + + def __init__(self, inputs: Optional[dict], outputs: Optional[dict]): + """ + + :param inputs: 输入数据 + :type inputs: Optional[dict] + :param outputs: 输出数据 + :type outputs: Optional[dict] + """ + self.inputs = FancyDict(inputs) + self.outputs = FancyDict(outputs) + + +class ExecutionHistory(Representable): + """ + 节点执行历史 + """ + + def __init__( + self, + id: str, + node_id: str, + started_time: datetime, + archived_time: datetime, + loop: int, + skip: bool, + retry: int, + version: str, + inputs: dict, + outputs: dict, + ): + """ + + : param id: ID + : type id: str + : param node_id: Node ID + : type node_id: str + : param started_time: 开始时间 + : type started_time: datetime + : param archived_time: 归档时间 + : type archived_time: datetime + : param loop: 重入计数 + : type loop: int + : param skip: 是否跳过 + : type skip: bool + : param retry: 重试次数 + : type retry: int + : param version: 版本号 + : type version: str + : param inputs: 输入数据 + : type inputs: dict + : param outputs: 输出数据 + : type outputs: dict + """ + self.id = id + self.node_id = node_id + self.started_time = started_time + self.archived_time = archived_time + self.loop = loop + self.skip = skip + self.retry = retry + self.version = version + self.inputs = inputs + self.outputs = outputs + + +class ExecutionShortHistory(Representable): + """ + 简短节点执行历史 + """ + + def __init__( + self, + id: str, + node_id: str, + started_time: datetime, + archived_time: datetime, + loop: int, + skip: bool, + retry: int, + version: str, + ): + """ + + : param id: ID + : type id: str + : param node_id: Node ID + : type node_id: str + : param started_time: 开始时间 + : type started_time: datetime + : param archived_time: 归档时间 + : type archived_time: datetime + : param loop: 重入计数 + : type loop: int + : param skip: 是否跳过 + : type skip: bool + : param retry: 重试次数 + : type retry: int + : param version: 版本号 + : type version: str + """ + self.id = id + self.node_id = node_id + self.started_time = started_time + self.archived_time = archived_time + self.loop = loop + self.skip = skip + self.retry = retry + self.version = version + + +class CallbackData(Representable): + """ + 节点回调数据 + """ + + def __init__(self, id: int, node_id: str, version: str, data: dict): + """ + + :param id: 数据 ID + :type id: int + :param node_id: 节点 ID + :type node_id: str + :param version: 版本 + :type version: str + :param data: 数据 + :type data: dict + """ + self.id = id + self.node_id = node_id + self.version = version + self.data = data + + +class SuspendedProcessInfo(Representable): + """ + 挂起进程信息 + """ + + def __init__(self, process_id: int, current_node: str): + """ + + :param process_id: 进程 ID + :type process_id: int + :param current_node: 当前节点 ID + :type current_node: str + """ + self.process_id = process_id + self.current_node = current_node + + +class ProcessInfo(Representable): + """ + 进程信息 + """ + + def __init__( + self, + process_id: int, + destination_id: str, + root_pipeline_id: str, + pipeline_stack: List[str], + parent_id: int, + ): + """ + + :param process_id: 进程 ID + :type process_id: int + :param destination_id: 进程目标节点 ID + :type destination_id: str + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + :param pipeline_stack: 流程栈 + :type pipeline_stack: List[str] + :param parent_id: 父进程 ID + :type parent_id: int + """ + self.process_id = process_id + self.destination_id = destination_id + self.parent_id = parent_id + self.root_pipeline_id = root_pipeline_id + self.pipeline_stack = pipeline_stack + + @property + def top_pipeline_id(self): + return self.pipeline_stack[-1] + + +class DispatchProcess(Representable): + """ + 待调度进程信息 + """ + + def __init__(self, process_id: int, node_id: str): + """ + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 调度开始节点 ID + :type node_id: str + """ + self.process_id = process_id + self.node_id = node_id + + +class ContextValueType(Enum): + """ + + :param Enum: [description] + :type Enum: [type] + """ + + PLAIN = 1 + SPLICE = 2 + COMPUTE = 3 + + +class ContextValue(Representable): + def __init__(self, key: str, type: ContextValueType, value: Any, code: Optional[str] = None): + if type is ContextValueType.COMPUTE and code is None: + raise ValueError("code can't be none when type is COMPUTE") + + self.key = key + self.type = type + self.value = value + self.code = code diff --git a/bamboo_engine/exceptions.py b/bamboo_engine/exceptions.py new file mode 100644 index 00000000..3a9ec987 --- /dev/null +++ b/bamboo_engine/exceptions.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +异常定义模块 +""" + + +class EngineException(Exception): + pass + + +class InvalidOperationError(EngineException): + pass + + +class NotFoundError(EngineException): + pass + + +class ValueError(EngineException): + pass + + +class StateVersionNotMatchError(EngineException): + pass + + +class TreeInvalidException(EngineException): + pass + + +class ConnectionValidateError(TreeInvalidException): + def __init__(self, failed_nodes, detail, *args): + self.failed_nodes = failed_nodes + self.detail = detail + super(ConnectionValidateError, self).__init__(*args) + + +class ConvergeMatchError(TreeInvalidException): + def __init__(self, gateway_id, *args): + self.gateway_id = gateway_id + super(ConvergeMatchError, self).__init__(*args) + + +class StreamValidateError(TreeInvalidException): + def __init__(self, node_id, *args): + self.node_id = node_id + super(StreamValidateError, self).__init__(*args) + + +class IsolateNodeError(TreeInvalidException): + pass diff --git a/bamboo_engine/handler.py b/bamboo_engine/handler.py new file mode 100644 index 00000000..58744e30 --- /dev/null +++ b/bamboo_engine/handler.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +节点处理器逻辑封装模块 +""" + +from typing import Optional, List +from abc import ABCMeta, abstractmethod + +from bamboo_engine import states + +from .eri import ( + EngineRuntimeInterface, + Node, + Schedule, + CallbackData, + ScheduleType, + DispatchProcess, + ProcessInfo, + NodeType, +) +from .exceptions import NotFoundError + + +def register_handler(type: NodeType): + """ + 节点 Handler 注册函数 + + :param type: 节点类型 + :type type: NodeType + """ + + def register(cls): + HandlerFactory.add_handler(type, cls) + return cls + + return register + + +class ExecuteResult: + """ + Handler execute 方法返回的结果 + """ + + def __init__( + self, + should_sleep: bool, + schedule_ready: bool, + schedule_type: Optional[ScheduleType], + schedule_after: int, + dispatch_processes: List[DispatchProcess], + next_node_id: Optional[str], + should_die: bool = False, + ): + """ + + :param should_sleep: 当前进程是否应该进入休眠 + :type should_sleep: bool + :param schedule_ready: 被处理的节点是否准备好进入调度 + :type schedule_ready: bool + :param schedule_type: 被处理的节点的调度类型 + :type schedule_type: Optional[ScheduleType] + :param schedule_after: 在 schedule_after 秒后开始执行调度 + :type schedule_after: int + :param dispatch_processes: 需要派发的子进程信息列表 + :type dispatch_processes: List[DispatchProcess] + :param next_node_id: 推进循环中下一个要处理的节点的 ID + :type next_node_id: Optional[str] + :param should_die: 当前进程是否需要进入死亡状态, defaults to False + :type should_die: bool, optional + """ + self.should_sleep = should_sleep + self.schedule_ready = schedule_ready + self.schedule_type = schedule_type + self.schedule_after = schedule_after + self.dispatch_processes = dispatch_processes + self.next_node_id = next_node_id + self.should_die = should_die + + +class ScheduleResult: + """ + Handler schedule 方法返回的结果 + """ + + def __init__( + self, + has_next_schedule: bool, + schedule_after: int, + schedule_done: bool, + next_node_id: Optional[str], + ): + """ + + :param has_next_schedule: 是否还有下次调度 + :type has_next_schedule: bool + :param schedule_after: 在 schedule_after 秒后开始下次调度 + :type schedule_after: int + :param schedule_done: 调度是否完成 + :type schedule_done: bool + :param next_node_id: 调度完成后下一个需要执行的节点的 ID + :type next_node_id: Optional[str] + """ + self.has_next_schedule = has_next_schedule + self.schedule_after = schedule_after + self.schedule_done = schedule_done + self.next_node_id = next_node_id + + +class NodeHandler(metaclass=ABCMeta): + """ + 节点处理器,负责封装不同类型节点的 execute 和 schedule 逻辑 + """ + + LOOP_KEY = "_loop" + INNER_LOOP_KEY = "_inner_loop" + + def __init__(self, node: Node, runtime: EngineRuntimeInterface): + """ + + :param node: 节点实例 + :type node: Node + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + """ + self.node = node + self.runtime = runtime + + @abstractmethod + def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param process_info: 进程信息 + :type process_id: ProcessInfo + :param loop: 重入次数 + :type loop: int + :param inner_loop: 当前流程重入次数 + :type inner_loop: int + :param version: 执行版本 + :type version: str + :return: 执行结果 + :rtype: ExecuteResult + """ + + def schedule( + self, + process_info: ProcessInfo, + loop: int, + inner_loop: int, + schedule: Schedule, + callback_data: Optional[CallbackData] = None, + ) -> ScheduleResult: + """ + 节点的 schedule 处理逻辑,不支持 schedule 的节点可以不实现该方法 + + :param process_info: 进程信息 + :type process_id: ProcessInfo + :param loop: 重入次数 + :type loop: int + :param inner_loop: 当前流程重入次数 + :type inner_loop: int + :param schedule: Schedule 实例 + :type schedule: Schedule + :param callback_data: 回调数据, defaults to None + :type callback_data: Optional[CallbackData], optional + :return: 调度结果 + :rtype: ScheduleResult + """ + raise NotImplementedError() + + def _execute_fail(self, ex_data: str) -> ExecuteResult: + exec_outputs = self.runtime.get_execution_data_outputs(self.node.id) + + self.runtime.set_state( + node_id=self.node.id, to_state=states.FAILED, set_archive_time=True + ) + + exec_outputs["ex_data"] = ex_data + + self.runtime.set_execution_data_outputs(self.node.id, exec_outputs) + + return ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + ) + + def _get_plain_inputs(self, node_id: str): + return { + key: di.value for key, di in self.runtime.get_data_inputs(node_id).items() + } + + +class HandlerFactory: + """ + 节点处理器工厂 + """ + + _handlers = {} + + @classmethod + def add_handler(cls, type: NodeType, handler_cls): + """ + 向工厂中注册某个类型节点的处理器 + + :param type: 节点类型 + :type type: NodeType + :param handler_cls: [description] + :type handler_cls: [type] + :raises InvalidOperationError: [description] + """ + if not issubclass(handler_cls, NodeHandler): + raise InvalidOperationError( + "register handler err: {} is not subclass of {}".format( + handler_cls, "NodeHandler" + ) + ) + cls._handlers[type.value] = handler_cls + + @classmethod + def get_handler(cls, node: Node, runtime: EngineRuntimeInterface) -> NodeHandler: + """ + 获取某个节点的处理器实例 + + :param node: 节点实例 + :type node: NodeType + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :raises NotFoundError: [description] + :return: 节点处理器实例 + :rtype: NodeHandler + """ + if node.type.value not in cls._handlers: + raise NotFoundError( + "can not find handler for {} type node".format(node.type.value) + ) + + return cls._handlers[node.type.value](node, runtime) diff --git a/bamboo_engine/handlers/__init__.py b/bamboo_engine/handlers/__init__.py new file mode 100644 index 00000000..0b7bf783 --- /dev/null +++ b/bamboo_engine/handlers/__init__.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +节点处理逻辑存放模块 +""" + + +def register(): + from .conditional_parallel_gateway import ConditionalParallelGatewayHandler + from .converge_gateway import ConvergeGatewayHandler + from .empty_end_event import EmptyEndEventHandler + from .empty_start_event import EmptyStartEventHandler + from .exclusive_gateway import ExclusiveGatewayHandler + from .executable_end_event import ExecutableEndEventHandler + from .parallel_gateway import ParallelGatewayHandler + from .service_activity import ServiceActivityHandler + from .subprocess import SubProcessHandler diff --git a/bamboo_engine/handlers/conditional_parallel_gateway.py b/bamboo_engine/handlers/conditional_parallel_gateway.py new file mode 100644 index 00000000..03b2a300 --- /dev/null +++ b/bamboo_engine/handlers/conditional_parallel_gateway.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import json +import logging + +from bamboo_engine.utils.boolrule import BoolRule +from bamboo_engine.template.template import Template + +from bamboo_engine import states +from bamboo_engine.eri import NodeType, ProcessInfo +from bamboo_engine.context import Context +from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult +from bamboo_engine.utils.string import transform_escape_char + +logger = logging.getLogger("bamboo_engine") + + +@register_handler(NodeType.ConditionalParallelGateway) +class ConditionalParallelGatewayHandler(NodeHandler): + def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + evaluations = [c.evaluation for c in self.node.conditions] + top_pipeline_id = process_info.top_pipeline_id + root_pipeline_id = process_info.root_pipeline_id + + root_pipeline_inputs = self._get_plain_inputs(root_pipeline_id) + + # resolve conditions references + evaluation_refs = set() + for e in evaluations: + refs = Template(e).get_reference() + evaluation_refs = evaluation_refs.union(refs) + + logger.info( + "[%s] %s evaluation original refs: %s", + root_pipeline_id, + self.node.id, + evaluation_refs, + ) + additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=evaluation_refs) + evaluation_refs = evaluation_refs.union(additional_refs) + + logger.info( + "[%s] %s evaluation final refs: %s", + root_pipeline_id, + self.node.id, + evaluation_refs, + ) + context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=evaluation_refs) + context = Context(self.runtime, context_values, root_pipeline_inputs) + try: + hydrated_context = {k: transform_escape_char(v) for k, v in context.hydrate(deformat=True).items()} + except Exception as e: + logger.exception( + "[%s] %s context hydrate error", + root_pipeline_id, + self.node.id, + ) + return self._execute_fail("evaluation context hydrate failed(%s), check node log for details." % e) + + # check conditions + fork_targets = [] + for c in self.node.conditions: + resolved_evaluate = Template(c.evaluation).render(hydrated_context) + logger.info( + "[%s] %s render evaluation %s: %s with %s", + root_pipeline_id, + self.node.id, + c.evaluation, + resolved_evaluate, + hydrated_context, + ) + try: + result = BoolRule(resolved_evaluate).test() + logger.info("[%s] %s %s test result: %s", root_pipeline_id, self.node.id, resolved_evaluate, result) + except Exception as e: + # test failed + return self._execute_fail( + "evaluate[{}] fail with data[{}] message: {}".format( + c.resolved_evaluate, json.dumps(hydrated_context), e + ) + ) + else: + if result: + fork_targets.append(c.target_id) + + # all miss + if not fork_targets: + return self._execute_fail("all conditions of branches are not meet") + + # fork + from_to = {} + for target in fork_targets: + from_to[target] = self.node.converge_gateway_id + + dispatch_processes = self.runtime.fork( + parent_id=process_info.process_id, + root_pipeline_id=process_info.root_pipeline_id, + pipeline_stack=process_info.pipeline_stack, + from_to=from_to, + ) + + self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) + + return ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=dispatch_processes, + next_node_id=None, + ) diff --git a/bamboo_engine/handlers/converge_gateway.py b/bamboo_engine/handlers/converge_gateway.py new file mode 100644 index 00000000..82da73f0 --- /dev/null +++ b/bamboo_engine/handlers/converge_gateway.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine import states +from bamboo_engine.eri import ProcessInfo, NodeType +from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult + + +@register_handler(NodeType.ConvergeGateway) +class ConvergeGatewayHandler(NodeHandler): + def execute( + self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str + ) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + + self.runtime.set_state( + node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True + ) + + return ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=self.node.target_nodes[0], + ) diff --git a/bamboo_engine/handlers/empty_end_event.py b/bamboo_engine/handlers/empty_end_event.py new file mode 100644 index 00000000..7d5abf8e --- /dev/null +++ b/bamboo_engine/handlers/empty_end_event.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from bamboo_engine import states +from bamboo_engine.config import Settings +from bamboo_engine.eri import ProcessInfo, ContextValue, ContextValueType, NodeType +from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult +from bamboo_engine.context import Context + +logger = logging.getLogger("bamboo_engine") + + +@register_handler(NodeType.EmptyEndEvent) +class EmptyEndEventHandler(NodeHandler): + def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + root_pipeline_id = process_info.root_pipeline_id + pipeline_id = process_info.pipeline_stack.pop() + root_pipeline_finished = len(process_info.pipeline_stack) == 0 + + root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) + if not root_pipeline_finished: + subproc_state = self.runtime.get_state(pipeline_id) + + # write pipeline data + context_outputs = self.runtime.get_context_outputs(pipeline_id) + logger.info( + "[%s] %s context outputs: %s", + root_pipeline_id, + pipeline_id, + context_outputs, + ) + + context_values = self.runtime.get_context_values(pipeline_id=pipeline_id, keys=context_outputs) + logger.info( + "[%s] %s context values: %s", + root_pipeline_id, + pipeline_id, + context_values, + ) + + context = Context(self.runtime, context_values, root_pipeline_inputs) + hydrated_context = context.hydrate(deformat=False) + logger.info( + "[%s] %s hydrated context: %s", + root_pipeline_id, + pipeline_id, + hydrated_context, + ) + + outputs = {} + for key in context_outputs: + outputs[key] = hydrated_context.get(key, key) + if not root_pipeline_finished: + outputs[self.LOOP_KEY] = subproc_state.loop + Settings.RERUN_INDEX_OFFSET + outputs[self.INNER_LOOP_KEY] = subproc_state.inner_loop + Settings.RERUN_INDEX_OFFSET + self.runtime.set_execution_data_outputs(node_id=pipeline_id, outputs=outputs) + + self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) + + self.runtime.set_state(node_id=pipeline_id, to_state=states.FINISHED, set_archive_time=True) + + # root pipeline finish + if root_pipeline_finished: + return ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + should_die=True, + ) + + # subprocess finish + subprocess = self.runtime.get_node(pipeline_id) + self.runtime.set_pipeline_stack(process_info.process_id, process_info.pipeline_stack) + + # extract subprocess outputs to parent context + subprocess_outputs = self.runtime.get_data_outputs(pipeline_id) + context.extract_outputs( + pipeline_id=process_info.pipeline_stack[-1], + data_outputs=subprocess_outputs, + execution_data_outputs=outputs, + ) + + return ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=subprocess.target_nodes[0], + ) diff --git a/bamboo_engine/handlers/empty_start_event.py b/bamboo_engine/handlers/empty_start_event.py new file mode 100644 index 00000000..90141c84 --- /dev/null +++ b/bamboo_engine/handlers/empty_start_event.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import logging + +from bamboo_engine import states +from bamboo_engine.context import Context +from bamboo_engine.eri import ProcessInfo, NodeType, ContextValue, ContextValueType, Data +from bamboo_engine.exceptions import NotFoundError +from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult + +logger = logging.getLogger("bamboo_engine") + + +@register_handler(NodeType.EmptyStartEvent) +class EmptyStartEventHandler(NodeHandler): + def execute( + self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str + ) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + + try: + data = self.runtime.get_data(self.node.id) + except NotFoundError: + need_pre_render = False + else: + need_pre_render = True + + if need_pre_render: + top_pipeline_id = process_info.top_pipeline_id + root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) + upsert_context_dict = dict() + pre_render_keys = data.inputs["pre_render_keys"].value + + logger.info("{} pre_render_keys are: {}".format(top_pipeline_id, ",".join(pre_render_keys))) + + refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=set(pre_render_keys)) + + context_values = self.runtime.get_context_values( + pipeline_id=top_pipeline_id, keys=set(pre_render_keys).union(refs) + ) + context = Context(self.runtime, context_values, root_pipeline_inputs) + hydrated_context = context.hydrate(deformat=False) + for context_value in context_values: + context_key = context_value.key + if context_key in pre_render_keys: + upsert_context_dict[context_key] = ContextValue( + key=context_key, + type=ContextValueType.PLAIN, + value=hydrated_context[context_key], + ) + + logger.info(f"{top_pipeline_id} pre_render_keys results are: {upsert_context_dict}") + self.runtime.upsert_plain_context_values(top_pipeline_id, upsert_context_dict) + + self.runtime.set_state( + node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True + ) + + return ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=self.node.target_nodes[0], + ) diff --git a/bamboo_engine/handlers/exclusive_gateway.py b/bamboo_engine/handlers/exclusive_gateway.py new file mode 100644 index 00000000..c3d040bd --- /dev/null +++ b/bamboo_engine/handlers/exclusive_gateway.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import json +import logging + +from bamboo_engine import states +from bamboo_engine.context import Context +from bamboo_engine.template import Template +from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult +from bamboo_engine.utils.boolrule import BoolRule +from bamboo_engine.eri import NodeType, ProcessInfo + +from bamboo_engine.utils.string import transform_escape_char + +logger = logging.getLogger("bamboo_engine") + + +@register_handler(NodeType.ExclusiveGateway) +class ExclusiveGatewayHandler(NodeHandler): + def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + evaluations = [c.evaluation for c in self.node.conditions] + top_pipeline_id = process_info.top_pipeline_id + root_pipeline_id = process_info.root_pipeline_id + + root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) + + # resolve conditions references + evaluation_refs = set() + for e in evaluations: + refs = Template(e).get_reference() + evaluation_refs = evaluation_refs.union(refs) + + logger.info( + "[%s] %s evaluation original refs: %s", + root_pipeline_id, + self.node.id, + evaluation_refs, + ) + additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=evaluation_refs) + evaluation_refs = evaluation_refs.union(additional_refs) + + logger.info( + "[%s] %s evaluation final refs: %s", + root_pipeline_id, + self.node.id, + evaluation_refs, + ) + context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=evaluation_refs) + logger.info( + "[%s] %s evaluation context values: %s", + root_pipeline_id, + self.node.id, + context_values, + ) + + context = Context(self.runtime, context_values, root_pipeline_inputs) + try: + hydrated_context = {k: transform_escape_char(v) for k, v in context.hydrate(deformat=True).items()} + except Exception as e: + logger.exception( + "[%s] %s context hydrate error", + root_pipeline_id, + self.node.id, + ) + return self._execute_fail("evaluation context hydrate failed(%s), check node log for details." % e) + + # check conditions + meet_targets = [] + meet_conditions = [] + for c in self.node.conditions: + resolved_evaluate = Template(c.evaluation).render(hydrated_context) + logger.info( + "[%s] %s render evaluation %s: %s with %s", + root_pipeline_id, + self.node.id, + c.evaluation, + resolved_evaluate, + hydrated_context, + ) + try: + result = BoolRule(resolved_evaluate).test() + logger.info("[%s] %s %s test result: %s", root_pipeline_id, self.node.id, resolved_evaluate, result) + except Exception as e: + # test failed + return self._execute_fail( + "evaluate[{}] fail with data[{}] message: {}".format( + resolved_evaluate, json.dumps(hydrated_context), e + ) + ) + else: + if result: + meet_conditions.append(c.name) + meet_targets.append(c.target_id) + + # all miss + if not meet_targets: + return self._execute_fail("all conditions of branches are not meet") + + # multiple branch hit + if len(meet_targets) != 1: + return self._execute_fail("multiple conditions meet: {}".format(meet_conditions)) + + self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) + + return ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=meet_targets[0], + ) diff --git a/bamboo_engine/handlers/executable_end_event.py b/bamboo_engine/handlers/executable_end_event.py new file mode 100644 index 00000000..93c99781 --- /dev/null +++ b/bamboo_engine/handlers/executable_end_event.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import copy +import logging +import traceback + +from bamboo_engine import states +from bamboo_engine.eri import ProcessInfo, NodeType +from bamboo_engine.handler import register_handler, ExecuteResult + +from .empty_end_event import EmptyEndEventHandler + +logger = logging.getLogger("bamboo_engine") + + +@register_handler(NodeType.ExecutableEndEvent) +class ExecutableEndEventHandler(EmptyEndEventHandler): + def execute( + self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str + ) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + + logger.info( + "[%s] %s executable end event: %s", + process_info.root_pipeline_id, + self.node.id, + self.node, + ) + event = self.runtime.get_executable_end_event(code=self.node.code) + + try: + event.execute( + pipeline_stack=copy.copy(process_info.pipeline_stack), + root_pipeline_id=process_info.root_pipeline_id, + ) + except Exception: + ex_data = traceback.format_exc() + logger.warning( + "[%s] %s executable end event execute raise: %s", + process_info.root_pipeline_id, + self.node.id, + ex_data, + ) + + self.runtime.set_execution_data_outputs( + node_id=self.node.id, outputs={"ex_data": ex_data} + ) + + self.runtime.set_state( + node_id=self.node.id, to_state=states.FAILED, set_archive_time=True + ) + + return ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + ) + + return super().execute(process_info=process_info, loop=loop, inner_loop=inner_loop, version=version) diff --git a/bamboo_engine/handlers/parallel_gateway.py b/bamboo_engine/handlers/parallel_gateway.py new file mode 100644 index 00000000..d719cb83 --- /dev/null +++ b/bamboo_engine/handlers/parallel_gateway.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine import states +from bamboo_engine.eri import ProcessInfo, NodeType +from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult + + +@register_handler(NodeType.ParallelGateway) +class ParallelGatewayHandler(NodeHandler): + def execute( + self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str + ) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + + from_to = {} + for target in self.node.target_nodes: + from_to[target] = self.node.converge_gateway_id + + dispatch_processes = self.runtime.fork( + parent_id=process_info.process_id, + root_pipeline_id=process_info.root_pipeline_id, + pipeline_stack=process_info.pipeline_stack, + from_to=from_to, + ) + + self.runtime.set_state( + node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True + ) + + return ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=dispatch_processes, + next_node_id=None, + ) diff --git a/bamboo_engine/handlers/service_activity.py b/bamboo_engine/handlers/service_activity.py new file mode 100644 index 00000000..e2f74f0a --- /dev/null +++ b/bamboo_engine/handlers/service_activity.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback +from typing import Optional + +from bamboo_engine import states +from bamboo_engine.config import Settings + +from bamboo_engine.context import Context +from bamboo_engine.template import Template +from bamboo_engine.eri import ( + ProcessInfo, + ContextValue, + ContextValueType, + ExecutionData, + CallbackData, + ScheduleType, + NodeType, + Schedule, +) +from bamboo_engine.handler import ( + register_handler, + NodeHandler, + ExecuteResult, + ScheduleResult, +) + +logger = logging.getLogger("bamboo_engine") + + +@register_handler(NodeType.ServiceActivity) +class ServiceActivityHandler(NodeHandler): + """ + 其中所有 set_state 调用都会传入 state version 来确保能够在用户强制失败节点后放弃后续无效的任务执行 + """ + + def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + top_pipeline_id = process_info.top_pipeline_id + root_pipeline_id = process_info.root_pipeline_id + + data = self.runtime.get_data(self.node.id) + root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) + need_render_inputs = data.need_render_inputs() + render_escape_inputs = data.render_escape_inputs() + + logger.info( + "[%s] %s activity execute data: %s, root inputs: %s", + root_pipeline_id, + self.node.id, + data, + root_pipeline_inputs, + ) + + # resolve inputs context references + inputs_refs = set(Template(need_render_inputs).get_reference()) + logger.info( + "[%s] %s activity original refs: %s", + root_pipeline_id, + self.node.id, + inputs_refs, + ) + + additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=inputs_refs) + inputs_refs = inputs_refs.union(additional_refs) + logger.info( + "[%s] %s activity final refs: %s", + root_pipeline_id, + self.node.id, + inputs_refs, + ) + + # prepare context + context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=inputs_refs) + + # pre extract loop outputs + loop_value = loop + Settings.RERUN_INDEX_OFFSET + need_render_inputs[self.LOOP_KEY] = loop_value + if self.LOOP_KEY in data.outputs: + loop_output_key = data.outputs[self.LOOP_KEY] + context_values.append(ContextValue(key=loop_output_key, type=ContextValueType.PLAIN, value=loop_value)) + + # pre extract inner_loop outputs + inner_loop_value = inner_loop + Settings.RERUN_INDEX_OFFSET + need_render_inputs[self.INNER_LOOP_KEY] = inner_loop_value + if self.INNER_LOOP_KEY in data.outputs: + inner_loop_output_key = data.outputs[self.INNER_LOOP_KEY] + context_values.append( + ContextValue( + key=inner_loop_output_key, + type=ContextValueType.PLAIN, + value=inner_loop_value, + ) + ) + + logger.info( + "[%s] %s activity context values: %s", + root_pipeline_id, + self.node.id, + context_values, + ) + + context = Context(self.runtime, context_values, root_pipeline_inputs) + # hydrate will call user code, use try to catch unexpected error + try: + hydrated_context = context.hydrate(deformat=True) + except Exception as e: + logger.exception( + "[%s] %s activity context hydrate error", + root_pipeline_id, + self.node.id, + ) + service_data = ExecutionData(inputs=data.plain_inputs(), outputs={}) + service_data.outputs.ex_data = "inputs hydrate failed(%s), check node log for details" % e + service_data.outputs._result = False + service_data.outputs._loop = loop + service_data.outputs._inner_loop = inner_loop + + self.runtime.set_execution_data(node_id=self.node.id, data=service_data) + self.runtime.set_state( + node_id=self.node.id, + version=version, + to_state=states.FAILED, + set_archive_time=True, + ) + return ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + ) + + logger.info( + "[%s] %s actvity hydrated context: %s", + root_pipeline_id, + self.node.id, + hydrated_context, + ) + + # resolve inputs + execute_inputs = Template(need_render_inputs).render(hydrated_context) + execute_inputs.update(render_escape_inputs) + + # data prepare + service_data = ExecutionData(inputs=execute_inputs, outputs={}) + root_pipeline_data = ExecutionData(inputs=root_pipeline_inputs, outputs={}) + + # execute + service = self.runtime.get_service(code=self.node.code, version=self.node.version) + service.setup_runtime_attributes( + id=self.node.id, + version=version, + top_pipeline_id=top_pipeline_id, + root_pipeline_id=root_pipeline_id, + loop=loop, + inner_loop=inner_loop, + ) + + # start monitor + monitoring = False + if self.node.timeout is not None: + monitoring = True + self.runtime.start_timeout_monitor( + process_id=process_info.process_id, + node_id=self.node.id, + version=version, + timeout=self.node.timeout, + ) + + # pre_execute and excute + logger.debug( + "[%s] %s service data before execute: %s", + self.node.id, + root_pipeline_id, + service_data, + ) + logger.debug( + "[%s] %s root pipeline data before execute: %s", + self.node.id, + root_pipeline_id, + root_pipeline_data, + ) + execute_success = False + try: + service.pre_execute(data=service_data, root_pipeline_data=root_pipeline_data) + execute_success = service.execute(data=service_data, root_pipeline_data=root_pipeline_data) + except Exception: + ex_data = traceback.format_exc() + service_data.outputs.ex_data = ex_data + logger.warning("[%s]service execute fail: %s", process_info.root_pipeline_id, ex_data) + logger.debug("[%s] service data after execute: %s", root_pipeline_id, service_data) + service_data.outputs._result = execute_success + service_data.outputs._loop = loop + service_data.outputs._inner_loop = inner_loop + + # execute success + if execute_success: + + need_schedule = service.need_schedule() + next_node_id = None + + if not need_schedule: + if monitoring: + self.runtime.stop_timeout_monitor( + process_id=process_info.process_id, + node_id=self.node.id, + version=version, + timeout=self.node.timeout, + ) + + self.runtime.set_state( + node_id=self.node.id, + version=version, + to_state=states.FINISHED, + set_archive_time=True, + ) + + context.extract_outputs( + pipeline_id=top_pipeline_id, + data_outputs=data.outputs, + execution_data_outputs=service_data.outputs, + ) + next_node_id = self.node.target_nodes[0] + + self.runtime.set_execution_data(node_id=self.node.id, data=service_data) + + return ExecuteResult( + should_sleep=need_schedule, + schedule_ready=need_schedule, + schedule_type=service.schedule_type(), + schedule_after=service.schedule_after( + schedule=None, + data=service_data, + root_pipeline_data=root_pipeline_data, + ), + dispatch_processes=[], + next_node_id=next_node_id, + ) + + # pre_execute failed or execute failed + if monitoring: + self.runtime.stop_timeout_monitor( + process_id=process_info.process_id, + node_id=self.node.id, + version=version, + timeout=self.node.timeout, + ) + + if not self.node.error_ignorable: + self.runtime.set_state( + node_id=self.node.id, + version=version, + to_state=states.FAILED, + set_archive_time=True, + ) + + self.runtime.set_execution_data(node_id=self.node.id, data=service_data) + + context.extract_outputs( + pipeline_id=top_pipeline_id, + data_outputs=data.outputs, + execution_data_outputs=service_data.outputs, + ) + + return ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + ) + + # pre_execute failed or execute failed and error ignore + self.runtime.set_state( + node_id=self.node.id, + version=version, + to_state=states.FINISHED, + set_archive_time=True, + error_ignored=True, + ) + + self.runtime.set_execution_data(node_id=self.node.id, data=service_data) + + context.extract_outputs( + pipeline_id=top_pipeline_id, + data_outputs=data.outputs, + execution_data_outputs=service_data.outputs, + ) + + return ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=self.node.target_nodes[0], + ) + + def _finish_schedule( + self, + process_info: ProcessInfo, + schedule: Schedule, + data_outputs: dict, + execution_data: ExecutionData, + error_ignored: bool, + root_pipeline_inputs: dict, + ) -> ScheduleResult: + if self.node.timeout is not None: + self.runtime.stop_timeout_monitor( + process_id=process_info.process_id, + node_id=self.node.id, + version=schedule.version, + timeout=self.node.timeout, + ) + + self.runtime.set_state( + node_id=self.node.id, + version=schedule.version, + to_state=states.FINISHED, + set_archive_time=True, + error_ignored=error_ignored, + ) + + context = Context(self.runtime, [], root_pipeline_inputs) + context.extract_outputs( + pipeline_id=process_info.top_pipeline_id, + data_outputs=data_outputs, + execution_data_outputs=execution_data.outputs, + ) + + return ScheduleResult( + has_next_schedule=False, + schedule_after=-1, + schedule_done=True, + next_node_id=self.node.target_nodes[0], + ) + + def schedule( + self, + process_info: ProcessInfo, + loop: int, + inner_loop: int, + schedule: Schedule, + callback_data: Optional[CallbackData] = None, + ) -> ScheduleResult: + """ + 节点的 schedule 处理逻辑 + + :param process_id: 进程 ID + :type process_id: int + :param schedule: Schedule 实例 + :type schedule: Schedule + :param callback_data: 回调数据, defaults to None + :type callback_data: Optional[CallbackData], optional + :return: 调度结果 + :rtype: ScheduleResult + """ + # data prepare + top_pipeline_id = process_info.top_pipeline_id + root_pipeline_id = process_info.root_pipeline_id + + data_outputs = self.runtime.get_data_outputs(self.node.id) + service_data = self.runtime.get_execution_data(self.node.id) + + root_pipeline_inputs = self._get_plain_inputs(root_pipeline_id) + root_pipeline_data = ExecutionData(inputs=root_pipeline_inputs, outputs={}) + logger.info( + "[%s] %s activity schedule data: %s, root inputs: %s", + root_pipeline_id, + self.node.id, + service_data, + root_pipeline_inputs, + ) + + # schedule + service = self.runtime.get_service(code=self.node.code, version=self.node.version) + service.setup_runtime_attributes( + id=self.node.id, + version=schedule.version, + top_pipeline_id=top_pipeline_id, + root_pipeline_id=root_pipeline_id, + loop=loop, + inner_loop=inner_loop, + ) + + schedule_success = False + schedule.times += 1 + try: + schedule_success = service.schedule( + schedule=schedule, + data=service_data, + root_pipeline_data=root_pipeline_data, + callback_data=callback_data, + ) + except Exception: + service_data.outputs.ex_data = traceback.format_exc() + + service_data.outputs._result = schedule_success + service_data.outputs._loop = loop + service_data.outputs._inner_loop = inner_loop + + self.runtime.add_schedule_times(schedule.id) + self.runtime.set_execution_data(node_id=self.node.id, data=service_data) + + monitoring = self.node.timeout is not None + schedule_type = service.schedule_type() + + # schedule success + if schedule_success: + if schedule_type == ScheduleType.CALLBACK: + return self._finish_schedule( + process_info=process_info, + schedule=schedule, + data_outputs=data_outputs, + execution_data=service_data, + error_ignored=False, + root_pipeline_inputs=root_pipeline_inputs, + ) + else: + is_schedule_done = service.is_schedule_done() + + # poll or multi-callback finished + if is_schedule_done: + return self._finish_schedule( + process_info=process_info, + schedule=schedule, + data_outputs=data_outputs, + execution_data=service_data, + error_ignored=False, + root_pipeline_inputs=root_pipeline_inputs, + ) + + has_next_schedule = schedule_type == ScheduleType.POLL + return ScheduleResult( + has_next_schedule=has_next_schedule, + schedule_after=service.schedule_after( + schedule=schedule, + data=service_data, + root_pipeline_data=root_pipeline_data, + ), + schedule_done=False, + next_node_id=None, + ) + + if monitoring: + self.runtime.stop_timeout_monitor( + process_id=process_info.process_id, + node_id=self.node.id, + version=schedule.version, + timeout=self.node.timeout, + ) + + # schedule fail + if not self.node.error_ignorable: + self.runtime.set_state( + node_id=self.node.id, + version=schedule.version, + to_state=states.FAILED, + set_archive_time=True, + ) + + context = Context(self.runtime, [], root_pipeline_inputs) + context.extract_outputs( + pipeline_id=process_info.top_pipeline_id, + data_outputs=data_outputs, + execution_data_outputs=service_data.outputs, + ) + + return ScheduleResult( + has_next_schedule=False, + schedule_after=-1, + schedule_done=False, + next_node_id=None, + ) + + # schedule fail and error ignore + return self._finish_schedule( + process_info=process_info, + schedule=schedule, + data_outputs=data_outputs, + execution_data=service_data, + error_ignored=True, + root_pipeline_inputs=root_pipeline_inputs, + ) diff --git a/bamboo_engine/handlers/subprocess.py b/bamboo_engine/handlers/subprocess.py new file mode 100644 index 00000000..a4a40077 --- /dev/null +++ b/bamboo_engine/handlers/subprocess.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from bamboo_engine.context import Context +from bamboo_engine.config import Settings +from bamboo_engine.template import Template +from bamboo_engine.eri import ProcessInfo, ContextValue, ContextValueType, NodeType +from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult + +logger = logging.getLogger("bamboo_engine") + + +@register_handler(NodeType.SubProcess) +class SubProcessHandler(NodeHandler): + def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: + """ + 节点的 execute 处理逻辑 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param process_info: 进程信息 + :type process_id: ProcessInfo + :return: 执行结果 + :rtype: ExecuteResult + """ + data = self.runtime.get_data(self.node.id) + root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) + need_render_inputs = data.need_render_inputs() + render_escape_inputs = data.render_escape_inputs() + top_pipeline_id = process_info.top_pipeline_id + root_pipeline_id = process_info.root_pipeline_id + + logger.info( + "[%s] %s subprocess data: %s", + root_pipeline_id, + self.node.id, + data, + ) + + # reset inner_loop of nodes in subprocess + self.runtime.reset_children_state_inner_loop(self.node.id) + + # resolve inputs context references + inputs_refs = Template(need_render_inputs).get_reference() + logger.info( + "[%s] %s subprocess original refs: %s", + root_pipeline_id, + self.node.id, + inputs_refs, + ) + + additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=inputs_refs) + inputs_refs = inputs_refs.union(additional_refs) + logger.info( + "[%s] %s subprocess final refs: %s", + root_pipeline_id, + self.node.id, + inputs_refs, + ) + + # prepare context + context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=inputs_refs) + + # pre extract loop outputs + loop_value = loop + Settings.RERUN_INDEX_OFFSET + if self.LOOP_KEY in data.outputs: + loop_output_key = data.outputs[self.LOOP_KEY] + context_values.append( + ContextValue( + key=loop_output_key, + type=ContextValueType.PLAIN, + value=loop_value, + ) + ) + logger.info( + "[%s] %s subprocess parent context values: %s", + root_pipeline_id, + self.node.id, + context_values, + ) + + context = Context(self.runtime, context_values, root_pipeline_inputs) + hydrated_context = context.hydrate(deformat=True) + logger.info( + "[%s] %s subprocess parent hydrated context: %s", + root_pipeline_id, + self.node.id, + hydrated_context, + ) + + # resolve inputs + subprocess_inputs = Template(need_render_inputs).render(hydrated_context) + subprocess_inputs.update(render_escape_inputs) + sub_context_values = { + key: ContextValue(key=key, type=ContextValueType.PLAIN, value=value) + for key, value in subprocess_inputs.items() + } + logger.info( + "[%s] %s subprocess inject context: %s", + root_pipeline_id, + self.node.id, + sub_context_values, + ) + + # update subprocess context, inject subprocess data + self.runtime.upsert_plain_context_values(self.node.id, sub_context_values) + process_info.pipeline_stack.append(self.node.id) + self.runtime.set_pipeline_stack(process_info.process_id, process_info.pipeline_stack) + + return ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=self.node.start_event_id, + ) diff --git a/bamboo_engine/local.py b/bamboo_engine/local.py new file mode 100644 index 00000000..0d2a362d --- /dev/null +++ b/bamboo_engine/local.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +引擎执行 local +""" + +from typing import Optional + +from werkzeug.local import Local + +from .utils.object import Representable + +_local = Local() + + +class CurrentNodeInfo(Representable): + def __init__(self, node_id: str, version: str, loop: int): + self.node_id = node_id + self.version = version + self.loop = loop + + +def set_node_info(node_info: CurrentNodeInfo): + """ + 设置当前进程/线程/协程 Local 中的当前节点信息 + + :param node_id: 节点 ID + :type node_id: str + :param version: 节点版本 + :type version: str + :param loop: 重入次数 + :type loop: int + """ + _local.current_node_info = node_info + + +def get_node_info() -> Optional[CurrentNodeInfo]: + """ + 获取当前进程/线程/协程正在处理的节点 ID,版本及重入次数 + + :return: 节点 ID + :rtype: [type] + """ + return getattr(_local, "current_node_info", None) diff --git a/bamboo_engine/metrics.py b/bamboo_engine/metrics.py new file mode 100644 index 00000000..b4b1ea97 --- /dev/null +++ b/bamboo_engine/metrics.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import os +import time + +from functools import wraps + +from prometheus_client import Gauge, Histogram, Counter + + +def decode_buckets(buckets_list): + return [float(x) for x in buckets_list.split(",")] + + +def get_histogram_buckets_from_evn(env_name): + if env_name in os.environ: + buckets = decode_buckets(os.environ.get(env_name)) + else: + if hasattr(Histogram, "DEFAULT_BUCKETS"): # pragma: no cover + buckets = Histogram.DEFAULT_BUCKETS + else: # pragma: no cover + # For prometheus-client < 0.3.0 we cannot easily access + # the default buckets: + buckets = ( + 0.005, + 0.01, + 0.025, + 0.05, + 0.075, + 0.1, + 0.25, + 0.5, + 0.75, + 1.0, + 2.5, + 5.0, + 7.5, + 10.0, + float("inf"), + ) + return buckets + + +def setup_gauge(*gauges): + def wrapper(func): + @wraps(func) + def _wrapper(*args, **kwargs): + for g in gauges: + g.inc(1) + try: + return func(*args, **kwargs) + finally: + for g in gauges: + g.dec(1) + + return _wrapper + + return wrapper + + +def setup_histogram(*histograms): + def wrapper(func): + @wraps(func) + def _wrapper(*args, **kwargs): + start = time.time() + try: + return func(*args, **kwargs) + finally: + for h in histograms: + h.observe(time.time() - start) + + return _wrapper + + return wrapper + + +# engine metrics +ENGINE_RUNNING_PROCESSES = Gauge( + "engine_running_processes", "count running state processes" +) +ENGINE_RUNNING_SCHEDULES = Gauge( + "engine_running_schedules", "count running state schedules" +) +ENGINE_PROCESS_RUNNING_TIME = Histogram( + "engine_process_running_time", + "time spent running process", + buckets=get_histogram_buckets_from_evn("ENGINE_PROCESS_RUNNING_TIME_BUCKETS"), +) +ENGINE_SCHEDULE_RUNNING_TIME = Histogram( + "engine_schedule_running_time", + "time spent running schedule", + buckets=get_histogram_buckets_from_evn("ENGINE_SCHEDULE_RUNNING_TIME_BUCKETS"), +) +ENGINE_NODE_EXECUTE_TIME = Histogram( + "engine_node_execute_time", + "time spent executing node", + buckets=get_histogram_buckets_from_evn("ENGINE_NODE_EXECUTE_TIME_BUCKETS"), + labelnames=["type"], +) +ENGINE_NODE_SCHEDULE_TIME = Histogram( + "engine_node_schedule_time", + "time spent scheduling node", + buckets=get_histogram_buckets_from_evn("ENGINE_NODE_SCHEDULE_TIME_BUCKETS"), + labelnames=["type"], +) + +# runtime metrics +ENGINE_RUNTIME_CONTEXT_VALUE_READ_TIME = Histogram( + "engine_runtime_context_value_read_time", "time spent reading context value" +) +ENGINE_RUNTIME_CONTEXT_REF_READ_TIME = Histogram( + "engine_runtime_context_ref_read_time", "time spent reading context value reference" +) +ENGINE_RUNTIME_CONTEXT_VALUE_UPSERT_TIME = Histogram( + "engine_runtime_context_value_upsert_time", "time spent upserting context value" +) + +ENGINE_RUNTIME_DATA_INPUTS_READ_TIME = Histogram( + "engine_runtime_data_inputs_read_time", "time spent reading node data inputs" +) +ENGINE_RUNTIME_DATA_OUTPUTS_READ_TIME = Histogram( + "engine_runtime_data_outputs_read_time", "time spent reading node data outputs" +) +ENGINE_RUNTIME_DATA_READ_TIME = Histogram( + "engine_runtime_data_read_time", "time spent reading node data inputs and outputs" +) + +ENGINE_RUNTIME_EXEC_DATA_INPUTS_READ_TIME = Histogram( + "engine_runtime_exec_data_inputs_read_time", + "time spent reading node execution data inputs", +) +ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_READ_TIME = Histogram( + "engine_runtime_exec_data_outputs_read_time", + "time spent reading node execution data outputs", +) +ENGINE_RUNTIME_EXEC_DATA_READ_TIME = Histogram( + "engine_runtime_exec_data_read_time", + "time spent reading node execution data inputs and outputs", +) +ENGINE_RUNTIME_EXEC_DATA_INPUTS_WRITE_TIME = Histogram( + "engine_runtime_exec_data_inputs_write_time", + "time spent writing node execution data inputs", +) +ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_WRITE_TIME = Histogram( + "engine_runtime_exec_data_outputs_write_time", + "time spent writing node execution data outputs", +) +ENGINE_RUNTIME_EXEC_DATA_WRITE_TIME = Histogram( + "engine_runtime_exec_data_write_time", + "time spent writing node execution data inputs and outputs", +) +ENGINE_RUNTIME_CALLBACK_DATA_READ_TIME = Histogram( + "engine_runtime_callback_data_read_time", + "time spent reading node callback data", +) + +ENGINE_RUNTIME_SCHEDULE_READ_TIME = Histogram( + "engine_runtime_schedule_read_time", "time spent reading schedule" +) +ENGINE_RUNTIME_SCHEDULE_WRITE_TIME = Histogram( + "engine_runtime_schedule_write_time", "time spent writing schedule" +) + +ENGINE_RUNTIME_STATE_READ_TIME = Histogram( + "engine_runtime_state_read_time", "time spent reading state" +) +ENGINE_RUNTIME_STATE_WRITE_TIME = Histogram( + "engine_runtime_state_write_time", "time spent writing state" +) + +ENGINE_RUNTIME_NODE_READ_TIME = Histogram( + "engine_runtime_node_read_time", "time spent reading node" +) + +ENGINE_RUNTIME_PROCESS_READ_TIME = Histogram( + "engine_runtime_process_read_time", "time spent reading process" +) diff --git a/bamboo_engine/states.py b/bamboo_engine/states.py new file mode 100644 index 00000000..2cc16319 --- /dev/null +++ b/bamboo_engine/states.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +引擎内部状态及状态相关数据定义模块 +""" + +from enum import Enum + +from .utils.collections import ConstantDict + + +class StateType(Enum): + CREATED = "CREATED" + READY = "READY" + RUNNING = "RUNNING" + SUSPENDED = "SUSPENDED" + BLOCKED = "BLOCKED" + FINISHED = "FINISHED" + FAILED = "FAILED" + REVOKED = "REVOKED" + + +CREATED = StateType.CREATED.value +READY = StateType.READY.value +RUNNING = StateType.RUNNING.value +SUSPENDED = StateType.SUSPENDED.value +BLOCKED = StateType.BLOCKED.value +FINISHED = StateType.FINISHED.value +FAILED = StateType.FAILED.value +REVOKED = StateType.REVOKED.value + +ALL_STATES = frozenset([READY, RUNNING, SUSPENDED, BLOCKED, FINISHED, FAILED, REVOKED]) + +ARCHIVED_STATES = frozenset([FINISHED, FAILED, REVOKED]) +SLEEP_STATES = frozenset([SUSPENDED, REVOKED]) +CHILDREN_IGNORE_STATES = frozenset([BLOCKED]) + +INVERTED_TRANSITION = ConstantDict({RUNNING: frozenset([READY, FINISHED])}) + +TRANSITION = ConstantDict( + { + READY: frozenset([RUNNING, SUSPENDED]), + RUNNING: frozenset([FINISHED, FAILED, REVOKED, SUSPENDED]), + SUSPENDED: frozenset([READY, REVOKED, RUNNING]), + BLOCKED: frozenset([]), + FINISHED: frozenset([RUNNING, FAILED]), + FAILED: frozenset([READY, FINISHED]), + REVOKED: frozenset([]), + } +) + + +def can_transit(from_state, to_state): + + if from_state in TRANSITION: + if to_state in TRANSITION[from_state]: + return True + return False diff --git a/bamboo_engine/template/__init__.py b/bamboo_engine/template/__init__.py new file mode 100644 index 00000000..d455fb8d --- /dev/null +++ b/bamboo_engine/template/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +模板相关逻辑存放模块 +""" + +from .template import Template # noqa diff --git a/bamboo_engine/template/sandbox.py b/bamboo_engine/template/sandbox.py new file mode 100644 index 00000000..f9ad0455 --- /dev/null +++ b/bamboo_engine/template/sandbox.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +模板渲染沙箱 +""" + +from typing import List, Dict + +import importlib + +from bamboo_engine.config import Settings + + +def _shield_words(sandbox: dict, words: List[str]): + for shield_word in words: + sandbox[shield_word] = None + + +class ModuleObject: + def __init__(self, sub_paths, module): + if len(sub_paths) == 1: + setattr(self, sub_paths[0], module) + return + setattr(self, sub_paths[0], ModuleObject(sub_paths[1:], module)) + + +def _import_modules(sandbox: dict, modules: Dict[str, str]): + for mod_path, alias in modules.items(): + mod = importlib.import_module(mod_path) + sub_paths = alias.split(".") + if len(sub_paths) == 1: + sandbox[alias] = mod + else: + sandbox[sub_paths[0]] = ModuleObject(sub_paths[1:], mod) + + +def get() -> dict: + sandbox = {} + + _shield_words(sandbox, Settings.MAKO_SANDBOX_SHIELD_WORDS) + _import_modules(sandbox, Settings.MAKO_SANDBOX_IMPORT_MODULES) + + return sandbox diff --git a/bamboo_engine/template/template.py b/bamboo_engine/template/template.py new file mode 100644 index 00000000..c89ee9d3 --- /dev/null +++ b/bamboo_engine/template/template.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +封装模板处理,渲染逻辑的相关模块 +""" + +import copy +import re +import logging + +from typing import Any, List, Set + +from mako.template import Template as MakoTemplate +from mako import lexer, codegen +from mako.exceptions import MakoException + +from bamboo_engine.utils.mako_utils.checker import check_mako_template_safety +from bamboo_engine.utils.mako_utils.exceptions import ForbiddenMakoTemplateException +from bamboo_engine.utils import mako_safety +from bamboo_engine.utils.string import deformat_var_key + +from . import sandbox + + +logger = logging.getLogger("root") +# find mako template(format is ${xxx},and ${}# not in xxx, # may raise memory error) +TEMPLATE_PATTERN = re.compile(r"\${[^${}#]+}") + + +class Template: + def __init__(self, data: Any): + self.data = data + + def get_reference(self, deformat=False) -> Set[str]: + """ + 获取当前数据中模板所引用的所有标志符 + + :return: 标志符列表 + :rtype: List[str] + """ + + reference = [] + templates = self.get_templates() + for tpl in templates: + reference += self._get_template_reference(tpl) + reference = set(reference) + if not deformat: + reference = {"${%s}" % r for r in reference} + + return reference + + def get_templates(self) -> List[str]: + """ + 获取当前数据中所有的模板片段 + + :return: 模板片段列表 + :rtype: List[str] + """ + templates = [] + data = self.data + if isinstance(data, str): + templates += self._get_string_templates(data) + if isinstance(data, (list, tuple)): + for item in data: + templates += Template(item).get_templates() + if isinstance(data, dict): + for value in list(data.values()): + templates += Template(value).get_templates() + return list(set(templates)) + + def render(self, context: dict) -> Any: + """ + 渲染当前模板 + + :param context: 模板渲染上下文 + :type context: dict + :return: 模板渲染后的数据 + :rtype: Any + """ + data = self.data + if isinstance(data, str): + return self._render_string(data, context) + if isinstance(data, list): + ldata = [""] * len(data) + for index, item in enumerate(data): + ldata[index] = Template(copy.deepcopy(item)).render(context) + return ldata + if isinstance(data, tuple): + ldata = [""] * len(data) + for index, item in enumerate(data): + ldata[index] = Template(copy.deepcopy(item)).render(context) + return tuple(ldata) + if isinstance(data, dict): + for key, value in list(data.items()): + data[key] = Template(copy.deepcopy(value)).render(context) + return data + return data + + def _get_string_templates(self, string) -> List[str]: + return list(set(TEMPLATE_PATTERN.findall(string))) + + def _get_template_reference(self, template: str) -> List[str]: + lex = lexer.Lexer(template) + + try: + node = lex.parse() + except MakoException as e: + logger.warning( + "pipeline get template[{}] reference error[{}]".format(template, e) + ) + return [] + + # Dummy compiler. _Identifiers class requires one + # but only interested in the reserved_names field + def compiler(): + return None + + compiler.reserved_names = set() + identifiers = codegen._Identifiers(compiler, node) + + return list(identifiers.undeclared) + + def _render_string(self, string: str, context: dict) -> str: + """ + 使用特定上下文渲染指定模板 + + :param string: 模板 + :type string: str + :param context: 上下文 + :type context: dict + :return: 渲染后的模板 + :rtype: str + """ + if not isinstance(string, str): + return string + templates = self._get_string_templates(string) + + # TODO keep render return object, here only process simple situation + if ( + len(templates) == 1 + and templates[0] == string + and deformat_var_key(string) in context + ): + return context[deformat_var_key(string)] + + for tpl in templates: + try: + check_mako_template_safety( + tpl, + mako_safety.SingleLineNodeVisitor(), + mako_safety.SingleLinCodeExtractor(), + ) + except ForbiddenMakoTemplateException as e: + logger.warning("forbidden template: {}, exception: {}".format(tpl, e)) + continue + except Exception: + logger.exception("{} safety check error.".format(tpl)) + continue + resolved = Template._render_template(tpl, context) + string = string.replace(tpl, resolved) + return string + + @staticmethod + def _render_template(template: str, context: dict) -> Any: + """ + 使用特定上下文渲染指定模板 + + :param template: 模板 + :type template: Any + :param context: 上下文 + :type context: dict + :raises TypeError: [description] + :return: [description] + :rtype: str + """ + data = {} + data.update(sandbox.get()) + data.update(context) + if not isinstance(template, str): + raise TypeError( + "constant resolve error, template[%s] is not a string" % template + ) + try: + tm = MakoTemplate(template) + except (MakoException, SyntaxError) as e: + logger.error("pipeline resolve template[{}] error[{}]".format(template, e)) + return template + try: + resolved = tm.render_unicode(**data) + except Exception as e: + logger.warning( + "constant content({}) is invalid, data({}), error: {}".format( + template, data, e + ) + ) + return template + else: + return resolved diff --git a/bamboo_engine/utils/__init__.py b/bamboo_engine/utils/__init__.py new file mode 100644 index 00000000..1714047d --- /dev/null +++ b/bamboo_engine/utils/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +引擎内部使用的各类工具存放模块 +""" diff --git a/bamboo_engine/utils/boolrule/__init__.py b/bamboo_engine/utils/boolrule/__init__.py new file mode 100644 index 00000000..e7c06d34 --- /dev/null +++ b/bamboo_engine/utils/boolrule/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +bool 表达式解析工具模块 +""" + +from .boolrule import ( + BoolRule, + MissingVariableException, + UnknownOperatorException, +) # noqa diff --git a/bamboo_engine/utils/boolrule/boolrule.py b/bamboo_engine/utils/boolrule/boolrule.py new file mode 100644 index 00000000..02bf770a --- /dev/null +++ b/bamboo_engine/utils/boolrule/boolrule.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pyparsing import ( + CaselessLiteral, + Combine, + Forward, + Group, + Keyword, + Optional, + ParseException, + ParseResults, + QuotedString, + Suppress, + Word, + ZeroOrMore, + alphanums, + alphas, + delimitedList, + nums, + oneOf, +) + + +class SubstituteVal: + """ + Represents a token that will later be replaced by a context value. + """ + + def __init__(self, t): + self._path = t[0] + + def get_val(self, context): + if not context: + # raise MissingVariableException( + # 'context missing or empty' + # ) + return self._path + + val = context + + try: + for part in self._path.split(pathDelimiter): + val = getattr(val, part) if hasattr(val, part) else val[part] + + except KeyError: + raise MissingVariableException( + "no value supplied for {}".format(self._path) + ) + + return val + + def __repr__(self): + return "SubstituteVal(%s)" % self._path + + +# Grammar definition +pathDelimiter = "." +# match gcloud's variable +identifier = Combine( + Optional("${") + Optional("_") + Word(alphas, alphanums + "_") + Optional("}") +) +# identifier = Word(alphas, alphanums + "_") +propertyPath = delimitedList(identifier, pathDelimiter, combine=True) + +and_ = Keyword("and", caseless=True) +or_ = Keyword("or", caseless=True) +in_ = Keyword("in", caseless=True) + +lparen = Suppress("(") +rparen = Suppress(")") + +binaryOp = oneOf("== != < > >= <= in notin issuperset notissuperset", caseless=True)( + "operator" +) + +E = CaselessLiteral("E") +numberSign = Word("+-", exact=1) +realNumber = Combine( + Optional(numberSign) + + (Word(nums) + "." + Optional(Word(nums)) | ("." + Word(nums))) + + Optional(E + Optional(numberSign) + Word(nums)) +) + +integer = Combine( + Optional(numberSign) + Word(nums) + Optional(E + Optional("+") + Word(nums)) +) + +# str_ = quotedString.addParseAction(removeQuotes) +str_ = QuotedString('"') | QuotedString("'") +bool_ = oneOf("true false", caseless=True) + +simpleVals = ( + realNumber.setParseAction(lambda toks: float(toks[0])) + | integer.setParseAction(lambda toks: int(toks[0])) + | str_ + | bool_.setParseAction(lambda toks: toks[0] == "true") + | propertyPath.setParseAction(lambda toks: SubstituteVal(toks)) +) # need to add support for alg expressions + +propertyVal = simpleVals | (lparen + Group(delimitedList(simpleVals)) + rparen) + +boolExpression = Forward() +boolCondition = Group( + (Group(propertyVal)("lval") + binaryOp + Group(propertyVal)("rval")) + | (lparen + boolExpression + rparen) +) +boolExpression << boolCondition + ZeroOrMore((and_ | or_) + boolExpression) + + +def double_equals_trans(lval, rval, operator): + # double equals + if operator in ["in", "notin"]: + if isinstance(rval, list) and len(rval): + transed_rval = [] + if isinstance(lval, int): + for item in rval: + try: + transed_rval.append(int(item)) + except Exception: + pass + elif isinstance(lval, str): + for item in rval: + try: + transed_rval.append(str(item)) + except Exception: + pass + rval += transed_rval + + elif operator in ["issuperset", "notissuperset"]: + # avoid convert set('abc') to {a, b, c}, but keep {'abc'} + if isinstance(lval, str): + lval = [lval] + if isinstance(rval, str): + rval = [rval] + + else: + try: + if isinstance(lval, int): + rval = int(rval) + elif isinstance(rval, int): + lval = int(lval) + if isinstance(lval, str): + rval = str(rval) + elif isinstance(rval, str): + lval = str(lval) + except Exception: + pass + + return lval, rval + + +class BoolRule: + """ + Represents a boolean expression and provides a `test` method to evaluate + the expression and determine its truthiness. + + :param query: A string containing the query to be evaluated + :param lazy: If ``True``, parse the query the first time it's tested rather + than immediately. This can help with performance if you + instantiate a lot of rules and only end up evaluating a + small handful. + """ + + _compiled = False + _tokens = None + _query = None + + def __init__(self, query, lazy=False, strict=True): + self._query = query + self.strict = strict + if not lazy: + self._compile() + + def test(self, context=None): + """ + Test the expression against the given context and return the result. + + :param context: A dict context to evaluate the expression against. + :return: True if the expression succesfully evaluated against the + context, or False otherwise. + """ + if self._is_match_all(): + return True + + self._compile() + return self._test_tokens(self._tokens, context) + + def _is_match_all(self): + return True if self._query == "*" else False + + def _compile(self): + if not self._compiled: + + # special case match-all query + if self._is_match_all(): + return + + try: + self._tokens = boolExpression.parseString( + self._query, parseAll=self.strict + ) + except ParseException: + raise + + self._compiled = True + + def _expand_val(self, val, context): + if type(val) == list: + val = [self._expand_val(v, context) for v in val] + + if isinstance(val, SubstituteVal): + ret = val.get_val(context) + return ret + + if isinstance(val, ParseResults): + return [self._expand_val(x, context) for x in val.asList()] + + return val + + def _test_tokens(self, tokens, context): + passed = False + + for token in tokens: + + if not isinstance(token, ParseResults): + if token == "or" and passed: + return True + elif token == "and" and not passed: + return False + continue + + if not token.getName(): + passed = self._test_tokens(token, context) + continue + + items = token.asDict() + + operator = items["operator"] + lval = self._expand_val(items["lval"][0], context) + rval = self._expand_val(items["rval"][0], context) + lval, rval = double_equals_trans(lval, rval, operator) + + if operator in ("=", "==", "eq"): + passed = lval == rval + elif operator in ("!=", "ne"): + passed = lval != rval + elif operator in (">", "gt"): + passed = lval > rval + elif operator in (">=", "ge"): + passed = lval >= rval + elif operator in ("<", "lt"): + passed = lval < rval + elif operator in ("<=", "le"): + passed = lval <= rval + elif operator == "in": + passed = lval in rval + elif operator == "notin": + passed = lval not in rval + elif operator == "issuperset": + passed = set(lval).issuperset(set(rval)) + elif operator == "notissuperset": + passed = not set(lval).issuperset(set(rval)) + else: + raise UnknownOperatorException("Unknown operator '{}'".format(operator)) + + return passed + + +class MissingVariableException(Exception): + """ + Raised when an expression contains a property path that's not supplied in + the context. + """ + + pass + + +class UnknownOperatorException(Exception): + """ + Raised when an expression uses an unknown operator. + + This should never be thrown since the operator won't be correctly parsed as + a token by pyparsing, but it's useful to have this hanging around for when + additional operators are being added. + """ + + pass diff --git a/bamboo_engine/utils/collections.py b/bamboo_engine/utils/collections.py new file mode 100644 index 00000000..04eaa0f9 --- /dev/null +++ b/bamboo_engine/utils/collections.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +集合类工具 +""" + + +from typing import Any +from collections import UserDict + + +class FancyDict(dict): + def __getattr__(self, key: str) -> Any: + try: + return self[key] + except KeyError as k: + raise AttributeError(k) + + def __setattr__(self, key: str, value: Any): + # 内建属性不放入 key 中 + if key.startswith("__") and key.endswith("__"): + super().__setattr__(key, value) + else: + self[key] = value + + def __delattr__(self, key: str): + try: + del self[key] + except KeyError as k: + raise AttributeError(k) + + +class ConstantDict(dict): + """ConstantDict is a subclass of :class:`dict`, implementing __setitem__ + method to avoid item assignment:: + + >>> d = ConstantDict({'key': 'value'}) + >>> d['key'] = 'value' + Traceback (most recent call last): + ... + TypeError: 'ConstantDict' object does not support item assignment + """ + + def __setitem__(self, key: str, value: Any): + raise TypeError( + "'%s' object does not support item assignment" % self.__class__.__name__ + ) diff --git a/bamboo_engine/utils/constants.py b/bamboo_engine/utils/constants.py new file mode 100644 index 00000000..1f810d23 --- /dev/null +++ b/bamboo_engine/utils/constants.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from bamboo_engine.eri import ContextValueType + + +VAR_CONTEXT_MAPPING = { + "plain": ContextValueType.PLAIN, + "splice": ContextValueType.SPLICE, + "lazy": ContextValueType.COMPUTE, +} diff --git a/bamboo_engine/utils/graph.py b/bamboo_engine/utils/graph.py new file mode 100644 index 00000000..fad1c14a --- /dev/null +++ b/bamboo_engine/utils/graph.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class Graph(object): + def __init__(self, nodes, flows): + self.nodes = nodes + self.flows = flows + self.path = [] + self.last_visited_node = "" + self.graph = {node: [] for node in self.nodes} + for flow in self.flows: + self.graph[flow[0]].append(flow[1]) + + def has_cycle(self): + self.path = [] + visited = {node: False for node in self.nodes} + visit_stack = {node: False for node in self.nodes} + + for node in self.nodes: + if self._has_cycle(node, visited, visit_stack): + return True + return False + + def _has_cycle(self, node, visited, visit_stack): + self.last_visited_node = node + self.path.append(node) + visited[node] = True + visit_stack[node] = True + + for neighbor in self.graph[node]: + if not visited[neighbor]: + if self._has_cycle(neighbor, visited, visit_stack): + return True + elif visit_stack[neighbor]: + self.path.append(neighbor) + return True + + self.path.remove(node) + visit_stack[node] = False + return False + + def get_cycle(self): + if self.has_cycle(): + cross_node = self.path[-1] + if self.path.count(cross_node) > 1: + return self.path[self.path.index(cross_node) :] + else: + return self.path + return [] + + +if __name__ == "__main__": + graph1 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4]]) + assert not graph1.has_cycle() + assert graph1.get_cycle() == [] + graph2 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 1]]) + assert graph2.has_cycle() + assert graph2.get_cycle() == [1, 2, 3, 4, 1] + graph3 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 2]]) + assert graph3.has_cycle() + assert graph3.get_cycle() == [2, 3, 4, 2] + graph4 = Graph( + [ + "n20c4a0601193f268bfa168f1192eacd", + "nef42d10350b3961b53df7af67e16d9b", + "n0ada7b4abe63771a43052eaf188dc4b", + "n0cd3b95c714388bacdf1a486ab432fc", + "n1430047af8537f88710c4bbf3cbfb0f", + "n383748fe27434d582f0ca17af9d968a", + "n51426abd4be3a4691c80a73c3f93b3c", + "n854753a77933562ae72ec87c365f23d", + "n89f083892a731d7b9d7edb0f372006d", + "n8d4568db0ad364692b0387e86a2f1e0", + "n8daedbb02273a0fbc94cc118c90649f", + "n90b7ef55fe839b181879e036b4f8ffe", + "n99817348b4a36a6931854c93eed8c5f", + "na02956eba6f3a36ab9b0af2f2350213", + "nc3d0d49adf530bbaffe53630c184c0a", + "nca50848d1aa340f8c2b4776ce81868d", + "ncab9a48e79d357195dcee68dad3a31f", + "ncb4e013a6a8348bab087cc8500a3876", + "ne1f86f902a23e7fa4a67192e8b38a05", + "ne26def77df1385caa206c64e7e3ea53", + "nf3ebee137c53da28091ad7d140ce00c", + "nfc1dcdd7476393b9a81a988c113e1cf", + "n0197f8f210b3a1b8a7fc2f90e94744e", + "n01fb40259ad3cf285bb11a8bbbe59f2", + "n03f39191e8a32629145ba6a677ed040", + "n03ffc3b9e12316d8be63261cb9dec71", + "n07982b8985139249bca3a046f3a4379", + "n0b9e36e6b633ddb906d2044f658f110", + "n136c4fedebe3eb0ba932495aff6a945", + "n17cdc62c5d43976a413bda8f35634eb", + "n1d48483d8023439ad98d61d156c85fb", + "n26725bdcc0931fab0bc73e7244545ca", + "n2890db24f6c3cd1bbcd6b7d8cf2c045", + "n2ad9caac5b737bd897d4c8844c85f12", + "n2c88d1c1d8b35aebf883cbf259fb6bc", + "n302d25dfc9c369ab13104d5208e7119", + "n31688b7ab44338e9e6cb8dcaf259eef", + "n374443fbdc1313d98ebbe19d535fec2", + "n38c3dd0344a3f86bc7511c454bcdf4c", + "n3934eef90463940a6a9cf4ba2e63b1c", + "n40d5f0ca4bc3dd99c0b264cb186f00f", + "n476ddcb6dd33e2abac43596b08c2bc1", + "n4790f8aa48e335aa712e2af757e180b", + "n48bbfdc912334fc89c4f48c05e8969e", + "n5bef4f4532a382eaf79a0af70b2396b", + "n5ced56bcc863060ac4977755f35a5f5", + "n66a0562670e37648a3e05c243335bff", + "n6dc118cd3f7341d9ef8c97c63e2e9d9", + "n6e9d52e1ea53958a93e5b34022e7037", + "n786694b5ed33295a885b5bcd8c7c1ce", + "n7dccd56c80233469a4609f684ebe457", + "n8492d92ab6a3da48c2b49d6fcb8a479", + "n86a8b1a56f9399f90c4c227594a9d03", + "n8a805c0cd02307bad9f7828880b53dc", + "n8c7e35b0457300d9d6a96a6b1d18329", + "n91fdaed36403d06a07f4afe85e2892c", + "n9335d0718a937f9a39ec5b36d5637fe", + "n9372fb07ad936cba31f3d4e440f395a", + "n9ab96f926d83a93a5d3ebe2888fd343", + "na2a8a54e68033d0a276eb88dbff91c3", + "na493a7b5d5b3cc29f4070a6c4589cb7", + "nadfa68cb2503a39aac6626d6c72484a", + "nae1218ddd2e3448b562bc79dc084401", + "nc012287be793377b975b0230b35d713", + "ncb2e01f0c5336fe82b0e0e496f2612b", + "ncb5843900903b4c8a0a8302474d8c51", + "ncbf4db2c48f3348b2c7081f9e3b363a", + "nd4ee6c3248935ce9239e4bb20a81ab8", + "ndb1cf7af0e2319c9868530d0df8fd93", + "ne36a6858a733430bffa4fec053dc1ab", + "ne7af4a7c3613b3d81fe9e6046425a36", + "ne8035dd8de732758c1cc623f80f2fc8", + "ned91fdb914c35f3a21f320f62d72ffd", + "nf5448b3c66430f4a299d08208d313a6", + "nfaa0756a06f300495fb2e2e45e05ed3", + ], + [ + ["n8d4568db0ad364692b0387e86a2f1e0", "n5bef4f4532a382eaf79a0af70b2396b"], + ["n8daedbb02273a0fbc94cc118c90649f", "nf5448b3c66430f4a299d08208d313a6"], + ["n01fb40259ad3cf285bb11a8bbbe59f2", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["ncab9a48e79d357195dcee68dad3a31f", "n0197f8f210b3a1b8a7fc2f90e94744e"], + ["na493a7b5d5b3cc29f4070a6c4589cb7", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["n89f083892a731d7b9d7edb0f372006d", "n136c4fedebe3eb0ba932495aff6a945"], + ["n51426abd4be3a4691c80a73c3f93b3c", "n9ab96f926d83a93a5d3ebe2888fd343"], + ["n89f083892a731d7b9d7edb0f372006d", "n8492d92ab6a3da48c2b49d6fcb8a479"], + ["n17cdc62c5d43976a413bda8f35634eb", "n6e9d52e1ea53958a93e5b34022e7037"], + ["n476ddcb6dd33e2abac43596b08c2bc1", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["n6dc118cd3f7341d9ef8c97c63e2e9d9", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["n91fdaed36403d06a07f4afe85e2892c", "ncb4e013a6a8348bab087cc8500a3876"], + ["n8a805c0cd02307bad9f7828880b53dc", "n3934eef90463940a6a9cf4ba2e63b1c"], + ["n2890db24f6c3cd1bbcd6b7d8cf2c045", "n0ada7b4abe63771a43052eaf188dc4b"], + ["ned91fdb914c35f3a21f320f62d72ffd", "n383748fe27434d582f0ca17af9d968a"], + ["n89f083892a731d7b9d7edb0f372006d", "n0b9e36e6b633ddb906d2044f658f110"], + ["nc3d0d49adf530bbaffe53630c184c0a", "na493a7b5d5b3cc29f4070a6c4589cb7"], + ["ncb2e01f0c5336fe82b0e0e496f2612b", "nc012287be793377b975b0230b35d713"], + ["n86a8b1a56f9399f90c4c227594a9d03", "nf3ebee137c53da28091ad7d140ce00c"], + ["nc3d0d49adf530bbaffe53630c184c0a", "nadfa68cb2503a39aac6626d6c72484a"], + ["na02956eba6f3a36ab9b0af2f2350213", "na2a8a54e68033d0a276eb88dbff91c3"], + ["n8daedbb02273a0fbc94cc118c90649f", "n07982b8985139249bca3a046f3a4379"], + ["n136c4fedebe3eb0ba932495aff6a945", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["n9372fb07ad936cba31f3d4e440f395a", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n8d4568db0ad364692b0387e86a2f1e0", "n91fdaed36403d06a07f4afe85e2892c"], + ["n854753a77933562ae72ec87c365f23d", "n40d5f0ca4bc3dd99c0b264cb186f00f"], + ["n854753a77933562ae72ec87c365f23d", "n1d48483d8023439ad98d61d156c85fb"], + ["n9ab96f926d83a93a5d3ebe2888fd343", "n383748fe27434d582f0ca17af9d968a"], + ["ne36a6858a733430bffa4fec053dc1ab", "n0cd3b95c714388bacdf1a486ab432fc"], + ["n03ffc3b9e12316d8be63261cb9dec71", "nca50848d1aa340f8c2b4776ce81868d"], + ["ne8035dd8de732758c1cc623f80f2fc8", "n0ada7b4abe63771a43052eaf188dc4b"], + ["n51426abd4be3a4691c80a73c3f93b3c", "ned91fdb914c35f3a21f320f62d72ffd"], + ["nd4ee6c3248935ce9239e4bb20a81ab8", "nfaa0756a06f300495fb2e2e45e05ed3"], + ["n5bef4f4532a382eaf79a0af70b2396b", "ncb4e013a6a8348bab087cc8500a3876"], + ["ne26def77df1385caa206c64e7e3ea53", "n786694b5ed33295a885b5bcd8c7c1ce"], + ["n854753a77933562ae72ec87c365f23d", "ne8035dd8de732758c1cc623f80f2fc8"], + ["n374443fbdc1313d98ebbe19d535fec2", "ndb1cf7af0e2319c9868530d0df8fd93"], + ["nfaa0756a06f300495fb2e2e45e05ed3", "n8c7e35b0457300d9d6a96a6b1d18329"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n26725bdcc0931fab0bc73e7244545ca"], + ["n8d4568db0ad364692b0387e86a2f1e0", "ncb2e01f0c5336fe82b0e0e496f2612b"], + ["ncb5843900903b4c8a0a8302474d8c51", "ncb4e013a6a8348bab087cc8500a3876"], + ["nf5448b3c66430f4a299d08208d313a6", "nf3ebee137c53da28091ad7d140ce00c"], + ["n20c4a0601193f268bfa168f1192eacd", "nd4ee6c3248935ce9239e4bb20a81ab8"], + ["nca50848d1aa340f8c2b4776ce81868d", "nc3d0d49adf530bbaffe53630c184c0a"], + ["na02956eba6f3a36ab9b0af2f2350213", "n03ffc3b9e12316d8be63261cb9dec71"], + ["n7dccd56c80233469a4609f684ebe457", "n8daedbb02273a0fbc94cc118c90649f"], + ["n0ada7b4abe63771a43052eaf188dc4b", "na02956eba6f3a36ab9b0af2f2350213"], + ["n9335d0718a937f9a39ec5b36d5637fe", "n99817348b4a36a6931854c93eed8c5f"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n5ced56bcc863060ac4977755f35a5f5"], + ["ncb4e013a6a8348bab087cc8500a3876", "ne26def77df1385caa206c64e7e3ea53"], + ["na02956eba6f3a36ab9b0af2f2350213", "n4790f8aa48e335aa712e2af757e180b"], + ["nc012287be793377b975b0230b35d713", "ncb4e013a6a8348bab087cc8500a3876"], + ["n8d4568db0ad364692b0387e86a2f1e0", "ncb5843900903b4c8a0a8302474d8c51"], + ["n40d5f0ca4bc3dd99c0b264cb186f00f", "n0ada7b4abe63771a43052eaf188dc4b"], + ["n38c3dd0344a3f86bc7511c454bcdf4c", "n17cdc62c5d43976a413bda8f35634eb"], + ["n6e9d52e1ea53958a93e5b34022e7037", "n90b7ef55fe839b181879e036b4f8ffe"], + ["nf3ebee137c53da28091ad7d140ce00c", "n51426abd4be3a4691c80a73c3f93b3c"], + ["n99817348b4a36a6931854c93eed8c5f", "n89f083892a731d7b9d7edb0f372006d"], + ["n89f083892a731d7b9d7edb0f372006d", "n6dc118cd3f7341d9ef8c97c63e2e9d9"], + ["n8daedbb02273a0fbc94cc118c90649f", "n66a0562670e37648a3e05c243335bff"], + ["nadfa68cb2503a39aac6626d6c72484a", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["n383748fe27434d582f0ca17af9d968a", "nef42d10350b3961b53df7af67e16d9b"], + ["na02956eba6f3a36ab9b0af2f2350213", "n03f39191e8a32629145ba6a677ed040"], + ["nae1218ddd2e3448b562bc79dc084401", "n383748fe27434d582f0ca17af9d968a"], + ["n26725bdcc0931fab0bc73e7244545ca", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n48bbfdc912334fc89c4f48c05e8969e", "n8a805c0cd02307bad9f7828880b53dc"], + ["ne7af4a7c3613b3d81fe9e6046425a36", "ncb4e013a6a8348bab087cc8500a3876"], + ["nfc1dcdd7476393b9a81a988c113e1cf", "n8d4568db0ad364692b0387e86a2f1e0"], + ["n0197f8f210b3a1b8a7fc2f90e94744e", "n99817348b4a36a6931854c93eed8c5f"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n302d25dfc9c369ab13104d5208e7119"], + ["n1d48483d8023439ad98d61d156c85fb", "n0ada7b4abe63771a43052eaf188dc4b"], + ["na2a8a54e68033d0a276eb88dbff91c3", "nca50848d1aa340f8c2b4776ce81868d"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n9372fb07ad936cba31f3d4e440f395a"], + ["ndb1cf7af0e2319c9868530d0df8fd93", "n2ad9caac5b737bd897d4c8844c85f12"], + ["n8492d92ab6a3da48c2b49d6fcb8a479", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["n8d4568db0ad364692b0387e86a2f1e0", "ne7af4a7c3613b3d81fe9e6046425a36"], + ["n302d25dfc9c369ab13104d5208e7119", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n51426abd4be3a4691c80a73c3f93b3c", "n2c88d1c1d8b35aebf883cbf259fb6bc"], + ["n786694b5ed33295a885b5bcd8c7c1ce", "n0cd3b95c714388bacdf1a486ab432fc"], + ["n854753a77933562ae72ec87c365f23d", "n2890db24f6c3cd1bbcd6b7d8cf2c045"], + ["nc3d0d49adf530bbaffe53630c184c0a", "n476ddcb6dd33e2abac43596b08c2bc1"], + ["n2c88d1c1d8b35aebf883cbf259fb6bc", "n383748fe27434d582f0ca17af9d968a"], + ["n0cd3b95c714388bacdf1a486ab432fc", "n854753a77933562ae72ec87c365f23d"], + ["n51426abd4be3a4691c80a73c3f93b3c", "nae1218ddd2e3448b562bc79dc084401"], + ["nc3d0d49adf530bbaffe53630c184c0a", "n01fb40259ad3cf285bb11a8bbbe59f2"], + ["ne1f86f902a23e7fa4a67192e8b38a05", "n374443fbdc1313d98ebbe19d535fec2"], + ["n0b9e36e6b633ddb906d2044f658f110", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["ncab9a48e79d357195dcee68dad3a31f", "ncbf4db2c48f3348b2c7081f9e3b363a"], + ["n8daedbb02273a0fbc94cc118c90649f", "n86a8b1a56f9399f90c4c227594a9d03"], + ["ncbf4db2c48f3348b2c7081f9e3b363a", "n99817348b4a36a6931854c93eed8c5f"], + ["n1430047af8537f88710c4bbf3cbfb0f", "ncab9a48e79d357195dcee68dad3a31f"], + ["n4790f8aa48e335aa712e2af757e180b", "nca50848d1aa340f8c2b4776ce81868d"], + ["ne26def77df1385caa206c64e7e3ea53", "ne36a6858a733430bffa4fec053dc1ab"], + ["ncab9a48e79d357195dcee68dad3a31f", "n31688b7ab44338e9e6cb8dcaf259eef"], + ["n07982b8985139249bca3a046f3a4379", "nf3ebee137c53da28091ad7d140ce00c"], + ["n66a0562670e37648a3e05c243335bff", "nf3ebee137c53da28091ad7d140ce00c"], + ["n03f39191e8a32629145ba6a677ed040", "nca50848d1aa340f8c2b4776ce81868d"], + ["n8c7e35b0457300d9d6a96a6b1d18329", "n38c3dd0344a3f86bc7511c454bcdf4c"], + ["n5ced56bcc863060ac4977755f35a5f5", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n2ad9caac5b737bd897d4c8844c85f12", "n48bbfdc912334fc89c4f48c05e8969e"], + ["n31688b7ab44338e9e6cb8dcaf259eef", "n99817348b4a36a6931854c93eed8c5f"], + ["n3934eef90463940a6a9cf4ba2e63b1c", "n7dccd56c80233469a4609f684ebe457"], + ["ncab9a48e79d357195dcee68dad3a31f", "n9335d0718a937f9a39ec5b36d5637fe"], + ], + ) + assert not graph4.has_cycle() + assert graph4.get_cycle() == [] + graph5 = Graph([1, 2, 3, 4, 5], [[1, 2], [2, 3], [2, 4], [4, 5], [5, 2]]) + assert graph5.has_cycle() + assert graph5.get_cycle() == [2, 4, 5, 2] diff --git a/bamboo_engine/utils/mako_safety.py b/bamboo_engine/utils/mako_safety.py new file mode 100644 index 00000000..12d6ba67 --- /dev/null +++ b/bamboo_engine/utils/mako_safety.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +Mako 安全工具 +""" + +from ast import NodeVisitor + +from mako import parsetree + +from .mako_utils.code_extract import MakoNodeCodeExtractor +from .mako_utils.exceptions import ForbiddenMakoTemplateException + + +class SingleLineNodeVisitor(NodeVisitor): + """ + 遍历语法树节点,遇到魔术方法使用或 import 时,抛出异常 + """ + + def __init__(self, *args, **kwargs): + super(SingleLineNodeVisitor, self).__init__(*args, **kwargs) + + def visit_Attribute(self, node): + if node.attr.startswith("__"): + raise ForbiddenMakoTemplateException("can not access private attribute") + + def visit_Name(self, node): + if node.id.startswith("__"): + raise ForbiddenMakoTemplateException("can not access private method") + + def visit_Import(self, node): + raise ForbiddenMakoTemplateException("can not use import statement") + + def visit_ImportFrom(self, node): + self.visit_Import(node) + + +class SingleLinCodeExtractor(MakoNodeCodeExtractor): + def extract(self, node): + if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): + return node.text + elif isinstance(node, parsetree.Text): + return None + else: + raise ForbiddenMakoTemplateException( + "Unsupported node: [{}]".format(node.__class__.__name__) + ) diff --git a/bamboo_engine/utils/mako_utils/__init__.py b/bamboo_engine/utils/mako_utils/__init__.py new file mode 100644 index 00000000..03e37cda --- /dev/null +++ b/bamboo_engine/utils/mako_utils/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +Mako 相关工具模块 +""" diff --git a/bamboo_engine/utils/mako_utils/checker.py b/bamboo_engine/utils/mako_utils/checker.py new file mode 100644 index 00000000..ec664f43 --- /dev/null +++ b/bamboo_engine/utils/mako_utils/checker.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +import ast +from typing import List + +from mako import parsetree +from mako.exceptions import MakoException +from mako.lexer import Lexer + +from .code_extract import MakoNodeCodeExtractor +from .exceptions import ForbiddenMakoTemplateException + + +def parse_template_nodes( + nodes: List[parsetree.Node], + node_visitor: ast.NodeVisitor, + code_extractor: MakoNodeCodeExtractor, +): + """ + 解析mako模板节点,逐个节点解析抽象语法树并检查安全性 + :param nodes: mako模板节点列表 + :param node_visitor: 节点访问类,用于遍历AST节点 + :param code_extractor: Mako 词法节点处理器,用于提取 python 代码 + """ + for node in nodes: + code = code_extractor.extract(node) + if code is None: + continue + + ast_node = ast.parse(code, "", "exec") + node_visitor.visit(ast_node) + if hasattr(node, "nodes"): + parse_template_nodes(node.nodes, node_visitor) + + +def check_mako_template_safety( + text: str, node_visitor: ast.NodeVisitor, code_extractor: MakoNodeCodeExtractor +) -> bool: + """ + 检查mako模板是否安全,若不安全直接抛出异常,安全则返回True + :param text: mako模板内容 + :param node_visitor: 节点访问器,用于遍历AST节点 + """ + try: + lexer_template = Lexer(text).parse() + except MakoException as mako_error: + raise ForbiddenMakoTemplateException( + "非mako模板,解析失败, {err_msg}".format(err_msg=mako_error.__class__.__name__) + ) + parse_template_nodes(lexer_template.nodes, node_visitor, code_extractor) + return True diff --git a/bamboo_engine/utils/mako_utils/code_extract.py b/bamboo_engine/utils/mako_utils/code_extract.py new file mode 100644 index 00000000..e8e4bde4 --- /dev/null +++ b/bamboo_engine/utils/mako_utils/code_extract.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import abc + +from mako import parsetree +from mako.ast import PythonFragment + +from .exceptions import ForbiddenMakoTemplateException + + +class MakoNodeCodeExtractor: + @abc.abstractmethod + def extract(self, node): + """ + 处理 Mako Lexer 分割出来的 code 对象,返回需要检测的 python 代码,返回 None 表示该节点不需要处理 + + :param node: mako parsetree node + :return: 需要处理的代码,或 None + """ + raise NotImplementedError() + + +class StrictMakoNodeCodeExtractor(MakoNodeCodeExtractor): + def extract(self, node): + if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): + return node.text + elif isinstance(node, parsetree.ControlLine): + if node.isend: + return None + return PythonFragment(node.text).code + elif isinstance(node, parsetree.Text): + return None + else: + raise ForbiddenMakoTemplateException( + "不支持[{}]节点".format(node.__class__.__name__) + ) diff --git a/bamboo_engine/utils/mako_utils/exceptions.py b/bamboo_engine/utils/mako_utils/exceptions.py new file mode 100644 index 00000000..384bbfb6 --- /dev/null +++ b/bamboo_engine/utils/mako_utils/exceptions.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class ForbiddenMakoTemplateException(Exception): + pass diff --git a/bamboo_engine/utils/mako_utils/visitors.py b/bamboo_engine/utils/mako_utils/visitors.py new file mode 100644 index 00000000..d46785e4 --- /dev/null +++ b/bamboo_engine/utils/mako_utils/visitors.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import _ast +import ast + +from werkzeug.utils import import_string + +from .exceptions import ForbiddenMakoTemplateException + + +class StrictNodeVisitor(ast.NodeVisitor): + """ + 遍历语法树节点,遇到魔术方法使用或import时,抛出异常 + """ + + BLACK_LIST_MODULE_METHODS = { + "os": dir(__import__("os")), + "subprocess": dir(__import__("subprocess")), + "shutil": dir(__import__("shutil")), + "ctypes": dir(__import__("ctypes")), + "codecs": dir(__import__("codecs")), + "sys": dir(__import__("sys")), + "socket": dir(__import__("socket")), + "webbrowser": dir(__import__("webbrowser")), + "threading": dir(__import__("threading")), + "sqlite3": dir(__import__("threading")), + "signal": dir(__import__("signal")), + "imaplib": dir(__import__("imaplib")), + "fcntl": dir(__import__("fcntl")), + "pdb": dir(__import__("pdb")), + "pty": dir(__import__("pty")), + "glob": dir(__import__("glob")), + "tempfile": dir(__import__("tempfile")), + "types": dir(import_string("types.CodeType")) + + dir(import_string("types.FrameType")), + "builtins": [ + "getattr", + "hasattr", + "breakpoint", + "compile", + "delattr", + "open", + "eval", + "exec", + "execfile", + "exit", + "dir", + "globals", + "locals", + "input", + "iter", + "next", + "quit", + "setattr", + "vars", + "memoryview", + "super", + "print", + ], + } + + BLACK_LIST_METHODS = [] + for module_name, methods in BLACK_LIST_MODULE_METHODS.items(): + BLACK_LIST_METHODS.append(module_name) + BLACK_LIST_METHODS.extend(methods) + BLACK_LIST_METHODS = set(BLACK_LIST_METHODS) + + WHITE_LIST_MODULES = ["datetime", "re", "random", "json", "math"] + + def __init__(self, black_list_methods=None, white_list_modules=None): + self.black_list_methods = black_list_methods or self.BLACK_LIST_METHODS + self.white_list_modules = white_list_modules or self.WHITE_LIST_MODULES + + @staticmethod + def is_white_list_ast_obj(ast_obj: _ast.AST) -> bool: + """ + 判断是否白名单对象,特殊豁免 + :param ast_obj: 抽象语法树节点 + :return: bool + """ + # re 正则表达式允许使用 compile + if isinstance(ast_obj, _ast.Attribute) and isinstance(ast_obj.value, _ast.Name): + if ast_obj.value.id == "re" and ast_obj.attr in ["compile"]: + return True + + return False + + def visit_Attribute(self, node): + if self.is_white_list_ast_obj(node): + return + + if node.attr in self.black_list_methods or node.attr.startswith("_"): + raise ForbiddenMakoTemplateException("Mako template forbidden.") + + def visit_Name(self, node): + if node.id in self.black_list_methods or node.id.startswith("_"): + raise ForbiddenMakoTemplateException("Mako template forbidden.") + + def visit_Import(self, node): + for name in node.names: + if name.name not in self.white_list_modules: + raise ForbiddenMakoTemplateException("Mako template forbidden.") + + def visit_ImportFrom(self, node): + self.visit_Import(node) diff --git a/bamboo_engine/utils/object.py b/bamboo_engine/utils/object.py new file mode 100644 index 00000000..f1cc8726 --- /dev/null +++ b/bamboo_engine/utils/object.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class Representable: + def __str__(self): + return "<%s: %s>" % (self.__class__.__name__, self.__dict__) + + __repr__ = __str__ diff --git a/bamboo_engine/utils/string.py b/bamboo_engine/utils/string.py new file mode 100644 index 00000000..cdec19a0 --- /dev/null +++ b/bamboo_engine/utils/string.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +""" +字符串处理类工具 +""" + +import uuid + +ESCAPED_CHARS = {"\n": r"\n", "\r": r"\r", "\t": r"\t"} + + +def transform_escape_char(string: str) -> str: + """ + 对未转义的字符串进行转义,现有的转义字符包括\n, \r, \t + """ + if not isinstance(string, str): + return string + # 已转义的情况 + if len([c for c in ESCAPED_CHARS.values() if c in string]) > 0: + return string + for key, value in ESCAPED_CHARS.items(): + if key in string: + string = string.replace(key, value) + return string + + +def format_var_key(key: str) -> str: + """ + format key to ${key} + + :param key: key + :type key: str + :return: format key + :rtype: str + """ + return "${%s}" % key + + +def deformat_var_key(key: str) -> str: + """ + deformat ${key} to key + + :param key: key + :type key: str + :return: deformat key + :rtype: str + """ + return key[2:-1] + + +def unique_id(prefix: str) -> str: + if len(prefix) != 1: + raise ValueError("prefix length must be 1") + + return "{}{}".format(prefix, uuid.uuid4().hex) + + +def get_lower_case_name(text: str) -> str: + lst = [] + for index, char in enumerate(text): + if char.isupper() and index != 0: + lst.append("_") + lst.append(char) + + return "".join(lst).lower() diff --git a/bamboo_engine/validator/__init__.py b/bamboo_engine/validator/__init__.py new file mode 100644 index 00000000..c9170ab5 --- /dev/null +++ b/bamboo_engine/validator/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .api import validate_and_process_pipeline # noqa diff --git a/bamboo_engine/validator/api.py b/bamboo_engine/validator/api.py new file mode 100644 index 00000000..e01f0b93 --- /dev/null +++ b/bamboo_engine/validator/api.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.eri import NodeType +from bamboo_engine import exceptions + +from . import rules +from .connection import ( + validate_graph_connection, + validate_graph_without_circle, +) +from .gateway import validate_gateways, validate_stream +from .utils import format_pipeline_tree_io_to_list + + +def validate_and_process_pipeline(pipeline: dict, cycle_tolerate=False): + for subproc in [ + act + for act in pipeline["activities"].values() + if act["type"] == NodeType.SubProcess.value + ]: + validate_and_process_pipeline(subproc["pipeline"], cycle_tolerate) + + format_pipeline_tree_io_to_list(pipeline) + # 1. connection validation + validate_graph_connection(pipeline) + + # do not tolerate circle in flow + if not cycle_tolerate: + no_cycle = validate_graph_without_circle(pipeline) + if not no_cycle["result"]: + raise exceptions.TreeInvalidException(no_cycle["message"]) + + # 2. gateway validation + validate_gateways(pipeline) + + # 3. stream validation + validate_stream(pipeline) + + +def add_sink_type(node_type: str): + rules.FLOW_NODES_WITHOUT_STARTEVENT.append(node_type) + rules.NODE_RULES[node_type] = rules.SINK_RULE diff --git a/bamboo_engine/validator/connection.py b/bamboo_engine/validator/connection.py new file mode 100644 index 00000000..07aa2e36 --- /dev/null +++ b/bamboo_engine/validator/connection.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.utils.graph import Graph +from bamboo_engine.exceptions import ConnectionValidateError + +from .rules import NODE_RULES +from .utils import get_nodes_dict + + +def validate_graph_connection(data): + """ + 节点连接合法性校验 + """ + nodes = get_nodes_dict(data) + + result = {"result": True, "message": {}, "failed_nodes": []} + + for i in nodes: + node_type = nodes[i]["type"] + rule = NODE_RULES[node_type] + message = "" + for j in nodes[i]["target"]: + if nodes[j]["type"] not in rule["allowed_out"]: + message += "不能连接%s类型节点\n" % nodes[i]["type"] + if ( + rule["min_in"] > len(nodes[i]["source"]) + or len(nodes[i]["source"]) > rule["max_in"] + ): + message += "节点的入度最大为%s,最小为%s\n" % (rule["max_in"], rule["min_in"]) + if ( + rule["min_out"] > len(nodes[i]["target"]) + or len(nodes[i]["target"]) > rule["max_out"] + ): + message += "节点的出度最大为%s,最小为%s\n" % (rule["max_out"], rule["min_out"]) + if message: + result["failed_nodes"].append(i) + result["message"][i] = message + + if result["failed_nodes"]: + raise ConnectionValidateError( + failed_nodes=result["failed_nodes"], detail=result["message"] + ) + + +def validate_graph_without_circle(data): + """ + validate if a graph has not cycle + + return { + "result": False, + "message": "error message", + "error_data": ["node1_id", "node2_id", "node1_id"] + } + """ + + nodes = [data["start_event"]["id"], data["end_event"]["id"]] + nodes += list(data["gateways"].keys()) + list(data["activities"].keys()) + flows = [ + [flow["source"], flow["target"]] for _, flow in list(data["flows"].items()) + ] + cycle = Graph(nodes, flows).get_cycle() + if cycle: + return { + "result": False, + "message": "pipeline graph has circle", + "error_data": cycle, + } + return {"result": True, "data": []} diff --git a/bamboo_engine/validator/gateway.py b/bamboo_engine/validator/gateway.py new file mode 100644 index 00000000..ec05ed32 --- /dev/null +++ b/bamboo_engine/validator/gateway.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import queue + +from bamboo_engine import exceptions +from .utils import get_node_for_sequence, get_nodes_dict + +STREAM = "stream" +P_STREAM = "p_stream" +P = "p" +MAIN_STREAM = "main" + +PARALLEL_GATEWAYS = {"ParallelGateway", "ConditionalParallelGateway"} + + +def not_in_parallel_gateway(gateway_stack, start_from=None): + """ + check whether there is parallel gateway in stack from specific gateway + :param gateway_stack: + :param start_from: + :return: + """ + start = 0 + if start_from: + id_stack = [g["id"] for g in gateway_stack] + start = id_stack.index(start_from) + + for i in range(start, len(gateway_stack)): + gateway = gateway_stack[i] + if gateway["type"] in PARALLEL_GATEWAYS: + return False + return True + + +def matched_in_prev_blocks(gid, current_start, block_nodes): + """ + check whether gateway with gid is matched in previous block + :param gid: + :param current_start: + :param block_nodes: + :return: + """ + prev_nodes = set() + for prev_start, nodes in list(block_nodes.items()): + if prev_start == current_start: + continue + prev_nodes.update(nodes) + + return gid in prev_nodes + + +def match_converge( + converges, + gateways, + cur_index, + end_event_id, + block_start, + block_nodes, + converged, + dist_from_start, + converge_in_len, + stack=None, +): + """ + find converge for parallel and exclusive in blocks, and check sanity of gateway + :param converges: + :param gateways: + :param cur_index: + :param end_event_id: + :param block_start: + :param block_nodes: + :param converged: + :param dist_from_start: + :param stack: + :param converge_in_len: + :return: + """ + + if stack is None: + stack = [] + + if cur_index not in gateways: + return None, False + + # return if this node is already matched + if gateways[cur_index]["match"]: + return gateways[cur_index]["match"], gateways[cur_index]["share_converge"] + + current_gateway = gateways[cur_index] + target = gateways[cur_index]["target"] + stack.append(gateways[cur_index]) + stack_id_set = {g["id"] for g in stack} + + # find closest converge recursively + for i in range(len(target)): + + # do not process prev blocks nodes + if matched_in_prev_blocks(target[i], block_start, block_nodes): + target[i] = None + continue + + block_nodes[block_start].add(target[i]) + + # do not find self's converge node again + while target[i] in gateways and target[i] != current_gateway["id"]: + + if target[i] in stack_id_set: + # return to previous gateway + + if not_in_parallel_gateway(stack, start_from=target[i]): + # do not trace back + target[i] = None + break + else: + raise exceptions.ConvergeMatchError( + cur_index, "并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关" + ) + + converge_id, shared = match_converge( + converges=converges, + gateways=gateways, + cur_index=target[i], + end_event_id=end_event_id, + block_start=block_start, + block_nodes=block_nodes, + stack=stack, + converged=converged, + dist_from_start=dist_from_start, + converge_in_len=converge_in_len, + ) + if converge_id: + target[i] = converge_id + + if not shared: + # try to get next node fo converge which is not shared + target[i] = converges[converge_id]["target"][0] + + else: + # can't find corresponding converge gateway, which means this gateway will reach end event directly + target[i] = end_event_id + + if ( + target[i] in converges + and dist_from_start[target[i]] < dist_from_start[cur_index] + ): + # do not match previous converge + target[i] = None + + stack.pop() + + is_exg = current_gateway["type"] == "ExclusiveGateway" + converge_id = None + shared = False + cur_to_converge = len(target) + converge_end = False + + # gateway match validation + for i in range(len(target)): + + # mark first converge + if target[i] in converges and not converge_id: + converge_id = target[i] + + # same converge node + elif target[i] in converges and converge_id == target[i]: + pass + + # exclusive gateway point to end + elif is_exg and target[i] == end_event_id: + if not_in_parallel_gateway(stack): + converge_end = True + else: + raise exceptions.ConvergeMatchError( + cur_index, "并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关" + ) + + # exclusive gateway point back to self + elif is_exg and target[i] == current_gateway["id"]: + # not converge behavior + cur_to_converge -= 1 + pass + + # exclusive gateway converge at different converge gateway + elif is_exg and target[i] in converges and converge_id != target[i]: + raise exceptions.ConvergeMatchError(cur_index, "分支网关的所有分支第一个遇到的汇聚网关必须是同一个") + + # meet previous node + elif is_exg and target[i] is None: + # not converge behavior + cur_to_converge -= 1 + pass + + # invalid cases + else: + raise exceptions.ConvergeMatchError(cur_index, "非法网关,请检查其分支是否符合规则") + + if is_exg: + if converge_id in converges: + # this converge is shared by multiple gateway + # only compare to the number of positive incoming + shared = ( + converge_in_len[converge_id] > cur_to_converge + or converge_id in converged + ) + else: + # for parallel gateway + + converge_incoming = len(converges[converge_id]["incoming"]) + gateway_outgoing = len(target) + + if converge_incoming > gateway_outgoing: + for gateway_id in converged.get(converge_id, []): + # find another parallel gateway + if gateways[gateway_id]["type"] in PARALLEL_GATEWAYS: + raise exceptions.ConvergeMatchError( + converge_id, "汇聚网关只能汇聚来自同一个并行网关的分支" + ) + + shared = True + + elif converge_incoming < gateway_outgoing: + raise exceptions.ConvergeMatchError(converge_id, "汇聚网关没有汇聚其对应的并行网关的所有分支") + + current_gateway["match"] = converge_id + current_gateway["share_converge"] = shared + current_gateway["converge_end"] = converge_end + + converged.setdefault(converge_id, []).append(current_gateway["id"]) + block_nodes[block_start].add(current_gateway["id"]) + + return converge_id, shared + + +def distance_from(origin, node, tree, marked, visited=None): + """ + get max distance from origin to node + :param origin: + :param node: + :param tree: + :param marked: + :param visited: + :return: + """ + if visited is None: + visited = set() + + if node["id"] in marked: + return marked[node["id"]] + + if node["id"] == origin["id"]: + return 0 + + if node["id"] in visited: + # do not trace circle + return None + + visited.add(node["id"]) + + incoming_dist = [] + for incoming in node["incoming"]: + prev_node = get_node_for_sequence(incoming, tree, "source") + + # get incoming node's distance recursively + dist = distance_from( + origin=origin, node=prev_node, tree=tree, marked=marked, visited=visited + ) + + # if this incoming do not trace back to current node + if dist is not None: + incoming_dist.append(dist + 1) + + if not incoming_dist: + return None + + # get max distance + res = max(incoming_dist) + marked[node["id"]] = res + return res + + +def validate_gateways(tree): + """ + check sanity of gateways and find their converge gateway + :param tree: + :return: + """ + converges = {} + gateways = {} + all = {} + distances = {} + converge_positive_in = {} + process_order = [] + + # data preparation + for i, item in list(tree["gateways"].items()): + node = { + "incoming": item["incoming"] + if isinstance(item["incoming"], list) + else [item["incoming"]], + "outgoing": item["outgoing"] + if isinstance(item["outgoing"], list) + else [item["outgoing"]], + "type": item["type"], + "target": [], + "source": [], + "id": item["id"], + "match": None, + } + + # find all first reach nodes(ConvergeGateway, ExclusiveGateway, ParallelGateway, EndEvent) + # which is not ServiceActivity for each gateway + for index in node["outgoing"]: + index = tree["flows"][index]["target"] + while index in tree["activities"]: + index = tree["flows"][tree["activities"][index]["outgoing"]]["target"] + + # append this node's id to current gateway's target list + node["target"].append(index) + + # get current node's distance from start event + if not distance_from( + node=node, origin=tree["start_event"], tree=tree, marked=distances + ): + raise exceptions.ConvergeMatchError(node["id"], "无法获取该网关距离开始节点的距离") + + if item["type"] == "ConvergeGateway": + converges[i] = node + else: + process_order.append(i) + gateways[i] = node + + all[i] = node + + # calculate positive incoming number for converge + for nid, node in list(all.items()): + for t in node["target"]: + if t in converges and distances[t] > distances[nid]: + converge_positive_in[t] = converge_positive_in.setdefault(t, 0) + 1 + + process_order.sort(key=lambda gid: distances[gid]) + end_event_id = tree["end_event"]["id"] + converged = {} + block_nodes = {} + visited = set() + + # process in distance order + for gw in process_order: + if gw in visited or "match" in gw: + continue + visited.add(gw) + + block_nodes[gw] = set() + + match_converge( + converges=converges, + gateways=gateways, + cur_index=gw, + end_event_id=end_event_id, + converged=converged, + block_start=gw, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=converge_positive_in, + ) + + # set converge gateway + for i in gateways: + if gateways[i]["match"]: + tree["gateways"][i]["converge_gateway_id"] = gateways[i]["match"] + + return converged + + +def blend(source, target, custom_stream=None): + """ + blend source and target streams + :param source: + :param target: + :param custom_stream: + :return: + """ + + if custom_stream: + # use custom stream instead of source's stream + if isinstance(custom_stream, set): + for stream in custom_stream: + target[STREAM].add(stream) + else: + target[STREAM].add(custom_stream) + + return + + if len(source[STREAM]) == 0: + raise exceptions.InvalidOperationException( + "stream validation error, node(%s) stream is empty" % source["id"] + ) + + # blend + for s in source[STREAM]: + target[STREAM].add(s) + + +def streams_for_parallel(p): + streams = set() + for i, target_id in enumerate(p["target"]): + streams.add("{}_{}".format(p["id"], i)) + + return streams + + +def flowing(where, to, parallel_converges): + """ + mark target's stream from target + :param where: + :param to: + :param parallel_converges: + :return: + """ + is_parallel = where["type"] in PARALLEL_GATEWAYS + + stream = None + if is_parallel: + # add parallel's stream to its converge + parallel_converge = to[where["converge_gateway_id"]] + blend(source=where, target=parallel_converge, custom_stream=stream) + + if len(parallel_converge[STREAM]) > 1: + raise exceptions.StreamValidateError(node_id=parallel_converge) + + # flow to target + for i, target_id in enumerate(where["target"]): + target = to[target_id] + fake = False + + # generate different stream + if is_parallel: + stream = "{}_{}".format(where["id"], i) + + if target_id in parallel_converges: + + is_valid_branch = where[STREAM].issubset( + parallel_converges[target_id][P_STREAM] + ) + is_direct_connect = where.get("converge_gateway_id") == target_id + + if is_valid_branch or is_direct_connect: + # do not flow when branch of parallel converge to its converge gateway + fake = True + + if not fake: + blend(source=where, target=target, custom_stream=stream) + + # sanity check + if len(target[STREAM]) != 1: + raise exceptions.StreamValidateError(node_id=target_id) + + +def validate_stream(tree): + """ + validate flow stream + :param tree: pipeline tree + :return: + """ + # data preparation + start_event_id = tree["start_event"]["id"] + end_event_id = tree["end_event"]["id"] + nodes = get_nodes_dict(tree) + nodes[start_event_id][STREAM] = {MAIN_STREAM} + nodes[end_event_id][STREAM] = {MAIN_STREAM} + parallel_converges = {} + visited = set({}) + + for nid, node in list(nodes.items()): + node.setdefault(STREAM, set()) + + # set allow streams for parallel's converge + if node["type"] in PARALLEL_GATEWAYS: + parallel_converges[node["converge_gateway_id"]] = { + P_STREAM: streams_for_parallel(node), + P: nid, + } + + # build stream from start + node_queue = queue.Queue() + node_queue.put(nodes[start_event_id]) + while not node_queue.empty(): + + # get node + node = node_queue.get() + + if node["id"] in visited: + # flow again to validate stream, but do not add target to queue + flowing(where=node, to=nodes, parallel_converges=parallel_converges) + continue + + # add to queue + for target_id in node["target"]: + node_queue.put(nodes[target_id]) + + # mark as visited + visited.add(node["id"]) + + # flow + flowing(where=node, to=nodes, parallel_converges=parallel_converges) + + # data clean + for nid, n in list(nodes.items()): + if len(n[STREAM]) != 1: + raise exceptions.StreamValidateError(node_id=nid) + + # replace set to str + n[STREAM] = n[STREAM].pop() + + # isolate node check + for __, node in list(nodes.items()): + if not node[STREAM]: + raise exceptions.IsolateNodeError() + + return nodes diff --git a/bamboo_engine/validator/rules.py b/bamboo_engine/validator/rules.py new file mode 100644 index 00000000..51073430 --- /dev/null +++ b/bamboo_engine/validator/rules.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.eri import NodeType + +MAX_IN = 1000 +MAX_OUT = 1000 +FLOW_NODES_WITHOUT_STARTEVENT = [ + NodeType.ServiceActivity.value, + NodeType.SubProcess.value, + NodeType.EmptyEndEvent.value, + NodeType.ParallelGateway.value, + NodeType.ConditionalParallelGateway.value, + NodeType.ExclusiveGateway.value, + NodeType.ConvergeGateway.value, +] + +FLOW_NODES_WITHOUT_START_AND_END = [ + NodeType.ServiceActivity.value, + NodeType.SubProcess.value, + NodeType.ParallelGateway.value, + NodeType.ConditionalParallelGateway.value, + NodeType.ExclusiveGateway.value, + NodeType.ConvergeGateway.value, +] + +SOURCE_RULE = { + "min_in": 0, + "max_in": 0, + "min_out": 1, + "max_out": 1, + "allowed_out": FLOW_NODES_WITHOUT_START_AND_END, +} + +SINK_RULE = { + "min_in": 1, + "max_in": MAX_IN, + "min_out": 0, + "max_out": 0, + "allowed_out": [], +} + +ACTIVITY_RULE = { + "min_in": 1, + "max_in": MAX_IN, + "min_out": 1, + "max_out": 1, + "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, +} + +EMIT_RULE = { + "min_in": 1, + "max_in": MAX_IN, + "min_out": 1, + "max_out": MAX_OUT, + "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, +} + +CONVERGE_RULE = { + "min_in": 1, + "max_in": MAX_IN, + "min_out": 1, + "max_out": 1, + "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, +} + +# rules of activity graph +NODE_RULES = { + NodeType.EmptyStartEvent.value: SOURCE_RULE, + NodeType.EmptyEndEvent.value: SINK_RULE, + NodeType.ServiceActivity.value: ACTIVITY_RULE, + NodeType.ExclusiveGateway.value: EMIT_RULE, + NodeType.ParallelGateway.value: EMIT_RULE, + NodeType.ConditionalParallelGateway.value: EMIT_RULE, + NodeType.ConvergeGateway.value: CONVERGE_RULE, + NodeType.SubProcess.value: ACTIVITY_RULE, +} diff --git a/bamboo_engine/validator/utils.py b/bamboo_engine/validator/utils.py new file mode 100644 index 00000000..6b846a08 --- /dev/null +++ b/bamboo_engine/validator/utils.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from copy import deepcopy + +from bamboo_engine.exceptions import ValueError + + +def format_to_list(notype): + """ + format a data to list + :return: + """ + if isinstance(notype, list): + return notype + if not notype: + return [] + return [notype] + + +def format_node_io_to_list(node, i=True, o=True): + if i: + node["incoming"] = format_to_list(node["incoming"]) + + if o: + node["outgoing"] = format_to_list(node["outgoing"]) + + +def format_pipeline_tree_io_to_list(pipeline_tree): + """ + :summary: format incoming and outgoing to list + :param pipeline_tree: + :return: + """ + for act in list(pipeline_tree["activities"].values()): + format_node_io_to_list(act, o=False) + + for gateway in list(pipeline_tree["gateways"].values()): + format_node_io_to_list(gateway, o=False) + + format_node_io_to_list(pipeline_tree["end_event"], o=False) + + +def get_node_for_sequence(sid, tree, node_type): + target_id = tree["flows"][sid][node_type] + + if target_id in tree["activities"]: + return tree["activities"][target_id] + elif target_id in tree["gateways"]: + return tree["gateways"][target_id] + elif target_id == tree["end_event"]["id"]: + return tree["end_event"] + elif target_id == tree["start_event"]["id"]: + return tree["start_event"] + + raise ValueError("node(%s) not in data" % target_id) + + +def get_nodes_dict(data): + """ + get all FlowNodes of a pipeline + """ + data = deepcopy(data) + start = data["start_event"]["id"] + end = data["end_event"]["id"] + + nodes = {start: data["start_event"], end: data["end_event"]} + + nodes.update(data["activities"]) + nodes.update(data["gateways"]) + + for node in list(nodes.values()): + # format to list + node["incoming"] = format_to_list(node["incoming"]) + node["outgoing"] = format_to_list(node["outgoing"]) + + node["source"] = [ + data["flows"][incoming]["source"] for incoming in node["incoming"] + ] + node["target"] = [ + data["flows"][outgoing]["target"] for outgoing in node["outgoing"] + ] + + return nodes diff --git "a/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271.csv" "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271.csv" new file mode 100644 index 00000000..14b0d1d3 --- /dev/null +++ "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271.csv" @@ -0,0 +1,11 @@ +series,x,y +bamboo_engine,100,1.33 +bamboo_engine,500,9.66 +bamboo_engine,1000,19.33 +bamboo_engine,5000,154.33 +bamboo_engine,10000,347.5 +pipeline,100,6 +pipeline,500,91 +pipeline,1000,545 +pipeline,5000, +pipeline,10000, \ No newline at end of file diff --git "a/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.chartshaper" "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.chartshaper" new file mode 100644 index 00000000..c10da1c4 --- /dev/null +++ "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.chartshaper" @@ -0,0 +1,163 @@ +{ + "id": "u-45eb-10b", + "data": [ + { + "series": "bamboo-engine", + "x": "100", + "y": 1.33 + }, + { + "series": "bamboo-engine", + "x": "500", + "y": 9.66 + }, + { + "series": "bamboo-engine", + "x": "1000", + "y": 19.33 + }, + { + "series": "bamboo-engine", + "x": "5000", + "y": 154.33 + }, + { + "series": "pipeline", + "y": 6, + "x": "100" + }, + { + "series": "pipeline", + "y": 91, + "x": "500" + }, + { + "series": "pipeline", + "y": 545, + "x": "1000" + }, + { + "series": "pipeline", + "y": null, + "x": "5000" + }, + { + "x": "10000", + "y": 347.5, + "series": "bamboo-engine" + }, + { + "x": "10000", + "y": null, + "series": "pipeline" + } + ], + "configs": { + "renderer": "canvas", + "title": { + "visible": true, + "text": "大流程执行" + }, + "description": { + "visible": true, + "text": "并行网关连接多个节点(100 gevent)" + }, + "padding": "auto", + "legend": { + "visible": true, + "position": "top-left", + "wordSpacing": 4, + "flipPage": false + }, + "tooltip": { + "visible": true, + "shared": true, + "crosshairs": { + "type": "y" + } + }, + "xAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": false, + "grid": { + "visible": false + }, + "line": { + "visible": true + }, + "tickLine": { + "visible": true + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "并发数" + } + }, + "yAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": true, + "grid": { + "visible": true + }, + "line": { + "visible": false + }, + "tickLine": { + "visible": false + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "耗时(s)" + } + }, + "label": { + "visible": true, + "type": "point" + }, + "connectNulls": false, + "smooth": true, + "lineSize": 2, + "lineStyle": { + "lineJoin": "round", + "lineCap": "round" + }, + "point": { + "visible": false, + "size": 0, + "style": { + "stroke": "#fff" + } + }, + "type": "Line", + "forceFit": false, + "animation": false, + "width": 1097, + "height": 532, + "data": { + "styles": { + "London": { + "color": "#5b8ff9" + } + } + }, + "xField": "x", + "yField": "y", + "seriesField": "series" + }, + "selection": [ + 2, + 4 + ] +} \ No newline at end of file diff --git "a/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.csv" "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.csv" new file mode 100644 index 00000000..3878a082 --- /dev/null +++ "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.csv" @@ -0,0 +1,11 @@ +series,x,y +bamboo-engine,100,1.33 +bamboo-engine,500,9.66 +bamboo-engine,1000,19.33 +bamboo-engine,5000,154.33 +pipeline,100,6 +pipeline,500,91 +pipeline,1000,545 +pipeline,5000, +bamboo-engine,10000,347.5 +pipeline,10000, \ No newline at end of file diff --git "a/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.js" "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.js" new file mode 100644 index 00000000..d7b6db4a --- /dev/null +++ "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.js" @@ -0,0 +1,101 @@ +import * as G2Plot from '@antv/g2plot' +const container = document.getElementById('app'); +const data = [ + { + "series": "bamboo-engine", + "x": "100", + "y": 1.33 + }, + { + "series": "bamboo-engine", + "x": "500", + "y": 9.66 + }, + { + "series": "bamboo-engine", + "x": "1000", + "y": 19.33 + }, + { + "series": "bamboo-engine", + "x": "5000", + "y": 154.33 + }, + { + "series": "pipeline", + "y": 6, + "x": "100" + }, + { + "series": "pipeline", + "y": 91, + "x": "500" + }, + { + "series": "pipeline", + "y": 545, + "x": "1000" + }, + { + "series": "pipeline", + "y": null, + "x": "5000" + }, + { + "x": "10000", + "y": 347.5, + "series": "bamboo-engine" + }, + { + "x": "10000", + "y": null, + "series": "pipeline" + } +]; +const config = { + "title": { + "visible": true, + "text": "大流程执行" + }, + "description": { + "visible": true, + "text": "并行网关连接多个节点(100 gevent)" + }, + "legend": { + "flipPage": false + }, + "xAxis": { + "title": { + "visible": true, + "text": "并发数" + } + }, + "yAxis": { + "title": { + "visible": true, + "text": "耗时(s)" + } + }, + "label": { + "visible": true + }, + "smooth": true, + "point": { + "size": 0 + }, + "forceFit": false, + "width": 1097, + "height": 532, + "xField": "x", + "yField": "y", + "seriesField": "series", + "color": [ + "#5B8FF9", + "#5AD8A6" + ] +} +const plot = new G2Plot.Line(container, { + data, + ...config, +}); +plot.render(); diff --git "a/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.png" "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.png" new file mode 100644 index 00000000..49938107 Binary files /dev/null and "b/benchmark/EXECUTION \345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.png" differ diff --git "a/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/EXECUTION \345\244\232\346\265\201\347\250\213.csv" "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/EXECUTION \345\244\232\346\265\201\347\250\213.csv" new file mode 100644 index 00000000..391c40bc --- /dev/null +++ "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/EXECUTION \345\244\232\346\265\201\347\250\213.csv" @@ -0,0 +1,9 @@ +series,x,y +bamboo_engine,100,25.98 +bamboo_engine,500,138 +bamboo_engine,1000,272 +bamboo_engine,5000,2442 +pipeline,100,48.77 +pipeline,500,311 +pipeline,1000,748 +pipeline,5000, \ No newline at end of file diff --git "a/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.chartshaper" "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.chartshaper" new file mode 100644 index 00000000..4ab5a144 --- /dev/null +++ "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.chartshaper" @@ -0,0 +1,153 @@ +{ + "id": "u-45eb-10b", + "data": [ + { + "series": "bamboo-engine", + "x": "100", + "y": 25.98 + }, + { + "series": "bamboo-engine", + "x": "500", + "y": 138 + }, + { + "series": "bamboo-engine", + "x": "1000", + "y": 272 + }, + { + "series": "bamboo-engine", + "x": "5000", + "y": 2442 + }, + { + "series": "pipeline", + "y": 48.77, + "x": "100" + }, + { + "series": "pipeline", + "y": 311, + "x": "500" + }, + { + "series": "pipeline", + "y": 748, + "x": "1000" + }, + { + "series": "pipeline", + "y": null, + "x": "5000" + } + ], + "configs": { + "renderer": "canvas", + "title": { + "visible": true, + "text": "多流程并发执行" + }, + "description": { + "visible": true, + "text": "17个节点,子流程,并行、分支网关(100 gevent)" + }, + "padding": "auto", + "legend": { + "visible": true, + "position": "top-left", + "wordSpacing": 4, + "flipPage": false + }, + "tooltip": { + "visible": true, + "shared": true, + "crosshairs": { + "type": "y" + } + }, + "xAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": false, + "grid": { + "visible": false + }, + "line": { + "visible": true + }, + "tickLine": { + "visible": true + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "并发数" + } + }, + "yAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": true, + "grid": { + "visible": true + }, + "line": { + "visible": false + }, + "tickLine": { + "visible": false + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "公平调度平均耗时(s)" + } + }, + "label": { + "visible": true, + "type": "point" + }, + "connectNulls": false, + "smooth": true, + "lineSize": 2, + "lineStyle": { + "lineJoin": "round", + "lineCap": "round" + }, + "point": { + "visible": false, + "size": 0, + "style": { + "stroke": "#fff" + } + }, + "type": "Line", + "forceFit": false, + "animation": false, + "width": 1097, + "height": 532, + "data": { + "styles": { + "London": { + "color": "#5b8ff9" + } + } + }, + "xField": "x", + "yField": "y", + "seriesField": "series" + }, + "selection": [ + 2, + 4 + ] +} \ No newline at end of file diff --git "a/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.csv" "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.csv" new file mode 100644 index 00000000..b7d4190c --- /dev/null +++ "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.csv" @@ -0,0 +1,9 @@ +series,x,y +bamboo-engine,100,25.98 +bamboo-engine,500,138 +bamboo-engine,1000,272 +bamboo-engine,5000,2442 +pipeline,100,48.77 +pipeline,500,311 +pipeline,1000,748 +pipeline,5000, \ No newline at end of file diff --git "a/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.js" "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.js" new file mode 100644 index 00000000..08fb49b4 --- /dev/null +++ "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.js" @@ -0,0 +1,91 @@ +import * as G2Plot from '@antv/g2plot' +const container = document.getElementById('app'); +const data = [ + { + "series": "bamboo-engine", + "x": "100", + "y": 25.98 + }, + { + "series": "bamboo-engine", + "x": "500", + "y": 138 + }, + { + "series": "bamboo-engine", + "x": "1000", + "y": 272 + }, + { + "series": "bamboo-engine", + "x": "5000", + "y": 2442 + }, + { + "series": "pipeline", + "y": 48.77, + "x": "100" + }, + { + "series": "pipeline", + "y": 311, + "x": "500" + }, + { + "series": "pipeline", + "y": 748, + "x": "1000" + }, + { + "series": "pipeline", + "y": null, + "x": "5000" + } +]; +const config = { + "title": { + "visible": true, + "text": "多流程并发执行" + }, + "description": { + "visible": true, + "text": "17个节点,子流程,并行、分支网关(100 gevent)" + }, + "legend": { + "flipPage": false + }, + "xAxis": { + "title": { + "visible": true, + "text": "并发数" + } + }, + "yAxis": { + "title": { + "visible": true, + "text": "公平调度平均耗时(s)" + } + }, + "label": { + "visible": true + }, + "smooth": true, + "point": { + "size": 0 + }, + "forceFit": false, + "width": 1097, + "height": 532, + "xField": "x", + "yField": "y", + "seriesField": "series", + "color": [ + "#5B8FF9", + "#5AD8A6" + ] +} +const plot = new G2Plot.Line(container, { + data, + ...config, +}); +plot.render(); diff --git "a/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.png" "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.png" new file mode 100644 index 00000000..049c1603 Binary files /dev/null and "b/benchmark/EXECUTION \345\244\232\346\265\201\347\250\213/Line-20210309.png" differ diff --git "a/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.chartshaper" "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.chartshaper" new file mode 100644 index 00000000..fa5ceb2b --- /dev/null +++ "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.chartshaper" @@ -0,0 +1,153 @@ +{ + "id": "u-45eb-10b", + "data": [ + { + "series": "100流程(17节点)并发执行", + "x": "100", + "y": 25.98 + }, + { + "series": "100流程(17节点)并发执行", + "x": "200", + "y": 14.75 + }, + { + "series": "100流程(17节点)并发执行", + "x": "500", + "y": 8.29 + }, + { + "series": "100流程(17节点)并发执行", + "x": "1000", + "y": 6.78 + }, + { + "series": "1000节点大流程", + "y": 19.33, + "x": "100" + }, + { + "series": "1000节点大流程", + "y": 12.5, + "x": "200" + }, + { + "series": "1000节点大流程", + "y": 11, + "x": "500" + }, + { + "series": "1000节点大流程", + "y": 7.5, + "x": "1000" + } + ], + "configs": { + "renderer": "canvas", + "title": { + "visible": true, + "text": "水平扩展测试" + }, + "description": { + "visible": true, + "text": "" + }, + "padding": "auto", + "legend": { + "visible": true, + "position": "top-left", + "wordSpacing": 4, + "flipPage": false + }, + "tooltip": { + "visible": true, + "shared": true, + "crosshairs": { + "type": "y" + } + }, + "xAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": false, + "grid": { + "visible": false + }, + "line": { + "visible": true + }, + "tickLine": { + "visible": true + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "gevent worker 数" + } + }, + "yAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": true, + "grid": { + "visible": true + }, + "line": { + "visible": false + }, + "tickLine": { + "visible": false + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "流程执行耗时" + } + }, + "label": { + "visible": true, + "type": "point" + }, + "connectNulls": false, + "smooth": true, + "lineSize": 2, + "lineStyle": { + "lineJoin": "round", + "lineCap": "round" + }, + "point": { + "visible": false, + "size": 0, + "style": { + "stroke": "#fff" + } + }, + "type": "Line", + "forceFit": false, + "animation": false, + "width": 1097, + "height": 532, + "data": { + "styles": { + "London": { + "color": "#5b8ff9" + } + } + }, + "xField": "x", + "yField": "y", + "seriesField": "series" + }, + "selection": [ + 2, + 3 + ] +} \ No newline at end of file diff --git "a/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.csv" "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.csv" new file mode 100644 index 00000000..a3123553 --- /dev/null +++ "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.csv" @@ -0,0 +1,9 @@ +series,x,y +100流程(17节点)并发执行,100,25.98 +100流程(17节点)并发执行,200,14.75 +100流程(17节点)并发执行,500,8.29 +100流程(17节点)并发执行,1000,6.78 +1000节点大流程,100,19.33 +1000节点大流程,200,12.5 +1000节点大流程,500,11 +1000节点大流程,1000,7.5 \ No newline at end of file diff --git "a/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.js" "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.js" new file mode 100644 index 00000000..48acaaf2 --- /dev/null +++ "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.js" @@ -0,0 +1,90 @@ +import * as G2Plot from '@antv/g2plot' +const container = document.getElementById('app'); +const data = [ + { + "series": "100流程(17节点)并发执行", + "x": "100", + "y": 25.98 + }, + { + "series": "100流程(17节点)并发执行", + "x": "200", + "y": 14.75 + }, + { + "series": "100流程(17节点)并发执行", + "x": "500", + "y": 8.29 + }, + { + "series": "100流程(17节点)并发执行", + "x": "1000", + "y": 6.78 + }, + { + "series": "1000节点大流程", + "y": 19.33, + "x": "100" + }, + { + "series": "1000节点大流程", + "y": 12.5, + "x": "200" + }, + { + "series": "1000节点大流程", + "y": 11, + "x": "500" + }, + { + "series": "1000节点大流程", + "y": 7.5, + "x": "1000" + } +]; +const config = { + "title": { + "visible": true, + "text": "水平扩展测试" + }, + "description": { + "visible": true + }, + "legend": { + "flipPage": false + }, + "xAxis": { + "title": { + "visible": true, + "text": "gevent worker 数" + } + }, + "yAxis": { + "title": { + "visible": true, + "text": "流程执行耗时" + } + }, + "label": { + "visible": true + }, + "smooth": true, + "point": { + "size": 0 + }, + "forceFit": false, + "width": 1097, + "height": 532, + "xField": "x", + "yField": "y", + "seriesField": "series", + "color": [ + "#5B8FF9", + "#5AD8A6" + ] +} +const plot = new G2Plot.Line(container, { + data, + ...config, +}); +plot.render(); diff --git "a/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.png" "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.png" new file mode 100644 index 00000000..79e455ca Binary files /dev/null and "b/benchmark/EXECUTION \346\260\264\345\271\263\346\211\251\345\261\225/Line-20210309.png" differ diff --git "a/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.chartshaper" "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.chartshaper" new file mode 100644 index 00000000..a7239b8b --- /dev/null +++ "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.chartshaper" @@ -0,0 +1,138 @@ +{ + "id": "u-45eb-10b", + "data": [ + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "100", + "y": 0.76 + }, + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "500", + "y": 1.68 + }, + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "1000", + "y": 3.19 + }, + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "5000", + "y": 13 + }, + { + "x": "10000", + "y": 30, + "series": "normal(20 nodes 14 vars) 100 并发" + } + ], + "configs": { + "renderer": "canvas", + "title": { + "visible": true, + "text": "任务创建耗时" + }, + "description": { + "visible": true, + "text": "" + }, + "padding": "auto", + "legend": { + "visible": true, + "position": "top-left", + "wordSpacing": 4, + "flipPage": false + }, + "tooltip": { + "visible": true, + "shared": true, + "crosshairs": { + "type": "y" + } + }, + "xAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": false, + "grid": { + "visible": false + }, + "line": { + "visible": true + }, + "tickLine": { + "visible": true + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "流程数" + } + }, + "yAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": true, + "grid": { + "visible": true + }, + "line": { + "visible": false + }, + "tickLine": { + "visible": false + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "耗时(s)" + } + }, + "label": { + "visible": true, + "type": "point" + }, + "connectNulls": false, + "smooth": true, + "lineSize": 2, + "lineStyle": { + "lineJoin": "round", + "lineCap": "round" + }, + "point": { + "visible": false, + "size": 0, + "style": { + "stroke": "#fff" + } + }, + "type": "Line", + "forceFit": false, + "animation": false, + "width": 1097, + "height": 532, + "data": { + "styles": { + "London": { + "color": "#5b8ff9" + } + } + }, + "xField": "x", + "yField": "y", + "seriesField": "series" + }, + "selection": [ + 1, + 4 + ] +} \ No newline at end of file diff --git "a/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.csv" "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.csv" new file mode 100644 index 00000000..b1ff9347 --- /dev/null +++ "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.csv" @@ -0,0 +1,6 @@ +series,x,y +normal(20 nodes 14 vars) 100 并发,100,0.76 +normal(20 nodes 14 vars) 100 并发,500,1.68 +normal(20 nodes 14 vars) 100 并发,1000,3.19 +normal(20 nodes 14 vars) 100 并发,5000,13 +normal(20 nodes 14 vars) 100 并发,10000,30 \ No newline at end of file diff --git "a/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.js" "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.js" new file mode 100644 index 00000000..b53bb69e --- /dev/null +++ "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.js" @@ -0,0 +1,74 @@ +import * as G2Plot from '@antv/g2plot' +const container = document.getElementById('app'); +const data = [ + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "100", + "y": 0.76 + }, + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "500", + "y": 1.68 + }, + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "1000", + "y": 3.19 + }, + { + "series": "normal(20 nodes 14 vars) 100 并发", + "x": "5000", + "y": 13 + }, + { + "x": "10000", + "y": 30, + "series": "normal(20 nodes 14 vars) 100 并发" + } +]; +const config = { + "title": { + "visible": true, + "text": "任务创建耗时" + }, + "description": { + "visible": true + }, + "legend": { + "flipPage": false + }, + "xAxis": { + "title": { + "visible": true, + "text": "流程数" + } + }, + "yAxis": { + "title": { + "visible": true, + "text": "耗时(s)" + } + }, + "label": { + "visible": true + }, + "smooth": true, + "point": { + "size": 0 + }, + "forceFit": false, + "width": 1097, + "height": 532, + "xField": "x", + "yField": "y", + "seriesField": "series", + "color": [ + "#5B8FF9" + ] +} +const plot = new G2Plot.Line(container, { + data, + ...config, +}); +plot.render(); diff --git "a/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.png" "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.png" new file mode 100644 index 00000000..46daa03f Binary files /dev/null and "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/Line-20210309.png" differ diff --git "a/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/PREPARE\345\244\232\346\265\201\347\250\213.csv" "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/PREPARE\345\244\232\346\265\201\347\250\213.csv" new file mode 100644 index 00000000..53c486a8 --- /dev/null +++ "b/benchmark/PREPARE \345\244\232\346\265\201\347\250\213/PREPARE\345\244\232\346\265\201\347\250\213.csv" @@ -0,0 +1,6 @@ +series,x,y +normal(20 nodes 14 vars) p100,100,0.76 +normal(20 nodes 14 vars) p100,500,1.68 +normal(20 nodes 14 vars) p100,1000,3.19 +normal(20 nodes 14 vars) p100,5000,13 +normal(20 nodes 14 vars) p100,10000,30 \ No newline at end of file diff --git "a/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.chartshaper" "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.chartshaper" new file mode 100644 index 00000000..a86071da --- /dev/null +++ "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.chartshaper" @@ -0,0 +1,143 @@ +{ + "id": "u-45eb-10b", + "data": [ + { + "series": "单流程多节点", + "x": "100", + "y": 0.05 + }, + { + "series": "单流程多节点", + "x": "500", + "y": 0.05 + }, + { + "series": "单流程多节点", + "x": "1000", + "y": 0.09 + }, + { + "series": "单流程多节点", + "x": "5000", + "y": 0.45 + }, + { + "x": "10000", + "y": 0.91, + "series": "单流程多节点" + }, + { + "x": "100000", + "y": 9.32, + "series": "单流程多节点" + } + ], + "configs": { + "renderer": "canvas", + "title": { + "visible": false, + "text": "任务创建耗时" + }, + "description": { + "visible": true, + "text": "并行网关后多个节点" + }, + "padding": "auto", + "legend": { + "visible": true, + "position": "top-left", + "wordSpacing": 4, + "flipPage": false + }, + "tooltip": { + "visible": true, + "shared": true, + "crosshairs": { + "type": "y" + } + }, + "xAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": false, + "grid": { + "visible": false + }, + "line": { + "visible": true + }, + "tickLine": { + "visible": true + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "耗时(s)" + } + }, + "yAxis": { + "visible": true, + "autoHideLabel": false, + "autoRotateLabel": false, + "autoRotateTitle": true, + "grid": { + "visible": true + }, + "line": { + "visible": false + }, + "tickLine": { + "visible": false + }, + "label": { + "visible": true + }, + "title": { + "visible": true, + "offset": 12, + "text": "节点数" + } + }, + "label": { + "visible": true, + "type": "point" + }, + "connectNulls": false, + "smooth": true, + "lineSize": 2, + "lineStyle": { + "lineJoin": "round", + "lineCap": "round" + }, + "point": { + "visible": false, + "size": 0, + "style": { + "stroke": "#fff" + } + }, + "type": "Line", + "forceFit": false, + "animation": false, + "width": 1097, + "height": 532, + "data": { + "styles": { + "London": { + "color": "#5b8ff9" + } + } + }, + "xField": "x", + "yField": "y", + "seriesField": "series" + }, + "selection": [ + 1, + 0 + ] +} \ No newline at end of file diff --git "a/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.csv" "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.csv" new file mode 100644 index 00000000..655fa645 --- /dev/null +++ "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.csv" @@ -0,0 +1,7 @@ +series,x,y +单流程多节点,100,0.05 +单流程多节点,500,0.05 +单流程多节点,1000,0.09 +单流程多节点,5000,0.45 +单流程多节点,10000,0.91 +单流程多节点,100000,9.32 \ No newline at end of file diff --git "a/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.js" "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.js" new file mode 100644 index 00000000..8f2e3758 --- /dev/null +++ "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.js" @@ -0,0 +1,79 @@ +import * as G2Plot from '@antv/g2plot' +const container = document.getElementById('app'); +const data = [ + { + "series": "单流程多节点", + "x": "100", + "y": 0.05 + }, + { + "series": "单流程多节点", + "x": "500", + "y": 0.05 + }, + { + "series": "单流程多节点", + "x": "1000", + "y": 0.09 + }, + { + "series": "单流程多节点", + "x": "5000", + "y": 0.45 + }, + { + "x": "10000", + "y": 0.91, + "series": "单流程多节点" + }, + { + "x": "100000", + "y": 9.32, + "series": "单流程多节点" + } +]; +const config = { + "title": { + "text": "任务创建耗时" + }, + "description": { + "visible": true, + "text": "并行网关后多个节点" + }, + "legend": { + "flipPage": false + }, + "xAxis": { + "title": { + "visible": true, + "text": "耗时(s)" + } + }, + "yAxis": { + "title": { + "visible": true, + "text": "节点数" + } + }, + "label": { + "visible": true + }, + "smooth": true, + "point": { + "size": 0 + }, + "forceFit": false, + "width": 1097, + "height": 532, + "xField": "x", + "yField": "y", + "seriesField": "series", + "color": [ + "#5B8FF9" + ] +} +const plot = new G2Plot.Line(container, { + data, + ...config, +}); +plot.render(); diff --git "a/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.png" "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.png" new file mode 100644 index 00000000..f125a379 Binary files /dev/null and "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/Line-20210309.png" differ diff --git "a/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271.csv" "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271.csv" new file mode 100644 index 00000000..16c65f1f --- /dev/null +++ "b/benchmark/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271/PREPARE\345\215\225\346\265\201\347\250\213\345\244\232\350\212\202\347\202\271.csv" @@ -0,0 +1,7 @@ +series,x,y +nodes flow,100,0.05 +nodes flow,500,0.05 +nodes flow,1000,0.09 +nodes flow,5000,0.45 +nodes flow,10000,0.91 +nodes flow,100000,9.32 \ No newline at end of file diff --git a/benchmark/cpu_usage.png b/benchmark/cpu_usage.png new file mode 100644 index 00000000..ca12974a Binary files /dev/null and b/benchmark/cpu_usage.png differ diff --git a/docs/assets/img/code_arch.png b/docs/assets/img/code_arch.png new file mode 100644 index 00000000..5763a511 Binary files /dev/null and b/docs/assets/img/code_arch.png differ diff --git a/docs/assets/img/simple_example.png b/docs/assets/img/simple_example.png new file mode 100644 index 00000000..3bcd9f50 Binary files /dev/null and b/docs/assets/img/simple_example.png differ diff --git a/docs/assets/img/user_guide_basic_concept/status.png b/docs/assets/img/user_guide_basic_concept/status.png new file mode 100644 index 00000000..e559195f Binary files /dev/null and b/docs/assets/img/user_guide_basic_concept/status.png differ diff --git a/docs/assets/img/user_guide_engine_message_lost/img1.png b/docs/assets/img/user_guide_engine_message_lost/img1.png new file mode 100644 index 00000000..d626e0c9 Binary files /dev/null and b/docs/assets/img/user_guide_engine_message_lost/img1.png differ diff --git a/docs/assets/img/user_guide_flow_orchestration/data_example.png b/docs/assets/img/user_guide_flow_orchestration/data_example.png new file mode 100644 index 00000000..54f6b8fe Binary files /dev/null and b/docs/assets/img/user_guide_flow_orchestration/data_example.png differ diff --git a/docs/assets/img/user_guide_flow_orchestration/data_input_example_1.png b/docs/assets/img/user_guide_flow_orchestration/data_input_example_1.png new file mode 100644 index 00000000..7abb9d18 Binary files /dev/null and b/docs/assets/img/user_guide_flow_orchestration/data_input_example_1.png differ diff --git a/docs/assets/img/user_guide_flow_orchestration/node_output_example_1.png b/docs/assets/img/user_guide_flow_orchestration/node_output_example_1.png new file mode 100644 index 00000000..003a5805 Binary files /dev/null and b/docs/assets/img/user_guide_flow_orchestration/node_output_example_1.png differ diff --git a/docs/assets/img/user_guide_flow_orchestration/rewritable_output_example_1.png b/docs/assets/img/user_guide_flow_orchestration/rewritable_output_example_1.png new file mode 100644 index 00000000..00b0bc7f Binary files /dev/null and b/docs/assets/img/user_guide_flow_orchestration/rewritable_output_example_1.png differ diff --git a/docs/assets/img/user_guide_flow_orchestration/var_sample.png b/docs/assets/img/user_guide_flow_orchestration/var_sample.png new file mode 100644 index 00000000..d3cdd309 Binary files /dev/null and b/docs/assets/img/user_guide_flow_orchestration/var_sample.png differ diff --git a/docs/assets/img/user_guide_splice_var/splice_example_1.png b/docs/assets/img/user_guide_splice_var/splice_example_1.png new file mode 100644 index 00000000..b4b21d3f Binary files /dev/null and b/docs/assets/img/user_guide_splice_var/splice_example_1.png differ diff --git a/docs/assets/img/user_guide_splice_var/splice_example_2.png b/docs/assets/img/user_guide_splice_var/splice_example_2.png new file mode 100644 index 00000000..3e9a450b Binary files /dev/null and b/docs/assets/img/user_guide_splice_var/splice_example_2.png differ diff --git a/docs/assets/img/user_guide_splice_var/splice_example_3.png b/docs/assets/img/user_guide_splice_var/splice_example_3.png new file mode 100644 index 00000000..6417dc49 Binary files /dev/null and b/docs/assets/img/user_guide_splice_var/splice_example_3.png differ diff --git a/docs/assets/img/user_guide_splice_var/splice_resolve.png b/docs/assets/img/user_guide_splice_var/splice_resolve.png new file mode 100644 index 00000000..1204dbfe Binary files /dev/null and b/docs/assets/img/user_guide_splice_var/splice_resolve.png differ diff --git a/docs/upgrade/bamboo_pipeline_to_bamboo_engine.md b/docs/upgrade/bamboo_pipeline_to_bamboo_engine.md new file mode 100644 index 00000000..b4acb2d5 --- /dev/null +++ b/docs/upgrade/bamboo_pipeline_to_bamboo_engine.md @@ -0,0 +1,76 @@ + + +- [如何从 bamboo-pipeline 升级至 bamboo-engine](#如何从-bamboo-pipeline-升级至-bamboo-engine) + - [依赖升级](#依赖升级) + - [修改项目配置](#修改项目配置) + - [启动额外的 Worker 进程](#启动额外的-worker-进程) + - [API 替换](#api-替换) + - [* 可选项](#-可选项) + + + +## 如何从 bamboo-pipeline 升级至 bamboo-engine + +`bamboo-engine` 和 `bamboo-pipeline` 不存在冲突,系统中可以同时启动和运行两套引擎 + +### 依赖升级 + +将 bamboo-pipeline 升级至 `3.1.0` 或以上版本 + +``` +$ pip install bamboo-pipeline +``` + +### 修改项目配置 + +```python +from pipeline.celery.settings import * +from pipeline.eri.celery import queues +from celery import Celery + +CELERY_QUEUES.extend(queues.CELERY_QUEUES) # 向 broker 队列中添加 bamboo-engine 专用队列 + +app = Celery("proj") + +app.config_from_object("django.conf:settings") + +# 添加 INSTALLED_APPS +INSTALLED_APPS = [ + ... + "pipeline.eri", + ... +] +``` + +### 启动额外的 Worker 进程 + +``` +$ python manage.py celery worker -Q er_execute,er_schedule -l info +``` + +### API 替换 + +按照如下映射进行 API 调用的替换: + +- pipeline.service.task_service.run_pipeline: bamboo_engine.api.run_pipeline +- pipeline.service.task_service.pause_pipeline : bamboo_engine.api.pause_pipeline +- pipeline.service.task_service.revoke_pipeline : bamboo_engine.api.revoke_pipeline +- pipeline.service.task_service.resume_pipeline : bamboo_engine.api.resume_pipeline +- pipeline.service.task_service.pause_activity : bamboo_engine.api.pause_node_appoint +- pipeline.service.task_service.resume_activity : bamboo_engine.api.resume_node_appoint +- pipeline.service.task_service.retry_activity : bamboo_engine.api.retry_node +- pipeline.service.task_service.skip_activity : bamboo_engine.api.skip_node +- pipeline.service.task_service.skip_exclusive_gateway : bamboo_engine.api.skip_exclusive_gateway +- pipeline.service.task_service.forced_fail : bamboo_engine.api.forced_fail_activity +- pipeline.service.task_service.get_state : bamboo_engine.api.get_pipeline_states +- pipeline.service.task_service.get_topo_tree : 不再支持 +- pipeline.service.task_service.get_inputs : bamboo_engine.api.get_execution_data_inputs +- pipeline.service.task_service.get_outputs : bamboo_engine.api.get_execution_data_outputs +- pipeline.service.task_service.get_activity_histories : bamboo_engine.api.get_node_histories +- pipeline.service.task_service.callback : bamboo_engine.api.callback +- pipeline.service.task_service.get_plain_log_for_node : pipeline.eri.runtime.BambooDjangoRuntime.get_plain_log_for_node + + +### * 可选项 + +- 将 pipeline.builder 包的所有引用切换至 bamboo_engine.builder 下 diff --git a/docs/upgrade/bamboo_pipeline_vs_bamboo_engine.md b/docs/upgrade/bamboo_pipeline_vs_bamboo_engine.md new file mode 100644 index 00000000..eb664546 --- /dev/null +++ b/docs/upgrade/bamboo_pipeline_vs_bamboo_engine.md @@ -0,0 +1,50 @@ + + +- [bamboo-pipeline 与 bamboo-engine 性能对比](#bamboo-pipeline-与-bamboo-engine-性能对比) + - [单个大流程执行](#单个大流程执行) + - [多流程并行执行](#多流程并行执行) + + + +## bamboo-pipeline 与 bamboo-engine 性能对比 + +测试环境: + +- MacBook Pro(16 英寸,2019) +- 处理器:2.6 GHz 六核Intel Core i7 +- 内存:32 GB 2667 MHz DDR4 +- OS:macOS Big Sur 11.2.1 +- Broker:RabbitMQ 3.8.2 +- MySQL:5.7.22 + +### 单个大流程执行 + +|引擎|节点数|执行耗时| +|-|-|-| +|bamboo-engine|100|1.33| +|bamboo-engine|500|9.66| +|bamboo-engine|1000|19.33| +|bamboo-engine|5000|154.33| +|bamboo-engine|10000|347.5| +|pipeline|100|6| +|pipeline|500|91| +|pipeline|1000|545| +|pipeline|5000|-| +|pipeline|10000|-| + +![](../../benchmark/EXECUTION%20单流程多节点/Line-20210309.png) + +### 多流程并行执行 + +|引擎|流程数|单个流程执行耗时| +|-|-|-| +|bamboo-engine|100|25.98| +|bamboo-engine|500|138| +|bamboo-engine|1000|272| +|bamboo-engine|5000|2442| +|pipeline|100|48.77| +|pipeline|500|311| +|pipeline|1000|748| +|pipeline|5000|-| + +![](../../benchmark/EXECUTION%20多流程/Line-20210309.png) diff --git a/docs/user_guide/basic_concept.md b/docs/user_guide/basic_concept.md new file mode 100644 index 00000000..2f6f03f6 --- /dev/null +++ b/docs/user_guide/basic_concept.md @@ -0,0 +1,86 @@ + + +- [流程](#流程) +- [节点](#节点) + - [event](#event) + - [activity](#activity) + - [gateway](#gateway) + - [subprocess](#subprocess) +- [状态](#状态) +- [顺序流](#顺序流) +- [ID](#id) +- [组件](#组件) +- [数据交换](#数据交换) + - [执行数据](#执行数据) + - [上下文](#上下文) + - [变量(Var)](#变量var) + + + +## 流程 + +以特定节点开始,特定节点结束的有向图 + +## 节点 + +流程图中除了边以外的元素,流程本身也是一个节点 + +### event + +一个流程至少会拥有两个事件,一个开始事件(`StartEvent`)和结束事件(`EndEvent`),开始事件的完成标志一个流程的执行开始,而结束事件的完成则标志一个 `Pipeline` 的执行完成。 + +### activity + +一个 `Activity` 对象代表了某项活动,比如发起网络请求,操作数据库,执行命令等等,`Activity` 的行为通常由用户来定义。 + +### gateway + +gateway 在流程中起到了引流的作用,网关会决定一个任务流程的执行走向和行为,如分支网关(`ExclusiveGateway`)决定了接下来需要引擎需要执行的路径,并行网关(`ParallelGateway`)会增加当前流程的执行进程数,条件并行网关(`ConditionalParallelGateway`)会根据当前上下文的信息来判断当前流程的增加的执行进程数,而汇聚网关(`ConvergeGateway`)则会减少当前流程的执行进程数。 + +### subprocess + +子流程(`SubProcess`)是流程中一种特殊的节点,当一个流程作为另一个流程的某个节点出现在其结构中,我们就把这个前者称为后者的子流程,通过使用子流程,开发者能够对流程重用,减少重复编码。 + +## 状态 + +每个节点都拥有一个对应的状态对象,节点状态流转示意如下 + +![](../assets/img/user_guide_basic_concept/status.png) + +## 顺序流 + +流程图中的有向边 + +## ID + +流程、节点和顺序流在系统中的唯一标志符 + +## 组件 + +特定业务逻辑的封装,用于定义流程活动节点的逻辑。 + +## 数据交换 + +### 执行数据 + +流程对象中每个节点都拥有一个执行数据,这个执行数据用于存储节点的输入和输出数据,每个节点之间的执行数据是相互隔离的,也就是说,节点1不能直接访问节点2的输出。 + +### 上下文 + +在整个流程执行的过程中,节点之间并不是完全孤立的,某些时候节点需要进行通信,例如,节点2需要获取节点1的执行结果,并根据该结果来决定接下来的行为,由于在一个流程中每个节点之间的数据是相互隔离的,无法在节点内实现对其他节点数据的直接访问,所以,每个流程会拥有一个用于进行节点通信和数据传递的上下文。 + +节点1能够将自己在执行过程中生成的某些数据写到上下文中,当其他节点需要使用的时候,只需要从上下文中获取这些数据即可。 + +执行数据与上下文之间的数据交换如下图所示: + +![上下文](https://raw.githubusercontent.com/homholueng/md_pic/master/pipeline_doc/data_context.png) + +> 为什么不能在节点中直接访问其他节点与上下文的数据?在节点中直接访问其他节点与上下文中的数据固然方便,但是这样可能会导致在实现组件时过度依赖当前上下文的结构与流程结构,从而破坏了组件的独立性与可复用性。pipeline 中的每种活动节点都是独立的个体,即:无论在什么结构下的流程中、在流程中的什么位置都能够正确的执行。 + +### 变量(Var) + +在定义流程时,通过变量(Var)能够声明上下文和节点执行数据中的数据,以及变量之间的引用关系,目前框架中提供了以下三种类型的变量: + +- PLAIN:常量类型的变量,其值在声明后就不会发生变化,这种变量的声明十分简单:`{'type': 'plain', 'value': 'var value'}` +- SPLICE:拼接类型的变量,这种变量的值能够引用其他变量并且根据需求进行一定程度的拼接和 python 操作,SPLICE 类型变量的详细使用说明可以参考 [SPLICE 变量使用](./user_guide_splice_var.md) 章节 +- LAZY:延迟获取值类型的变量,这种变量在进行解析前可以执行一段自定义的代码来获取特定的值,更加详细的说明可以参考 [LAZY 变量](./user_guide_lazy_variable.md) 章节 diff --git a/docs/user_guide/component_unit_test.md b/docs/user_guide/component_unit_test.md new file mode 100644 index 00000000..dc4b2de5 --- /dev/null +++ b/docs/user_guide/component_unit_test.md @@ -0,0 +1,327 @@ + + +- [组件单元测试](#%E7%BB%84%E4%BB%B6%E5%8D%95%E5%85%83%E6%B5%8B%E8%AF%95) + - [组件测试类](#%E7%BB%84%E4%BB%B6%E6%B5%8B%E8%AF%95%E7%B1%BB) + - [组件测试用例](#%E7%BB%84%E4%BB%B6%E6%B5%8B%E8%AF%95%E7%94%A8%E4%BE%8B) + - [执行断言](#%E6%89%A7%E8%A1%8C%E6%96%AD%E8%A8%80) + - [调度断言](#%E8%B0%83%E5%BA%A6%E6%96%AD%E8%A8%80) + - [调用断言](#%E8%B0%83%E7%94%A8%E6%96%AD%E8%A8%80) + - [如何patch ESB接口调用](#%E5%A6%82%E4%BD%95patch-esb%E6%8E%A5%E5%8F%A3%E8%B0%83%E7%94%A8) +- [示例](#%E7%A4%BA%E4%BE%8B) + + + +## 组件单元测试 + +在我们完成自定义组件的开发后,我们需要测试组件是否能够按照我们预期的那样运行。最简单的方式就是构造一个包含该节点的流程然后把流程跑起来观察其行为和输出是否符合预期。但是这种测试方式十分耗时而且是一次性的,下次若是修改了节点后需要再进行一遍相同的操作。 + +为了解决这个问题,框架内部提供了组件测试单元测试框架,框架会模拟组件在流程中执行的场景,并根据开发者编写的测试用例来执行组件并检测组件的行为是否符合预期。借助组件单元测试框架能够节省我们测试组件的时间,并且保证组件实现在发生变化后能够快速确认改动是否影响了组件的功能。 + +### 组件测试类 + +要使用框架提供的单元测试框架十分容易,只需要在定义测试类的时候将框架提供的测试混入类混入到当前类中即可: + +```python + +from django.test import TestCase +from pipeline.component_framework.test import ComponentTestMixin + +class AComponentTestCase(TestCase, ComponentTestMixin): + + @property + def component_cls(self): + # return the component class which should be tested + return TheScheduleComponent + + @property + def cases(self): + # return your component test cases here + return [] + +``` + +将测试类混入到当前类中后,还需要定义两个属性: + +- `component_cls`:该方法返回需要被测试的组件的类。 +- `cases`:该方法返回该组件的所有测试用例。 + +### 组件测试用例 + +对于一个组件可能我们会有若干个测试用例,分别测试不同情况下组件的行为是否符合我们的预期。下面的代码就定义了一个组件测试用例的实例: + +```python +from mock import patch, MagicMock, call +from pipeline.component_framework.test import (ComponentTestMixin, + ComponentTestCase, + CallAssertion, + ExecuteAssertion, + ScheduleAssertion) + +ComponentTestCase(name='case 1', + inputs={'e_call_1': True}, + parent_data={}, + execute_assertion=ExecuteAssertion(success=True, + outputs={}), + schedule_assertion=[ + ScheduleAssertion(success=True, + outputs={'count': 1}, + callback_data=None), + ScheduleAssertion(success=True, + outputs={'count': 2}, + callback_data=None), + ScheduleAssertion(success=True, + schedule_finished=True, + outputs={'count': 2}, + callback_data=None)], + patchers=[ + patch('pipeline_test_use.components.collections.experience.need_patch_1', + MagicMock()), + patch('pipeline_test_use.components.collections.experience.need_patch_2', + MagicMock())], + execute_call_assertion=[ + CallAssertion(func='pipeline_test_use.components.collections.experience.need_patch_1', + calls=[call()]), + CallAssertion(func='pipeline_test_use.components.collections.experience.need_patch_2', + calls=[])], + schedule_call_assertion=[ + CallAssertion(func='pipeline_test_use.components.collections.experience.need_patch_1', + calls=[]), + CallAssertion(func='pipeline_test_use.components.collections.experience.need_patch_2', + calls=[])]) +``` + +下面让我们来看一下测试用例的构成: + +- `name`:用例名,框架在用例运行失败时会使用当前用例名在日志信息中提示开发者,定义有意义的用例名能够方便我们快速了解该用例测试的功能以及在用例执行失败时快速定位。 +- `inputs`:组件执行输入数据,其中定义的数据在测试用例执行前会被设置到被测组件所绑定服务的 `execute(data, parent_data)` 及 `schedule(self, data, parent_data, callback_data=None)` 方法中 `data` 对象的 `inputs` 中。 +- `parent_data`:组件执行上下文数据,其中定义的数据在测试用例执行前会被设置到被测组件所绑定服务的 `execute(data, parent_data)` 及 `schedule(self, data, parent_data, callback_data=None)` 方法中 `parent_data` 对象的 `inputs` 中。 +- `execute_assertion`:执行断言,用于检测本次测试中组件绑定服务的 `execute` 方法的行为是否符合预期。 +- `schedule_assertion`:调度断言,用于检测本次测试中组件绑定服务的 `schedule` 方法的行为是否符合预期;对于非调度或断言型的组件,该字段留空即可。 +- `patchers`:其中定义的 `patcher` 会在当前测试用例执行前调用,用于 patch 组件在执行时调用的其他模块的方法或属性,以实现测试隔离。 +- `execute_call_assertion`:执行调用断言,用于检测本次测试中组件绑定服务的 `execute` 方法是否以预期的方式调用了其他方法。 +- `schedule_call_assertion`:调度调用断言,用于检测本次测试中组件绑定服务的 `schedule` 方法是否以预期的方式调用了其他方法。 + +#### 执行断言 + +执行断言能够帮助我们检测本次测试中组件服务的 `exeucte` 方法是否执行成功了,输出的数据是否符合预期: + +```python +ExecuteAssertion(success=True, outputs={}) +``` + +下面看一下执行断言的构成: + +- `success`:断言本次测试中组件服务的 `execute` 方法是否执行成功。 +- `outputs`:断言本次测试中组件服务的 `execute` 方法执行完成后当前节点的数据对象中 `outputs` 字段(即 `execute(data, parent_data)` 中 `data` 的 `outputs` 字段)的数据。 + +#### 调度断言 + +调度断言能够帮助我们检测本次测试中组件服务的 `schedule` 方法是否执行成功了,调度是否完成了,输出的数据是否符合预期;这里需要注意的是:**对于调度型的服务,测试框架会根据我们传入的调度断言的数量来进行相应次数的 `scheudle` 方法调用**: + +```python +ScheduleAssertion(success=True, schedule_finished=True, outputs={'count': 2}, callback_data=None) +``` + +下面看一下调度断言的构成: + +- `success`:断言本次测试中组件服务的 `schedule` 方法是否执行成功。 +- `schedule_finished`:断言本次测试中组件服务是否已经完成调度。 +- `outputs`:断言本次测试中组件服务的 `schedule` 方法执行完成后当前节点的数据对象中 `outputs` 字段(即 `schedule(data, parent_data, callback_data=None)` 中 `data` 的 `outputs` 字段)的数据。 +- `callback_data`:对于回调型的组件,通过该参数传入回调数据(即 `schedule(data, parent_data, callback_data=None)` 中的 `callback_data`)。 + +#### 调用断言 + +调用断言用于检测组件服务的 `execute` 或 `schedule` 方法是否按照预期调用了某些方法;这里需要注意的是:**进行调用断言的函数必须是被 `MagicMock` patch 过的函数**: + +```python +CallAssertion(func='pipeline_test_use.components.collections.experience.need_patch_1', + calls=[call(), + call(kwarg_1='', kwargs_2='')], + any_order=False), +``` + +下面看一下调用断言的构成: + +- `func`:进行调用断言的函数的全限定名。 +- `calls`:对函数的调用断言,若要进行“没有被调用”的断言,传递空数组即可。 +- `any_order`:是否对 `calls` 中的调用断言没有顺序要求。 + +#### 如何patch ESB接口调用 +大部分插件都会调用ESB接口,在单元测试中,我们可以将这个调用过程进行patch,使被测插件在执行时,接口调用并不实际发生,而是通过MagicMock返回我们给定的响应。比如对于作业平台job.fast_execute_script, +我们可以编写这样的Mock类: +```python +class MockClient(object): + def __init__(self, fast_execute_script_return=None): + self.job = MagicMock() + self.job.fast_execute_script = MagicMock(return_value=fast_execute_script_return) +``` +实例化该类时,提供接口响应用例数据: +```python +success_result = { + 'result': True, + 'code': 0, + 'message': 'success', + 'data': { + 'job_instance_name': 'API Quick execution script1521100521303', + 'job_instance_id': 10000 + }, +} +mock_client = MockClient(fast_execute_script_return=success_result) +``` + +而在测试用例中,patch获取client的get_client_by_user函数为mock_client: +```python +from pipeline.component_framework.test import Patcher + +GET_CLIENT_BY_USER = 'pipeline_plugins.components.collections.sites.open.job.get_client_by_user' + + +ComponentTestCase( + ... + patchers=[ + Patcher(target=GET_CLIENT_BY_USER, return_value=mock_client) + ] + ... + ) +``` +## 示例 + +让我们针对下面代码中定义的组件来编写一个测试类: + +```python + +from pipeline.core.flow.activity import Service, StaticIntervalGenerator +from pipeline.component_framework.component import Componen + +class TheScheduleService(Service): + __need_schedule__ = True + interval = StaticIntervalGenerator(1) + + def execute(self, data, parent_data): + + # make execute failed if receive fail signal + if data.inputs.get('fail', False): + return False + + # write all inputs to outputs + for k, v in data.inputs.items(): + data.outputs[k] = v + + # write all parent_data.inputs to outputs + data.outputs.parent_data = {} + for k, v in parent_data.inputs.items(): + data.outputs.parent_data[k] = v + + return True + + def schedule(self, data, parent_data, callback_data=None): + + # make schedule failed if receive fail signal + if data.inputs.get('schedule_fail', False): + return False + + # decide schedule state according to count + count = data.get_one_of_outputs('count') + if count is None: + data.outputs.count = 1 + else: + if count == 2: + self.finish_schedule() + else: + data.outputs.count += 1 + + return True + + +class TheScheduleComponent(Component): + name = u'the schedule component' + code = 'the_schedule_component' + bound_service = TheScheduleService +``` + +上述代码中定义的组件是一个调度型组件,该组件的 `execute` 和 `schedule` 方法都会根据输入的某个参数来决定是否能够执行成功。并且在 `execute` 方法中会把传入的 `data` 和 `parent_data` 中的所有输入数据写到输出中。而在 `schedule` 方法中,会根据当前 `schedule` 执行的次数来决定是否完成调度。 + +根据这个组件的实现,我们能够构造出三个测试用例: + +- 组件执行成功的测试用例 +- `execute` 执行失败的测试用例 +- `schedule` 执行失败的测试用例 + +> 由于被测代码比较简单,在实际情况中,建议根据黑盒和白盒测试中用例构造方式的指导来构造较为完备的测试用例,以保证能够覆盖到组件执行时所有可能出现的场景。 + +下面就是我们的测试代码: + +```python +from django.test import TestCase + +from pipeline.component_framework.test import (ComponentTestMixin, + ComponentTestCase, + ExecuteAssertion, + ScheduleAssertion) + +from pipeline_test_use.components.collections.experience import TheScheduleComponent + + +class TheScheduleComponentTest(TestCase, ComponentTestMixin): + + def component_cls(self): + return TheScheduleComponent + + def cases(self): + return [ + ComponentTestCase(name='success case', + inputs={'k1': 'v1', + 'k2': 'v2'}, + parent_data={'k': 'v'}, + execute_assertion=ExecuteAssertion(success=True, + outputs={'k1': 'v1', + 'k2': 'v2', + 'parent_data': {'k': 'v'}}), + schedule_assertion=[ScheduleAssertion(success=True, + outputs={'k1': 'v1', + 'k2': 'v2', + 'count': 1, + 'parent_data': {'k': 'v'}}, + callback_data=None), + ScheduleAssertion(success=True, + outputs={'k1': 'v1', + 'k2': 'v2', + 'count': 2, + 'parent_data': {'k': 'v'}}, + callback_data=None), + ScheduleAssertion(success=True, + schedule_finished=True, + outputs={'k1': 'v1', + 'k2': 'v2', + 'count': 2, + 'parent_data': {'k': 'v'}}, + callback_data=None)]), + ComponentTestCase(name='execute fail case', + inputs={'k1': 'v1', + 'k2': 'v2', + 'fail': True}, + parent_data={'k': 'v'}, + execute_assertion=ExecuteAssertion(success=False, + outputs=None), + schedule_assertion=None), + ComponentTestCase(name='schedule fail case', + inputs={'k1': 'v1', + 'k2': 'v2', + 'schedule_fail': True}, + parent_data={'k': 'v'}, + execute_assertion=ExecuteAssertion(success=True, + outputs={'k1': 'v1', + 'k2': 'v2', + 'schedule_fail': True, + 'parent_data': {'k': 'v'}}), + schedule_assertion=ScheduleAssertion(success=False, + outputs=None, + callback_data=None + ))] +``` + +可以看到我们定义了三个测试用例: + +- `success case`:在这个用例中,我们测试了组件成功执行的情况。在执行断言中:我们根据组件的行为对输出数据进行了断言;在调度断言中,我们定义了三个断言对象,并根据组件的行为分别对不同调度中的输出数据及调度完成情况进行断言。 +- `execute fail case`:在这个用例中,我们测试了组件服务 `execute` 方法执行失败的情况,由于 `execute` 方法执行失败后不会再进入调度状态,所以我们没有设置调度断言。 +- `schedule fail case`:在这个用例中,我们测试了组件服务 `schedule` 方法执行失败的情况。 diff --git a/docs/user_guide/custom_component.md b/docs/user_guide/custom_component.md new file mode 100644 index 00000000..dd33bfd3 --- /dev/null +++ b/docs/user_guide/custom_component.md @@ -0,0 +1,410 @@ + + +- [自定义组件](#%E8%87%AA%E5%AE%9A%E4%B9%89%E7%BB%84%E4%BB%B6) + - [开发一个自定义组件](#%E5%BC%80%E5%8F%91%E4%B8%80%E4%B8%AA%E8%87%AA%E5%AE%9A%E4%B9%89%E7%BB%84%E4%BB%B6) + - [使用 APP 统一管理你的组件](#%E4%BD%BF%E7%94%A8-app-%E7%BB%9F%E4%B8%80%E7%AE%A1%E7%90%86%E4%BD%A0%E7%9A%84%E7%BB%84%E4%BB%B6) + - [编写 Service](#%E7%BC%96%E5%86%99-service) + - [编写 Component](#%E7%BC%96%E5%86%99-component) + - [执行一下刚刚编写的组件](#%E6%89%A7%E8%A1%8C%E4%B8%80%E4%B8%8B%E5%88%9A%E5%88%9A%E7%BC%96%E5%86%99%E7%9A%84%E7%BB%84%E4%BB%B6) + - [组件的行为](#%E7%BB%84%E4%BB%B6%E7%9A%84%E8%A1%8C%E4%B8%BA) + - [单次执行](#%E5%8D%95%E6%AC%A1%E6%89%A7%E8%A1%8C) + - [周期性轮询](#%E5%91%A8%E6%9C%9F%E6%80%A7%E8%BD%AE%E8%AF%A2) + - [单次回调](#%E5%8D%95%E6%AC%A1%E5%9B%9E%E8%B0%83) + - [多次回调](#%E5%A4%9A%E6%AC%A1%E5%9B%9E%E8%B0%83) + - [组件的注册](#%E7%BB%84%E4%BB%B6%E7%9A%84%E6%B3%A8%E5%86%8C) + + + +## 自定义组件 + +pipeline 开放了自定义组件的能力,开发者们能够开发针对某些特定业务场景的组件,将其添加到引擎的组件库中,并在 ServiceActivity 中调用他们。如之前的示例所示: + +```python +act = ServiceActivity(component_code='example_component') +``` + +### 开发一个自定义组件 + +#### 1. 使用 APP 统一管理你的组件 + +组件开发的最佳实践是创建一个独立的 APP,并在这个 APP 中单独管理自定义的组件和组件需要使用到的一些公共逻辑。pipeline 提供了快捷命令,能够让我们快速的创建一个用于存放自定义组件的 APP,在 Django 工程根目录下执行以下命令: + +```bash +$ python manage.py create_plugins_app custom_plugins +``` + +该命令会在 Django 工程根目录下生成拥有以下目录结构的 APP: + +```text +custom_plugins +├── __init__.py +├── components +│   ├── __init__.py +│   └── collections +│   ├── __init__.py +│   └── plugins.py +├── migrations +│   └── __init__.py +└── static + └── custom_plugins + └── plugins.js +``` + +别忘了把新创建的 APP 添加到 Django 配置的 `INSTALLED_APPS` 中: + +```python +INSTALLED_APPS = ( + ... + 'custom_plugins', + ... +) +``` + +#### 2. 编写 Service + +组件服务 `Service` 是组件的核心,`Service` 定义了组件被调用时执行的逻辑,下面让我们实现一个计算传入的参数 `n` 的阶乘,并把结果写到输出中的 `Service`,在 `custom_plugins/components/collections/plugins.py` 中输入以下代码: + +```python +import math +from pipeline.core.flow.activity import Service + + +class FactorialCalculateService(Service): + + def execute(self, data, parent_data): + n = data.get_one_of_inputs('n') + if not isinstance(n, int): + data.outputs.ex_data = 'n must be a integer!' + return False + + data.outputs.factorial_of_n = math.factorial(n) + return True + + def inputs_format(self): + return [ + Service.InputItem(name='integer n', key='n', type='int', required=True) + ] + + def outputs_format(self): + return [ + Service.OutputItem(name='factorial of n', key='factorial_of_n', type='int') + ] + +``` + +首先我们继承了 `Service` 基类,并实现了 `execute()` 和 `outputs_format()` 这两个方法,他们的作用如下: + +- `execute`:组件被调用时执行的逻辑。接收 `data` 和 `parent_data` 两个参数,`data` 是当前节点的数据对象,这个数据对象存储了用户传递给当前节点的参数的值以及当前节点输出的值。`parent_data` 则是该节点所属流程对象的数据对象,通常会将一些全局使用的常量存储在该对象中,如当前流程的执行者、流程的开始时间等。 +- `outputs_format`:组件执行成功时输出的字段,每个字段都包含字段名、字段键及字段类型的说明。这个方法必须返回一个 `OutputItem` 的数组,返回的这些信息能够用于确认某个组件在执行成功时输出的数据,便于在流程上下文或后续节点中进行引用。 +- `inputs_format`:组件所需的输入字段,每个字段都包含字段名、字段键、字段类型及是否必填的说明。这个方法必须返回一个 `InputItem` 的数组,返回的这些信息能够用于确认某个组件需要获取什么样的输入数据。 + +下面我们来看一下 `execute()` 方法内部执行的逻辑,首先我们尝试从当前节点数据对象的输出中获取输入参数 `n`,如果获取到的参数不是一个 `int` 实例,那么我们会将异常信息写入到当前节点输出的 `ex_data` 字段中,**这个字段是引擎内部的保留字段,节点执行失败时产生的异常信息都应该写入到该字段中**。随后我们返回 `False` 代表组件本次执行失败,随后节点会进入失败状态: + +``` +n = data.get_one_of_inputs('n') +if not isinstance(n, int): + data.outputs.ex_data = 'n must be a integer!' + return False +``` + +若获取到的 `n` 是一个正常的 `int`,我们就调用 `math.factorial()` 函数来计算 `n` 的阶乘,计算完成后,我们会将结果写入到输出的 `factorial_of_n` 字段中,以供流程中的其他节点使用: + +``` +data.outputs.factorial_of_n = math.factorial(n) +return True +``` + +#### 3. 编写 Component + +完成 `Service` 的编写后,我们需要将其与一个 `Component` 绑定起来,才能够注册到组件库中: + +```python +from pipeline.component_framework.component import Component + +class FactorialCalculateComponent(Component): + name = 'FactorialCalculateComponent' + code = 'fac_cal_comp' + bound_service = FactorialCalculateService + +``` + +我们定义了一个继承自基类 `Component` 的类 `FactorialCalculateComponent`,他拥有以下属性: + +- `name`:组件名。 +- `code`:组件代码,这个代码必须是全局唯一的。 +- `bound_service`:与该组件绑定的 `Service`。 + +这样一来,我们就完成了一个自定义组件的开发。 + +#### 4. 执行一下刚刚编写的组件 + +完成组件的编写后,让我们在流程中执行以下刚刚编写好的组件验证以下: + +```python +from pipeline.eri.runtime import BambooDjangoRuntime +from bamboo_engine import api +from bamboo_engine import builder +from bamboo_engine.builder import Var +from bamboo_engine.builder import EmptyStartEvent, ServiceActivity, EmptyEndEvent + + + +# 使用 builder 构造出流程描述结构 +start = EmptyStartEvent() +act = ServiceActivity(component_code='fac_cal_comp') +act.component.inputs.n = Var(type=Var.PLAIN, value=4) +end = EmptyEndEvent() + +start.extend(act).extend(end) + +pipeline = builder.build_tree(start) +api.run_pipeline(runtime=BambooDjangoRuntime(), pipeline=pipeline) +``` + +可以看到,我们能够通过 `component_code` 来引用刚刚编写的组件,然后我们将该组件的输入 `n` 的值设置为 `4`: + +```python +act = ServiceActivity(component_code='fac_cal_comp') +act.component.inputs.n = Var(type=Var.PLAIN, value=4) +``` + +流程运行完后,获取节点的执行结果,可以看到,该节点输出了 `factorial_of_n`,并且值为 24(4 * 3 * 2 *1),这正是我们需要的效果: + +```bash +>>> api.get_execution_data_outputs(BambooDjangoRuntime(), act.id).data +{'_loop': 0, '_result': True, 'factorial_of_n': 24} +``` + +### 组件的行为 + +我们在上一节中定义的 `FactorialCalculateService` 在完成 `execute()` 的执行后即认为该组件已经执行完成。但是在某些场景下,这样的行为并不能满足我们的需求,例如调用第三方系统的接口启动一个任务,并周期性的轮询该任务的状态,随后根据任务状态确认执行结果;或是调用第三方系统启动任务后等待第三方系统回调并根据回调数据确认执行结果。 + +为了满足上述的场景,pipeline 中的组件执行时的行为有以下几种: + +- 单次执行:默认的执行方式,完成 `execute()` 的执行后即认为该组件已经执行完毕。 +- 周期性轮询:完成 `execute()` 的执行后,还会周期性的执行 `schedule()` 方法,直至满足一定的条件为止。 +- 单次回调:完成 `execute()` 的执行后,会等待外部回调,接收到回调后会执行一次 `schedule()` 方法。 +- 多次回调:完成 `execute()` 的执行后,会等待外部回调,每次回调会执行一次 `schedule()` 方法。 + +总结起来,组件的执行方式可以用一条公式概括: `execute + n * schedule`。 + + +#### 单次执行 + +这是组件默认的执行方式,在这种模式下,一旦 `execute()` 方法执行完成后,该组件即视为执行完成。执行结果会根据 `execute()` 的返回值来判断: + +- `False`:执行失败,节点会进入 FAILED 状态。 +- `True` 或 `None`:执行成功,节点会进入 FINISHED 状态。 + +#### 周期性轮询 + +如果我们需要周期性的轮询第三方平台的接口,那么可以使用周期性轮询的执行方式,下面的代码定义了一个周期性轮询的组件服务: + +```python +from pipeline.core.flow.activity import Service, StaticIntervalGenerator + +class ScheduleService(Service): + __need_schedule__ = True + interval = StaticIntervalGenerator(2) + + + def _get_poll_url(self): + pass + + def _poll_status(self, poll_url): + pass + + def execute(self, data, parent_data): + poll_url = self._get_poll_url() + data.outputs.poll_url = poll_url + return True + + def schedule(self, data, parent_data, callback_data=None): + + poll_url = data.get_one_of_outputs('poll_url') + status = self._poll_status(poll_url) + + if status == 0: + self.finish_schedule() + elif status < 0: + data.outputs.ex_data = 'task failed with code: %s' % status + return False + + return True + + +``` + +让我们来拆分一下这个组件服务的定义,一个周期性轮询组件服务必须包含两个类属性: + + - `__need_schedule__`: 表示当前组件服务是否需要调度,周期性轮询的方式下必须将该字段设置为 `True` + - `interval`:轮询间隔生成器,周期性轮询方式下该字段必须为 `AbstractIntervalGenerator` 的子类。 + + +我们在 `execute()` 中调用第三方系统获取了用于轮询的 `poll_url`,并将其写入到输出中(**如果在 `execute()` 方法中返回了 `False`,那么当前节点会进入 FAILED 状态,不会进入之后的轮询阶段**): + +```python + def execute(self, data, parent_data): + poll_url = self._get_poll_url() + data.outputs.poll_url = poll_url + return True +``` + +下面看看 `schedule()` 方法的定义,该方法接收三个参数: + +- `data`: 当前节点的数据对象,这个数据对象存储了用户传递给当前节点的参数的值以及当前节点输出的值。 +- `parent_data`: 该节点所属流程对象的数据对象。 +- `callback_data`:回调数据,在等待回调模式下由第三方系统传入的数据。 + +我们在 `schedule()` 方法中,使用在 `execute()` 中设置到输出中的 `poll_url` 来轮询第三方系统的状态,并根据其返回值来决定该次轮询的结果: + +- `True`:当次轮询成功,若轮询已完成则节点会进入 FINISHED 状态,否则仍然处于 RUNNING 状态,等待进入下次轮询。 +- `False`:当次轮询失败,节点会进入 FAILED 状态。 + +当轮询完成后,即可调用 `finish_schedule()` 方法: + +```python + def schedule(self, data, parent_data, callback_data=None): + + poll_url = data.get_one_of_outputs('poll_url') + status = self._poll_status(poll_url) + + if status == 0: + self.finish_schedule() + elif status < 0: + data.outputs.ex_data = 'task failed with code: %s' % status + return False + + return True +``` + +下面让我们了解一下轮询间隔生成器,间隔生成器必须拥有 `next()` 方法,该方法返回一个整数,代表每次轮询时间的时间间隔,单位为秒。一般我们会继承 `AbstractIntervalGenerator` 来定义新的生成器。下面的代码定义了一个间隔线性增长的生成器,轮询时间间隔会根据轮询次数的增长而增长: + +```python +from pipeline.core.flow.activity import AbstractIntervalGenerator + +class LinearIntervalGenerator(AbstractIntervalGenerator): + def next(self): + super(DefaultIntervalGenerator, self).next() + return self.count + +``` + +`AbstractIntervalGenerator` 中的 `count` 属性表示本次轮询的轮次,**在实现自定义的 `next()` 方法时一定要调用父类的 `next()` 方法**。 + +#### 单次回调 + +如果第三方系统提供了回调机制,那我们就可以将组件服务设置为等待回调的模式: + +```python +from pipeline.core.flow.activity import Service + +class WaitCallbackService(Service): + __need_schedule__ = True + + def _external_api_call(self): + pass + + def execute(self, data, parent_data): + self._external_api_call() + return True + + def schedule(self, data, parent_data, callback_data=None): + + status = self.callback_data['status'] + + if status < 0: + data.outputs.ex_data = 'task failed with code: %s' % status + return False + + self.finish_schedule() +``` + +让我们来拆分一下这个组件服务的定义,一个等待回调型组件服务必须包含这个类属性: + +- `__need_schedule__`: 表示当前组件服务是否需要调度,等待回调的方式下必须将该字段设置为 `True` + +等待回调型的组件服务于周期轮询型的差异在于 `interval` 这个类属性,周期轮训型的服务该属性的值为间隔生成器,而回调型的服务该属性的值为 `None`。 + +我们在 `execute()` 方法中只做了一次 api 调用,然后就进入了等待回调的状态(**如果在 `execute()` 方法中返回了 `False`,那么当前节点会进入 FAILED 状态,不会进入之后的等待回调阶段**): + +```python + def execute(self, data, parent_data): + self._external_api_call() + return True +``` + +在 `schedule()` 方法中,我们检测第三方系统回调时传入的数据,来判断本次执行是否成功: + +```python + def schedule(self, data, parent_data, callback_data=None): + + status = self.callback_data['status'] + + if status < 0: + data.outputs.ex_data = 'task failed with code: %s' % status + return False + + self.finish_schedule() +``` + +#### 多次回调 + +将组件的类属性 `__multi_callback_enabled__` 设置为 `True`,该组件将会支持多次回调: + +```python +class WaitCallbackService(Service): + __need_schedule__ = True + __multi_callback_enabled__ = True + + def _external_api_call(self): + pass + + def execute(self, data, parent_data): + self._external_api_call() + return True + + def schedule(self, data, parent_data, callback_data=None): + + status = self.callback_data['status'] + + if status < 0: + data.outputs.ex_data = 'task failed with code: %s' % status + return False + else if status < 1: + return True + + self.finish_schedule() +``` + +### 组件的注册 + +pipeline 通过插件自动发现机制,在启动 SaaS 服务时扫描每个已经注册到 Django 中的 APP (INSTALLED_APPS)下特定的目录(包括子目录),自动发现并注册合法的插件,这些待扫描的目录能够通过 Django settings 下的 `COMPONENT_PATH` 进行配置: + +```python +COMPONENT_PATH = [ + 'custom.components.path', +] +``` + +pipeline 默认会扫描已注册 APP 的 `components.collections` 目录,尝试从该目录下(包括子目录)所有的 Python 模块中发现并注册合法的标准插件。 + +pipeline 插件自动发现机制的实现代码可以参考 `pipeline.component_framework.apps` 和 `pipeline.utils.register` 模块。 + + +现在回过头来看看我们之前创建的 APP,其目录结构与 pipeline 默认扫描的路径一致,所以我们在 `custom_plugins.components.collections.plugins` 模块中定义的组件就会自动的被注册到组件库中: + +```text +custom_plugins +├── __init__.py +├── components +│   ├── __init__.py +│   └── collections +│   ├── __init__.py +│   └── plugins.py +├── migrations +│   └── __init__.py +└── static + └── custom_plugins + └── plugins.js +``` diff --git a/docs/user_guide/engine_api.md b/docs/user_guide/engine_api.md new file mode 100644 index 00000000..65e9b1a0 --- /dev/null +++ b/docs/user_guide/engine_api.md @@ -0,0 +1,845 @@ + + +- [Engine API](#1-EngineAPI) + - [run_pipeline](#11-run_pipeline) + - [example](#111-example) + - [pause_pipeline](#12-pause_pipeline) + - [example](#121-example) + - [revoke_pipeline](#13-revoke_pipeline) + - [example](#131-example) + - [resume_pipeline](#14-resume_pipeline) + - [example](#141-example) + - [pause_node_appoint](#15-pause_node_appoint) + - [example](#151-example) + - [resume_node_appoint](#16-resume_node_appoint) + - [example](#161-example) + - [retry_node](#17-retry_node) + - [example](#171-example) + - [retry_subprocess](#18-retry_subprocess) + - [example](#181-example) + - [skip_node](#19-skip_node) + - [example](#191-example) + - [skip_exclusive_gateway](#110-skip_exclusive_gateway) + - [example](#1101-example) + - [forced_fail_activity](#111-forced_fail_activity) + - [example](#1111-example) + - [callback](#112-callback) + - [example](#1121-example) + - [get_pipeline_states](#113-get_pipeline_states) + - [example](#1131-example) + - [get_children_states](#114-get_children_states) + - [example](#1141-example) + - [get_execution_data_inputs](#115-get_execution_data_inputs) + - [example](#1151-example) + - [get_execution_data_outputs](#116-get_execution_data_outputs) + - [example](#1161-example) + - [get_execution_data](#117-get_execution_data) + - [example](#1171-example) + - [get_data](#118-get_data) + - [example](#1181-example) + - [get_node_histories](#119-get_node_histories) + - [example](#1191-example) + - [get_node_short_histories](#120-get_node_short_histories) + - [example](#1201-example) + + + + + + + + + + +# 1. Engine API + +所有与 bamboo_engine 的交互都应该通过 bamboo_engine.api 来进行,所有的 Engine API 的返回对象均为 `bamboo_engine.api.EngineAPIResult`: + +```python +class EngineAPIResult: + def __init__( + self, + result: bool, + message: str, + exc: Optional[Exception] = None, + data: Optional[Any] = None, + ): + """ + :param result: 是否执行成功 + :type result: bool + :param message: 附加消息,result 为 False 时关注 + :type message: str + :param exc: 异常对象 + :type exc: Exception + :param data: 数据 + :type data: Any + """ + self.result = result + self.message = message + self.exc = exc + self.data = data +``` + + + +## 1.1. run_pipeline + +```python +def run_pipeline( + self, + pipeline: dict, + root_pipeline_data: Optional[dict] = None, + root_pipeline_context: Optional[dict] = None, + subprocess_context: Optional[dict] = None, + **options +): + """ + 运行流程 + + :param pipeline: 流程数据 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + """ +``` + + + +### 1.1.1. example + +```python +start = EmptyStartEvent() +act = ServiceActivity(component_code="example_component") +end = EmptyEndEvent() + +start.extend(act).extend(end) + +pipeline = builder.build_tree(start) + +runtime = BambooDjangoRuntime() +api.run_pipeline(runtime=runtime, pipeline=pipeline).result +# True +``` + + + +## 1.2. pause_pipeline + +```python +def pause_pipeline( + runtime: EngineRuntimeInterface, pipeline_id: str +) -> EngineAPIResult: + """ + 暂停 pipeline 的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline_id: piipeline id + :type pipeline_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + +### 1.2.1. example + +```python +runtime = BambooDjangoRuntime() +api.run_pipeline(runtime=runtime, pipeline_id="pipeline id").result +# True +``` + + + +## 1.3. revoke_pipeline + +```python +def revoke_pipeline( + runtime: EngineRuntimeInterface, pipeline_id: str +) -> EngineAPIResult: + """ + 撤销 pipeline,使其无法继续执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline_id: pipeline id + :type pipeline_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.3.1. example + +```python +runtime = BambooDjangoRuntime() +api.revoke_pipeline(runtime=runtime, pipeline_id="pipeline id").result +# True +``` + + + +## 1.4. resume_pipeline + +```python +def resume_pipeline( + runtime: EngineRuntimeInterface, pipeline_id: str +) -> EngineAPIResult: + """ + 继续被 pause_pipeline 接口暂停的 pipeline 的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param pipeline_id: pipeline id + :type pipeline_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.4.1. example + +```python +runtime = BambooDjangoRuntime() +api.resume_pipeline(runtime=runtime, pipeline_id="pipeline id").result +# True +``` + + + + +## 1.5. pause_node_appoint + +```python +def pause_node_appoint( + runtime: EngineRuntimeInterface, node_id: str +) -> EngineAPIResult: + """ + 预约暂停某个节点的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 id + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.5.1. example + +```python +runtime = BambooDjangoRuntime() +api.pause_node_appoint(runtime=runtime, node_id="node_id").result +# True +``` + + + +## 1.6. resume_node_appoint + +```python +def resume_node_appoint( + runtime: EngineRuntimeInterface, node_id: str +) -> EngineAPIResult: + """ + 继续由于某个节点而暂停的 pipeline 的执行 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 id + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.6.1. example + +```python +runtime = BambooDjangoRuntime() +api.resume_node_appoint(runtime=runtime, node_id="node_id").result +# True +``` + + + +## 1.7. retry_node + +```python +def retry_node( + runtime: EngineRuntimeInterface, node_id: str, data: Optional[dict] = None +) -> EngineAPIResult: + """ + 重试某个执行失败的节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 失败的节点 id + :type node_id: str + :param data: 重试时使用的节点执行输入 + :type data: dict + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.7.1. example + +```python +runtime = BambooDjangoRuntime() +api.retry_node(runtime=runtime, node_id="node_id", data={"key": "value"}).result +# True +``` + + + +## 1.8. retry_subprocess + +```python +def retry_subprocess(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 重试进入失败的子流程节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 子流程节点 id + :type node_id: str + :return: [description] + :rtype: EngineAPIResult + """ +``` + + + +### 1.8.1. example + +```python +runtime = BambooDjangoRuntime() +api.retry_subprocess(runtime=runtime, node_id="node_id").result +# True +``` + + + +## 1.9. skip_node + +```python +def skip_node(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 跳过某个执行失败的节点(仅限 event,activity) + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 失败的节点 id + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.9.1. example + +```python +runtime = BambooDjangoRuntime() +api.skip_node(runtime=runtime, node_id="node_id").result +# True +``` + + + +## 1.10. skip_exclusive_gateway + +```python +def skip_exclusive_gateway( + runtime: EngineRuntimeInterface, node_id: str, flow_id: str +) -> EngineAPIResult: + """ + 跳过某个执行失败的分支网关 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 失败的分支网关 id + :type node_id: str + :param flow_id: 需要往下执行的 flow id + :type flow_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.10.1. example + +```python +runtime = BambooDjangoRuntime() +api.skip_exclusive_gateway(runtime=runtime, node_id="node_id", flow_id="flow_id").result +# True +``` + + + +## 1.11. forced_fail_activity + +```python +def forced_fail_activity( + runtime: EngineRuntimeInterface, node_id: str, ex_data: str +) -> EngineAPIResult: + """ + 强制失败某个 activity 节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :param message: 异常信息 + :type message: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.11.1. example + +```python +runtime = BambooDjangoRuntime() +api.forced_fail_activity(runtime=runtime, node_id="node_id", ex_data="forced fail by me").result +# True +``` + + + +## 1.12. callback + +```python +def callback( + runtime: EngineRuntimeInterface, node_id: str, version: str, data: dict +) -> EngineAPIResult: + """ + 回调某个节点 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param version: 节点执行版本 + :param version: str + :param data: 节点 ID + :type data: dict + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.12.1. example + +```python +runtime = BambooDjangoRuntime() +api.callback(runtime=runtime, node_id="node_id", version="version", data={"key": "value"}).result +# True +``` + + + +## 1.13. get_pipeline_states + +```python +def get_pipeline_states( + runtime: EngineRuntimeInterface, root_id: str, flat_children=True +) -> EngineAPIResult: + """ + 返回某个任务的状态树 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param root_id: 根节点 ID + :type root_id: str + :param flat_children: 是否将所有子节点展开 + :type flat_children: bool + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.13.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_pipeline_states(runtime=runtime, root_id="pipeline_id").data + +{'pc31c89e6b85a4e2c8c5db477978c1a57': {'id': 'pc31c89e6b85a4e2c8c5db477978c1a57', # 节点 ID + 'state': 'FINISHED', # 节点状态 + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', # 根流程 ID + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', # 父流程 ID + 'version': 'vaf47e56f2f31401e979c3c47b2a0c285', # 状态版本 + 'loop': 1, # 重入次数 + 'retry': 0, # 重试次数 + 'skip': False, # 是否被跳过 + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 688664, tzinfo=), # 状态数据创建时间 + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 688423, tzinfo=), # 节点开始执行时间 + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 775165, tzinfo=), # 执行完成(成功或失败)时间 + 'children': {'e42035b3f98374062921a191115fc602e': {'id': 'e42035b3f98374062921a191115fc602e', + 'state': 'FINISHED', + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'version': 've2d0fa10d7d842a1bcac25984620232a', + 'loop': 1, + 'retry': 0, + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'skip': False, + 'children': {}, + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 744490, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 744308, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 746690, tzinfo=)}, + 'e327f83de42df4ebfab375c271bf63d29': {'id': 'e327f83de42df4ebfab375c271bf63d29', + 'state': 'FINISHED', + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'version': 'v893cdc14150d4df5b20f2db32ba142b3', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'children': {}, + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 753321, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 753122, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 758697, tzinfo=)}, + 'e6c7d7a3721ca4b19a5a7f3b34d8387bf': {'id': 'e6c7d7a3721ca4b19a5a7f3b34d8387bf', + 'state': 'FINISHED', + 'root_id:': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'parent_id': 'pc31c89e6b85a4e2c8c5db477978c1a57', + 'version': 'v0c661ee6994d4eb4bdbfe5260f9a9f22', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'children': {}, + 'created_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 767563, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 767384, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 3, 45, 54, 773341, tzinfo=)}}}} +``` + + + +## 1.14. get_children_states + +```python +def get_children_states( + runtime: EngineRuntimeInterface, node_id: str +) -> EngineAPIResult: + """ + 返回某个节点及其所有子节点的状态 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 父流程 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.14.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_children_states(runtime=runtime, node_id="pipeline_id").data + + +{'p07926dd8e81a4f0d9cd484d4856afd42': {'id': 'p07926dd8e81a4f0d9cd484d4856afd42', # 节点 ID + 'state': 'FINISHED', # 节点状态 + 'root_id:': 'p07926dd8e81a4f0d9cd484d4856afd42', # 根流程 ID + 'parent_id': 'p07926dd8e81a4f0d9cd484d4856afd42', # 父流程 ID + 'version': 'v512822ec7fbc4c3180bddb4a6e3f72ad', # 状态版本 + 'loop': 1, # 重入次数 + 'retry': 0, # 重试次数 + 'skip': False, # 是否被跳过 + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'created_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 725395, tzinfo=), # 状态数据创建时间 + 'started_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 725130, tzinfo=), # 节点开始执行时间 + 'archived_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 842400, tzinfo=), # 执行完成(成功或失败)时间 + 'children': {'e571501dfbf204e679347c4a74a4ad2ae': {'id': 'e571501dfbf204e679347c4a74a4ad2ae', + 'state': 'FINISHED', + 'root_id:': 'p07926dd8e81a4f0d9cd484d4856afd42', + 'parent_id': 'p07926dd8e81a4f0d9cd484d4856afd42', + 'version': 'vf72134b379224b5e95bd1b1c887b2b1e', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'created_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 806533, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 806038, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 809831, tzinfo=)}, + 'ea3e45c2685e148e9849e4a34e992a562': {'id': 'ea3e45c2685e148e9849e4a34e992a562', + 'state': 'FINISHED', + 'root_id:': 'p07926dd8e81a4f0d9cd484d4856afd42', + 'parent_id': 'p07926dd8e81a4f0d9cd484d4856afd42', + 'version': 'vbca6dd994806449bbfdfb372457189bc', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'created_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 817497, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 817295, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 823874, tzinfo=)}, + 'efdb8de56dec5419baa0c68ae9af6a671': {'id': 'efdb8de56dec5419baa0c68ae9af6a671', + 'state': 'FINISHED', + 'root_id:': 'p07926dd8e81a4f0d9cd484d4856afd42', + 'parent_id': 'p07926dd8e81a4f0d9cd484d4856afd42', + 'version': 'v957e052ef10d4d14b3fc039893ec70ae', + 'loop': 1, + 'retry': 0, + 'skip': False, + 'error_ignorable': False, # 是否出错后自动跳过(老版本 API 兼容字段) + 'error_ignored': False, # 是否出错后自动跳过 + 'created_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 834135, tzinfo=), + 'started_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 833957, tzinfo=), + 'archived_time': datetime.datetime(2021, 3, 10, 11, 5, 22, 840337, tzinfo=)}}}} +``` + + + +## 1.15. get_execution_data_inputs + +```python +def get_execution_data_inputs( + runtime: EngineRuntimeInterface, node_id: str +) -> EngineAPIResult: + """ + 获取某个节点执行数据的输入数据 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.15.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_execution_data_inputs(runtime=runtime, node_id="node_id").data + +{'_loop': 1} +``` + + + +## 1.16. get_execution_data_outputs + +```python +def get_execution_data_outputs( + runtime: EngineRuntimeInterface, node_id: str +) -> EngineAPIResult: + """ + 获取某个节点的执行数据输出 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.16.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_execution_data_outputs(runtime=runtime, node_id="node_id").data + +{} +``` + + + +## 1.17. get_execution_data + +```python +def get_execution_data( + runtime: EngineRuntimeInterface, node_id: str +) -> EngineAPIResult: + """ + 获取某个节点的执行数据 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.17.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_execution_data(runtime=runtime, node_id="node_id").data + +{'inputs': {'_loop': 1}, 'outputs': {}} +``` + + + +## 1.18. get_data + +```python +def get_data(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: + """ + 获取某个节点的原始输入数据 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + +### 1.18.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_data(runtime=runtime, node_id="node_id").data + +{'inputs': {'_loop': 1}, 'outputs': {}} +``` + + + +## 1.19. get_node_histories + +> 注意,只有进行过重试、跳过、重入的节点才会记录执行历史 + +```python +def get_node_histories( + runtime: EngineRuntimeInterface, node_id: str, loop: int = -1 +) -> EngineAPIResult: + """ + 获取某个节点的历史记录概览 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.19.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_node_histories(runtime=runtime, node_id="node_id").data + + +[ + { + "id": 1, # 历史 ID + "node_id": "e34ef61258b134ffaae42efee2ab9ff1b", # 节点 ID + "started_time": datetime.datetime(2021, 3, 10, 11, 10, 9, 350028, tzinfo=), # 节点开始执行时间 + "archived_time": datetime.datetime(2021, 3, 10, 11, 10, 9, 352609, tzinfo=), # 执行完成(成功或失败)时间 + "loop": 1, # 重入次数 + "skip": False, # 是否被跳过 + "version": "vg4ef61258b134ffaae42efee2ab9ff1b", # 状态版本 + "inputs": {}, # 输入执行数据 + "outputs": {}, # 输出执行数据 + } +] +``` + + + +## 1.20. get_node_short_histories + +> 注意,只有进行过重试、跳过、重入的节点才会记录执行历史 + +```python +def get_node_short_histories( + runtime: EngineRuntimeInterface, node_id: str, loop: int = -1 +) -> EngineAPIResult: + """ + 获取某个节点的简要历史记录 + + :param runtime: 引擎运行时实例 + :type runtime: EngineRuntimeInterface + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 执行结果 + :rtype: EngineAPIResult + """ +``` + + + +### 1.20.1. example + +```python +runtime = BambooDjangoRuntime() +api.get_node_histories(runtime=runtime, node_id="node_id").data + + +[ + { + "id": 1, # 历史 ID + "node_id": "e34ef61258b134ffaae42efee2ab9ff1b", # 节点 ID + "started_time": datetime.datetime(2021, 3, 10, 11, 10, 9, 350028, tzinfo=), # 节点开始执行时间 + "archived_time": datetime.datetime(2021, 3, 10, 11, 10, 9, 352609, tzinfo=), # 执行完成(成功或失败)时间 + "loop": 1, # 重入次数 + "skip": False, # 是否被跳过 + "version": "vg4ef61258b134ffaae42efee2ab9ff1b", # 状态版本 + } +] +``` \ No newline at end of file diff --git a/docs/user_guide/flow_builder.md b/docs/user_guide/flow_builder.md new file mode 100644 index 00000000..5b800790 --- /dev/null +++ b/docs/user_guide/flow_builder.md @@ -0,0 +1,527 @@ + + +- [流程构造器](#流程构造器) +- [构造元素](#构造元素) + - [event](#event) + - [EmptyStartEvent](#emptystartevent) + - [EmptyEndEvent](#emptyendevent) + - [ExecutableEndEvent](#executableendevent) + - [activity](#activity) + - [ServiceActivity](#serviceactivity) + - [SubProcess](#subprocess) + - [gateway](#gateway) + - [ParallelGateway](#parallelgateway) + - [ExclusiveGateway](#exclusivegateway) + - [ConditionalParallelGateway](#conditionalparallelgateway) + - [ConvergeGateway](#convergegateway) + - [data](#data) + - [Data](#data-1) + - [Var](#var) + - [NodeOutput](#nodeoutput) + - [RewritableNodeOutput](#rewritablenodeoutput) + - [Params](#params) + - [DataInput](#datainput) +- [连接构造元素](#连接构造元素) + - [extend](#extend) + - [connect](#connect) + - [converge](#converge) + - [to](#to) +- [生成流程描述结构](#生成流程描述结构) + + +## 流程构造器 + +回到 Quick Start 中的例子: + + +```python +from bamboo_engine import api +from bamboo_engine.builder import * +from pipeline.eri.runtime import BambooDjangoRuntime + +# 使用 builder 构造出流程描述结构 +start = EmptyStartEvent() +# 这里先使用 bamboo-pipeline 自带的示例组件,我们会在后续的章节中学习如何自定义组件 +act = ServiceActivity(component_code="example_component") +end = EmptyEndEvent() + +start.extend(act).extend(end) + +pipeline = builder.build_tree(start) + +# 执行流程对象 +runtime = BambooDjangoRuntime() + +api.run_pipeline(runtime=runtime, pipeline=pipeline) +``` + +流程构造器的职责是降低我们构造流程描述结构的成本,可以看到上面的例子中构造的流程只有三个节点,但是通过这三个节点生成的描述结构却十分的复杂: + +```bash +>>> pipeline = builder.build_tree(start) +>>> pipeline +{'activities': {'d29a8ef1ec7f367e9724415e03de22ab': {'component': {'code': 'example_component', + 'inputs': {}}, + 'error_ignorable': False, + 'id': 'd29a8ef1ec7f367e9724415e03de22ab', + 'incoming': ['ee7124a9bcf337308aff8fcc0a674782'], + 'name': None, + 'optional': False, + 'outgoing': 'c43b3a60c86b36ac91e177b02abe7800', + 'type': 'ServiceActivity'}}, + 'data': {'inputs': {}, 'outputs': {}}, + 'end_event': {'id': '6930365c0c73358dbefb9c2d25922e0f', + 'incoming': ['c43b3a60c86b36ac91e177b02abe7800'], + 'name': None, + 'outgoing': '', + 'type': 'EmptyEndEvent'}, + 'flows': {'c43b3a60c86b36ac91e177b02abe7800': {'id': 'c43b3a60c86b36ac91e177b02abe7800', + 'is_default': False, + 'source': 'd29a8ef1ec7f367e9724415e03de22ab', + 'target': '6930365c0c73358dbefb9c2d25922e0f'}, + 'ee7124a9bcf337308aff8fcc0a674782': {'id': 'ee7124a9bcf337308aff8fcc0a674782', + 'is_default': False, + 'source': '5740b0a1f8b03f9fb82c3690a41c6b10', + 'target': 'd29a8ef1ec7f367e9724415e03de22ab'}}, + 'gateways': {}, + 'id': '3a07e1b279a83df2bf15f6b094901303', + 'start_event': {'id': '5740b0a1f8b03f9fb82c3690a41c6b10', + 'incoming': '', + 'name': None, + 'outgoing': 'ee7124a9bcf337308aff8fcc0a674782', + 'type': 'EmptyStartEvent'}} +``` + +如果要手动去拼接这样的一个结构,这简直就是一个灾难,所以,使用流程构造器能够大大的降低我们构造复杂流程的成本。 + +## 构造元素 + +要使用流程构造器,首先我们需要创建构造流程时要使用到的构造元素,**构造元素指的是*流程元素*的替代对象**,每个构造元素都拥有一个到流程元素的唯一映射,且构造元素拥有与其所对应的流程元素相同的类名。下面所展示的代码片段中创建除了三个流程元素:`start`,`act` 及 `end`。 + +```python +from bamboo_engine import builder +from bamboo_engine.builder import EmptyStartEvent, ServiceActivity, EmptyEndEvent +from pipeline.parser import PipelineParser + +start = EmptyStartEvent() +act = ServiceActivity(component_code='example_component') +end = EmptyEndEvent() +``` + +目前可用的构造元素(等同于可用的流程元素)如下所示: + +- event 类型 + - `EmptyStartEvent`:空开始事件。 + - `EmptyEndEvent`:空结束事件。 + - `ExecutableEndEvent`:可执行结束事件。 +- activity 类型 + - `ServiceActivity`:服务活动。 + - `SubProcess`:子流程。 +- gateway 类型 + - `ParallelGateway`:并行网关。 + - `ExclusiveGateway`:分支网关。 + - `ConditionalParallelGateway`:条件并行网关。 + - `ConvergeGateway`:汇聚网关。 +- data 类型 + - `Data`:Data 所对应的构造对象 + - `Params`:声明子流程中全局变量对父流程中中全局变量引用的参数对象 + - `Var`:代表流程中变量的构造对象 + - `DataInput`:用于声明子流程 Data 中对外暴露参数的构造对象 + - `NodeOutput`:用于声明对其他节点输出的结果引用的构造对象 + - `RewritableNodeOutput`:用于声明对多个节点输出的结果引用的构造对象,每次其引用的节点执行后,该变量的值会刷新为该节点中对应的输出值 + +### event + +#### EmptyStartEvent + +空开始事件 + +```python +EmptyStartEvent(id=None, name=None, outgoing=None) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 + +#### EmptyEndEvent + +空结束事件 + +```python +EmptyEndEvent(id=None, name=None, outgoing=None) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 + +#### ExecutableEndEvent + +可自定义执行逻辑的结束事件 + +```python +ExecutableEndEvent(type, id=None, name=None, outgoing=None) +``` +- type:自定义业务逻辑结束节点的名称(在 `FlowNodeClsFactory` 中注册的 name) +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 + +### activity + +#### ServiceActivity + +服务节点,可绑定特定的组件执行特定的逻辑 + +```python +ServiceActivity(id=None, + name=None, + outgoing=None, + component_code=None, + error_ignorable=False, + timeout=None, + skippable=True, + retryable=True) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 +- component_code:服务节点绑定的 [component](./custom_component.md) code +- error_ignorable:是否忽略执行中的错误或执行失败 +- timeout:执行超时时间,单位为秒 +- skippable:在执行出错或失败后是否能够手动跳过 +- retryable:在执行出错或失败后是否能够手动重试 + +#### SubProcess + +子流程节点,可关联特定的流程 + +```python +SubProcess(id=None, + name=None, + outgoing=None, + start=None, + data=None, + params=None, + replace_id=False, + template_id=None) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 +- start:子流程引用流程的 `EmptyStartEvent` builder 对象,start 与 template_id 参数不能同时为空,当两者都不为空时优先使用 template_id +- data:子流程的 `Data` 对象 +- params:子流程的传入参数,可以是 `dict` 或是 `pipeline.builder.flow.data.Params` 类型 +- replace_id:是否需要在 `build_tree` 时替换该子流程节点所引用的所有 builder 对象的 ID,该选项只有在使用 start 关键字关联流程时生效 +- template_id:该子流程节点所关联的 `pipeline.models.PipelineTemplate` 的 template_id,start 与 template_id 参数不能同时为空,当两者都不为空时优先使用 template_id,PipelineTemplate 相关的使用方式可参考[流程管理](./flow_management.md) + +### gateway + +#### ParallelGateway + +并行网关 + +```python +ParallelGateway(id=None, name=None, outgoing=None) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 + + +#### ExclusiveGateway + +分支网关 + +```python +ExclusiveGateway(id=None, name=None, outgoing=None, condition=None) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 +- condition:分支网关中每个输出节点对应的条件映射,应为 `{node_index: "condition", ...}` 形式的字典,其中 node_index 为输出节点在 ExclusiveGateway outgoing 字段中的下标,condition 为节点执行需要满足的分支条件 + +#### ConditionalParallelGateway + +条件并行网关 + +```python +ConditionalParallelGateway(id=None, name=None, outgoing=None, condition=None) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 +- condition:条件并行网关中每个输出节点对应的条件映射,应为 `{node_index: "condition", ...}` 形式的字典,其中 node_index 为输出节点在 ConditionalParallelGateway outgoing 字段中的下标,condition 为节点执行需要满足的分支条件 + +#### ConvergeGateway + +汇聚网关 + +```python +ConvergeGateway(id=None, name=None, outgoing=None) +``` + +- id:节点 ID,可为空,为空时框架自动生成 +- name:节点名,可为空,为空时框架自动生成 +- outgoing:输出节点数组,可为空 + + +### data + +#### Data + +Data 所对应的构造对象 + +```python +Data(inputs=None, outputs=None) +``` + +- inputs:Data 对象的 inputs 字典,默认为 `{}` +- outputs:Data 对象的 outputs 字典,默认为 `{}` + +#### Var + +代表流程中变量的构造对象 + +```python +Var(type, value, custom_type=None) +``` + +- type:变量的类型,其取值范围为 `{Var.PLAIN, Var.SPLICE, Var.LAZY}` +- value:变量的值 +- custom_type type 为 `Var.LAZY` 时,该变量对应的 [LazyVariable](./lazy_variable.md) 的 code + +#### NodeOutput + +用于声明对其他节点输出的结果引用的构造对象 + +```python +NodeOutput(type, source_act, source_key) +``` + +- type:变量的类型,其取值范围为 `{Var.PLAIN, Var.SPLICE, Var.LAZY}`,该字段的取值目前不会影响 NodeOutput 的行为 +- source_act:要引用的变量所属的输出节点 ID +- source_key:要引用的变量在其节点被输出后的 key + +#### RewritableNodeOutput + +用于声明对多个节点输出的结果引用的构造对象,每次其引用的节点执行后,该变量的值会刷新为该节点中对应的输出值 + +```python +RewritableNodeOutput(type, source_act) +``` + +- type:变量的类型,其取值范围为 `{Var.PLAIN, Var.SPLICE, Var.LAZY}`,该字段的取值目前不会影响 RewritableNodeOutput 的行为 +- source_act:要引用的输出节点与其变量的映射数组,其形式应为 `[{'source_act': act_id, 'source_key': key}, ...]`,其中 source_act 为要引用的变量所属的输出节点 ID,source_key 为要引用的变量在其节点被输出后的 key + +#### Params + +声明子流程中全局变量对父流程中中全局变量引用的参数对象 + +```python +Params(params=None) +``` + +- params:记录子流程全局变量对父流程变量引用的 `dict`,其形式应为 `{key: Var(...)}`,其中 key 为子流程中变量的 key,Var 为 `Var` 构造对象实例,相关概念可以参考[子流程参数传递](./basic_concept.md#子流程参数传递(Param)) + +#### DataInput + +用于声明子流程 Data 中对外暴露参数的构造对象,相关概念可以参考[子流程参数传递](./basic_concept.md#子流程参数传递(Param)) + +```python +Var(type, value, source_tag=None) +``` + +- type:变量的类型,其取值范围为 `{Var.PLAIN, Var.SPLICE, Var.LAZY}` +- value:变量的值 +- source_tag:当 type 为 `Var.LAZY` 时,该变量对应的 [LazyVariable](./lazy_variable.md) 的 code + +## 连接构造元素 + +当我们创建好了构造元素之后,我们还需要将这些构造元素根据我们的需要连接起来,构造元素提供了若干方法来帮助我们应对各种场景下的元素连接操作。 + +### extend + +`extend()` 方法会创建一条从调用者到传入元素的连接,并返回作为参数传入的构造元素: + + +```bash +>>> from bamboo_engine.builder import ServiceActivity +>>> act_1 = ServiceActivity(name='act_1') +>>> act_2 = ServiceActivity(name='act_2') + +>>> act_1.extend(act_2) + +``` + +通过使用 `extend()` 链式调用能够快速构造出一个简单的串行流程: + +```bash +>>> from bamboo_engine.builder import EmptyStartEvent, ServiceActivity, EmptyEndEvent +>>> start = EmptyStartEvent(name='start') +>>> act_1 = ServiceActivity(name='act_1') +>>> act_2 = ServiceActivity(name='act_2') +>>> act_3 = ServiceActivity(name='act_3') +>>> end = EmptyEndEvent(name='end') + +>>> start.extend(act_1).extend(act_2).extend(act_3).extend(end) + + +``` + +### connect + +`connect()` 方法能够接收多个构造元素作为参数,并为每一个传入的构造元素构建一条由调用者到该元素的连接,并返回当前调用对象。当我们要构造的流程中含有分支或是并行结构时,这个方法能够帮助我们快速构造出这样的结构: + +```bash +>>> from bamboo_engine.builder import ServiceActivity, ParallelGateway +>>> parallel_gateway = ParallelGateway(name='parallel_gateway') +>>> act_1 = ServiceActivity(name='act_1') +>>> act_2 = ServiceActivity(name='act_2') +>>> act_3 = ServiceActivity(name='act_3') + +>>> parallel_gateway.connect(act_1, act_2, act_3) + + +>>> parallel_gateway.outgoing +[, + , + ] +``` + +### converge + +`converge()` 方法会将所有从调用者出发的连接汇聚到传入的节点上,并返回该节点,使用 `converge()` 能够快速的实现从网关发散出去的连接的汇聚操作: + +```bash +>>> from bamboo_engine.builder import ServiceActivity, ParallelGateway, ConvergeGateway +>>> parallel_gateway = ParallelGateway(name='parallel_gateway') +>>> act_1 = ServiceActivity(name='act_1') +>>> act_2 = ServiceActivity(name='act_2') +>>> act_3 = ServiceActivity(name='act_3') +>>> act_4 = ServiceActivity(name='act_4') +>>> converge_gateway = ConvergeGateway(name='converge_gateway') + +>>> act_3.extend(act_4) + + +>>> parallel_gateway.connect(act_1, act_2, act_3) + + +>>> parallel_gateway.converge(converge_gateway) + + +>>> for act in [act_1, act_2, act_4]: + print(act.outgoing) +[] +[] +[] +``` + +### to + +`to()` 方法是一个辅助方法,其内部什么都不做,只会原封不动的返回传入的构造元素。使用 `to()` 方法能够让我们在链式调用的过程中改变方法的调用者: + +```bash +>>> from bamboo_engine.builder import ServiceActivity, ParallelGateway, ConvergeGateway +>>> parallel_gateway = ParallelGateway(name='parallel_gateway') +>>> act_1 = ServiceActivity(name='act_1') +>>> act_2 = ServiceActivity(name='act_2') +>>> act_3 = ServiceActivity(name='act_3') +>>> act_4 = ServiceActivity(name='act_4') + +>>> parallel_gateway.connect(act_1, act_2) \ + .to(act_1).extend(act_3) \ + .to(act_2).extend(act_4) + + +>>> parallel_gateway.outgoing +[, + ] + +>>> act_1.outgoing +[] + +>>> act_2.outgoing +[] +``` + +## 生成流程描述结构 + +当完成了构造元素的连接后,我们就能够通过构造元素来生成描述结构了,使用 `build_tree()` 函数,传入开始事件节点,流程构造器就会返回由这些构造元素连接成的流程描述结构: + +```bash +>>> from bamboo_engine.builder import EmptyStartEvent, ServiceActivity, EmptyEndEvent, build_tree +>>> start = EmptyStartEvent(name='start') +>>> act_1 = ServiceActivity(name='act_1') +>>> act_2 = ServiceActivity(name='act_2') +>>> act_3 = ServiceActivity(name='act_3') +>>> end = EmptyEndEvent(name='end') + +>>> start.extend(act_1).extend(act_2).extend(act_3).extend(end) + + +>>> build_tree(start_elem=start) +{'activities': {'15b9a9ffcd7d3d289cb99886c4b66aa0': {'component': {'code': None, + 'inputs': {}}, + 'error_ignorable': False, + 'id': '15b9a9ffcd7d3d289cb99886c4b66aa0', + 'incoming': ['df79daafd73c36f3965a2f8b36058aa5'], + 'name': 'act_1', + 'optional': False, + 'outgoing': 'd67e43325e3b389cba471562bd7e2a73', + 'type': 'ServiceActivity'}, + '7c3cfddb114c35ecbe18b88f5a519c58': {'component': {'code': None, + 'inputs': {}}, + 'error_ignorable': False, + 'id': '7c3cfddb114c35ecbe18b88f5a519c58', + 'incoming': ['d67e43325e3b389cba471562bd7e2a73'], + 'name': 'act_2', + 'optional': False, + 'outgoing': 'abf3d80c6e363156bc4076c7dd0324c4', + 'type': 'ServiceActivity'}, + 'ffe2edb847e335c5861d35c747d6d5f9': {'component': {'code': None, + 'inputs': {}}, + 'error_ignorable': False, + 'id': 'ffe2edb847e335c5861d35c747d6d5f9', + 'incoming': ['abf3d80c6e363156bc4076c7dd0324c4'], + 'name': 'act_3', + 'optional': False, + 'outgoing': 'c12f7f231c353d3ab5762fe6a18f7efb', + 'type': 'ServiceActivity'}}, + 'data': {'inputs': {}, 'outputs': {}}, + 'end_event': {'id': '05c932bb9d8735729c6aaf1aba52ee53', + 'incoming': ['c12f7f231c353d3ab5762fe6a18f7efb'], + 'name': 'end', + 'outgoing': '', + 'type': 'EmptyEndEvent'}, + 'flows': {'abf3d80c6e363156bc4076c7dd0324c4': {'id': 'abf3d80c6e363156bc4076c7dd0324c4', + 'is_default': False, + 'source': '7c3cfddb114c35ecbe18b88f5a519c58', + 'target': 'ffe2edb847e335c5861d35c747d6d5f9'}, + 'c12f7f231c353d3ab5762fe6a18f7efb': {'id': 'c12f7f231c353d3ab5762fe6a18f7efb', + 'is_default': False, + 'source': 'ffe2edb847e335c5861d35c747d6d5f9', + 'target': '05c932bb9d8735729c6aaf1aba52ee53'}, + 'd67e43325e3b389cba471562bd7e2a73': {'id': 'd67e43325e3b389cba471562bd7e2a73', + 'is_default': False, + 'source': '15b9a9ffcd7d3d289cb99886c4b66aa0', + 'target': '7c3cfddb114c35ecbe18b88f5a519c58'}, + 'df79daafd73c36f3965a2f8b36058aa5': {'id': 'df79daafd73c36f3965a2f8b36058aa5', + 'is_default': False, + 'source': 'c582a8976e673ac39db8519a75f8baaa', + 'target': '15b9a9ffcd7d3d289cb99886c4b66aa0'}}, + 'gateways': {}, + 'id': '3149bd721e94377e8baee990e9fc4622', + 'start_event': {'id': 'c582a8976e673ac39db8519a75f8baaa', + 'incoming': '', + 'name': 'start', + 'outgoing': 'df79daafd73c36f3965a2f8b36058aa5', + 'type': 'EmptyStartEvent'}} +``` diff --git a/docs/user_guide/flow_orchestration.md b/docs/user_guide/flow_orchestration.md new file mode 100644 index 00000000..fa5ced2a --- /dev/null +++ b/docs/user_guide/flow_orchestration.md @@ -0,0 +1,727 @@ + + +- [如何编排流程](#%E5%A6%82%E4%BD%95%E7%BC%96%E6%8E%92%E6%B5%81%E7%A8%8B) + - [开始与结束](#%E5%BC%80%E5%A7%8B%E4%B8%8E%E7%BB%93%E6%9D%9F) + - [活动](#%E6%B4%BB%E5%8A%A8) + - [网关](#%E7%BD%91%E5%85%B3) + - [并行网关](#%E5%B9%B6%E8%A1%8C%E7%BD%91%E5%85%B3) + - [分支网关](#%E5%88%86%E6%94%AF%E7%BD%91%E5%85%B3) + - [条件并行网关](#%E6%9D%A1%E4%BB%B6%E5%B9%B6%E8%A1%8C%E7%BD%91%E5%85%B3) + - [子流程](#%E5%AD%90%E6%B5%81%E7%A8%8B) + - [数据交换](#%E6%95%B0%E6%8D%AE%E4%BA%A4%E6%8D%A2) + - [Data](#data) + - [Var](#var) + - [NodeOutput](#nodeoutput) + - [RewritableNodeOutput](#rewritablenodeoutput) + - [DataInput 与 Params](#datainput-%E4%B8%8E-params) + - [流程结构合法性](#%E6%B5%81%E7%A8%8B%E7%BB%93%E6%9E%84%E5%90%88%E6%B3%95%E6%80%A7) + - [节点合法性](#%E8%8A%82%E7%82%B9%E5%90%88%E6%B3%95%E6%80%A7) + - [网关配对合法性](#%E7%BD%91%E5%85%B3%E9%85%8D%E5%AF%B9%E5%90%88%E6%B3%95%E6%80%A7) + - [流程合法性](#%E6%B5%81%E7%A8%8B%E5%90%88%E6%B3%95%E6%80%A7) + + + +## 如何编排流程 + +在开始使用流程引擎之前,我们要先了解一下如何才能够编排出我们想要的流程。 + +### 开始与结束 + +在一个合法的流程中,开始事件和结束事件是必须存在的,也就是说,我们其实是在开始事件节点与结束事件节点之间自由的定义我们的流程结构,然后再将这些节点连接起来: + +```python +from bamboo_engine.builder import EmptyStartEvent, ServiceActivity, EmptyEndEvent + +start = EmptyStartEvent() +act_1 = ServiceActivity() +act_2 = ServiceActivity() +... +end = EmptyEndEvent() + + +start.extend(act1) \ + .extend(act2) \ + ... \ + .extend(end) + +``` + +### 活动 + +活动是流程中最重要的元素,活动节点中能够执行任意的逻辑,例如(发起网络请求,操作数据库,执行命令等等),流程中活动节点的类型及其编排的顺序就决定了这个流程的能力,在 pipeline 中,活动本身是没有定义自身的行为的,其通过与**组件**进行绑定从而获得组件执行的能力,你可以通过开发自定义组件来扩充活动节点的能力范围,pipeline 中的每一个组件都拥有一个唯一的 `code`,通过这个 `code`,在 Quick Start 的例子中,活动通过与 `code` 为 example_component 的组件绑定而获得了该组件的执行能力: + +```python +from bamboo_engine.builder import ServiceActivity + +act = ServiceActivity(component_code='example_component') +``` + +pipeline 提供的 example_component 这个组件在执行的过程中什么都不做: + +```python +class SimpleExampleService(Service): + def execute(self, data, parent_data): + return True + + def outputs_format(self): + return [] + +class SimpleExampleComponent(Component): + name = u'example component' + code = 'example_component' + bound_service = SimpleExampleService +``` + +关于自定义组件的知识,在后续的章节中会详细说明。 + +### 网关 + +网关在流程执行的过程中起到了引流的作用,同时,网关可能会改变流程的执行模式。目前 pipeline 中可用的网关及其作用如下: + + - 分支网关:分支网关可以发散出多条顺序流,每条顺序流会与一个布尔表达式进行绑定,当流程执行到分支网关时,引擎会在当前流程的数据上下文中执行该分支网关中的分支表达式,引擎会选择与第一个结果为真的表达式所绑定的顺序流往下继续执行,分支网关并不会改变流程的执行模式。 + - 并行网关:并行网关可以发散出多条顺序流,同时其会改变当前流程的执行模式,当流程执行到并行网关时,流程会进入并行执行的模式,此时并行网关后的每一条分支会被同时执行,直到其遇到一个汇聚网关。pipeline 中同一个并行网关发散的所有分支都必须汇聚到同一个汇聚网关中。 + - 条件并行网关:条件并行网关可以发散出多条顺序流,并且每条顺序流也会与一个布尔表达式绑定,同时其会改变当前流程的执行模式,当流程执行到条件并行网关时,流程会进入并行执行的模式,但是此时引擎只会执行在当前数据上下文中布尔表达式结果为真的分支。 + - 汇聚网关:汇聚网关能够连接多条输入顺序流,并只能输出一条顺序流。汇聚网关的作用是将其他网关发散出去的多条顺序流汇聚成一条顺序流。 + +#### 并行网关 + +并行网关允许我们让流程的执行进入并行执行的状态,系列代码创建了一个包含三个活动节点(act_1, act_2, act_3)及一个并行网关的流程,并行网关后的 act_1, act_2, act_3 会被引擎并行执行: + +![并行网关示例](https://raw.githubusercontent.com/homholueng/md_pic/master/pipeline_doc/parallel_gateway_example.png) + +```python +from pipeline.eri.runtime import BambooDjangoRuntime +from bamboo_engine import api +from bamboo_engine.builder import ( + build_tree, + EmptyStartEvent, + ServiceActivity, + EmptyEndEvent, + ParallelGateway, + ConvergeGateway +) + +start = EmptyStartEvent() +pg = ParallelGateway() +act_1 = ServiceActivity(component_code='pipe_example_component', name='act_1') +act_2 = ServiceActivity(component_code='pipe_example_component', name='act_2') +act_3 = ServiceActivity(component_code='pipe_example_component', name='act_3') +cg = ConvergeGateway() +end = EmptyEndEvent() + +start.extend(pg).connect(act_1, act_2, act_3).to(pg).converge(cg).extend(end) + +pipeline = build_tree(start) +api.run_pipeline(runtime=BambooDjangoRuntime(), pipeline=pipeline) +``` + +这里需要注意的是,并行网关发散出去的所有分支最后都要汇聚到同一个汇聚网关中,上述代码中的 `to(pg).converge(cg)` 调用实现了这个操作。 + +#### 分支网关 + +分支网关允许我们通过当前流程数据上下文中的数据来决定之后流程执行的方向,下列代码创建了一个包含三个活动节点(act_1, act_2, act_3)及一个分支网关的流程,分支网关会根据 act_1 节点输出到数据上下文中的变量来判断接下来是执行 act_2 还是 act_3 节点: + +![分支网关示例](https://raw.githubusercontent.com/homholueng/md_pic/master/pipeline_doc/exclusive_gateway.png) + +```python +from pipeline.eri.runtime import BambooDjangoRuntime +from bamboo_engine import api +from bamboo_engine.builder import ( + build_tree, + EmptyStartEvent, + ServiceActivity, + EmptyEndEvent, + ExclusiveGateway, + Var, + Data, + NodeOutput +) + +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='pipe_example_component', name='act_1') +eg = ExclusiveGateway( + conditions={ + 0: '${act_1_output} < 0', + 1: '${act_1_output} >= 0' + }, + name='act_2 or act_3' +) +act_2 = ServiceActivity(component_code='pipe_example_component', name='act_2') +act_3 = ServiceActivity(component_code='pipe_example_component', name='act_3') +end = EmptyEndEvent() + +start.extend(act_1).extend(eg).connect(act_2, act_3).to(eg).converge(end) + +act_1.component.inputs.input_a = Var(type=Var.SPLICE, value='${input_a}') + +pipeline_data = Data() +pipeline_data.inputs['${input_a}'] = Var(type=Var.PLAIN, value=0) +pipeline_data.inputs['${act_1_output}'] = NodeOutput(type=Var.SPLICE, source_act=act_1.id, source_key='input_a') + +pipeline = build_tree(start) +api.run_pipeline(runtime=BambooDjangoRuntime(), pipeline=pipeline) +``` + +我们一步步来看这个流程的声明,首先我们创建了绑定了 pipe_example_component 组件的活动节点,该组件会把其接收到的输入参数原封不动的写到当前节点的输出中: + +```python +act_1 = ServiceActivity(component_code='pipe_example_component', name='act_1') +``` + +然后我们创建了分支网关,并设定了其要绑定到输出顺序流中的布尔表达式,`condition` 字段接收一个字典,该字典表示将 value 指定的表达式绑定到第 key 个输出顺序流上,示例中的 `condition` 表示将 `${act_1_output} < 0` 表达式绑定到分支网关的第 `0` 个输出顺序流上。 + +```python +eg = ExclusiveGateway( + conditions={ + 0: '${act_1_output} < 0', + 1: '${act_1_output} >= 0' + }, + name='act_2 or act_3' +) +``` + +然后我们完成了流程的编排(分支网关输出顺序流的排列与节点的连接顺序相关,`connect(act_2, act_3)` 调用中参数传递的顺序决定了 `eg -> act_2` 为 eg 的第0个输出顺序流): + +```python +start.extend(act_1).extend(eg).connect(act_2, act_3).to(eg).converge(end) +``` + +完成了流程的编排后,我们需要配置这个流程中数据交互的方式,我们需要做两件事情: + +- 配置 act_1 的输入 +- 配置 act_1 到流程数据上下文的输出 + +首先,我们为 act_1 配置了一个 key 为 `input_a` 的输入参数,该参数会引用流程上下中的 `${input_a}` 变量: + +```python +act_1.component.inputs.input_a = Var(type=Var.SPLICE, value='${input_a}') +``` + +然后,我们创建了一个初始的流程数据上下文,并为其设置了一个值为 `0` key 为 `${input_a}` 的全局变量: + +```python +pipeline_data = Data() +pipeline_data.inputs['${input_a}'] = Var(type=Var.PLAIN, value=0) +``` + +随后,我们声明了一个 key 为 `${act_1_output}` 的全局变量,该变量是对 act_1 输出数据中 key 为 `input_a` 的变量的引用: + +```python +pipeline_data.inputs['${act_1_output}'] = NodeOutput(type=Var.SPLICE, source_act=act_1.id, source_key='input_a') +``` + +下图展示了这个流程中数据交换的过程: + +![数据交换](https://raw.githubusercontent.com/homholueng/md_pic/master/pipeline_doc/eg_data_exchange.png) + +后面就是构造流程描述结构并执行了: + +```python +pipeline = build_tree(start) +api.run_pipeline(runtime=BambooDjangoRuntime(), pipeline=pipeline) +``` + +执行后获取流程状态可以看到,只有 act_2 与 act_3 中只有 act_3 执行了(`'${act_1_output} >= 0'` 绑定的 `eg -> act_3` 顺序流): + +```bash +>>> api.get_pipeline_states(BambooDjangoRuntime(), pipeline["id"]) +{ + ... + u'bdb6de805a183c18a131ea49509197b8': {'finish_time': '2019-03-28 08:08:54', + 'id': u'bdb6de805a183c18a131ea49509197b8', + 'loop': 1L, + 'name': u'act_3', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 08:08:53', + 'state': 'FINISHED'}, + u'e5bce7ca4da63f6da89e47d012e2773c': {'finish_time': '2019-03-28 08:08:53', + 'id': u'e5bce7ca4da63f6da89e47d012e2773c', + 'loop': 1L, + 'name': u'act_1', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 08:08:53', + 'state': 'FINISHED'}, + ... + }, + ... +} +``` + +把流程上下文中的全局变量 `${input_a}` 的值改为 `-1` 后再次执行上述代码,可以看到这次只有 act_2 与 act_3 中只有 act_2 执行了: + +```bash +>>> api.get_pipeline_states(BambooDjangoRuntime(), pipeline["id"]) +{ + ... + u'191e41ec8c06365f8d30a06b331ed533': {'finish_time': '2019-03-28 08:20:22', + 'id': u'191e41ec8c06365f8d30a06b331ed533', + 'loop': 1L, + 'name': u'act_2', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 08:20:22', + 'state': 'FINISHED'}, + u'efd7e68f479438f9941103a89a2130f4': {'finish_time': '2019-03-28 08:20:22', + 'id': u'efd7e68f479438f9941103a89a2130f4', + 'loop': 1L, + 'name': u'act_1', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 08:20:22', + 'state': 'FINISHED'} + ... +} +``` + +如果分支网关中的所有表达式执行结果都不为真,那么流程会进入执行失败的状态。 + +#### 条件并行网关 + +条件并行网关允许我们只并行执行部分在当前上下文下满足特定条件的分支: + +![条件并行网关示例](https://raw.githubusercontent.com/homholueng/md_pic/master/pipeline_doc/conditional_pg_example.png) + +```python +from pipeline.eri.runtime import BambooDjangoRuntime +from bamboo_engine import api +from bamboo_engine.builder import ( + build_tree, + EmptyStartEvent, + ServiceActivity, + EmptyEndEvent, + ConditionalParallelGateway, + ConvergeGateway, + Var, + Data, + NodeOutput +) + +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='pipe_example_component', name='act_1') +cpg = ConditionalParallelGateway( + conditions={ + 0: '${act_1_output} < 0', + 1: '${act_1_output} >= 0', + 2: '${act_1_output} >= 0' + }, + name='[act_2] or [act_3 and act_4]' +) +act_2 = ServiceActivity(component_code='pipe_example_component', name='act_2') +act_3 = ServiceActivity(component_code='pipe_example_component', name='act_3') +act_4 = ServiceActivity(component_code='pipe_example_component', name='act_4') +cg = ConvergeGateway() +end = EmptyEndEvent() + +start.extend(act_1).extend(cpg).connect(act_2, act_3, act_4).to(cpg).converge(cg).extend(end) + +act_1.component.inputs.input_a = Var(type=Var.SPLICE, value='${input_a}') + +pipeline_data = Data() +pipeline_data.inputs['${input_a}'] = Var(type=Var.PLAIN, value=0) +pipeline_data.inputs['${act_1_output}'] = NodeOutput(type=Var.SPLICE, source_act=act_1.id, source_key='input_a') + +pipeline = build_tree(start) +api.run_pipeline(runtime=BambooDjangoRuntime(), pipeline=pipeline) +``` + +首先,创建条件并行与创建分支网关类似,都需要为输出顺序流按序绑定布尔表达式: + +```python +cpg = ConditionalParallelGateway( + conditions={ + 0: '${act_1_output} < 0', + 1: '${act_1_output} >= 0', + 2: '${act_1_output} >= 0' + }, + name='[act_2] or [act_3 and act_4]' +) +``` + +由于条件并行网关会改变流程的执行模式,所以最后还是需要将其发散出去的分支汇聚到同一个汇聚网关上: + +```python +extend(cpg).connect(act_2, act_3, act_4).to(cpg).converge(cg) +``` + +查看流程执行状态,可以发现只有 act_3 与 act_4 被执行了: + +```bash +>>> api.get_pipeline_states(BambooDjangoRuntime(), pipeline["id"]) +{'children': {u'377d513285963a40951eea983e62f899': {'finish_time': '2019-03-28 13:48:43', + 'id': u'377d513285963a40951eea983e62f899', + 'loop': 1L, + 'name': u'act_4', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 13:48:43', + 'state': 'FINISHED'}, + u'45d24e4595dd32b3ab65feddc73a75c3': {'finish_time': '2019-03-28 13:48:43', + 'id': u'45d24e4595dd32b3ab65feddc73a75c3', + 'loop': 1L, + 'name': u'act_1', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 13:48:43', + 'state': 'FINISHED'}, + u'a4ee2f04cbdb3e87a5563f2907d5c42f': {'finish_time': '2019-03-28 13:48:43', + 'id': u'a4ee2f04cbdb3e87a5563f2907d5c42f', + 'loop': 1L, + 'name': u'[act_2] or [act_3 and act_4]', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 13:48:43', + 'state': 'FINISHED'}, + u'bacfee844f313c7285b0e3f83f187745': {'finish_time': '2019-03-28 13:48:43', + 'id': u'bacfee844f313c7285b0e3f83f187745', + 'loop': 1L, + 'name': u'act_3', + 'retry': 0L, + 'skip': False, + 'start_time': '2019-03-28 13:48:43', + 'state': 'FINISHED'}, + ... + } +} +``` + +同样的,如果条件并行网关中的所有表达式执行结果都不为真,那么流程会进入执行失败的状态。 + +### 子流程 + +子流程允许我们将一个包含多个节点的复杂操作放在一个节点中执行,子流程通常用于做公共流程的抽离,你可以将子流程理解为编程语言中的函数,通过将一些重复的执行逻辑放到子流程中,能够减少重复逻辑的编排: + +![子流程示例](https://raw.githubusercontent.com/homholueng/md_pic/master/pipeline_doc/subprocess_example.png) + +```python +from pipeline.eri.runtime import BambooDjangoRuntime +from bamboo_engine import api +from bamboo_engine.builder import ( + build_tree, + EmptyStartEvent, + ServiceActivity, + EmptyEndEvent, + ExclusiveGateway, + SubProcess, + Var, + Data, + NodeOutput +) + + +def sub_process(data): + subproc_start = EmptyStartEvent() + subproc_act = ServiceActivity (component_code='pipe_example_component', name='sub_act') + subproc_end = EmptyEndEvent() + + subproc_start.extend(subproc_act).extend(subproc_end) + + subproc_act.component.inputs.sub_input = Var(type=Var.SPLICE, value='${sub_input}') + + return SubProcess(start=subproc_start, data=data) + +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='pipe_example_component', name='act_1') +eg = ExclusiveGateway( + conditions={ + 0: '${act_1_output} < 0', + 1: '${act_1_output} >= 0' + }, + name='act_2 or act_3' +) + +sub_pipeline_data_1 = Data(inputs={'${sub_input}': Var(type=Var.PLAIN, value=1)}) +subproc_1 = sub_process(sub_pipeline_data_1) + +sub_pipeline_data_2 = Data(inputs={'${sub_input}': Var(type=Var.PLAIN, value=2)}) +subproc_2 = sub_process(sub_pipeline_data_2) +end = EmptyEndEvent() + +start.extend(act_1).extend(eg).connect(subproc_1, subproc_2).converge(end) + +act_1.component.inputs.input_a = Var(type=Var.SPLICE, value='${input_a}') + +pipeline_data = Data() +pipeline_data.inputs['${input_a}'] = Var(type=Var.PLAIN, value=0) +pipeline_data.inputs['${act_1_output}'] = NodeOutput(type=Var.SPLICE, source_act=act_1.id, source_key='input_a') + +pipeline = build_tree(start) +api.run_pipeline(runtime=BambooDjangoRuntime(), pipeline=pipeline) +``` + +首先我们创建了一个子流程构造函数,该函数会返回一个全新的子流程,为了简单说明子流程的作用,示例中的子流程只有一个活动节点,这个节点只有一个 `sub_input` 变量,该变量引用了流程数据上下文中的 `${sub_input}` 变量,也就是说,subproc_act 节点的输出会随着 `${sub_input}` 的变化而变化: + +```python +def sub_process(data): + subproc_start = EmptyStartEvent() + subproc_act = ServiceActivity (component_code='pipe_example_component', name='sub_act') + subproc_end = EmptyEndEvent() + + subproc_start.extend(subproc_act).extend(subproc_end) + + subproc_act.component.inputs.sub_input = Var(type=Var.SPLICE, value='${sub_input}') + + return SubProcess(start=subproc_start, data=data) +``` + +随后,我们创建了一个带有分支网关的流程,这个流程会根据 act_1 节点的输出来选择 subproc_1 或 subproc_2 其中一个子流程执行: + +```python +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='pipe_example_component', name='act_1') +eg = ExclusiveGateway( + conditions={ + 0: '${act_1_output} < 0', + 1: '${act_1_output} >= 0' + }, + name='act_2 or act_3' +) + +sub_pipeline_data_1 = Data(inputs={'${sub_input}': Var(type=Var.PLAIN, value=1)}) +subproc_1 = sub_process(sub_pipeline_data_1) + +sub_pipeline_data_2 = Data(inputs={'${sub_input}': Var(type=Var.PLAIN, value=2)}) +subproc_2 = sub_process(sub_pipeline_data_2) +end = EmptyEndEvent() + +start.extend(act_1).extend(eg).connect(subproc_1, subproc_2).converge(end) +``` + +这里需要说明一下子流程的声明方式,首先我们为子流程创建了一个数据上下文对象,这一步与流程对象的数据上下文声明一致,因为子流程本质上也是一个流程对象: + +```python +sub_pipeline_data_1 = Data(inputs={'${sub_input}': Var(type=Var.PLAIN, value=1)}) +``` + +之后,我们创建了一个子流程节点,这个子流程节点接受子流程的开始节点和数据上下文对象。 + +```python +subproc_1 = sub_process(sub_pipeline_data_1) +``` + +### 数据交换 + +#### Data + +就如我们在[数据对象](./basic_concept.md#数据对象)一节中所提到的,我们可以使用 `Data` 构造对象来定义一个流程的 data context: + +![](.../assetss/img/user_guide_flow_orchestration/data_example.png) + +```python +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='debug_node') +act_1.component.inputs.param_1 = Var(type=Var.SPLICE, value='${constant_1}') +end = EmptyEndEvent() + +start.extend(act_1).extend(end) + +pipeline_data = Data() +pipeline_data.inputs['${constant_1}'] = Var(type=Var.PLAIN, value='value_1') + +pipeline = build_tree(start, data=pipeline_data) +``` + +在上面的流程中,我们在先在 act_1 节点的输入中定义了一个 param_1 参数,该参数引用流程 data context 中的 `${constant_1}` 变量,随后我们通过 `Data` 构造了一个 data context,并在其输入域中定义了`'${constant_1}'` 变量。 + +#### Var + +下面是一个使用了 `SPLICE`,`LAZY`,`PLAIN` 三种变量的流程示例,我们为 act_1 的输入添加了三个类型的变量,引用了全局变量的 param_1,类型为 `upper_case` 的 LAZY 变量(这个变量会在解析时使用前将 value 变成大写的格式)param_2,以及普通变量 param_3: + +![](../assets/img/user_guide_flow_orchestration/var_sample.png) + +```python +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='debug_node') +act_1.component.inputs.param_1 = Var(type=Var.SPLICE, value='${constant_1}') +act_1.component.inputs.param_2 = Var(type=Var.LAZY, custom_type='upper_case', value='abc') +act_1.component.inputs.param_3 = Var(type=Var.PLAIN, value='normal var') +end = EmptyEndEvent() + +start.extend(act_1).extend(end) + +pipeline_data = Data() +pipeline_data.inputs['${constant_1}'] = Var(type=Var.PLAIN, value='value_1') + +pipeline = build_tree(start, data=pipeline_data) +``` + +#### NodeOutput + +当我们要引用其他节点输出的变量时,就需要使用到 `NodeOutput` 来声明这种引用关系,下面是一个引用之前的节点输出的变量的例子: + +![](../assets/img/user_guide_flow_orchestration/node_output_example_1.png) + +```python +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='debug_node') +act_1.component.inputs.param_1 = Var(type=Var.PLAIN, value='output_value_1') +act_2 = ServiceActivity(component_code='debug_node') +act_2.component.inputs.param_2 = Var(type=Var.SPLICE, value='${act_1_output}') +end = EmptyEndEvent() + +start.extend(act_1).extend(act_2).extend(end) + +pipeline_data = Data() +pipeline_data.inputs['${act_1_output}'] = NodeOutput(source_act=act_1.id, + source_key='param_1', + type=Var.SPLICE, + value='') + +pipeline = build_tree(start, data=pipeline_data) +``` + +数据上下文中的 `${act_1_output}` 变量引用了 act_1 输出的 param_1,当 act_1 执行完后,`${act_1_output}` 的值为 `output_value_1`,act_2 中的 param_2 解析后的值为 `output_value_1`。 + +#### RewritableNodeOutput + +`RewritableNodeOutput` 能够帮助我们在数据上下文中声明一个引用多个节点输出的变量,这个变量值会在其引用的节点执行完后进行刷新,下面是一个使用了 `RewritableNodeOutput` 的例子: + +![](../assets/img/user_guide_flow_orchestration/rewritable_output_example_1.png) + +```python +start = EmptyStartEvent() +act_1 = ServiceActivity(component_code='debug_node') +act_1.component.inputs.param_1 = Var(type=Var.PLAIN, value='output_value_1') +act_2 = ServiceActivity(component_code='debug_node') +act_2.component.inputs.context_var = Var(type=Var.SPLICE, value='${rewritable_output}') +act_2.component.inputs.param_2 = Var(type=Var.PLAIN, value='output_value_2') +act_3 = ServiceActivity(component_code='debug_node') +act_3.component.inputs.context_var = Var(type=Var.SPLICE, value='${rewritable_output}') +end = EmptyEndEvent() + +start.extend(act_1).extend(act_2).extend(act_3).extend(end) + +pipeline_data = Data() +pipeline_data.inputs['${rewritable_output}'] = RewritableNodeOutput(source_act=[ + { + 'source_act': act_1.id, + 'source_key': 'param_1' + }, + { + 'source_act': act_2.id, + 'source_key': 'param_2' + }], type=Var.SPLICE, value='') + +pipeline = build_tree(start, data=pipeline_data) +``` + +数据上下文的 `${rewritable_output}` 变量引用了来自 act_1 及 act_2 两个节点的输出字段,当 act_1 执行完成时,`${rewritable_output}` 的值为 `output_value_1`,act_2 中 context_var 解析后的值为 `output_value_1`,当 act_2 执行完成后,`${rewritable_output}` 的值为 `output_value_2`,act_3 中 context_var 解析后的值为 `output_value2`。 + +#### DataInput 与 Params + +`DataInput` 可以让我们将子流程数据上下文中的变量暴露出来,使得父流程能够通过 `Params` 来向子流程传递参数: + +![](asset/img/user_guide_flow_orchestration/data_input_example_1.png) + +```python +sub_start = EmptyStartEvent() +sub_act_1 = ServiceActivity(component_code='debug_node') +sub_act_1.component.inputs.param_1 = Var(type=Var.SPLICE, value='${sub_constant_1}') +sub_end = EmptyEndEvent() + +sub_start.extend(sub_act_1).extend(sub_end) + +sub_pipeline_data = Data() +sub_pipeline_data.inputs['${sub_constant_1}'] = DataInput(type=Var.PLAIN, value='default_value') + +start = EmptyStartEvent() +params = Params({ + '${sub_constant_1}': Var(type=Var.SPLICE, value='${constant_1}') +}) +subprocess = SubProcess(start=sub_start, data=sub_pipeline_data, params=params) +end = EmptyEndEvent() + +start.extend(subprocess).extend(end) + +pipeline_data = Data() +pipeline_data.inputs['${constant_1}'] = Var(type=Var.PLAIN, value='value_1') + +pipeline = build_tree(start, data=pipeline_data) +``` + +上述例子定义的流程中,我们的子流程中包含一个 act_1 节点,该节点引用了数据上下文中的 `${sub_constant_1}`,而该子流程通过 `DataInput` 将 `${sub_constant_1}` 设置成了父流程暴露的参数,如果父流程在使用子流程时没有修改这个变量的定义的话,该变量就会使用默认的定义,即 `Var(type=Var.PLAIN, value='default_value')`: + +```python +sub_pipeline_data.inputs['${sub_constant_1}'] = DataInput(type=Var.PLAIN, value='default_value') +``` + +当数据上下文汇总的变量作为参数暴露出来之后, 父流程就能够通过 `Params` 修改子流程暴露出来的变量,这个变量此时又引用了父流程数据上下文中的 `${constant_1}`: + +```python +params = Params({ + '${sub_constant_1}': Var(type=Var.SPLICE, value='${constant_1}') +}) +subprocess = SubProcess(start=sub_start, data=sub_pipeline_data, params=params) +``` + +所以,最后 act_1 拿到的是父流程传递进来的变量,值为 `value_1`。 + +### 流程结构合法性 + +通过使用流程构造器,我们能够构造任意结构的流程,但是在 pipeline 流程引擎中,并不是所有的流程结构都是合法的处于引擎实现上的一些考虑,我们对流程的合法性从三个角度进行了限制: + +- 节点合法性 +- 网关配对合法性 +- 流程合法性 + +只有同时满足以上三种合法性规则的流程才算是一个合法的流程。 + +### 节点合法性 + +节点合法性是对流程中每个节点的出入度的限制: + +- StartEvent + - 入度:`0` + - 出度:`1` +- EndEvent + - 入度:`>= 1` + - 出度:`0` +- ServiceActivity + - 入度:`>= 1` + - 出度:`1` +- SubProcess + - 入度:`>= 1` + - 出度:`1` +- ParallelGateway + - 入度:`>= 1` + - 出度:`>= 1` +- ConditionalParallelGateway + - 入度:`>= 1` + - 出度:`>= 1` +- ExclusiveGateway + - 入度:`>= 1` + - 出度:`>= 1` +- ConvergeGateway + - 入度:`>= 1` + - 出度:`1` + +### 网关配对合法性 + +在了解网关配对的概念前先了解正向边和反向边的概念: + +正向顺序流:源节点与开始节点的最远距离**小于**目标节点与开始节点的距离的顺序流 + +反向顺序流:源节点与开始节点的最远距离**大于**目标节点与开始节点的距离的顺序流 + +分支网关和并行只会与其正向顺序流所能够到达的汇聚网关进行匹配,只有其反向顺序流能够到达的汇聚网关不会被视为该网关匹配的汇聚网关。 + +例如,下图中的汇聚网关2不会被视与分支网关6匹配的的汇聚网关,但其会作为分支网关2的汇聚网关: + +![网关配对](https://raw.githubusercontent.com/homholueng/md_pic/master/pipeline_doc/gateway_match.png) + +在这个前提下,合法的网关的配对必须满足: + +- 并行网关必须且只能与一个汇聚网关进行配对,且汇聚网关只能与一个并行网关进行匹配,与并行网关匹配的汇聚网关必须汇聚该并行网关创建出的所有分支。 +- 分支网关可以且只能与一个汇聚网关进行匹配,也可以不进行匹配,没有匹配汇聚网关的分支网关被称为悬挂分支。(分支网关之间或分支网关与并行网关可以共享汇聚网关) + + +### 流程合法性 + +流程合法性保证了流程在执行的过程中流程合法推进。 + +由开始节点发散出的分支被称为主分支。 + +所有由并行网关发散出的分支被称为并行分支,并行网关发散出的每一个并行分支彼此之间会被判定为不同的分支。 + +流程不能改变分支的属性,分支的属性只能通过网关来进行改变,如通过并行网关将主分支扩展成多个并行分支,通过汇聚网关将来自同一个并行网关的并行分支合并成主分支。 diff --git a/docs/user_guide/monitor.md b/docs/user_guide/monitor.md new file mode 100644 index 00000000..603088ff --- /dev/null +++ b/docs/user_guide/monitor.md @@ -0,0 +1,45 @@ + +## 监控 + +bamboo-engine 目前会向外暴露以下 prometheus metrics: + +- engine_running_processes(Gauge):正在执行的引擎进程数 +- engine_running_schedules(Gauge):正在执行的引擎调度数 +- engine_process_running_time(Histogram):进程每次执行耗时 +- engine_schedule_running_time(Histogram):调度每次执行耗时 +- engine_node_execute_time(Histogram):每种节点类型每次执行耗时 +- engine_node_schedule_time(Histogram):每种节点类型每次调度耗时 + +bamboo-engine 定义了运行时应该记录并向外暴露的 prometheus metrics: + +- engine_runtime_context_value_read_time(Histogram):运行时读取上下文数据耗时 +- engine_runtime_context_ref_read_time(Histogram):运行时读取上下文引用数据耗时 +- engine_runtime_context_value_upsert_time(Histogram):运行时更新上下文数据耗时 +- engine_runtime_data_inputs_read_time(Histogram):运行时读取节点输入数据耗时 +- engine_runtime_data_outputs_read_time(Histogram):运行时读取节点输出配置耗时 +- engine_runtime_data_read_time(Histogram):运行时读取节点输入数据和输出配置耗时 +- engine_runtime_exec_data_inputs_read_time(Histogram):运行时读取节点执行数据输入耗时 +- engine_runtime_exec_data_outputs_read_time(Histogram):运行时读取节点执行数据输出耗时 +- engine_runtime_exec_data_read_time(Histogram):运行时读取节点执行数据耗时 +- engine_runtime_exec_data_inputs_write_time(Histogram):运行时写入节点执行数据输入耗时 +- engine_runtime_exec_data_outputs_write_time(Histogram):运行时写入节点执行数据输出耗时 +- engine_runtime_exec_data_write_time(Histogram):运行时写入节点执行数据耗时 +- engine_runtime_callback_data_read_time(Histogram):运行时读取节点回调数据耗时 +- engine_runtime_schedule_read_time(Histogram):运行时读取调度对象耗时 +- engine_runtime_schedule_write_time(Histogram):运行时写入调度对象耗时 +- engine_runtime_state_read_time(Histogram):运行时读取节点状态对象耗时 +- engine_runtime_state_write_time(Histogram):运行时写入节点状态对象耗时 +- engine_runtime_node_read_time(Histogram):运行时读取节点数据耗时 +- engine_runtime_process_read_time(Histogram):运行时读取进程对象耗时 + +## 采集入口 + +### bamboo-pipeline + +目前 bamboo-pipeline 仅支持采集以 gevent 模式启动的 worker 记录的 metrics,当 celery worker 以 gevent 模式启动后: + +```shell +$ python manage.py celery worker -Q er_execute,er_schedule -l info -P gevent +``` + +worker 进程会在本地 `:8001` 端口暴露 metrics 采集入口。 \ No newline at end of file diff --git a/docs/user_guide/run_your_component.md b/docs/user_guide/run_your_component.md new file mode 100644 index 00000000..ae530261 --- /dev/null +++ b/docs/user_guide/run_your_component.md @@ -0,0 +1,51 @@ + + +- [执行你的组件](#执行你的组件) + - [Component Runner](#component-runner) + - [示例](#示例) + + + +## 执行你的组件 + +当我们编写好我们的组件后,我们可能需要运行起来看看执行的效果,但是如果为了执行一个组件而特定的编排流程来测试,未免过于麻烦。这时候可以使用框架提供的 component runner 来模拟组件的运行时,直接在命令行中执行组件。 + +### Component Runner + +要在命令行执行组件很简单,秩序执行如下命令: + +```shel +python manage.py run_component your_component_code +``` + +该命令的可选选项如下: + +- `-d`:data 字段,会传递给插件 `execute` 方法的 `data` 参数,格式为 JSON 字符串 +- `-p`:parent_data 字段,会传递给插件 `execute` 方法的 `parent_data` 参数,格式为 JSON 字符串 +- `-c`:parent_data 字段,会传递给插件 `execute` 方法的 `parent_data` 参数,格式为 JSON 字符串 + +> 注意:component runner 不会进行任何的 mock 操作,插件中的代码会被真正执行,对第三方系统接口的调用也会生效。 + +组件中调用 `self.logger` 打印的日志会被输出到当前命令行会话中。 + +### 示例 + +下面展示了在命令行中执行 code 为 `schedule_node` 的组件,并且传递相应的参数的示范: + +```shell +python manage.py run_component -d '{"k": "v"}' -p '{"1": "2"}' -c '{"3": "4"}' schedule_node +2019-11-06 06:46:48,946 - INFO - Start to run component [schedule_node] with data: , parent_data: +2019-11-06 06:46:48,947 - INFO - Schedule 1 with data: , parent_data: +2019-11-06 06:46:48,947 - INFO - Schedule return [True], wait for next schedule in 2s +2019-11-06 06:46:50,951 - INFO - Schedule 2 with data: , parent_data: +2019-11-06 06:46:50,952 - INFO - Schedule return [True], wait for next schedule in 2s +2019-11-06 06:46:52,954 - INFO - Schedule 3 with data: , parent_data: +2019-11-06 06:46:52,955 - INFO - Schedule return [True], wait for next schedule in 2s +2019-11-06 06:46:54,956 - INFO - Schedule 4 with data: , parent_data: +2019-11-06 06:46:54,956 - INFO - Schedule return [True], wait for next schedule in 2s +2019-11-06 06:46:56,958 - INFO - Schedule 5 with data: , parent_data: +2019-11-06 06:46:56,959 - INFO - Schedule return [True], wait for next schedule in 2s +2019-11-06 06:46:58,959 - INFO - Schedule 6 with data: , parent_data: +2019-11-06 06:46:58,960 - INFO - Schedule return [True], wait for next schedule in 2s +2019-11-06 06:47:00,962 - INFO - Schedule finished +``` \ No newline at end of file diff --git a/docs/user_guide/splice_var.md b/docs/user_guide/splice_var.md new file mode 100644 index 00000000..83c94ec8 --- /dev/null +++ b/docs/user_guide/splice_var.md @@ -0,0 +1,120 @@ + + + +- [SPLICE 变量](#splice-变量) + - [变量引用](#变量引用) + - [python 操作](#python-操作) + + + +## SPLICE 变量 + +SPLICE 类型的变量能够帮助我们声明变量与变量间的引用关系,灵活使用 SPLICE 类型的变量能够在流程中实现很多复杂的数据交互操作。**我们通过 `${xxxx}` 的形式来声明对数据上下文中某个变量的引用;另外,数据上下文中只有 key 为 `${xxxx}` 形式的变量能够被 SPLICE 变量引用。** + +所有的 SPLICE 类型的变量在创建时仅仅声明了他们的引用关系,并没有进行解析操作。在流程中的节点执行前,引擎会解析该节点数据对象输入中所有的 SPLICE 变量,在解析的过程中,直接或间接被这些 SPLICE 变量引用的其他 SPLICE 变量也会被解析。以下面的场景为例 + +![](../assets/img/user_guide_splice_var/splice_resolve.png) + +当 act_1 执行前,引擎会解析其数据对象输入中的 `param_1` 变量,因为 `param_1` 直接引用了数据上下文中的 `${constant_1}` 变量,所以 `${constant_1}` 的值也会被解析,一直解析到 `${constant_2}`。而数据上下文中的 `${constant_3}`,`${constant_4}` 因为没有被 act_1 中的变量引用,所以就没有进行解析。 + +### 变量引用 + +以下面一个场景为例,我们的 act_1 的 `param_1` 输入需要引用数据上下文中定义的 `${constant_1}` 变量 + +![](../assets/img/user_guide_splice_var/splice_example_1.png) + +那么我们可以按照以下的方式来声明这两个变量之间的关系,当 act_1 执行前,`param_1` 解析后其值应为 `value_1`。 + +```python +# in act_1 component inputs +{ + 'param_1': { + 'type': 'splice', + 'value': '${constant_1}' + } +} + +# in data context inputs +{ + '${constant_1}': { + 'type': 'plain', + 'value': 'value_1' + } +} +``` + +当然,变量之间也能够存在链式的引用关系 + +![](../assets/img/user_guide_splice_var/splice_example_2.png) + +声明方式如下,`param_1` 解析后其值应为 `value_1_value_2_value_3`: + +```python +# in act_1 component inputs +{ + 'param_1': { + 'type': 'splice', + 'value': '${constant_1}' + } +} + +# in data context inputs +{ + '${constant_1}': { + 'type': 'plain', + 'value': 'value_1_${constant_2}' + }, + '${constant_2}': { + 'type': 'plain', + 'value': 'value_2_${constant_3}' + }, + '${constant_3}': { + 'type': 'plain', + 'value': 'value_3' + }, +} + +``` + +一个变量引用多个变量当然也是支持的 + +![](../assets/img/user_guide_splice_var/splice_example_3.png) + +声明方式如下,`param_1` 解析后其值应为 `value_1__value_2`: + +```python +# in act_1 component inputs +{ + 'param_1': { + 'type': 'splice', + 'value': '${constant_1}__${constant_2}' + } +} + +# in data context inputs +{ + '${constant_1}': { + 'type': 'plain', + 'value': 'value_1' + }, + '${constant_2}': { + 'type': 'plain', + 'value': 'value_2' + } +} +``` + +### python 操作 + +除了通过 `${xxx}` 语法引用数据上下文中的变量,引擎还支持对变量执行一些 python 操作 + +- 字符串拼接:`${"prefix" + KEY}`、`${"prefix%s" % KEY}`、`${"prefix{}".format(KEY)}`、`${"%s%s" % (KEY1, KEY2)}` + +- 字符串变换:`${KEY.upper()}`、`${KEY.replace("\n", ",")}`、`${KEY[0:2]}`、`${KEY.strip()}` + +- 数字运算:`${int(KEY) + 1}`、`${int(KEY)/10}` + +- 类型转换:`${KEY.split("\n")}`、`${KEY.count()}`、`${list(KEY)}`、`${[item.strip() for item in KEY.split("\n")]}` + +基本上 python 中能够在一行语句中实现的操作,都能够在 SPLICE 变量中实现 + diff --git a/docs/user_guide/workers.md b/docs/user_guide/workers.md new file mode 100644 index 00000000..be66ab36 --- /dev/null +++ b/docs/user_guide/workers.md @@ -0,0 +1,64 @@ + + +- [Celery Worker 配置](#celery-worker-配置) + - [Worker Pool 的选择](#worker-pool-的选择) +- [任务队列隔离](#任务队列隔离) + + + +## Celery Worker 配置 + +引擎中对不同类型的调度任务划分了不同的队列,建议按照如下的方式来启动 Celery Worker: + +处理流程推进的 worker: + +```shell +python manage.py celery worker -Q er_execute +``` + +处理轮询调度及回调请求的 worker: + +```shell +python manage.py celery worker -Q er_schedule +``` + +### Worker Pool 的选择 + +Celery worker 默认使用 prefork 模式来启动,如果要提高系统的流程执行并发量,建议安装 gevent 并以 gevent 模式启动: + +```python +$ pip install gevent +$ python manage.py celery worker -Q er_execute,er_schedule -P gevent -c 500 +``` + +更多 celery worker pool 的介绍请参考 [celery workers](https://docs.celeryproject.org/en/stable/userguide/workers.html#concurrency) + +## 任务队列隔离 + +有时候我们的使用场景中,我们不希望一些任务的执行被其他任务执行影响,这个时候我们可以通过添加自定义的队列来解决这个问题: + +```python +from pipeline.eri.celery.queues import * +from celery import Celery + +# 添加 API 队列 +CELERY_QUEUES.extend(QueueResolver("api").queues()) + +app = Celery("proj") + +app.config_from_object("django.conf:settings") +``` + +这样我们就能够在执行任务的时候选择我们创建的自定义队列: + +```python +api.run_pipeline(runtime, pipeline, queue='api') +``` + +当然,为了实现队列隔离的效果,我们要为我们自定义的队列启动专用的 worker: + +```shell +python manage.py celery worker -Q er_execute_api,er_schedule_api +``` + +如果你添加了多个自定义队列,就要为每个队列都启动一批 worker。 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..e6c68b54 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,16 @@ +[tool.black] +line-length = 120 +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist +)/ +''' \ No newline at end of file diff --git a/release.md b/release.md new file mode 100644 index 00000000..0b6fe2be --- /dev/null +++ b/release.md @@ -0,0 +1,49 @@ +## 1.3.2 + +- optimization: + - 对无效的 schedule 请求增加防御机制, 防止受到单个节点 schedule 请求风暴的影响 + +## 1.3.1 +- bugfix: + - 修复preview_node_inputs查看子流程节点数据失败问题 + +## 1.3.0 + +- feature: + - (er 3.0)增加 retry_subprocess API,支持对进入失败的子流程进行重试 + +## 1.2.1 + +- bugfix: + - 修复engine执行过程异常时,`process_info` 未声明导致的问题 + +## 1.2.0 + +- feature: + - (eri 2.1) API 增加 get_data 接口 +- optimization: + - 优化引擎在 context.hydrate 时的异常处理逻辑 +- bugfix: + - 修复 Settings.MAKO_SANDBOX_IMPORT_MODULES 无法导入多层级模块的问题 + - 修复子流程节点没有输出 `_inner_loop` 字段的问题 +## 1.1.9 + +- feature: + - (eri 2.0)引擎添加 inner_loop 记录当前流程循环次数功能 + - 添加 bamboo_engine.api.preview_node_inputs + +## 1.1.8 + +- bugfix: + - 修复 service_activity 节点执行后 outputs 中没有 _loop 的问题 + +## 1.1.7 + +- improvement: + - 变量预渲染放在流程开始节点,支持变量引用情况 + +## 1.1.6 + +- feature: + - engine run_pipeline API 支持配置子流程预置上下文 + - eri version bump to 1.0.0 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..cd2d225f --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +Werkzeug>=1.0.1,<2.0 +pyparsing>=2.2.0,<3.0 +mako>=1.1.4,<2.0 +prometheus-client>=0.9.0,<1.0 \ No newline at end of file diff --git a/requirements_test.txt b/requirements_test.txt new file mode 100644 index 00000000..3bc347d7 --- /dev/null +++ b/requirements_test.txt @@ -0,0 +1,8 @@ +Werkzeug>=1.0.1,<2.0 +pyparsing>=2.2.0,<3.0 +mako>=1.1.4,<2.0 +mock==4.0.3 +pytest==6.2.2 +black==20.8b1 +prometheus-client>=0.9.0,<1.0 +tox==3.24.0 diff --git a/runtime/bamboo-pipeline/LICENSE.txt b/runtime/bamboo-pipeline/LICENSE.txt new file mode 100644 index 00000000..fe63c5cb --- /dev/null +++ b/runtime/bamboo-pipeline/LICENSE.txt @@ -0,0 +1,377 @@ +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +If you have downloaded a copy of the 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) binary from Tencent, please note that the 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) binary is licensed under the MIT License. +If you have downloaded a copy of the 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) source code from Tencent, please note that 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) source code is licensed under the MIT License, except for the third-party components listed below which are subject to different license terms. Your integration of 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) into your own projects may require compliance with the MIT License, as well as the other licenses applicable to the third-party components included within 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition). +A copy of the MIT License is included in this file. + + +Other dependencies and licenses: + +Open Source Software Licensed Under the BSD 3-Clause License: +The below software in this distribution may have been modified by THL A29 Limited (“Tencent Modifications”). All Tencent Modifications are Copyright (C) 2017-2021 THL A29 Limited. +---------------------------------------------------------------------------------------- +1. Django 1.8.11 +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +2. Flask 0.10.1 +Copyright (c) 2013 by Armin Ronacher and contributors. See AUTHORS for more details. +Some rights reserved. + +3. Flask-Babel 0.11.2 +Copyright (c) 2010 by Armin Ronacher. +Some rights reserved. + +4. qs 6.5.2 +Copyright (c) 2014 Nathan LaFreniere and other contributors. +All rights reserved. + + +Terms of the BSD 3-Clause License: +-------------------------------------------------------------------- +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: +Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +Neither the name of [copyright holder] nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +Open Source Software Licensed Under the Apache License, Version 2.0: +---------------------------------------------------------------------------------------- +1. select2 3.5.3 +Copyright 2014 Igor Vaynberg + +2. bootstrap 2.0.4 +Copyright 2012 Twitter, Inc. + +3. bootstrap 2.3.2 +Copyright 2012 Twitter, Inc. + +4. webfontloader 1.6.24 +Copyright (c) 2010 Adobe Systems Incorporated, Google Incorporated. + +5. bootstrap-treeview 1.0.2 +Copyright 2013 Jonathan Miles + +6. bootstrap-datepicker master +Copyright 2012 Stefan Petre +Improvements by Andrew Rowls + +7. bootstrap-paginator 1.0.0 +Copyright 2013 Yun Lai + +8. sls-admin-vue v1_async_router +Copyright sailengsi sailengsi@126.com + +9. NUTZ-ONEKEY 4.0 +Copyright © 2018 - Kerbores. All Rights Reserved + +10. solr 7.0.0 +Copyright 2006-2017 The Apache Software Foundation + + +Terms of the Apache License, Version 2.0: +--------------------------------------------------- +Apache License

Version 2.0, January 2004

http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +“License” shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +“Licensor” shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +“Legal Entity” shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, “control” means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +“You” (or “Your”) shall mean an individual or Legal Entity exercising permissions granted by this License. + +“Source” form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +“Object” form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +“Work” shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +“Derivative Works” shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +“Contribution” shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, “submitted” means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as “Not a Contribution.” + +“Contributor” shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + +b) You must cause any modified files to carry prominent notices stating that You changed the files; and + +c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +d) If the Work includes a “NOTICE” text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + + + +Open Source Software Licensed under the Boost Software License-Version 1.0: +-------------------------------------------------------------------- +1. Boost 1.34.1 +Copyright 2000-2003 Beman Dawes + + +Terms of the Boost Software License-Version 1.0: +-------------------------------------------------------------------- + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization obtaining a copy of the software and accompanying documentation covered by this license (the "Software") to use, reproduce, display, distribute, execute, and transmit the Software, and to prepare derivative works of the Software, and to permit third-parties to whom the Software is furnished to do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including the above license grant, this restriction and the following disclaimer, must be included in all copies of the Software, in whole or in part, and all derivative works of the Software, unless such copies or derivative works are solely in the form of machine-executable object code generated by a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +Open Source Software Licensed Under the MIT License: +---------------------------------------------------------------------------------------- +1. jquery 1.7.2 +Copyright (c) 2011 John Resig, http://jquery.com/ + +2. jquery 1.8.2 +Copyright 2012 jQuery Foundation and other contributors +http://jquery.com/ + +3. jquery 1.8.3 +Copyright 2012 jQuery Foundation and other contributors +http://jquery.com/ + +4. jquery 1.9.1 +Copyright 2012 jQuery Foundation and other contributors +http://jquery.com/ + +5. jquery 1.10.2 +Copyright 2013 jQuery Foundation and other contributors +http://jquery.com/ + +6. jquery 1.11.0 +Copyright 2014 jQuery Foundation and other contributors +http://jquery.com/ + +7. jquery 1.11.2 +Copyright 2014 jQuery Foundation and other contributors +http://jquery.com/ + +8. jquery-json 2.6.0 +Copyright 2009-2011 Brantley Harris +Copyright 2010-2016 Timo Tijhof + +9. bootstrap 3.1.1 +Copyright (c) 2011-2014 Twitter, Inc + +10. bootstrap 3.3.4 +Copyright (c) 2011-2015 Twitter, Inc + +11. bootstrap 3.3.5 +Copyright (c) 2011-2015 Twitter, Inc + +12. bootstrap 3.3.7 +Copyright (c) 2011-2016 Twitter, Inc. + +13. jquery-ui 1.10.4 +Copyright 2014 jQuery Foundation and other contributors, +http://jqueryui.com/ + +14. jquery-ui 1.11.0 +Copyright 2014 jQuery Foundation and other contributors, +http://jqueryui.com/ + +15. path-to-regexp 1.7.0 +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +16. polyfill +Copyright (c) 2013 Philip Walton + +17. html5shiv 3.7.3 +Copyright (c) 2014 Alexander Farkas (aFarkas) + +18. bootstrap-daterangepicker 2.0.5 +Copyright (c) 2012-2015 Dan Grossman + +19. moment 2.10.3 +Copyright (c) 2011-2015 Tim Wood, Iskren Chernev, Moment.js contributors + +20. noty 2.4.1 +Copyright (c) 2012 Nedim Arabacı + +21. samrtmenus 1.0.1 +Copyright (c) Vasil Dinkov, Vadikom Web Ltd. + +22. handlebars.js 3.0.1 +Copyright (C) 2011-2014 by Yehuda Katz + +23. handlebars.js 4.0.5 +Copyright (C) 2011-2015 by Yehuda Katz + +24. jquery-cookie 1.4.1 +Copyright 2014 Klaus Hartl + +25. requirejs 2.1.15 +Copyright (c) 2010-2014, The Dojo Foundation + +26. jquery.zeroclipboard 0.2.0 +Copyright (c) 2014 James M. Greene + +27. switchery 0.8.2 +Copyright (c) 2013-2015 Alexander Petkov + +28. underscore 1.8.3 +Copyright (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative +Reporters & Editors + +29. List.js 1.3.0
Copyright (c) 2011-2014 Jonny Strömberg, jonnystromberg.com + +30. jquery-validation 1.11.0 +Copyright (c) 2013 Jörn Zaefferer + +31. Respond 1.4.2 +Copyright (c) 2012 Scott Jehl + +32. metismenu 2.6.1 +Copyright (c) 2015 Osman Nuri Okumuş + +33. easy-pie-chart 2.1.6 +Copyright (c) 2013 Robert Fleischmann + +34. radialIndicator 1.2.0 +Copyright (c) <2015> + +35. vue 2.4.2 +Copyright (c) 2013-present, Yuxi (Evan) You + +36. lodash 2.4.1 +Copyright 2012-2013 The Dojo Foundation +Based on Underscore.js 1.5.2, copyright 2009-2013 Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors http://underscorejs.org/ + +37. Sugar 1.4.1 +Copyright © 2011 Andrew Plummer + +38. jquery-mousewheel 3.0.6 +Copyright 2011, Brandon Aaron (http://brandonaaron.net/) + +39. jquery-mousewheel 3.1.12 +Copyright (c) 2013, Brandon Aaron (http://brandon.aaron.sh) + +40. DataTables 1.10.7 +Copyright (c) 2008-2013 SpryMedia Limited +http://datatables.net + +41. tablesorter 2.24.6 +Copyright (c) 2007 Christian Bach + +42. schedule 0.4.2 +Copyright (c) 2013 Daniel Bader (http://dbader.org) + +43. c3 0.4.10 +Copyright (c) 2013 Masayuki Tanaka + +44. Gritter for jQuery 1.7.4 +Copyright (c) 2012 Jordan Boesch + +45. daterangepicker 2.0.0 +Copyright (c) 2012-2014 Dan Grossman + +46. bootstrap-sortable 1.11.2 +Copyright (c) 2013 Matúš Brliť (drvic10k), bootstrap-sortable contributors +Copyright (c) 2011-2013 Tim Wood, Iskren Chernev, Moment.js contributors + +47. bootstrap-switch 3.3.4 +Copyright (c) 2013-2015 The authors of Bootstrap Switch + +48. clipboard.js 1.5.15 +© Zeno Rocha + +49. jquery-mobile 1.4.2 +Copyright 2010, 2014 jQuery Foundation, Inc. and other contributors, +http://jquery.com/ + +50. element 2.4.1 +Copyright (c) 2016 ElemeFE + +51. art-template 4.13.0 +Copyright (c) 2016 糖饼 + +52. axios 0.18.0 +Copyright (c) 2014-present Matt Zabriskie + +53. jsplumb 2.7.3 +Copyright (c) 2010 - 2014 jsPlumb, http://jsplumbtoolkit.com/ + +54. plotly.js 1.39.4 +Copyright (c) 2018 Plotly, Inc + +55. popper.js 1.2.0 +Copyright © 2016 Federico Zivolo and contributors + +56. vue-router 3.0.1 +Copyright (c) 2013-2016 Evan You + +57. vee-validate 2.1.0-beta.6 +Copyright (c) Abdelrahman Awad logaretm1@gmail.com + +58. vuex 3.0.1 +Copyright (c) 2015-2016 Evan You + +59. Vue.Draggable 2.16.0 +Copyright (c) 2016 David Desmaisons + +60. ngx-treeview master +Copyright (c) 2015-2017 + +61. redux-devtools-extension 2.15.0 +Copyright (c) 2015-present Mihail Diordiev + +62. nunuStudio v0.8.9.22-alpha +Copyright (c) 2018 Tentone + + + +Terms of the MIT License: +-------------------------------------------------------------------- +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/runtime/bamboo-pipeline/pipeline/__init__.py b/runtime/bamboo-pipeline/pipeline/__init__.py new file mode 100644 index 00000000..35b04733 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +default_app_config = "pipeline.apps.PipelineConfig" + +__version__ = "3.6.2" diff --git a/runtime/bamboo-pipeline/pipeline/admin.py b/runtime/bamboo-pipeline/pipeline/admin.py new file mode 100644 index 00000000..5c87ed01 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/admin.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from pipeline import models + + +@admin.register(models.PipelineTemplate) +class PipelineTemplateAdmin(admin.ModelAdmin): + list_display = ["id", "template_id", "name", "create_time", "edit_time"] + list_filter = ["is_deleted"] + search_fields = ["name"] + raw_id_fields = ["snapshot"] + + +@admin.register(models.PipelineInstance) +class PipelineInstanceAdmin(admin.ModelAdmin): + list_display = ["id", "template", "name", "instance_id", "create_time", "start_time", "finish_time", "is_deleted"] + list_filter = ["is_started", "is_finished", "is_revoked", "is_deleted"] + search_fields = ["name"] + raw_id_fields = ["template", "snapshot", "execution_snapshot", "tree_info"] diff --git a/runtime/bamboo-pipeline/pipeline/apps.py b/runtime/bamboo-pipeline/pipeline/apps.py new file mode 100644 index 00000000..0432d0f0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/apps.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import sys +import logging +import traceback + +import redis +from django.apps import AppConfig +from django.conf import settings +from redis.sentinel import Sentinel +from rediscluster import RedisCluster + +logger = logging.getLogger("root") + + +def get_client_through_sentinel(): + kwargs = {"sentinel_kwargs": {}} + sentinel_pwd = settings.REDIS.get("sentinel_password") + if sentinel_pwd: + kwargs["sentinel_kwargs"]["password"] = sentinel_pwd + if "password" in settings.REDIS: + kwargs["password"] = settings.REDIS["password"] + host = settings.REDIS["host"] + port = settings.REDIS["port"] + sentinels = list(zip([h.strip() for h in host.split(",")], [p.strip() for p in str(port).split(",")],)) + rs = Sentinel(sentinels, **kwargs) + # avoid None value in settings.REDIS + r = rs.master_for(settings.REDIS.get("service_name") or "mymaster") + # try to connect master + r.echo("Hello Redis") + return r + + +def get_cluster_client(): + kwargs = {"startup_nodes": [{"host": settings.REDIS["host"], "port": settings.REDIS["port"]}]} + if "password" in settings.REDIS: + kwargs["password"] = settings.REDIS["password"] + + r = RedisCluster(**kwargs) + r.echo("Hello Redis") + return r + + +def get_single_client(): + kwargs = { + "host": settings.REDIS["host"], + "port": settings.REDIS["port"], + } + if "password" in settings.REDIS: + kwargs["password"] = settings.REDIS["password"] + if "db" in settings.REDIS: + kwargs["db"] = settings.REDIS["db"] + + pool = redis.ConnectionPool(**kwargs) + return redis.StrictRedis(connection_pool=pool) + + +CLIENT_GETTER = { + "replication": get_client_through_sentinel, + "cluster": get_cluster_client, + "single": get_single_client, +} + + +class PipelineConfig(AppConfig): + name = "pipeline" + verbose_name = "Pipeline" + + def ready(self): + from pipeline.signals.handlers import pipeline_template_post_save_handler # noqa + from pipeline.validators.handlers import post_new_end_event_register_handler # noqa + + # init redis pool + if hasattr(settings, "REDIS"): + mode = settings.REDIS.get("mode") or "single" + try: + settings.redis_inst = CLIENT_GETTER[mode]() + except Exception: + # fall back to single node mode + logger.error("redis client init error: %s" % traceback.format_exc()) + elif ( + getattr(settings, "PIPELINE_DATA_BACKEND", None) + == "pipeline.engine.core.data.redis_backend.RedisDataBackend" + ): + logger.error("can not find REDIS in settings!") + + # avoid big flow pickle raise maximum recursion depth exceeded error + sys.setrecursionlimit(10000) diff --git a/runtime/bamboo-pipeline/pipeline/builder/__init__.py b/runtime/bamboo-pipeline/pipeline/builder/__init__.py new file mode 100644 index 00000000..01852d18 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .builder import * # noqa +from .flow import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/builder/builder.py b/runtime/bamboo-pipeline/pipeline/builder/builder.py new file mode 100644 index 00000000..41e8fe93 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/builder.py @@ -0,0 +1,227 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy +import queue + +from pipeline.builder.flow.data import Data, Params +from pipeline.builder.flow.event import ExecutableEndEvent +from pipeline.core.constants import PE +from pipeline.parser.utils import replace_all_id +from pipeline.utils.uniqid import uniqid + +__all__ = ["build_tree"] + +__skeleton = { + PE.id: None, + PE.start_event: None, + PE.end_event: None, + PE.activities: {}, + PE.gateways: {}, + PE.flows: {}, + PE.data: {PE.inputs: {}, PE.outputs: {}}, +} + +__node_type = { + PE.ServiceActivity: PE.activities, + PE.SubProcess: PE.activities, + PE.EmptyEndEvent: PE.end_event, + PE.EmptyStartEvent: PE.start_event, + PE.ParallelGateway: PE.gateways, + PE.ConditionalParallelGateway: PE.gateways, + PE.ExclusiveGateway: PE.gateways, + PE.ConvergeGateway: PE.gateways, +} + +__start_elem = {PE.EmptyStartEvent} + +__end_elem = {PE.EmptyEndEvent} + +__multiple_incoming_type = { + PE.ServiceActivity, + PE.ConvergeGateway, + PE.EmptyEndEvent, + PE.ParallelGateway, + PE.ConditionalParallelGateway, + PE.ExclusiveGateway, + PE.SubProcess, +} + +__incoming = "__incoming" + + +def build_tree(start_elem, id=None, data=None, replace_id=False): + tree = copy.deepcopy(__skeleton) + elem_queue = queue.Queue() + processed_elem = set() + + tree[__incoming] = {} + elem_queue.put(start_elem) + + while not elem_queue.empty(): + # get elem + elem = elem_queue.get() + + # update node when we meet again + if elem.id in processed_elem: + __update(tree, elem) + continue + + # add to queue + for e in elem.outgoing: + elem_queue.put(e) + + # mark as processed + processed_elem.add(elem.id) + + # tree grow + __grow(tree, elem) + + del tree[__incoming] + tree[PE.id] = id or uniqid() + user_data = data.to_dict() if isinstance(data, Data) else data + tree[PE.data] = user_data or tree[PE.data] + if replace_id: + replace_all_id(tree) + return tree + + +def __update(tree, elem): + node_type = __node_type[elem.type()] + node = tree[node_type] if node_type == PE.end_event else tree[node_type][elem.id] + node[PE.incoming] = tree[__incoming][elem.id] + + +def __grow(tree, elem): + if elem.type() in __start_elem: + outgoing = uniqid() + tree[PE.start_event] = { + PE.incoming: "", + PE.outgoing: outgoing, + PE.type: elem.type(), + PE.id: elem.id, + PE.name: elem.name, + } + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + elif elem.type() in __end_elem or isinstance(elem, ExecutableEndEvent): + tree[PE.end_event] = { + PE.incoming: tree[__incoming][elem.id], + PE.outgoing: "", + PE.type: elem.type(), + PE.id: elem.id, + PE.name: elem.name, + } + + elif elem.type() == PE.ServiceActivity: + outgoing = uniqid() + + tree[PE.activities][elem.id] = { + PE.incoming: tree[__incoming][elem.id], + PE.outgoing: outgoing, + PE.type: elem.type(), + PE.id: elem.id, + PE.name: elem.name, + PE.error_ignorable: elem.error_ignorable, + PE.timeout: elem.timeout, + PE.skippable: elem.skippable, + PE.retryable: elem.retryable, + PE.component: elem.component_dict(), + PE.optional: False, + PE.failure_handler: elem.failure_handler, + } + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + elif elem.type() == PE.SubProcess: + outgoing = uniqid() + + subprocess_param = elem.params.to_dict() if isinstance(elem.params, Params) else elem.params + + subprocess = { + PE.id: elem.id, + PE.incoming: tree[__incoming][elem.id], + PE.name: elem.name, + PE.outgoing: outgoing, + PE.type: elem.type(), + PE.params: subprocess_param, + } + + if elem.template_id: + subprocess[PE.template_id] = elem.template_id + else: + subprocess[PE.pipeline] = build_tree( + start_elem=elem.start, id=elem.id, data=elem.data, replace_id=elem.replace_id + ) + + tree[PE.activities][elem.id] = subprocess + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + elif elem.type() == PE.ParallelGateway: + outgoing = [uniqid() for _ in range(len(elem.outgoing))] + + tree[PE.gateways][elem.id] = { + PE.id: elem.id, + PE.incoming: tree[__incoming][elem.id], + PE.outgoing: outgoing, + PE.type: elem.type(), + PE.name: elem.name, + } + + for i, next_elem in enumerate(elem.outgoing): + __grow_flow(tree, outgoing[i], elem, next_elem) + + elif elem.type() in {PE.ExclusiveGateway, PE.ConditionalParallelGateway}: + outgoing = [uniqid() for _ in range(len(elem.outgoing))] + + tree[PE.gateways][elem.id] = { + PE.id: elem.id, + PE.incoming: tree[__incoming][elem.id], + PE.outgoing: outgoing, + PE.type: elem.type(), + PE.name: elem.name, + PE.conditions: elem.link_conditions_with(outgoing), + } + + for i, next_elem in enumerate(elem.outgoing): + __grow_flow(tree, outgoing[i], elem, next_elem) + + elif elem.type() == PE.ConvergeGateway: + outgoing = uniqid() + + tree[PE.gateways][elem.id] = { + PE.id: elem.id, + PE.incoming: tree[__incoming][elem.id], + PE.outgoing: outgoing, + PE.type: elem.type(), + PE.name: elem.name, + } + + next_elem = elem.outgoing[0] + __grow_flow(tree, outgoing, elem, next_elem) + + else: + raise Exception() + + +def __grow_flow(tree, outgoing, elem, next_element): + tree[PE.flows][outgoing] = {PE.is_default: False, PE.source: elem.id, PE.target: next_element.id, PE.id: outgoing} + if next_element.type() in __multiple_incoming_type: + tree[__incoming].setdefault(next_element.id, []).append(outgoing) + else: + tree[__incoming][next_element.id] = outgoing diff --git a/runtime/bamboo-pipeline/pipeline/builder/flow/__init__.py b/runtime/bamboo-pipeline/pipeline/builder/flow/__init__.py new file mode 100644 index 00000000..3d636925 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/flow/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .activity import * # noqa +from .event import * # noqa +from .gateway import * # noqa +from .data import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/builder/flow/activity.py b/runtime/bamboo-pipeline/pipeline/builder/flow/activity.py new file mode 100644 index 00000000..90b41c68 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/flow/activity.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.builder.flow.base import * # noqa +from pipeline.utils.collections import FancyDict + +__all__ = ["ServiceActivity", "SubProcess"] + + +class ServiceActivity(Element): + def __init__( + self, + component_code=None, + failure_handler=None, + error_ignorable=False, + timeout=None, + skippable=True, + retryable=True, + *args, + **kwargs + ): + self.component = FancyDict({"code": component_code, "inputs": FancyDict({})}) + self.failure_handler = ( + "{module}.{name}".format(module=failure_handler.__module__, name=failure_handler.__name__) + if failure_handler + else None + ) + self.error_ignorable = error_ignorable + self.timeout = timeout + self.skippable = skippable + self.retryable = retryable + super(ServiceActivity, self).__init__(*args, **kwargs) + + def type(self): + return PE.ServiceActivity + + def component_dict(self): + return { + "code": self.component.code, + "inputs": {key: var.to_dict() for key, var in list(self.component.inputs.items())}, + } + + +class SubProcess(Element): + def __init__( + self, + start=None, + data=None, + params=None, + global_outputs=None, + replace_id=False, + template_id=None, + *args, + **kwargs + ): + self.start = start + self.data = data + self.params = params or {} + self.replace_id = replace_id + self.global_outputs = FancyDict(global_outputs or {}) + self.template_id = template_id + super(SubProcess, self).__init__(*args, **kwargs) + + def type(self): + return PE.SubProcess diff --git a/runtime/bamboo-pipeline/pipeline/builder/flow/base.py b/runtime/bamboo-pipeline/pipeline/builder/flow/base.py new file mode 100644 index 00000000..f9e21c2e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/flow/base.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.core.constants import PE +from pipeline.utils.uniqid import uniqid + +__all__ = ["Element", "PE"] + + +class Element(object): + def __init__(self, id=None, name=None, outgoing=None): + self.id = id or uniqid() + self.name = name + self.outgoing = outgoing or [] + + def extend(self, element): + """ + build a connection from self to element and return element + :param element: target + :rtype: Element + """ + self.outgoing.append(element) + return element + + def connect(self, *args): + """ + build connections from self to elements in args and return self + :param args: target elements + :rtype: Element + """ + for e in args: + self.outgoing.append(e) + return self + + def converge(self, element): + """ + converge all connection those diverge from self to element and return element + :param element: target + :rtype: Element + """ + for e in self.outgoing: + e.tail().connect(element) + return element + + def to(self, element): + return element + + def tail(self): + """ + get tail element for self + :rtype: Element + """ + is_tail = len(self.outgoing) == 0 + e = self + + while not is_tail: + e = e.outgoing[0] + is_tail = len(e.outgoing) == 0 + + return e + + def type(self): + raise NotImplementedError() + + def __eq__(self, other): + return self.id == other.id + + def __repr__(self): + return "<{cls} {name}:{id}>".format(cls=type(self).__name__, name=self.name, id=self.id) diff --git a/runtime/bamboo-pipeline/pipeline/builder/flow/data.py b/runtime/bamboo-pipeline/pipeline/builder/flow/data.py new file mode 100644 index 00000000..e121fe53 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/flow/data.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.core.constants import PE +from pipeline.utils.collections import FancyDict + + +class Data(object): + def __init__(self, inputs=None, outputs=None): + self.inputs = FancyDict(inputs or {}) + self.outputs = outputs or [] + + def to_dict(self): + base = {"inputs": {}, "outputs": self.outputs} + + for key, value in list(self.inputs.items()): + base["inputs"][key] = value.to_dict() if isinstance(value, Var) else value + + return base + + +class Params(object): + def __init__(self, params=None): + self.params = FancyDict(params or {}) + + def to_dict(self): + base = {} + + for key, value in list(self.params.items()): + base[key] = value.to_dict() if isinstance(value, Var) else value + + return base + + +class Var(object): + PLAIN = PE.plain + SPLICE = PE.splice + LAZY = PE.lazy + + def __init__(self, type, value, custom_type=None): + self.type = type + self.value = value + self.custom_type = custom_type + + def to_dict(self): + base = {"type": self.type, "value": self.value} + if self.type == self.LAZY: + base["custom_type"] = self.custom_type + + return base + + +class DataInput(Var): + def __init__(self, *args, **kwargs): + super(DataInput, self).__init__(*args, **kwargs) + + def to_dict(self): + base = super(DataInput, self).to_dict() + base["is_param"] = True + return base + + +class NodeOutput(Var): + def __init__(self, source_act, source_key, *args, **kwargs): + self.source_act = source_act + self.source_key = source_key + kwargs["value"] = None + super(NodeOutput, self).__init__(*args, **kwargs) + + def to_dict(self): + base = super(NodeOutput, self).to_dict() + base["source_act"] = self.source_act + base["source_key"] = self.source_key + return base + + +class RewritableNodeOutput(Var): + def __init__(self, source_act, *args, **kwargs): + self.source_act = source_act + kwargs["value"] = None + super(RewritableNodeOutput, self).__init__(*args, **kwargs) + + def to_dict(self): + base = super(RewritableNodeOutput, self).to_dict() + base["source_act"] = self.source_act + return base diff --git a/runtime/bamboo-pipeline/pipeline/builder/flow/event.py b/runtime/bamboo-pipeline/pipeline/builder/flow/event.py new file mode 100644 index 00000000..3be3d1a3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/flow/event.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.builder.flow.base import * # noqa + +__all__ = ["EmptyEndEvent", "EmptyStartEvent", "ExecutableEndEvent"] + + +class EmptyStartEvent(Element): + def type(self): + return PE.EmptyStartEvent + + +class EmptyEndEvent(Element): + def type(self): + return PE.EmptyEndEvent + + +class ExecutableEndEvent(Element): + def __init__(self, type, **kwargs): + self._type = type + super(ExecutableEndEvent, self).__init__(**kwargs) + + def type(self): + return self._type diff --git a/runtime/bamboo-pipeline/pipeline/builder/flow/gateway.py b/runtime/bamboo-pipeline/pipeline/builder/flow/gateway.py new file mode 100644 index 00000000..e9d2d034 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/builder/flow/gateway.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.builder.flow.base import * # noqa + +__all__ = ["ParallelGateway", "ExclusiveGateway", "ConvergeGateway", "ConditionalParallelGateway"] + + +class ParallelGateway(Element): + def type(self): + return PE.ParallelGateway + + +class ConditionGateway(Element): + def __init__(self, conditions=None, *args, **kwargs): + self.conditions = conditions or {} + super(ConditionGateway, self).__init__(*args, **kwargs) + + def add_condition(self, index, evaluate): + self.conditions[index] = evaluate + + def link_conditions_with(self, outgoing): + conditions = {} + for i, out in enumerate(outgoing): + conditions[out] = {PE.evaluate: self.conditions[i]} + + return conditions + + +class ConditionalParallelGateway(ConditionGateway): + def type(self): + return PE.ConditionalParallelGateway + + +class ExclusiveGateway(ConditionGateway): + def type(self): + return PE.ExclusiveGateway + + +class ConvergeGateway(Element): + def type(self): + return PE.ConvergeGateway diff --git a/runtime/bamboo-pipeline/pipeline/celery/__init__.py b/runtime/bamboo-pipeline/pipeline/celery/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/celery/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/celery/queues.py b/runtime/bamboo-pipeline/pipeline/celery/queues.py new file mode 100644 index 00000000..1a5ef20d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/celery/queues.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class ScalableQueues(object): + _queues = {} + + @classmethod + def queues(cls): + return cls._queues + + @classmethod + def add(cls, name, routing_key="", queue_arguments=None): + queue_arguments = queue_arguments or {} + cls._queues[name] = {"name": name, "routing_key": routing_key or name, "queue_arguments": queue_arguments} + + @classmethod + def has_queue(cls, queue): + return queue in cls._queues + + @classmethod + def routing_key_for(cls, queue): + return cls._queues[queue]["routing_key"] diff --git a/runtime/bamboo-pipeline/pipeline/celery/settings.py b/runtime/bamboo-pipeline/pipeline/celery/settings.py new file mode 100644 index 00000000..710d5d4b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/celery/settings.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy + +from kombu import Exchange, Queue + +from pipeline.celery.queues import ScalableQueues +from pipeline.constants import PIPELINE_MAX_PRIORITY + +default_exchange = Exchange("default", type="direct") + +# 设置时区 +CELERY_TIMEZONE = "Asia/Shanghai" +# 启动时区设置 +CELERY_ENABLE_UTC = False + +# new priority queues +PUSH_DEFAULT_QUEUE_NAME = "pipeline_priority" +PUSH_DEFAULT_ROUTING_KEY = "pipeline_push_priority" + +SCHEDULE_DEFAULT_QUEUE_NAME = "service_schedule_priority" +SCHEDULE_DEFAULT_ROUTING_KEY = "schedule_service_priority" + +ADDITIONAL_DEFAULT_QUEUE_NAME = "pipeline_additional_task_priority" +ADDITIONAL_DEFAULT_ROUTING_KEY = "additional_task_priority" + +STATISTICS_PRIORITY_QUEUE_NAME = "pipeline_statistics_priority" +STATISTICS_PRIORITY_ROUTING_KEY = "pipeline_statistics_priority" + +SCALABLE_QUEUES_CONFIG = { + PUSH_DEFAULT_QUEUE_NAME: {"name": PUSH_DEFAULT_QUEUE_NAME, "routing_key": PUSH_DEFAULT_ROUTING_KEY}, + SCHEDULE_DEFAULT_QUEUE_NAME: {"name": SCHEDULE_DEFAULT_QUEUE_NAME, "routing_key": SCHEDULE_DEFAULT_ROUTING_KEY}, +} + +PIPELINE_PRIORITY_ROUTING = { + "queue": PUSH_DEFAULT_QUEUE_NAME, + "routing_key": PUSH_DEFAULT_ROUTING_KEY, +} + +PIPELINE_SCHEDULE_PRIORITY_ROUTING = { + "queue": SCHEDULE_DEFAULT_QUEUE_NAME, + "routing_key": SCHEDULE_DEFAULT_ROUTING_KEY, +} + +PIPELINE_ADDITIONAL_PRIORITY_ROUTING = { + "queue": ADDITIONAL_DEFAULT_QUEUE_NAME, + "routing_key": ADDITIONAL_DEFAULT_ROUTING_KEY, +} + +PIPELINE_STATISTICS_PRIORITY_ROUTING = { + "queue": STATISTICS_PRIORITY_QUEUE_NAME, + "routing_key": STATISTICS_PRIORITY_ROUTING_KEY, +} + +CELERY_ROUTES = { + # schedule + "pipeline.engine.tasks.service_schedule": PIPELINE_SCHEDULE_PRIORITY_ROUTING, + # pipeline + "pipeline.engine.tasks.batch_wake_up": PIPELINE_PRIORITY_ROUTING, + "pipeline.engine.tasks.dispatch": PIPELINE_PRIORITY_ROUTING, + "pipeline.engine.tasks.process_wake_up": PIPELINE_PRIORITY_ROUTING, + "pipeline.engine.tasks.start": PIPELINE_PRIORITY_ROUTING, + "pipeline.engine.tasks.wake_from_schedule": PIPELINE_PRIORITY_ROUTING, + "pipeline.engine.tasks.wake_up": PIPELINE_PRIORITY_ROUTING, + "pipeline.engine.tasks.process_unfreeze": PIPELINE_PRIORITY_ROUTING, + # another + "pipeline.log.tasks.clean_expired_log": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, + "pipeline.engine.tasks.node_timeout_check": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, + "pipeline.contrib.periodic_task.tasks.periodic_task_start": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, + "pipeline.contrib.periodic_task.tasks.bamboo_engine_periodic_task_start": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, + "pipeline.engine.tasks.heal_zombie_process": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, + "pipeline.engine.tasks.expired_tasks_clean": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, + # statistics + "pipeline.contrib.statistics.tasks.pipeline_post_save_statistics_task": PIPELINE_STATISTICS_PRIORITY_ROUTING, + "pipeline.contrib.statistics.tasks.pipeline_archive_statistics_task": PIPELINE_STATISTICS_PRIORITY_ROUTING, +} + + +class QueueResolver(object): + def __init__(self, queue): + self.queue = queue + + def default_setting_for(self, task, setting_key): + if not isinstance(task, str): + task = task.name + + return CELERY_ROUTES[task][setting_key] + + def resolve_task_routing_key(self, task): + default_key = self.default_setting_for(task, "routing_key") + default_queue = self.default_setting_for(task, "queue") + + if default_queue not in SCALABLE_QUEUES_CONFIG or not self.queue: + return default_key + + return self.resolve_routing_key(default_key) + + def resolve_task_queue_name(self, task): + default_queue = self.default_setting_for(task, "queue") + + return self.resolve_queue_name(default_queue) + + def resolve_queue_name(self, default_name): + if not self.queue: + return default_name + + return "{}_{}".format(self.queue, default_name) + + def resolve_routing_key(self, default_key): + if not self.queue: + return default_key + + return "{}_{}".format(ScalableQueues.routing_key_for(self.queue), default_key) + + +USER_QUEUES = [] + +for name, queue in ScalableQueues.queues().items(): + queue_arguments = copy.copy(queue["queue_arguments"]) + queue_arguments["x-max-priority"] = PIPELINE_MAX_PRIORITY + + for config in SCALABLE_QUEUES_CONFIG.values(): + resolver = QueueResolver(name) + USER_QUEUES.append( + Queue( + resolver.resolve_queue_name(config["name"]), + default_exchange, + routing_key=resolver.resolve_routing_key(config["routing_key"]), + queue_arguments=queue_arguments, + ) + ) + +CELERY_QUEUES = [ + # user queues + *USER_QUEUES, # noqa + # keep old queue to process message left in broker, remove on next version + Queue("default", default_exchange, routing_key="default"), + Queue("pipeline", default_exchange, routing_key="pipeline_push"), + Queue("service_schedule", default_exchange, routing_key="schedule_service"), + Queue("pipeline_additional_task", default_exchange, routing_key="additional_task"), + # priority queues + Queue( + PUSH_DEFAULT_QUEUE_NAME, + default_exchange, + routing_key=PUSH_DEFAULT_ROUTING_KEY, + queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, + ), + Queue( + SCHEDULE_DEFAULT_QUEUE_NAME, + default_exchange, + routing_key=SCHEDULE_DEFAULT_ROUTING_KEY, + queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, + ), + Queue( + ADDITIONAL_DEFAULT_QUEUE_NAME, + default_exchange, + routing_key=ADDITIONAL_DEFAULT_ROUTING_KEY, + queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, + ), + Queue( + STATISTICS_PRIORITY_QUEUE_NAME, + default_exchange, + routing_key=STATISTICS_PRIORITY_ROUTING_KEY, + queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, + ), +] + +CELERY_DEFAULT_QUEUE = "default" +CELERY_DEFAULT_EXCHANGE = "default" +CELERY_DEFAULT_ROUTING_KEY = "default" + +CELERYBEAT_SCHEDULER = "django_celery_beat.schedulers.DatabaseScheduler" + +CELERY_ACCEPT_CONTENT = ["json", "pickle", "msgpack", "yaml"] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/__init__.py b/runtime/bamboo-pipeline/pipeline/component_framework/__init__.py new file mode 100644 index 00000000..5c691bb7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +default_app_config = "pipeline.component_framework.apps.ComponentFrameworkConfig" diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/admin.py b/runtime/bamboo-pipeline/pipeline/component_framework/admin.py new file mode 100644 index 00000000..339cc051 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/admin.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from pipeline.component_framework import models + + +@admin.register(models.ComponentModel) +class ComponentModelAdmin(admin.ModelAdmin): + list_display = ["id", "code", "name", "status"] + search_fields = ["code", "name"] + list_filter = ["status"] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/apps.py b/runtime/bamboo-pipeline/pipeline/component_framework/apps.py new file mode 100644 index 00000000..ebcd3000 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/apps.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import sys + +from django.apps import AppConfig +from django.db.utils import InternalError, OperationalError, ProgrammingError + +from pipeline.conf import settings +from pipeline.component_framework import context +from pipeline.utils.register import autodiscover_collections + +logger = logging.getLogger("root") + +DJANGO_MANAGE_CMD = "manage.py" +INIT_PASS_TRIGGER = {"migrate"} + + +class ComponentFrameworkConfig(AppConfig): + name = "pipeline.component_framework" + verbose_name = "PipelineComponentFramework" + + def ready(self): + """ + @summary: 注册公共部分和当前RUN_VER下的标准插件到数据库 + @return: + """ + + if sys.argv and sys.argv[0] == DJANGO_MANAGE_CMD: + try: + command = sys.argv[1] + except IndexError: + return + else: + if command in INIT_PASS_TRIGGER: + print("ignore components init for command: {}".format(sys.argv)) + return + + for path in settings.COMPONENT_AUTO_DISCOVER_PATH: + autodiscover_collections(path) + + if context.skip_update_comp_models(): + return + + from pipeline.component_framework.models import ComponentModel + from pipeline.component_framework.library import ComponentLibrary + + try: + print("update component models") + ComponentModel.objects.all().update(status=False) + for code in ComponentLibrary.codes(): + ComponentModel.objects.filter(code=code, version__in=ComponentLibrary.versions(code)).update( + status=True + ) + print("update component models finish") + except InternalError as e: + # version field migration + logger.exception(e) + except (ProgrammingError, OperationalError) as e: + # first migrate + logger.exception(e) diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/base.py b/runtime/bamboo-pipeline/pipeline/component_framework/base.py new file mode 100644 index 00000000..65004164 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/base.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import importlib +import logging + +from django.db.utils import ProgrammingError + +from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION +from pipeline.component_framework.library import ComponentLibrary +from pipeline.component_framework.models import ComponentModel +from pipeline.component_framework import context +from pipeline.core.flow.activity import Service + +logger = logging.getLogger(__name__) + + +class ComponentMeta(type): + def __new__(cls, name, bases, attrs): + super_new = super(ComponentMeta, cls).__new__ + + # Also ensure initialization is only performed for subclasses of Model + # (excluding Model class itself). + parents = [b for b in bases if isinstance(b, ComponentMeta)] + if not parents: + return super_new(cls, name, bases, attrs) + + # Create the class + module_name = attrs.pop("__module__") + new_class = super_new(cls, name, bases, {"__module__": module_name}) + module = importlib.import_module(new_class.__module__) + + # Add all attributes to the class + attrs.setdefault("desc", "") + attrs.setdefault("base", "") + for obj_name, obj in list(attrs.items()): + setattr(new_class, obj_name, obj) + + # check + if not getattr(new_class, "name", None): + raise ValueError("component %s name can't be empty" % new_class.__name__) + + if not getattr(new_class, "code", None): + raise ValueError("component %s code can't be empty" % new_class.__name__) + + if not getattr(new_class, "bound_service", None) or not issubclass(new_class.bound_service, Service): + raise ValueError("component %s service can't be empty and must be subclass of Service" % new_class.__name__) + + if not getattr(new_class, "form", None): + setattr(new_class, "form", None) + + if not getattr(new_class, "output_form", None): + setattr(new_class, "output_form", None) + + if not getattr(new_class, "version", None): + setattr(new_class, "version", LEGACY_PLUGINS_VERSION) + + # category/group name + group_name = getattr(module, "__group_name__", new_class.__module__.split(".")[-1].title()) + setattr(new_class, "group_name", group_name) + new_name = "{}-{}".format(group_name, new_class.name) + + # category/group name + group_icon = getattr(module, "__group_icon__", "") + setattr(new_class, "group_icon", group_icon) + + if not getattr(module, "__register_ignore__", False): + ComponentLibrary.register_component( + component_code=new_class.code, version=new_class.version, component_cls=new_class + ) + + if context.skip_update_comp_models(): + return new_class + + try: + print("update {} component model".format(new_class.code)) + ComponentModel.objects.update_or_create( + code=new_class.code, version=new_class.version, defaults={"name": new_name, "status": __debug__} + ) + except Exception as e: + if not isinstance(e, ProgrammingError): + logging.exception(e) + + return new_class diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/component.py b/runtime/bamboo-pipeline/pipeline/component_framework/component.py new file mode 100644 index 00000000..1707b088 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/component.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.component_framework.base import ComponentMeta +from pipeline.core.data.base import DataObject +from pipeline.core.data.converter import get_variable +from pipeline.exceptions import ComponentDataLackException + + +class Component(object, metaclass=ComponentMeta): + def __init__(self, data_dict): + self.data_dict = data_dict + + @classmethod + def outputs_format(cls): + outputs = cls.bound_service().outputs() + outputs = [oi.as_dict() for oi in outputs] + return outputs + + @classmethod + def inputs_format(cls): + inputs = cls.bound_service().inputs() + inputs = [ii.as_dict() for ii in inputs] + return inputs + + @classmethod + def _get_item_schema(cls, type, key): + items = getattr(cls.bound_service(), type)() + for item in items: + if item.key == key: + return item + + return None + + @classmethod + def get_output_schema(cls, key): + return cls._get_item_schema(type="outputs", key=key).schema + + @classmethod + def get_input_schema(cls, key): + return cls._get_item_schema(type="inputs", key=key).schema + + @classmethod + def form_is_embedded(cls): + return getattr(cls, "embedded_form", False) + + def clean_execute_data(self, context): + """ + @summary: hook for subclass of Component to clean execute data with context + @param context: + @return: + """ + return self.data_dict + + def data_for_execution(self, context, pipeline_data): + data_dict = self.clean_execute_data(context) + inputs = {} + + for key, tag_info in list(data_dict.items()): + if tag_info is None: + raise ComponentDataLackException("Lack of inputs: %s" % key) + + inputs[key] = get_variable(key, tag_info, context, pipeline_data) + + return DataObject(inputs) + + def service(self): + return self.bound_service() diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/constant.py b/runtime/bamboo-pipeline/pipeline/component_framework/constant.py new file mode 100644 index 00000000..5f9016e2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/constant.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy + +from pipeline.core.data.expression import ConstantTemplate, deformat_constant_key +from pipeline.exceptions import ConstantNotExistException, ConstantReferenceException +from pipeline.utils.graph import Graph + + +class ConstantPool(object): + def __init__(self, pool, lazy=False): + self.raw_pool = pool + self.pool = None + + if not lazy: + self.resolve() + + def resolve(self): + if self.pool: + return + + refs = self.get_reference_info() + + nodes = list(refs.keys()) + flows = [] + for node in nodes: + for ref in refs[node]: + if ref in nodes: + flows.append([node, ref]) + graph = Graph(nodes, flows) + # circle reference check + trace = graph.get_cycle() + if trace: + raise ConstantReferenceException("Exist circle reference between constants: %s" % "->".join(trace)) + + # resolve the constants reference + pool = {} + temp_pool = copy.deepcopy(self.raw_pool) + # get those constants which are referenced only(not refer other constants) + referenced_only = ConstantPool._get_referenced_only(temp_pool) + while temp_pool: + for ref in referenced_only: + value = temp_pool[ref]["value"] + + # resolve those constants which reference the 'ref' + for key, info in list(temp_pool.items()): + maps = {deformat_constant_key(ref): value} + temp_pool[key]["value"] = ConstantTemplate(info["value"]).resolve_data(maps) + + pool[ref] = temp_pool[ref] + temp_pool.pop(ref) + referenced_only = ConstantPool._get_referenced_only(temp_pool) + + self.pool = pool + + @staticmethod + def _get_referenced_only(pool): + referenced_only = [] + for key, info in list(pool.items()): + reference = ConstantTemplate(info["value"]).get_reference() + formatted_reference = ["${%s}" % ref for ref in reference] + reference = [c for c in formatted_reference if c in pool] + if not reference: + referenced_only.append(key) + return referenced_only + + def get_reference_info(self, strict=True): + refs = {} + for key, info in list(self.raw_pool.items()): + reference = ConstantTemplate(info["value"]).get_reference() + formatted_reference = ["${%s}" % ref for ref in reference] + ref = [c for c in formatted_reference if not strict or c in self.raw_pool] + refs[key] = ref + return refs + + def resolve_constant(self, constant): + if not self.pool: + self.resolve() + + if constant not in self.pool: + raise ConstantNotExistException("constant %s not exist." % constant) + return self.pool[constant]["value"] + + def resolve_value(self, val): + if not self.pool: + self.resolve() + + maps = {deformat_constant_key(key): self.pool[key]["value"] for key in self.pool} + + return ConstantTemplate(val).resolve_data(maps) diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/constants.py b/runtime/bamboo-pipeline/pipeline/component_framework/constants.py new file mode 100644 index 00000000..afdfe246 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/constants.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +LEGACY_PLUGINS_VERSION = "legacy" diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/context.py b/runtime/bamboo-pipeline/pipeline/component_framework/context.py new file mode 100644 index 00000000..590f9af8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/context.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.conf import settings +from pipeline.utils import env + +UPDATE_TRIGGER = "update_component_models" + + +def skip_update_comp_models(): + if settings.AUTO_UPDATE_COMPONENT_MODELS: + return False + + django_command = env.get_django_command() + if django_command is None: + return True + + return django_command != UPDATE_TRIGGER diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/library.py b/runtime/bamboo-pipeline/pipeline/component_framework/library.py new file mode 100644 index 00000000..6083147c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/library.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION +from pipeline.exceptions import ComponentNotExistException + + +class ComponentLibrary(object): + components = {} + + def __new__(cls, *args, **kwargs): + if args: + component_code = args[0] + else: + component_code = kwargs.get("component_code", None) + version = kwargs.get("version", None) + if not component_code: + raise ValueError( + "please pass a component_code in args or kwargs: " + "ComponentLibrary('code') or ComponentLibrary(component_code='code')" + ) + return cls.get_component_class(component_code=component_code, version=version) + + @classmethod + def component_list(cls): + components = [] + for _, component_map in cls.components.items(): + components.extend(component_map.values()) + + return components + + @classmethod + def get_component_class(cls, component_code, version=None): + version = version or LEGACY_PLUGINS_VERSION + component_cls = cls.components.get(component_code, {}).get(version) + if component_cls is None: + raise ComponentNotExistException("component %s does not exist." % component_code) + return component_cls + + @classmethod + def get_component(cls, component_code, data_dict, version=None): + version = version or LEGACY_PLUGINS_VERSION + return cls.get_component_class(component_code=component_code, version=version)(data_dict) + + @classmethod + def register_component(cls, component_code, version, component_cls): + cls.components.setdefault(component_code, {})[version] = component_cls + + @classmethod + def codes(cls): + return cls.components.keys() + + @classmethod + def versions(cls, code): + return cls.components.get(code, {}).keys() diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/management/__init__.py b/runtime/bamboo-pipeline/pipeline/component_framework/management/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/management/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/__init__.py b/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/run_component.py b/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/run_component.py new file mode 100644 index 00000000..84e7eb7e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/run_component.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import sys + +import ujson as json +from django.core.management import BaseCommand + +from pipeline.component_framework.library import ComponentLibrary +from pipeline.component_framework.runner import ComponentRunner +from pipeline.exceptions import ComponentNotExistException + + +class Command(BaseCommand): + + help = "Run the specified component" + + def add_arguments(self, parser): + parser.add_argument("code", nargs=1, type=str) + parser.add_argument("-d", dest="data", nargs="?", type=str) + parser.add_argument("-p", dest="parent_data", nargs="?", type=str) + parser.add_argument("-c", dest="callbackdata", nargs="?", type=str) + + def handle(self, *args, **options): + code = options["code"][0] + data = options["data"] + parent_data = options["parent_data"] + callbackdata = options["callbackdata"] + + try: + data = json.loads(data) if data else {} + except Exception: + sys.stdout.write("data is not a valid json.\n") + exit(1) + + try: + parent_data = json.loads(parent_data) if parent_data else {} + except Exception: + sys.stdout.write("parent_data is not a valid json.\n") + exit(1) + + try: + callbackdata = json.loads(callbackdata) if callbackdata else {} + except Exception: + sys.stdout.write("callbackdata is not a valid json.\n") + exit(1) + + try: + component_cls = ComponentLibrary.get_component_class(code) + except ComponentNotExistException: + sys.stdout.write("component [{}] does not exist.\n".format(code)) + exit(1) + + runner = ComponentRunner(component_cls) + runner.run(data, parent_data, callbackdata) diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/update_component_models.py b/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/update_component_models.py new file mode 100644 index 00000000..b7f1787e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/management/commands/update_component_models.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.core.management import BaseCommand + + +class Command(BaseCommand): + def handle(self, *args, **options): + # do not need to do anything, the app ready will handle model update work + print("component models update finished.") diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0001_initial.py new file mode 100644 index 00000000..39c4f0e1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0001_initial.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import models, migrations + + +class Migration(migrations.Migration): + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="ComponentModel", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("code", models.CharField(unique=True, max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), + ("name", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u540d\u79f0")), + ("status", models.BooleanField(default=True, verbose_name="\u7ec4\u4ef6\u662f\u5426\u53ef\u7528")), + ], + options={"ordering": ["-id"], "verbose_name": "\u7ec4\u4ef6", "verbose_name_plural": "\u7ec4\u4ef6"}, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0002_delete_componentmodel.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0002_delete_componentmodel.py new file mode 100644 index 00000000..153c86c3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0002_delete_componentmodel.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import models, migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("component_framework", "0001_initial"), + ] + + operations = [ + migrations.DeleteModel(name="ComponentModel",), + ] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0003_componentmodel.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0003_componentmodel.py new file mode 100644 index 00000000..3dbd40fe --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0003_componentmodel.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +# Generated by Django 1.11.2 on 2017-11-15 12:09 + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ("component_framework", "0002_delete_componentmodel"), + ] + + operations = [ + migrations.CreateModel( + name="ComponentModel", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("code", models.CharField(max_length=255, unique=True, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), + ("name", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u540d\u79f0")), + ("status", models.BooleanField(default=True, verbose_name="\u7ec4\u4ef6\u662f\u5426\u53ef\u7528")), + ], + options={"ordering": ["-id"], "verbose_name": "\u7ec4\u4ef6", "verbose_name_plural": "\u7ec4\u4ef6"}, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0004_auto_20180413_1800.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0004_auto_20180413_1800.py new file mode 100644 index 00000000..4a2fb04e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0004_auto_20180413_1800.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("component_framework", "0003_componentmodel"), + ] + + operations = [ + migrations.AlterModelOptions( + name="componentmodel", + options={ + "ordering": ["-id"], + "verbose_name": "\u7ec4\u4ef6 Component", + "verbose_name_plural": "\u7ec4\u4ef6 Component", + }, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0005_auto_20190723_1806.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0005_auto_20190723_1806.py new file mode 100644 index 00000000..7fb3029c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0005_auto_20190723_1806.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("component_framework", "0004_auto_20180413_1800"), + ] + + operations = [ + migrations.AddField( + model_name="componentmodel", + name="version", + field=models.CharField(default=b"legacy", max_length=64, verbose_name="\u7ec4\u4ef6\u7248\u672c"), + ), + migrations.AlterField( + model_name="componentmodel", + name="code", + field=models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0006_auto_20200213_0743.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0006_auto_20200213_0743.py new file mode 100644 index 00000000..e915a6ef --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0006_auto_20200213_0743.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2020-02-13 07:43 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("component_framework", "0005_auto_20190723_1806"), + ] + + operations = [ + migrations.AlterField( + model_name="componentmodel", + name="version", + field=models.CharField(default="legacy", max_length=64, verbose_name="组件版本"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0007_auto_20201112_2244.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0007_auto_20201112_2244.py new file mode 100644 index 00000000..084242b3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/0007_auto_20201112_2244.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("component_framework", "0006_auto_20200213_0743"), + ] + + operations = [ + migrations.AlterField( + model_name="componentmodel", + name="code", + field=models.CharField(db_index=True, max_length=255, verbose_name="组件编码"), + ), + migrations.AlterField( + model_name="componentmodel", + name="version", + field=models.CharField(db_index=True, default="legacy", max_length=64, verbose_name="组件版本"), + ), + migrations.AlterUniqueTogether(name="componentmodel", unique_together=set([("code", "version")]),), + ] diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/models.py b/runtime/bamboo-pipeline/pipeline/component_framework/models.py new file mode 100644 index 00000000..040bd191 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/models.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION +from pipeline.component_framework.library import ComponentLibrary + + +class ComponentManager(models.Manager): + def get_component_dict(self): + """ + 获得标准插件对应的dict类型 + :return: + """ + components = self.filter(status=True) + component_dict = {} + for bundle in components: + name = bundle.name.split("-") + group_name = _(name[0]) + name = _(name[1]) + component_dict[bundle.code] = "{}-{}".format(group_name, name) + return component_dict + + def get_component_dicts(self, other_component_list): + """ + :param other_component_list: 结果集 + :param index: 结果集中指标字段 + :return: + """ + components = self.filter(status=True).values("code", "version", "name") + total = components.count() + groups = [] + for comp in components: + version = comp["version"] + # 插件名国际化 + name = comp["name"].split("-") + name = "{}-{}-{}".format(_(name[0]), _(name[1]), version) + code = "{}-{}".format(comp["code"], comp["version"]) + value = 0 + for oth_com_tmp in other_component_list: + if comp["code"] == oth_com_tmp[1] and comp["version"] == oth_com_tmp[2]: + value = oth_com_tmp[0] + groups.append({"code": code, "name": name, "value": value}) + return total, groups + + +class ComponentModel(models.Model): + """ + 注册的组件 + """ + + code = models.CharField(_("组件编码"), max_length=255, db_index=True) + version = models.CharField(_("组件版本"), max_length=64, default=LEGACY_PLUGINS_VERSION, db_index=True) + name = models.CharField(_("组件名称"), max_length=255) + status = models.BooleanField(_("组件是否可用"), default=True) + + objects = ComponentManager() + + class Meta: + verbose_name = _("组件 Component") + verbose_name_plural = _("组件 Component") + ordering = ["-id"] + unique_together = (("code", "version"),) + + def __unicode__(self): + return self.name + + @property + def group_name(self): + return ComponentLibrary.get_component_class(self.code, self.version).group_name + + @property + def group_icon(self): + return ComponentLibrary.get_component_class(self.code, self.version).group_icon diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/runner.py b/runtime/bamboo-pipeline/pipeline/component_framework/runner.py new file mode 100644 index 00000000..59e33aa9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/runner.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import time + +from pipeline.core.data.base import DataObject +from pipeline.utils.uniqid import uniqid + + +def get_console_logger(): + # create logger + logger = logging.getLogger("simple_example") + logger.setLevel(logging.DEBUG) + + # create console handler and set level to debug + ch = logging.StreamHandler() + ch.setLevel(logging.DEBUG) + + # create formatter + formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") + + # add formatter to ch + ch.setFormatter(formatter) + + # add ch to logger + logger.addHandler(ch) + + return logger + + +logger = get_console_logger() + + +class ComponentRunner: + def __init__(self, component_cls): + self.component_cls = component_cls + + def run(self, data, parent_data, callback_data=None): + + service = self.component_cls.bound_service() + + setattr(service, "id", uniqid()) + setattr(service, "logger", logger) + + data_object = DataObject(inputs=data) + parent_data_object = DataObject(inputs=parent_data) + + logger.info( + "Start to run component [{}] with data: {}, parent_data: {}".format( + self.component_cls.code, data_object, parent_data_object + ) + ) + + result = service.execute(data_object, parent_data_object) + + if result is False: + logger.info("Execute return [{}], stop running.".format(result)) + return + + if not service.need_schedule(): + logger.info("Execute return [{}], and component do not need schedule, finish running".format(result)) + return + + if service.interval is None: + logger.info("Start to callback component with callbackdata: {}".format(callback_data)) + result = service.schedule(data_object, parent_data_object, callback_data) + + if result is False: + logger.info("Schedule return [{}], stop running.".format(result)) + return + else: + logger.info("Schedule return [{}], finish running".format(result)) + else: + + schedue_times = 0 + + while not service.is_schedule_finished(): + + schedue_times += 1 + + logger.info( + "Schedule {} with data: {}, parent_data: {}".format(schedue_times, data_object, parent_data_object) + ) + + result = service.schedule(data_object, parent_data_object, None) + + if result is False: + logger.info("Schedule return [{}], stop running.".format(result)) + return + + interval = service.interval.next() + logger.info("Schedule return [{}], wait for next schedule in {}s".format(result, interval)) + time.sleep(interval) + + logger.info("Schedule finished") diff --git a/runtime/bamboo-pipeline/pipeline/component_framework/test.py b/runtime/bamboo-pipeline/pipeline/component_framework/test.py new file mode 100644 index 00000000..2149b7d9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/component_framework/test.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import importlib +import sys +import traceback +from contextlib import contextmanager + +from mock import MagicMock, call, patch + +from pipeline.core.data.base import DataObject +from pipeline.core.flow.io import SimpleItemSchema, ArrayItemSchema, ObjectItemSchema, ItemSchema +from pipeline.utils.uniqid import uniqid + + +@contextmanager +def patch_context(patchers): + for patcher in patchers: + patcher.start() + + yield + + for patcher in patchers: + patcher.stop() + + +class ComponentTestMixin(object): + def component_cls(self): + raise NotImplementedError() + + def cases(self): + raise NotImplementedError() + + def input_output_format_valid(self): + component = self._component_cls({}) + bound_service = component.service() + inputs_format = bound_service.inputs() + self._format_valid(inputs_format, ["name", "key", "type", "schema", "required"]) + outputs_format = bound_service.outputs() + self._format_valid(outputs_format, ["name", "key", "type", "schema"]) + + @property + def _cases(self): + return self.cases() + + @property + def _component_cls(self): + return self.component_cls() + + @property + def _component_cls_name(self): + return self._component_cls.__name__ + + @property + def _failed_cases(self): + return getattr(self, "__failed_cases", None) + + def _format_valid(self, component_format, format_keys): + assert isinstance(component_format, list) + for item in component_format: + assert set(item.as_dict().keys()) == set( + format_keys + ), "item {} is expected to contain attributes {} but {} obtained".format( + item.key, str(format_keys), str(item.as_dict().keys()) + ) + if item.schema is not None: + assert item.type == item.schema.type, "type of {} is expected to be {} but {} obtained".format( + item.key, item.schema.type, item.type + ) + self._item_schema_valid(item.schema) + + def _item_schema_valid(self, item_schema): + common_keys = {"type", "description", "enum"} + assert common_keys.issubset( + set(item_schema.as_dict().keys()) + ), "ItemSchema should contain attributes type, description and enum" + + if isinstance(item_schema, SimpleItemSchema): + return + if isinstance(item_schema, ArrayItemSchema): + assert hasattr(item_schema, "item_schema") and isinstance( + item_schema.item_schema, ItemSchema + ), "ArrayItemSchema should contain attribute item_schema" + self._item_schema_valid(item_schema.item_schema) + return + if isinstance(item_schema, ObjectItemSchema): + assert hasattr(item_schema, "property_schemas") and isinstance( + item_schema.property_schemas, dict + ), "ObjectItemSchema should contain attribute property_schemas with type dict" + for child_item_schema in item_schema.property_schemas.values(): + self._item_schema_valid(child_item_schema) + return + raise AssertionError("item_schema type error: {}".format(item_schema.description)) + + def _format_failure_message(self, no, name, msg): + return "[{component_cls} case {no}] - [{name}] fail: {msg}".format( + component_cls=self._component_cls_name, no=no + 1, name=name, msg=msg + ) + + def _do_case_assert(self, service, method, assertion, no, name, args=None, kwargs=None): + + args = args or [service] + kwargs = kwargs or {} + + data = kwargs.get("data") or args[0] + + result = getattr(service, method)(*args, **kwargs) + + assert_success = result in [None, True] # return none will consider as success + do_continue = not assert_success + + assert_method = "assertTrue" if assert_success else "assertFalse" + + getattr(self, assert_method)( + assertion.success, + msg=self._format_failure_message( + no=no, + name=name, + msg="{method} success assertion failed, {method} execute success".format(method=method), + ), + ) + + self.assertDictEqual( + data.outputs, + assertion.outputs, + msg=self._format_failure_message( + no=no, + name=name, + msg="{method} outputs assertion failed,\nexcept: {e}\nactual: {a}".format( + method=method, e=assertion.outputs, a=data.outputs + ), + ), + ) + + return do_continue + + def _do_call_assertion(self, name, no, assertion): + try: + assertion.do_assert() + except AssertionError as e: + self.assertTrue( + False, + msg=self._format_failure_message( + no=no, name=name, msg="{func} call assert failed: {e}".format(func=assertion.func, e=e) + ), + ) + + def _case_pass(self, case): + sys.stdout.write( + "\n[√] <{component}> - [{case_name}]\n".format(component=self._component_cls_name, case_name=case.name,) + ) + + def _case_fail(self, case): + sys.stdout.write( + "\n[×] <{component}> - [{case_name}]\n".format(component=self._component_cls_name, case_name=case.name,) + ) + + if not hasattr(self, "__failed_cases"): + setattr(self, "__failed_cases", []) + + self._failed_cases.append(case) + + def _test_fail(self): + raise AssertionError("{} cases fail".format([case.name for case in self._failed_cases])) + + def test_component(self): + self.input_output_format_valid() + + component = self._component_cls({}) + + for no, case in enumerate(self._cases): + try: + + patchers = [patcher.mock_patcher() for patcher in case.patchers] + + with patch_context(patchers): + + bound_service = component.service() + + setattr(bound_service, "id", case.service_id) + setattr(bound_service, "logger", MagicMock()) + + data = DataObject(inputs=case.inputs) + parent_data = DataObject(inputs=case.parent_data) + + # execute result check + do_continue = self._do_case_assert( + service=bound_service, + method="execute", + args=(data, parent_data), + assertion=case.execute_assertion, + no=no, + name=case.name, + ) + + for call_assertion in case.execute_call_assertion: + self._do_call_assertion(name=case.name, no=no, assertion=call_assertion) + + if do_continue: + self._case_pass(case) + continue + + if bound_service.need_schedule(): + + if bound_service.interval is None: + # callback case + self._do_case_assert( + service=bound_service, + method="schedule", + args=(data, parent_data, case.schedule_assertion.callback_data), + assertion=case.schedule_assertion, + no=no, + name=case.name, + ) + + else: + # schedule case + assertions = case.schedule_assertion + assertions = assertions if isinstance(assertions, list) else [assertions] + + for assertion in assertions: + do_continue = self._do_case_assert( + service=bound_service, + method="schedule", + args=(data, parent_data), + assertion=assertion, + no=no, + name=case.name, + ) + + self.assertEqual( + assertion.schedule_finished, + bound_service.is_schedule_finished(), + msg=self._format_failure_message( + no=no, + name=case.name, + msg="schedule_finished assertion failed:" + "\nexpected: {expected}\nactual: {actual}".format( + expected=assertion.schedule_finished, # noqa + actual=bound_service.is_schedule_finished(), + ), + ), + ) # noqa + + if do_continue: + break + + for call_assertion in case.schedule_call_assertion: + self._do_call_assertion(name=case.name, no=no, assertion=call_assertion) + + self._case_pass(case) + + except Exception: + self._case_fail(case) + sys.stdout.write("{}\n".format(traceback.format_exc())) + + if self._failed_cases: + self._test_fail() + + +class ComponentTestCase(object): + def __init__( + self, + inputs, + parent_data, + execute_assertion, + schedule_assertion, + name="", + patchers=None, + execute_call_assertion=None, + schedule_call_assertion=None, + service_id=None, + ): + self.inputs = inputs + self.parent_data = parent_data + self.execute_assertion = execute_assertion + self.execute_call_assertion = execute_call_assertion or [] + self.schedule_call_assertion = schedule_call_assertion or [] + self.schedule_assertion = schedule_assertion + self.name = name + self.patchers = patchers or [] + self.service_id = service_id or uniqid() + + +class CallAssertion(object): + def __init__(self, func, calls, any_order=False): + self.func = func + self.calls = calls + self.any_order = any_order + + def do_assert(self): + if not callable(self.func): + module_and_func = self.func.rsplit(".", 1) + mod_path = module_and_func[0] + func_name = module_and_func[1] + mod = importlib.import_module(mod_path) + func = getattr(mod, func_name) + else: + func = self.func + + if not self.calls: + func.assert_not_called() + else: + assert func.call_count == len(self.calls), ( + "Expected 'mock' have been called {expect} times. " + "Called {actual} times".format(expect=len(self.calls), actual=func.call_count) + ) + func.assert_has_calls(calls=self.calls, any_order=self.any_order) + + func.reset_mock() + + +class Assertion(object): + def __init__(self, success, outputs): + self.success = success + self.outputs = outputs + + +class ExecuteAssertion(Assertion): + pass + + +class ScheduleAssertion(Assertion): + def __init__(self, callback_data=None, schedule_finished=False, *args, **kwargs): + self.callback_data = callback_data + self.schedule_finished = schedule_finished + super(ScheduleAssertion, self).__init__(*args, **kwargs) + + +class Patcher(object): + def __init__(self, target, return_value=None, side_effect=None): + self.target = target + self.return_value = return_value + self.side_effect = side_effect + + def mock_patcher(self): + return patch(target=self.target, new=MagicMock(return_value=self.return_value, side_effect=self.side_effect)) + + +Call = call diff --git a/runtime/bamboo-pipeline/pipeline/components/__init__.py b/runtime/bamboo-pipeline/pipeline/components/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/components/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/components/collections/__init__.py b/runtime/bamboo-pipeline/pipeline/components/collections/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/components/collections/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/components/collections/examples.py b/runtime/bamboo-pipeline/pipeline/components/collections/examples.py new file mode 100644 index 00000000..564612b3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/components/collections/examples.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from pipeline.component_framework.component import Component +from pipeline.conf import settings +from pipeline.core.flow.activity import Service + +logger = logging.getLogger("celery") + +__register_ignore__ = not settings.ENABLE_EXAMPLE_COMPONENTS + + +class SimpleExampleService(Service): + def execute(self, data, parent_data): + return True + + def outputs_format(self): + return [] + + +class SimpleExampleComponent(Component): + name = "example component" + code = "example_component" + bound_service = SimpleExampleService + + +class PipeExampleService(Service): + def execute(self, data, parent_data): + for key, val in list(data.inputs.items()): + data.set_outputs(key, val) + return True + + def outputs_format(self): + return [] + + +class PipeExampleComponent(Component): + name = "pipe example component" + code = "pipe_example_component" + bound_service = PipeExampleService diff --git a/runtime/bamboo-pipeline/pipeline/conf/__init__.py b/runtime/bamboo-pipeline/pipeline/conf/__init__.py new file mode 100644 index 00000000..3ba02d5b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/conf/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.conf import settings as django_settings + +from pipeline.conf import default_settings + + +class PipelineSettings(object): + def __getattr__(self, key): + if hasattr(django_settings, key): + return getattr(django_settings, key) + + if hasattr(default_settings, key): + return getattr(default_settings, key) + + raise AttributeError("Settings object has no attribute %s" % key) + + +settings = PipelineSettings() diff --git a/runtime/bamboo-pipeline/pipeline/conf/default_settings.py b/runtime/bamboo-pipeline/pipeline/conf/default_settings.py new file mode 100644 index 00000000..0cdc651b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/conf/default_settings.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.conf import settings + +# pipeline template context module, to use this, you need +# 1) config PIPELINE_TEMPLATE_CONTEXT in your django settings, such as +# PIPELINE_TEMPLATE_CONTEXT = 'home_application.utils.get_template_context' +# 2) define get_template_context function in your app, which show accept one arg, such as +# def get_template_context(obj): +# context = { +# 'biz_cc_id': '1', +# 'biz_cc_name': 'test1', +# } +# if obj is not None: +# context.update({'template': '1'}) +# return context + +PIPELINE_TEMPLATE_CONTEXT = getattr(settings, "PIPELINE_TEMPLATE_CONTEXT", "") +PIPELINE_INSTANCE_CONTEXT = getattr(settings, "PIPELINE_INSTANCE_CONTEXT", "") + +PIPELINE_ENGINE_ADAPTER_API = getattr( + settings, "PIPELINE_ENGINE_ADAPTER_API", "pipeline.service.pipeline_engine_adapter.adapter_api", +) + +PIPELINE_DATA_BACKEND = getattr( + settings, "PIPELINE_DATA_BACKEND", "pipeline.engine.core.data.mysql_backend.MySQLDataBackend", +) +PIPELINE_DATA_CANDIDATE_BACKEND = getattr(settings, "PIPELINE_DATA_CANDIDATE_BACKEND", None) +PIPELINE_DATA_BACKEND_AUTO_EXPIRE = getattr(settings, "PIPELINE_DATA_BACKEND_AUTO_EXPIRE", False) +PIPELINE_DATA_BACKEND_AUTO_EXPIRE_SECONDS = int( + getattr(settings, "PIPELINE_DATA_BACKEND_AUTO_EXPIRE_SECONDS", 60 * 60 * 24) +) + +PIPELINE_END_HANDLER = getattr( + settings, "PIPELINE_END_HANDLER", "pipeline.engine.signals.handlers.pipeline_end_handler", +) +PIPELINE_WORKER_STATUS_CACHE_EXPIRES = getattr(settings, "PIPELINE_WORKER_STATUS_CACHE_EXPIRES", 30) +PIPELINE_RERUN_MAX_TIMES = getattr(settings, "PIPELINE_RERUN_MAX_TIMES", 0) +PIPELINE_RERUN_INDEX_OFFSET = getattr(settings, "PIPELINE_RERUN_INDEX_OFFSET", -1) + +COMPONENT_AUTO_DISCOVER_PATH = [ + "components.collections", +] + +COMPONENT_AUTO_DISCOVER_PATH += getattr(settings, "COMPONENT_PATH", []) + +AUTO_UPDATE_COMPONENT_MODELS = getattr(settings, "AUTO_UPDATE_COMPONENT_MODELS", True) + +VARIABLE_AUTO_DISCOVER_PATH = [ + "variables.collections", +] + +VARIABLE_AUTO_DISCOVER_PATH += getattr(settings, "VARIABLE_PATH", []) + +AUTO_UPDATE_VARIABLE_MODELS = getattr(settings, "AUTO_UPDATE_VARIABLE_MODELS", True) + +PIPELINE_PARSER_CLASS = getattr(settings, "PIPELINE_PARSER_CLASS", "pipeline.parser.pipeline_parser.PipelineParser") + +ENABLE_EXAMPLE_COMPONENTS = getattr(settings, "ENABLE_EXAMPLE_COMPONENTS", True) + +UUID_DIGIT_STARTS_SENSITIVE = getattr(settings, "UUID_DIGIT_STARTS_SENSITIVE", False) + +PIPELINE_LOG_LEVEL = getattr(settings, "PIPELINE_LOG_LEVEL", "INFO") + +# 远程插件包源默认配置 +EXTERNAL_PLUGINS_SOURCE_PROXY = getattr(settings, "EXTERNAL_PLUGINS_SOURCE_PROXY", None) +EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT = getattr(settings, "EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT", True) + +# 僵尸进程扫描配置 +ENGINE_ZOMBIE_PROCESS_DOCTORS = getattr(settings, "ENGINE_ZOMBIE_PROCESS_DOCTORS", None) +ENGINE_ZOMBIE_PROCESS_HEAL_CRON = getattr(settings, "ENGINE_ZOMBIE_PROCESS_HEAL_CRON", {"minute": "*/10"}) + +# 过期任务运行时清理配置 +EXPIRED_TASK_CLEAN = getattr(settings, "EXPIRED_TASK_CLEAN", False) +EXPIRED_TASK_CLEAN_CRON = getattr(settings, "EXPIRED_TASK_CLEAN_CRON", {"minute": "37", "hour": "*"}) +EXPIRED_TASK_CLEAN_NUM_LIMIT = getattr(settings, "EXPIRED_TASK_CLEAN_NUM_LIMIT", 100) +TASK_EXPIRED_MONTH = getattr(settings, "TASK_EXPIRED_MONTH", 6) + +# MAKO sandbox config +MAKO_SANDBOX_SHIELD_WORDS = getattr(settings, "MAKO_SANDBOX_SHIELD_WORDS", []) +MAKO_SANDBOX_IMPORT_MODULES = getattr(settings, "MAKO_SANDBOX_IMPORT_MODULES", {}) +MAKO_SAFETY_CHECK = getattr(settings, "MAKO_SAFETY_CHECK", True) + +# 开发者自定义插件和变量异常类 +PLUGIN_SPECIFIC_EXCEPTIONS = getattr(settings, "PLUGIN_SPECIFIC_EXCEPTIONS", ()) +VARIABLE_SPECIFIC_EXCEPTIONS = getattr(settings, "VARIABLE_SPECIFIC_EXCEPTIONS", ()) diff --git a/runtime/bamboo-pipeline/pipeline/constants.py b/runtime/bamboo-pipeline/pipeline/constants.py new file mode 100644 index 00000000..9fb38054 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/constants.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +PIPELINE_DEFAULT_PRIORITY = 100 +PIPELINE_MIN_PRIORITY = 0 +PIPELINE_MAX_PRIORITY = 255 diff --git a/runtime/bamboo-pipeline/pipeline/contrib/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/__init__.py new file mode 100644 index 00000000..7acd738b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +default_app_config = "pipeline.contrib.external_plugins.apps.ExternalPluginsConfig" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/admin.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/admin.py new file mode 100644 index 00000000..4edcab03 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/admin.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from pipeline.contrib.external_plugins.models import FileSystemSource, GitRepoSource, S3Source +from pipeline.contrib.external_plugins.models.forms import JsonFieldModelForm + +# Register your models here. + + +@admin.register(GitRepoSource) +class GitRepoSourceAdmin(admin.ModelAdmin): + form = JsonFieldModelForm + list_display = ["name", "from_config", "repo_raw_address", "branch"] + search_fields = ["name", "branch", "repo_raw_address"] + + +@admin.register(S3Source) +class S3SourceAdmin(admin.ModelAdmin): + form = JsonFieldModelForm + list_display = ["name", "from_config", "service_address", "bucket", "source_dir"] + search_fields = ["name", "bucket", "service_address"] + + +@admin.register(FileSystemSource) +class FileSystemSourceAdmin(admin.ModelAdmin): + form = JsonFieldModelForm + list_display = ["name", "from_config", "path"] + search_fields = ["name", "path"] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/apps.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/apps.py new file mode 100644 index 00000000..cb420c69 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/apps.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import sys +import traceback + +from django.apps import AppConfig +from django.conf import settings +from django.db.utils import ProgrammingError + +from pipeline.utils import env + +logger = logging.getLogger("root") + +DJANGO_MANAGE_CMD = "manage.py" +DEFAULT_TRIGGERS = {"runserver", "celery", "worker", "uwsgi", "shell", "update_component_models"} + + +class ExternalPluginsConfig(AppConfig): + name = "pipeline.contrib.external_plugins" + label = "pipeline_external_plugins" + verbose_name = "PipelineExternalPlugins" + + def ready(self): + from pipeline.contrib.external_plugins import loader # noqa + from pipeline.contrib.external_plugins.models import ExternalPackageSource # noqa + + # load external components when start command in trigger list + if self.should_load_external_module(): + try: + logger.info("Start to update package source from config file...") + ExternalPackageSource.update_package_source_from_config( + getattr(settings, "COMPONENTS_PACKAGE_SOURCES", {}) + ) + except ProgrammingError: + logger.warning( + "update package source failed, maybe first migration? " + "exception: {traceback}".format(traceback=traceback.format_exc()) + ) + # first migrate + return + + logger.info("Start to load external modules...") + + loader.load_external_modules() + + @staticmethod + def should_load_external_module(): + django_command = env.get_django_command() + if django_command is None: + print("app is not start with django manage command, current argv: {argv}".format(argv=sys.argv)) + return True + + triggers = getattr(settings, "EXTERNAL_COMPONENTS_LOAD_TRIGGER", DEFAULT_TRIGGERS) + print("should_load_external_module: {}".format(django_command in triggers)) + return django_command in triggers diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/exceptions.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/exceptions.py new file mode 100644 index 00000000..56ce1d2d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/exceptions.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class InvalidOperationException(Exception): + pass diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/loader.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/loader.py new file mode 100644 index 00000000..52cd1031 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/loader.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import importlib +import logging +import traceback + +from pipeline.contrib.external_plugins.models import source_cls_factory +from pipeline.contrib.external_plugins.utils.importer import importer_context + +logger = logging.getLogger("root") + + +def load_external_modules(): + for source_type, source_model_cls in list(source_cls_factory.items()): + # get all external source + sources = source_model_cls.objects.all() + + # get importer for source + for source in sources: + _import_modules_in_source(source) + + +def _import_modules_in_source(source): + try: + importer = source.importer() + + with importer_context(importer): + for mod in source.modules: + importlib.import_module(mod) + except Exception: + logger.error("An error occurred when loading {{{}}}: {}".format(source.name, traceback.format_exc())) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/0001_initial.py new file mode 100644 index 00000000..c420b286 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/0001_initial.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import pipeline.contrib.external_plugins.models.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FileSystemSource", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=128, unique=True, verbose_name="\u5305\u6e90\u540d")), + ( + "from_config", + models.BooleanField( + default=False, + verbose_name="\u662f\u5426\u662f\u4ece\u914d\u7f6e\u6587\u4ef6\u4e2d\u8bfb\u53d6\u7684", + ), + ), + ( + "packages", + pipeline.contrib.external_plugins.models.fields.JSONTextField( + verbose_name="\u6a21\u5757\u914d\u7f6e" + ), + ), + ("path", models.TextField(verbose_name="\u6587\u4ef6\u7cfb\u7edf\u8def\u5f84")), + ], + options={"abstract": False}, + ), + migrations.CreateModel( + name="GitRepoSource", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=128, unique=True, verbose_name="\u5305\u6e90\u540d")), + ( + "from_config", + models.BooleanField( + default=False, + verbose_name="\u662f\u5426\u662f\u4ece\u914d\u7f6e\u6587\u4ef6\u4e2d\u8bfb\u53d6\u7684", + ), + ), + ( + "packages", + pipeline.contrib.external_plugins.models.fields.JSONTextField( + verbose_name="\u6a21\u5757\u914d\u7f6e" + ), + ), + ("repo_raw_address", models.TextField(verbose_name="\u6587\u4ef6\u6258\u7ba1\u4ed3\u5e93\u94fe\u63a5")), + ("branch", models.CharField(max_length=128, verbose_name="\u5206\u652f\u540d")), + ], + options={"abstract": False}, + ), + migrations.CreateModel( + name="S3Source", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=128, unique=True, verbose_name="\u5305\u6e90\u540d")), + ( + "from_config", + models.BooleanField( + default=False, + verbose_name="\u662f\u5426\u662f\u4ece\u914d\u7f6e\u6587\u4ef6\u4e2d\u8bfb\u53d6\u7684", + ), + ), + ( + "packages", + pipeline.contrib.external_plugins.models.fields.JSONTextField( + verbose_name="\u6a21\u5757\u914d\u7f6e" + ), + ), + ("service_address", models.TextField(verbose_name="\u5bf9\u8c61\u5b58\u50a8\u670d\u52a1\u5730\u5740")), + ("bucket", models.TextField(verbose_name="bucket \u540d")), + ("access_key", models.TextField(verbose_name="access key")), + ("secret_key", models.TextField(verbose_name="secret key")), + ], + options={"abstract": False}, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/0002_s3source_source_dir.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/0002_s3source_source_dir.py new file mode 100644 index 00000000..7367e19b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/0002_s3source_source_dir.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.29 on 2020-11-09 11:01 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline_external_plugins", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="s3source", name="source_dir", field=models.TextField(default="", verbose_name="源目录名"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/__init__.py new file mode 100644 index 00000000..8a81d526 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.contrib.external_plugins.models.base import source_cls_factory # noqa +from pipeline.contrib.external_plugins.models.source import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/base.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/base.py new file mode 100644 index 00000000..755fb8f2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/base.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import sys +from abc import abstractmethod +from copy import deepcopy + +from django.db import IntegrityError, models +from django.utils.translation import ugettext_lazy as _ + +from pipeline.component_framework.library import ComponentLibrary +from pipeline.contrib.external_plugins import exceptions +from pipeline.contrib.external_plugins.models.fields import JSONTextField + +GIT = "git" +S3 = "s3" +FILE_SYSTEM = "fs" +logger = logging.getLogger("root") +source_cls_factory = {} + + +def package_source(cls): + source_cls_factory[cls.type()] = cls + return cls + + +class SourceManager(models.Manager): + def create_source(self, name, packages, from_config, **kwargs): + create_kwargs = deepcopy(kwargs) + create_kwargs.update({"name": name, "packages": packages, "from_config": from_config}) + return self.create(**create_kwargs) + + def remove_source(self, source_id): + source = self.get(id=source_id) + + if source.from_config: + raise exceptions.InvalidOperationException("Can not remove source create from config") + + source.delete() + + def update_source_from_config(self, configs): + + sources_from_config = self.filter(from_config=True).all() + existing_source_names = {source.name for source in sources_from_config} + source_name_in_config = {config["name"] for config in configs} + + invalid_source_names = existing_source_names - source_name_in_config + + # remove invalid source + self.filter(name__in=invalid_source_names).delete() + + # update and create source + for config in configs: + defaults = deepcopy(config["details"]) + defaults["packages"] = config["packages"] + + try: + self.update_or_create(name=config["name"], from_config=True, defaults=defaults) + except IntegrityError: + raise exceptions.InvalidOperationException( + 'There is a external source named "{source_name}" but not create from config, ' + "can not do source update operation".format(source_name=config["name"]) + ) + + +class ExternalPackageSource(models.Model): + name = models.CharField(_("包源名"), max_length=128, unique=True) + from_config = models.BooleanField(_("是否是从配置文件中读取的"), default=False) + packages = JSONTextField(_("模块配置")) + + objects = SourceManager() + + class Meta: + abstract = True + + @staticmethod + @abstractmethod + def type(): + raise NotImplementedError() + + @abstractmethod + def importer(self): + raise NotImplementedError() + + @abstractmethod + def details(self): + raise NotImplementedError() + + @property + def imported_plugins(self): + plugins = [] + try: + importer = self.importer() + except ValueError as e: + logger.exception("ExternalPackageSource[name={}] call importer error: {}".format(self.name, e)) + return plugins + for component in ComponentLibrary.component_list(): + component_importer = getattr(sys.modules[component.__module__], "__loader__", None) + if isinstance(component_importer, type(importer)) and component_importer.name == self.name: + plugins.append( + { + "code": component.code, + "name": component.name, + "group_name": component.group_name, + "class_name": component.__name__, + "module": component.__module__, + } + ) + return plugins + + @property + def modules(self): + modules = [] + + for package_info in list(self.packages.values()): + modules.extend(package_info["modules"]) + + return modules + + @staticmethod + def update_package_source_from_config(source_configs): + classified_config = {source_type: [] for source_type in list(source_cls_factory.keys())} + + for config in deepcopy(source_configs): + classified_config.setdefault(config.pop("type"), []).append(config) + + for source_type, configs in list(classified_config.items()): + try: + source_model_cls = source_cls_factory[source_type] + except KeyError: + raise KeyError("Unsupported external source type: %s" % source_type) + source_model_cls.objects.update_source_from_config(configs=configs) + + @staticmethod + def package_source_types(): + return list(source_cls_factory.keys()) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/fields.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/fields.py new file mode 100644 index 00000000..0f61078d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/fields.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import ujson as json +from django.db import models + + +class JSONTextField(models.TextField): + def __init__(self, *args, **kwargs): + super(JSONTextField, self).__init__(*args, **kwargs) + + def get_prep_value(self, value): + return json.dumps(value) + + def to_python(self, value): + value = super(JSONTextField, self).to_python(value) + return json.loads(value) + + def from_db_value(self, value, expression, connection, context): + return self.to_python(value) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/forms.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/forms.py new file mode 100644 index 00000000..99ced3fd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/forms.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import ujson as json +from django import forms + +from pipeline.contrib.external_plugins.models.fields import JSONTextField + + +class JsonFieldModelForm(forms.ModelForm): + def __init__(self, *args, **kwargs): + super(JsonFieldModelForm, self).__init__(*args, **kwargs) + # for edit in django admin web + all_fields = self.instance.__class__._meta.get_fields() + for field in all_fields: + if isinstance(field, JSONTextField): + self.initial[field.name] = json.dumps(getattr(self.instance, field.name)) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/source.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/source.py new file mode 100644 index 00000000..5082a66f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/models/source.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from pipeline.conf import settings +from pipeline.contrib.external_plugins.models.base import FILE_SYSTEM, GIT, S3, ExternalPackageSource, package_source +from pipeline.contrib.external_plugins.utils.importer import FSModuleImporter, GitRepoModuleImporter, S3ModuleImporter + + +@package_source +class GitRepoSource(ExternalPackageSource): + repo_raw_address = models.TextField(_("文件托管仓库链接")) + branch = models.CharField(_("分支名"), max_length=128) + + @staticmethod + def type(): + return GIT + + def importer(self): + return GitRepoModuleImporter( + name=self.name, + repo_raw_url=self.repo_raw_address, + branch=self.branch, + modules=list(self.packages.keys()), + proxy=settings.EXTERNAL_PLUGINS_SOURCE_PROXY, + secure_only=settings.EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT, + ) + + def details(self): + return {"repo_raw_address": self.repo_raw_address, "branch": self.branch} + + +@package_source +class S3Source(ExternalPackageSource): + service_address = models.TextField(_("对象存储服务地址")) + bucket = models.TextField(_("bucket 名")) + source_dir = models.TextField(_("源目录名"), default="") + access_key = models.TextField(_("access key")) + secret_key = models.TextField(_("secret key")) + + @staticmethod + def type(): + return S3 + + def importer(self): + return S3ModuleImporter( + name=self.name, + modules=list(self.packages.keys()), + service_address=self.service_address, + bucket=self.bucket, + source_dir=self.source_dir, + access_key=self.access_key, + secret_key=self.secret_key, + secure_only=settings.EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT, + ) + + def details(self): + return { + "service_address": self.service_address, + "bucket": self.bucket, + "source_dir": self.source_dir, + "access_key": self.access_key, + "secret_key": self.secret_key, + } + + +@package_source +class FileSystemSource(ExternalPackageSource): + path = models.TextField(_("文件系统路径")) + + @staticmethod + def type(): + return FILE_SYSTEM + + def importer(self): + return FSModuleImporter(name=self.name, modules=list(self.packages.keys()), path=self.path) + + def details(self): + return {"path": self.path} diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/mock.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/mock.py new file mode 100644 index 00000000..5f6a9201 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/mock.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from mock import MagicMock, call, patch # noqa + + +def mock_s3_resource(resource, **kwargs): + ret = {"resource": resource} + ret.update(kwargs) + return ret + + +class Object(object): + pass + + +class MockResponse(object): + def __init__(self, **kwargs): + self.content = kwargs.get("content") + self.ok = kwargs.get("ok", True) + + +class MockPackageSourceManager(object): + def __init__(self, **kwargs): + self.all = MagicMock(return_value=kwargs.get("all")) + + +class MockPackageSourceClass(object): + def __init__(self, **kwargs): + self.objects = MockPackageSourceManager(all=kwargs.get("all")) + + +class MockPackageSource(object): + def __init__(self, **kwargs): + self.type = MagicMock(return_value=kwargs.get("type")) + self.importer = MagicMock(return_value=kwargs.get("importer")) + self.modules = kwargs.get("modules", []) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/mock_settings.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/mock_settings.py new file mode 100644 index 00000000..79681dae --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/mock_settings.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +SYS_MODULES = "sys.modules" +SYS_META_PATH = "sys.meta_path" +IMP_ACQUIRE_LOCK = "imp.acquire_lock" +IMP_RELEASE_LOCK = "imp.release_lock" +REQUESTS_GET = "requests.get" +BOTO3_RESOURCE = "boto3.resource" +OS_PATH_EXISTS = "os.path.exists" + +IMPORTLIB_IMPORT_MODULE = "importlib.import_module" + +MODELS_BASE_SOURCE_CLS_FACTORY = "pipeline.contrib.external_plugins.models.base.source_cls_factory" +MODELS_SOURCE_MANAGER_UPDATE_SOURCE_FROM_CONFIG = ( + "pipeline.contrib.external_plugins.models.base.SourceManager.update_source_from_config" +) + +LOADER_SOURCE_CLS_FACTORY = "pipeline.contrib.external_plugins.loader.source_cls_factory" +LOADER__IMPORT_MODULES_IN_SOURCE = "pipeline.contrib.external_plugins.loader._import_modules_in_source" + +UTILS_IMPORTER_BASE_EXECUTE_SRC_CODE = ( + "pipeline.contrib.external_plugins.utils.importer.base.NonstandardModuleImporter._execute_src_code" +) +UTILS_IMPORTER_GIT__FETCH_REPO_FILE = ( + "pipeline.contrib.external_plugins.utils.importer.git.GitRepoModuleImporter._fetch_repo_file" +) +UTILS_IMPORTER_GIT__FILE_URL = "pipeline.contrib.external_plugins.utils.importer.git.GitRepoModuleImporter._file_url" +UTILS_IMPORTER_GIT_GET_SOURCE = "pipeline.contrib.external_plugins.utils.importer.git.GitRepoModuleImporter.get_source" +UTILS_IMPORTER_GIT_GET_FILE = "pipeline.contrib.external_plugins.utils.importer.git.GitRepoModuleImporter.get_file" +UTILS_IMPORTER_GIT_IS_PACKAGE = "pipeline.contrib.external_plugins.utils.importer.git.GitRepoModuleImporter.is_package" +UTILS_IMPORTER__SETUP_IMPORTER = "pipeline.contrib.external_plugins.utils.importer.utils._setup_importer" +UTILS_IMPORTER__REMOVE_IMPORTER = "pipeline.contrib.external_plugins.utils.importer.utils._remove_importer" + +UTILS_IMPORTER_S3__FETCH_OBJ_CONTENT = ( + "pipeline.contrib.external_plugins.utils.importer.s3.S3ModuleImporter._fetch_obj_content" +) +UTILS_IMPORTER_S3_GET_SOURCE = "pipeline.contrib.external_plugins.utils.importer.s3.S3ModuleImporter.get_source" +UTILS_IMPORTER_S3_GET_FILE = "pipeline.contrib.external_plugins.utils.importer.s3.S3ModuleImporter.get_file" +UTILS_IMPORTER_S3_IS_PACKAGE = "pipeline.contrib.external_plugins.utils.importer.s3.S3ModuleImporter.is_package" +UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT = ( + "pipeline.contrib.external_plugins.utils.importer.s3.S3ModuleImporter._get_s3_obj_content" +) + +UTILS_IMPORTER_FS_GET_SOURCE = "pipeline.contrib.external_plugins.utils.importer.fs.FSModuleImporter.get_source" +UTILS_IMPORTER_FS_GET_FILE = "pipeline.contrib.external_plugins.utils.importer.fs.FSModuleImporter.get_file" +UTILS_IMPORTER_FS_IS_PACKAGE = "pipeline.contrib.external_plugins.utils.importer.fs.FSModuleImporter.is_package" +UTILS_IMPORTER_FS__FETCH_FILE_CONTENT = ( + "pipeline.contrib.external_plugins.utils.importer.fs.FSModuleImporter._fetch_file_content" +) +UTILS_IMPORTER_FS__GET_FILE_CONTENT = ( + "pipeline.contrib.external_plugins.utils.importer.fs.FSModuleImporter._get_file_content" +) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/test_base.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/test_base.py new file mode 100644 index 00000000..7a9ef5a3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/test_base.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.contrib.external_plugins.models import base +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa + + +class BaseModuleTestCase(TestCase): + def test_package_source(self): + source_type = "source_type" + + cls_factory = {} + + with patch(MODELS_BASE_SOURCE_CLS_FACTORY, cls_factory): + + @base.package_source + class APackageSource(object): + @staticmethod + def type(): + return source_type + + self.assertIs(cls_factory[source_type], APackageSource) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/test_external_package_source.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/test_external_package_source.py new file mode 100644 index 00000000..7a312a78 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/base/test_external_package_source.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from copy import deepcopy + +from django.test import TestCase + +from pipeline.contrib.external_plugins import exceptions +from pipeline.contrib.external_plugins.models import ExternalPackageSource, GitRepoSource, source_cls_factory +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa + +SOURCE_NAME = "source_name" +PACKAGES = { + "root_package_1": {"version": "", "modules": ["test1", "test2"]}, + "root_package_2": {"version": "", "modules": ["test3", "test4"]}, + "root_package_3": {"version": "", "modules": ["test5", "test6"]}, +} +FROM_CONFIG = True +REPO_RAW_ADDRESS = "REPO_RAW_ADDRESS" +BRANCH = "master" + +OLD_SOURCE_1 = { + "name": "source_1", + "details": {"repo_raw_address": "old_address", "branch": "stage"}, + "packages": {"root_package": {"version": "", "modules": ["test1"]}}, +} + +OLD_SOURCE_3 = { + "name": "source_3", + "details": {"repo_raw_address": "old_address_3", "branch": "master"}, + "packages": {"root_package": {"version": "", "modules": ["test5"]}}, +} + +SOURCE_1 = { + "name": "source_1", + "details": {"repo_raw_address": "https://github.com/homholueng/plugins_example_1", "branch": "master"}, + "packages": {"root_package": {"version": "", "modules": ["test1", "test2"]}}, +} + +SOURCE_2 = { + "name": "source_2", + "details": {"repo_raw_address": "https://github.com/homholueng/plugins_example_2", "branch": "master"}, + "packages": {"root_package": {"version": "", "modules": ["test3", "test4"]}}, +} + +SOURCE_4 = { + "name": "source_4", + "details": {"repo_raw_address": "https://github.com/homholueng/plugins_example_4", "branch": "master"}, + "packages": {"root_package": {"version": "", "modules": ["test5", "test6"]}}, +} + +GIT_SOURCE_CONFIGS = [SOURCE_1, SOURCE_2, SOURCE_4] + + +class ExternalPackageSourceTestCase(TestCase): + def setUp(self): + GitRepoSource.objects.create_source( + name=SOURCE_NAME, + packages=PACKAGES, + from_config=FROM_CONFIG, + repo_raw_address=REPO_RAW_ADDRESS, + branch=BRANCH, + ) + + def tearDown(self): + GitRepoSource.objects.all().delete() + + def test_create_source(self): + source_1 = GitRepoSource.objects.get(name=SOURCE_NAME) + self.assertEqual(source_1.name, SOURCE_NAME) + self.assertEqual(source_1.packages, PACKAGES) + self.assertEqual(source_1.from_config, FROM_CONFIG) + self.assertEqual(source_1.repo_raw_address, REPO_RAW_ADDRESS) + self.assertEqual(source_1.branch, BRANCH) + + def test_remove_source(self): + source_1 = GitRepoSource.objects.get(name=SOURCE_NAME) + + self.assertRaises(exceptions.InvalidOperationException, GitRepoSource.objects.remove_source, source_1.id) + + source_1.from_config = False + source_1.save() + + GitRepoSource.objects.remove_source(source_1.id) + + self.assertFalse(GitRepoSource.objects.filter(id=source_1.id).exists()) + + def _assert_source_equals_config(self, source, config): + self.assertEqual(source.name, config["name"]) + self.assertEqual(source.packages, config["packages"]) + self.assertEqual(source.repo_raw_address, config["details"]["repo_raw_address"]) + self.assertEqual(source.branch, config["details"]["branch"]) + + def test_update_source_from_config(self): + GitRepoSource.objects.all().delete() + + for source in [OLD_SOURCE_1, OLD_SOURCE_3]: + GitRepoSource.objects.create_source( + name=source["name"], + packages=source["packages"], + from_config=True, + repo_raw_address=source["details"]["repo_raw_address"], + branch=source["details"]["branch"], + ) + + GitRepoSource.objects.update_source_from_config(GIT_SOURCE_CONFIGS) + + self.assertFalse(GitRepoSource.objects.filter(name=OLD_SOURCE_3["name"]).exists()) + + for config in GIT_SOURCE_CONFIGS: + source = GitRepoSource.objects.get(name=config["name"]) + self.assertTrue(source.from_config) + self._assert_source_equals_config(source, config) + + def test_modules(self): + source = GitRepoSource.objects.get(name=SOURCE_NAME) + + modules = [] + for package_info in list(PACKAGES.values()): + modules.extend(package_info["modules"]) + + self.assertEqual(source.modules, modules) + + @patch(MODELS_SOURCE_MANAGER_UPDATE_SOURCE_FROM_CONFIG, MagicMock()) + def test_update_package_source_from_config__empty_configs(self): + ExternalPackageSource.update_package_source_from_config([]) + for source_model_cls in list(source_cls_factory.values()): + source_model_cls.objects.update_source_from_config.assert_called_with(configs=[]) + + @patch(MODELS_SOURCE_MANAGER_UPDATE_SOURCE_FROM_CONFIG, MagicMock()) + def test_update_package_source_from_config__normal_case(self): + source_configs = [ + {"name": "1", "type": "git"}, + {"name": "2", "type": "git"}, + {"name": "3", "type": "s3"}, + {"name": "4", "type": "fs"}, + ] + ExternalPackageSource.update_package_source_from_config(source_configs) + GitRepoSource.objects.update_source_from_config.assert_has_calls( + [call(configs=[{"name": "1"}, {"name": "2"}]), call(configs=[{"name": "3"}]), call(configs=[{"name": "4"}])] + ) + + def test_update_package_source_from_config__unsupported_source_type(self): + source_configs = [{"name": "1", "type": "wrong_type"}] + self.assertRaises(KeyError, ExternalPackageSource.update_package_source_from_config, source_configs) + + def test_update_source_from_config__name_conflict(self): + source = deepcopy(SOURCE_1) + source["type"] = "git" + ExternalPackageSource.update_package_source_from_config([source]) + GitRepoSource.objects.filter(name=source["name"]).update(from_config=False) + self.assertRaises( + exceptions.InvalidOperationException, ExternalPackageSource.update_package_source_from_config, [source] + ) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/test_source.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/test_source.py new file mode 100644 index 00000000..4170f94c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/models/test_source.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.contrib.external_plugins.models import FileSystemSource, GitRepoSource, S3Source +from pipeline.contrib.external_plugins.models.base import FILE_SYSTEM, GIT, S3, ExternalPackageSource + + +class SourceTestCase(TestCase): + def test_source_cls(self): + self.assertTrue(issubclass(GitRepoSource, ExternalPackageSource)) + self.assertTrue(issubclass(S3Source, ExternalPackageSource)) + self.assertTrue(issubclass(FileSystemSource, ExternalPackageSource)) + + def test_source_type(self): + self.assertEqual(GitRepoSource.type(), GIT) + self.assertEqual(S3Source.type(), S3) + self.assertEqual(FileSystemSource.type(), FILE_SYSTEM) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/test_loader.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/test_loader.py new file mode 100644 index 00000000..1df475f1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/test_loader.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.contrib.external_plugins import loader +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa + + +class LoaderTestCase(TestCase): + def test__import_modules_in_source(self): + import_module = MagicMock() + + with patch(IMPORTLIB_IMPORT_MODULE, import_module): + modules = [1, 2, 3, 4] + source = MockPackageSource(importer="importer", modules=modules) + loader._import_modules_in_source(source) + import_module.assert_has_calls( + calls=[call(modules[0]), call(modules[1]), call(modules[2]), call(modules[3])] + ) + + @patch(LOADER__IMPORT_MODULES_IN_SOURCE, MagicMock()) + def test_load_external_modules(self): + cls_factory = Object() + setattr( + cls_factory, + "items", + MagicMock( + return_value=[ + ("type_1", MockPackageSourceClass(all=["source_1", "source_2"])), + ("type_2", MockPackageSourceClass(all=["source_3", "source_4"])), + ] + ), + ) + with patch(LOADER_SOURCE_CLS_FACTORY, cls_factory): + loader.load_external_modules() + loader._import_modules_in_source.assert_has_calls( + calls=[call("source_1"), call("source_2"), call("source_3"), call("source_4")] + ) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_base.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_base.py new file mode 100644 index 00000000..90c98426 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_base.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import imp +import sys + +from django.test import TestCase + +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa +from pipeline.contrib.external_plugins.utils.importer.base import NonstandardModuleImporter + + +class DummyImporter(NonstandardModuleImporter): + def __init__(self, **kwargs): + super(DummyImporter, self).__init__(modules=kwargs.get("modules", [])) + self._is_package = kwargs.get("is_package") + self._get_code = kwargs.get("get_code") + self._get_source = kwargs.get("get_source") + self._get_file = kwargs.get("get_file") + self._get_path = kwargs.get("get_path") + + self._accept_find_module_request_hook = MagicMock() + self._pre_load_module_hook = MagicMock() + self._post_load_module_hook = MagicMock() + self._import_error_hook = MagicMock() + + def is_package(self, fullname): + return self._is_package + + def get_code(self, fullname): + return self._get_code + + def get_source(self, fullname): + return self._get_source + + def get_file(self, fullname): + return self._get_file + + def get_path(self, fullname): + return self._get_path + + def accept_find_module_request_hook(self, fullname, path): + self._accept_find_module_request_hook(fullname=fullname, path=path) + + def pre_load_module_hook(self, fullname, module): + self._pre_load_module_hook(fullname=fullname, module=module) + + def post_load_module_hook(self, fullname, module): + self._post_load_module_hook(fullname=fullname, module=module) + + def import_error_hook(self, fullname): + self._import_error_hook(fullname=fullname) + + +class NonstandardModuleImporterTestCase(TestCase): + def setUp(self): + self.imp_acquire_lock_patcher = patch(IMP_ACQUIRE_LOCK, MagicMock()) + self.imp_release_lock_patcher = patch(IMP_RELEASE_LOCK, MagicMock()) + self.importer_exec_src_code_patcher = patch(UTILS_IMPORTER_BASE_EXECUTE_SRC_CODE, MagicMock()) + + self.imp_acquire_lock_patcher.start() + self.imp_release_lock_patcher.start() + self.importer_exec_src_code_patcher.start() + + def tearDown(self): + self.imp_acquire_lock_patcher.stop() + self.imp_release_lock_patcher.stop() + self.importer_exec_src_code_patcher.stop() + + def test_find_module__module_not_in_self_modules(self): + importer = DummyImporter() + + self.assertIsNone(importer.find_module("django")) + importer._accept_find_module_request_hook.assert_not_called() + + self.assertIsNone(importer.find_module("django.test")) + importer._accept_find_module_request_hook.assert_not_called() + + self.assertIsNone(importer.find_module("django.test.utils")) + importer._accept_find_module_request_hook.assert_not_called() + + def test_find_module__module_in_built_in(self): + importer = DummyImporter() + + self.assertIsNone(importer.find_module("math")) + importer._accept_find_module_request_hook.assert_not_called() + + def test_find_module__module_has_name_repetition(self): + importer = DummyImporter(modules=["magic_module"]) + + self.assertIsNone(importer.find_module("magic_module.magic_sub_module.magic_module")) + importer._accept_find_module_request_hook.assert_not_called() + + def test_find_module__accept(self): + importer = DummyImporter(modules=["magic_module"]) + + fullname = "magic_module" + self.assertIs(importer, importer.find_module(fullname)) + importer._accept_find_module_request_hook.assert_called_once_with(fullname=fullname, path=None) + importer._accept_find_module_request_hook.reset_mock() + + fullname = "magic_module.magic_sub_module_1" + self.assertIs(importer, importer.find_module(fullname)) + importer._accept_find_module_request_hook.assert_called_once_with(fullname=fullname, path=None) + importer._accept_find_module_request_hook.reset_mock() + + fullname = "magic_module.magic_sub_module_1.magic_sub_module_2" + self.assertIs(importer, importer.find_module(fullname)) + importer._accept_find_module_request_hook.assert_called_once_with(fullname=fullname, path=None) + importer._accept_find_module_request_hook.reset_mock() + + def test_load_module__module_already_in_sys_modules(self): + fullname = "exist_module" + mod = Object() + importer = DummyImporter() + + with patch(SYS_MODULES, {fullname: mod}): + self.assertEqual(importer.load_module(fullname=fullname), mod) + imp.acquire_lock.assert_called_once() + imp.release_lock.assert_called_once() + + def test_load_module__get_source_raise_import_error(self): + sub_module = "sub_module" + fullname = "exist_module.sub_module" + mod = Object() + importer = DummyImporter() + importer.get_source = MagicMock(side_effect=ImportError) + + with patch(SYS_MODULES, {sub_module: mod}): + self.assertIsNone(importer.load_module(fullname=fullname)) + imp.acquire_lock.assert_called_once() + imp.release_lock.assert_called_once() + + def test_load_module__is_package(self): + src_code = "src_code" + fullname = "magic_module" + _file = "file" + path = "path" + importer = DummyImporter(is_package=True, get_source=src_code, get_file=_file, get_path=path) + + with patch(SYS_MODULES, {}): + mod = importer.load_module(fullname=fullname) + + self.assertIs(sys.modules[fullname], mod) + self.assertEqual(mod.__file__, _file) + self.assertIs(mod.__loader__, importer) + self.assertEqual(mod.__path__, path) + self.assertEqual(mod.__package__, fullname) + + imp.acquire_lock.assert_called_once() + importer._pre_load_module_hook.assert_called_once_with(fullname=fullname, module=mod) + importer._execute_src_code.assert_called_once_with(src_code=src_code, module=mod) + importer._post_load_module_hook.assert_called_once_with(fullname=fullname, module=mod) + imp.release_lock.assert_called_once() + + def test_load_module__is_not_package(self): + src_code = "src_code" + fullname = "magic_module.sub_module" + _file = "file" + importer = DummyImporter(is_package=False, get_source=src_code, get_file=_file) + + with patch(SYS_MODULES, {}): + mod = importer.load_module(fullname=fullname) + + self.assertIs(sys.modules[fullname], mod) + self.assertEqual(mod.__file__, _file) + self.assertIs(mod.__loader__, importer) + self.assertEqual(mod.__package__, fullname.rpartition(".")[0]) + + imp.acquire_lock.assert_called_once() + importer._pre_load_module_hook.assert_called_once_with(fullname=fullname, module=mod) + importer._execute_src_code.assert_called_once_with(src_code=src_code, module=mod) + importer._post_load_module_hook.assert_called_once_with(fullname=fullname, module=mod) + imp.release_lock.assert_called_once() + + def test_load_module__raise_exception_before_add_module(self): + fullname = "magic_module.sub_module" + importer = DummyImporter(is_package=False) + importer.get_source = MagicMock(side_effect=Exception()) + importer._import_error_hook = MagicMock(side_effect=Exception()) + + with patch(SYS_MODULES, {}): + self.assertRaises(ImportError, importer.load_module, fullname) + self.assertNotIn(fullname, sys.modules) + + importer._import_error_hook.assert_called_once() + imp.release_lock.assert_called_once() + + def test_load_module__raise_exception_after_add_module(self): + fullname = "magic_module.sub_module" + importer = DummyImporter(is_package=False) + importer.get_file = MagicMock(side_effect=Exception()) + + with patch(SYS_MODULES, {}): + self.assertRaises(ImportError, importer.load_module, fullname) + self.assertNotIn(fullname, sys.modules) + + importer._import_error_hook.assert_called_once() + imp.release_lock.assert_called_once() diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_fs.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_fs.py new file mode 100644 index 00000000..a35b25d4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_fs.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa +from pipeline.contrib.external_plugins.utils.importer.fs import FSModuleImporter + +GET_FILE_RETURN = "GET_FILE_RETURN" +GET_SOURCE_RETURN = "a=1" +IS_PACKAGE_RETURN = True +_FETCH_FILE_RETURN = "_FETCH_FILE_RETURN" + + +class FSModuleImporterTestCase(TestCase): + def setUp(self): + self.path = "/usr/imp/custom_components/" + self.path_without_salsh = "/usr/imp/custom_components" + self.fullname = "module1.module2.module3" + self.module_url = "/usr/imp/custom_components/module1/module2/module3.py" + self.package_url = "/usr/imp/custom_components/module1/module2/module3/__init__.py" + + def test__init__(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + self.assertEqual(self.path, importer.path) + + importer = FSModuleImporter(name="name", modules=[], path=self.path_without_salsh) + self.assertEqual(self.path, importer.path) + + def test_is_package(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + with patch(OS_PATH_EXISTS, MagicMock(return_value=True)): + self.assertTrue(importer.is_package(self.fullname)) + + with patch(OS_PATH_EXISTS, MagicMock(return_value=False)): + self.assertFalse(importer.is_package(self.fullname)) + + @patch(UTILS_IMPORTER_FS_GET_FILE, MagicMock(return_value=GET_FILE_RETURN)) + @patch(UTILS_IMPORTER_FS_GET_SOURCE, MagicMock(return_value=GET_SOURCE_RETURN)) + def test_get_code(self): + expect_code = compile(GET_SOURCE_RETURN, GET_FILE_RETURN, "exec") + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + self.assertEqual(expect_code, importer.get_code(self.fullname)) + + @patch(UTILS_IMPORTER_FS_IS_PACKAGE, MagicMock(return_value=IS_PACKAGE_RETURN)) + @patch(UTILS_IMPORTER_FS__FETCH_FILE_CONTENT, MagicMock(return_value=_FETCH_FILE_RETURN)) + def test_get_source(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + self.assertEqual(_FETCH_FILE_RETURN, importer.get_source(self.fullname)) + importer._fetch_file_content.assert_called_once_with( + importer._file_path(self.fullname, is_pkg=IS_PACKAGE_RETURN) + ) + + @patch(UTILS_IMPORTER_FS_IS_PACKAGE, MagicMock(return_value=IS_PACKAGE_RETURN)) + @patch(UTILS_IMPORTER_FS__FETCH_FILE_CONTENT, MagicMock(return_value=None)) + def test_get_source__fetch_none(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + self.assertRaises(ImportError, importer.get_source, self.fullname) + importer._fetch_file_content.assert_called_once_with( + importer._file_path(self.fullname, is_pkg=IS_PACKAGE_RETURN) + ) + + def test_get_path(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + self.assertEqual(importer.get_path(self.fullname), ["/usr/imp/custom_components/module1/module2/module3"]) + + def test_get_file(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + with patch(UTILS_IMPORTER_FS_IS_PACKAGE, MagicMock(return_value=True)): + self.assertEqual(importer.get_file(self.fullname), self.package_url) + + with patch(UTILS_IMPORTER_FS_IS_PACKAGE, MagicMock(return_value=False)): + self.assertEqual(importer.get_file(self.fullname), self.module_url) + + def test__file_path(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + self.assertEqual(importer._file_path(self.fullname, is_pkg=True), self.package_url) + self.assertEqual(importer._file_path(self.fullname, is_pkg=False), self.module_url) + + def test__fetch_file__nocache(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path, use_cache=False) + + first_file_content = "first_file_content" + second_file_content = "second_file_content" + + with patch(UTILS_IMPORTER_FS__GET_FILE_CONTENT, MagicMock(return_value=first_file_content)): + self.assertEqual(importer._fetch_file_content(self.module_url), first_file_content) + self.assertEqual(importer.file_cache, {}) + + with patch(UTILS_IMPORTER_FS__GET_FILE_CONTENT, MagicMock(return_value=second_file_content)): + self.assertEqual(importer._fetch_file_content(self.module_url), second_file_content) + self.assertEqual(importer.file_cache, {}) + + with patch(UTILS_IMPORTER_FS__GET_FILE_CONTENT, MagicMock(return_value=None)): + self.assertIsNone(importer._fetch_file_content(self.module_url)) + self.assertEqual(importer.file_cache, {}) + + def test__fetch_file__use_cache(self): + importer = FSModuleImporter(name="name", modules=[], path=self.path) + + first_file_content = "first_file_content" + second_file_content = "second_file_content" + + with patch(UTILS_IMPORTER_FS__GET_FILE_CONTENT, MagicMock(return_value=first_file_content)): + self.assertEqual(importer._fetch_file_content(self.module_url), first_file_content) + self.assertEqual(importer.file_cache[self.module_url], first_file_content) + + with patch(UTILS_IMPORTER_FS__GET_FILE_CONTENT, MagicMock(return_value=second_file_content)): + self.assertEqual(importer._fetch_file_content(self.module_url), first_file_content) + self.assertEqual(importer.file_cache[self.module_url], first_file_content) + + with patch(UTILS_IMPORTER_FS__GET_FILE_CONTENT, MagicMock(return_value=None)): + self.assertIsNone(importer._fetch_file_content(self.package_url)) + self.assertEqual(importer.file_cache[self.package_url], None) + + with patch(UTILS_IMPORTER_FS__GET_FILE_CONTENT, MagicMock(return_value=second_file_content)): + self.assertIsNone(importer._fetch_file_content(self.package_url)) + self.assertEqual(importer.file_cache[self.package_url], None) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_git.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_git.py new file mode 100644 index 00000000..a9edaf9f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_git.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa +from pipeline.contrib.external_plugins.utils.importer.git import GitRepoModuleImporter + +GET_FILE_RETURN = "GET_FILE_RETURN" +GET_SOURCE_RETURN = "a=1" +IS_PACKAGE_RETURN = False +_FILE_URL_RETURN = "_FILE_URL_RETURN" +_FETCH_REPO_FILE_RETURN = "_FETCH_REPO_FILE_RETURN" + + +class GitRepoModuleImporterTestCase(TestCase): + def setUp(self): + self.repo_raw_url = "https://test-git-repo-raw/" + self.repo_raw_url_without_slash = "https://test-git-repo-raw" + self.branch = "master" + self.fullname = "module1.module2.module3" + self.module_url = "https://test-git-repo-raw/master/module1/module2/module3.py" + self.package_url = "https://test-git-repo-raw/master/module1/module2/module3/__init__.py" + + def test__init__(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + self.assertEqual(importer.repo_raw_url, self.repo_raw_url) + self.assertEqual(importer.branch, self.branch) + + importer = GitRepoModuleImporter( + name="name", modules=[], repo_raw_url=self.repo_raw_url_without_slash, branch=self.branch + ) + self.assertEqual(importer.repo_raw_url, self.repo_raw_url) + self.assertEqual(importer.branch, self.branch) + + self.assertRaises( + ValueError, + GitRepoModuleImporter, + name="name", + modules=[], + repo_raw_url="http://repo-addr/", + branch=self.branch, + ) + + GitRepoModuleImporter( + name="name", modules=[], repo_raw_url="http://repo-addr/", branch=self.branch, secure_only=False + ) + + def test__file_url(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + self.assertEqual(importer._file_url(self.fullname, is_pkg=True), self.package_url) + self.assertEqual(importer._file_url(self.fullname, is_pkg=False), self.module_url) + + def test__fetch_repo_file__no_cache(self): + importer = GitRepoModuleImporter( + name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch, use_cache=False + ) + first_resp = MockResponse(content="first_request_content") + second_resp = MockResponse(content="second_request_content") + + with patch(REQUESTS_GET, MagicMock(return_value=first_resp)): + self.assertEqual(importer._fetch_repo_file(self.module_url), first_resp.content) + self.assertEqual(importer.file_cache, {}) + + with patch(REQUESTS_GET, MagicMock(return_value=second_resp)): + self.assertEqual(importer._fetch_repo_file(self.module_url), second_resp.content) + self.assertEqual(importer.file_cache, {}) + + with patch(REQUESTS_GET, MagicMock(return_value=MockResponse(ok=False))): + self.assertIsNone(importer._fetch_repo_file(self.module_url)) + self.assertEqual(importer.file_cache, {}) + + def test__fetch_repo_file__use_cache(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + first_resp = MockResponse(content="first_request_content") + second_resp = MockResponse(content="second_request_content") + + with patch(REQUESTS_GET, MagicMock(return_value=first_resp)): + self.assertEqual(importer._fetch_repo_file(self.module_url), first_resp.content) + self.assertEqual(importer.file_cache[self.module_url], first_resp.content) + + with patch(REQUESTS_GET, MagicMock(return_value=second_resp)): + self.assertEqual(importer._fetch_repo_file(self.module_url), first_resp.content) + self.assertEqual(importer.file_cache[self.module_url], first_resp.content) + + with patch(REQUESTS_GET, MagicMock(return_value=MockResponse(ok=False))): + self.assertIsNone(importer._fetch_repo_file(self.package_url)) + self.assertIsNone(importer.file_cache[self.package_url]) + + with patch(REQUESTS_GET, MagicMock(return_value=second_resp)): + self.assertIsNone(importer._fetch_repo_file(self.package_url)) + self.assertIsNone(importer.file_cache[self.package_url]) + + def test_is_package(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + + with patch(UTILS_IMPORTER_GIT__FETCH_REPO_FILE, MagicMock(return_value=None)): + self.assertFalse(importer.is_package(self.fullname)) + importer._fetch_repo_file.assert_called_once_with(importer._file_url(self.fullname, is_pkg=True)) + + with patch(UTILS_IMPORTER_GIT__FETCH_REPO_FILE, MagicMock(return_value="")): + self.assertTrue(importer.is_package(self.fullname)) + importer._fetch_repo_file.assert_called_once_with(importer._file_url(self.fullname, is_pkg=True)) + + @patch(UTILS_IMPORTER_GIT_GET_FILE, MagicMock(return_value=GET_FILE_RETURN)) + @patch(UTILS_IMPORTER_GIT_GET_SOURCE, MagicMock(return_value=GET_SOURCE_RETURN)) + def test_get_code(self): + expect_code = compile(GET_SOURCE_RETURN, GET_FILE_RETURN, "exec") + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + + self.assertEqual(expect_code, importer.get_code(self.fullname)) + + @patch(UTILS_IMPORTER_GIT_IS_PACKAGE, MagicMock(return_value=IS_PACKAGE_RETURN)) + @patch(UTILS_IMPORTER_GIT__FETCH_REPO_FILE, MagicMock(return_value=_FETCH_REPO_FILE_RETURN)) + def test_get_source(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + + source = importer.get_source(self.fullname) + + self.assertEqual(source, _FETCH_REPO_FILE_RETURN) + importer._fetch_repo_file.assert_called_once_with(importer._file_url(self.fullname, is_pkg=IS_PACKAGE_RETURN)) + + @patch(UTILS_IMPORTER_GIT_IS_PACKAGE, MagicMock(return_value=IS_PACKAGE_RETURN)) + @patch(UTILS_IMPORTER_GIT__FETCH_REPO_FILE, MagicMock(return_value=None)) + def test_get_source__fetch_none(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + + self.assertRaises(ImportError, importer.get_source, self.fullname) + importer._fetch_repo_file.assert_called_once_with(importer._file_url(self.fullname, is_pkg=IS_PACKAGE_RETURN)) + + def test_get_path(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + + self.assertEqual(importer.get_path(self.fullname), ["https://test-git-repo-raw/master/module1/module2/module3"]) + + def test_get_file(self): + importer = GitRepoModuleImporter(name="name", modules=[], repo_raw_url=self.repo_raw_url, branch=self.branch) + + with patch(UTILS_IMPORTER_GIT_IS_PACKAGE, MagicMock(return_value=False)): + self.assertEqual(importer.get_file(self.fullname), self.module_url) + + with patch(UTILS_IMPORTER_GIT_IS_PACKAGE, MagicMock(return_value=True)): + self.assertEqual(importer.get_file(self.fullname), self.package_url) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_s3.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_s3.py new file mode 100644 index 00000000..0fc2f780 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_s3.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa +from pipeline.contrib.external_plugins.utils.importer.s3 import CONFIG, S3ModuleImporter + +GET_FILE_RETURN = "GET_FILE_RETURN" +GET_SOURCE_RETURN = "a=1" +IS_PACKAGE_RETURN = True +_FETCH_OBJ_CONTENT_RETURN = "_FETCH_OBJ_CONTENT_RETURN" + + +class S3ModuleImporterTestCase(TestCase): + def setUp(self): + self.service_address = "https://test-s3-address/" + self.service_address_without_slash = "https://test-s3-address" + self.not_secure_service_address = "http://no-secure-address/" + self.bucket = "bucket" + self.access_key = "access_key" + self.secret_key = "secret_key" + self.fullname = "module1.module2.module3" + self.module_url = "https://test-s3-address/bucket/module1/module2/module3.py" + self.package_url = "https://test-s3-address/bucket/module1/module2/module3/__init__.py" + self.module_key = "module1/module2/module3.py" + self.package_key = "module1/module2/module3/__init__.py" + + @patch(BOTO3_RESOURCE, mock_s3_resource) + def test__init__(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + self.assertEqual(self.service_address, importer.service_address) + self.assertEqual( + importer.s3, + mock_s3_resource( + "s3", + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + endpoint_url=self.service_address, + config=CONFIG, + ), + ) + + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address_without_slash, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + self.assertEqual(self.service_address, importer.service_address) + self.assertEqual( + importer.s3, + mock_s3_resource( + "s3", + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + endpoint_url=self.service_address, + config=CONFIG, + ), + ) + + self.assertRaises( + ValueError, + S3ModuleImporter, + name="name", + modules=[], + service_address=self.not_secure_service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.not_secure_service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + secure_only=False, + ) + self.assertEqual(self.not_secure_service_address, importer.service_address) + self.assertEqual( + importer.s3, + mock_s3_resource( + "s3", + aws_access_key_id=self.access_key, + aws_secret_access_key=self.secret_key, + endpoint_url=self.not_secure_service_address, + config=CONFIG, + ), + ) + + def test_is_package(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + with patch(UTILS_IMPORTER_S3__FETCH_OBJ_CONTENT, MagicMock(return_value="")): + self.assertTrue(importer.is_package("a.b.c")) + + with patch(UTILS_IMPORTER_S3__FETCH_OBJ_CONTENT, MagicMock(return_value=None)): + self.assertFalse(importer.is_package("a.b.c")) + + @patch(UTILS_IMPORTER_S3_GET_FILE, MagicMock(return_value=GET_FILE_RETURN)) + @patch(UTILS_IMPORTER_S3_GET_SOURCE, MagicMock(return_value=GET_SOURCE_RETURN)) + def test_get_code(self): + expect_code = compile(GET_SOURCE_RETURN, GET_FILE_RETURN, "exec") + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + self.assertEqual(expect_code, importer.get_code(self.fullname)) + + @patch(UTILS_IMPORTER_S3_IS_PACKAGE, MagicMock(return_value=IS_PACKAGE_RETURN)) + @patch(UTILS_IMPORTER_S3__FETCH_OBJ_CONTENT, MagicMock(return_value=_FETCH_OBJ_CONTENT_RETURN)) + def test_get_source(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + self.assertEqual(_FETCH_OBJ_CONTENT_RETURN, importer.get_source(self.fullname)) + importer._fetch_obj_content.assert_called_once_with(importer._obj_key(self.fullname, is_pkg=IS_PACKAGE_RETURN)) + + @patch(UTILS_IMPORTER_S3_IS_PACKAGE, MagicMock(return_value=IS_PACKAGE_RETURN)) + @patch(UTILS_IMPORTER_S3__FETCH_OBJ_CONTENT, MagicMock(return_value=None)) + def test_get_source__fetch_none(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + self.assertRaises(ImportError, importer.get_source, self.fullname) + importer._fetch_obj_content.assert_called_once_with(importer._obj_key(self.fullname, is_pkg=IS_PACKAGE_RETURN)) + + @patch(UTILS_IMPORTER_S3_IS_PACKAGE, MagicMock(return_value=IS_PACKAGE_RETURN)) + def test_get_path(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + self.assertEqual(importer.get_path(self.fullname), ["https://test-s3-address/bucket/module1/module2/module3"]) + + def test_get_file(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + with patch(UTILS_IMPORTER_S3_IS_PACKAGE, MagicMock(return_value=False)): + self.assertEqual(importer.get_file(self.fullname), self.module_url) + + with patch(UTILS_IMPORTER_S3_IS_PACKAGE, MagicMock(return_value=True)): + self.assertEqual(importer.get_file(self.fullname), self.package_url) + + def test__obj_key(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + self.assertEqual("module1/module2/module3/__init__.py", importer._obj_key(self.fullname, is_pkg=True)) + self.assertEqual("module1/module2/module3.py", importer._obj_key(self.fullname, is_pkg=False)) + + def test__fetch_obj_content__no_cache(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + use_cache=False, + ) + + first_obj_content = "first_obj_content" + second_obj_content = "second_obj_content" + + with patch(UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT, MagicMock(return_value=first_obj_content)): + self.assertEqual(importer._fetch_obj_content(self.module_key), first_obj_content) + self.assertEqual(importer.obj_cache, {}) + + with patch(UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT, MagicMock(return_value=second_obj_content)): + self.assertEqual(importer._fetch_obj_content(self.module_key), second_obj_content) + self.assertEqual(importer.obj_cache, {}) + + with patch(UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT, MagicMock(return_value=None)): + self.assertIsNone(importer._fetch_obj_content(self.module_key)) + self.assertEqual(importer.obj_cache, {}) + + def test__fetch_obj_content__use_cache(self): + importer = S3ModuleImporter( + name="name", + modules=[], + service_address=self.service_address, + bucket=self.bucket, + access_key=self.access_key, + secret_key=self.secret_key, + ) + + first_obj_content = "first_obj_content" + second_obj_content = "second_obj_content" + + with patch(UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT, MagicMock(return_value=first_obj_content)): + self.assertEqual(importer._fetch_obj_content(self.module_key), first_obj_content) + self.assertEqual(importer.obj_cache[self.module_key], first_obj_content) + + with patch(UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT, MagicMock(return_value=second_obj_content)): + self.assertEqual(importer._fetch_obj_content(self.module_key), first_obj_content) + self.assertEqual(importer.obj_cache[self.module_key], first_obj_content) + + with patch(UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT, MagicMock(return_value=None)): + self.assertIsNone(importer._fetch_obj_content(self.package_key)) + self.assertIsNone(importer.obj_cache[self.package_key]) + + with patch(UTILS_IMPORTER_S3__GET_S3_OBJ_CONTENT, MagicMock(return_value=first_obj_content)): + self.assertIsNone(importer._fetch_obj_content(self.package_key)) + self.assertIsNone(importer.obj_cache[self.package_key]) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_utils.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_utils.py new file mode 100644 index 00000000..bdb3f056 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/tests/utils/importer/test_utils.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import sys + +from django.test import TestCase + +from pipeline.contrib.external_plugins.tests.mock import * # noqa +from pipeline.contrib.external_plugins.tests.mock_settings import * # noqa +from pipeline.contrib.external_plugins.utils.importer import GitRepoModuleImporter, utils + + +class UtilsTestCase(TestCase): + @patch(SYS_META_PATH, []) + def test__set_up_importer(self): + utils._setup_importer("1") + utils._setup_importer("2") + + self.assertEqual(sys.meta_path, ["2", "1"]) + + def test__remove_importer(self): + importer_1 = GitRepoModuleImporter( + name="name", modules=["module_1"], repo_raw_url="https://url_1", branch="master" + ) + importer_2 = GitRepoModuleImporter( + name="name", modules=["module_2"], repo_raw_url="https://url_2", branch="master" + ) + importer_3 = GitRepoModuleImporter( + name="name", modules=["module_3"], repo_raw_url="https://url_3", branch="master" + ) + importer_4 = GitRepoModuleImporter( + name="name", modules=["module_4"], repo_raw_url="https://url_4", branch="master" + ) + + with patch(SYS_META_PATH, [importer_1, importer_2, importer_3]): + utils._remove_importer(importer_4) + self.assertEqual(sys.meta_path, [importer_1, importer_2, importer_3]) + utils._remove_importer(importer_1) + self.assertEqual(sys.meta_path, [importer_2, importer_3]) + utils._remove_importer(importer_3) + self.assertEqual(sys.meta_path, [importer_2]) + utils._remove_importer(importer_2) + self.assertEqual(sys.meta_path, []) + + @patch(UTILS_IMPORTER__SETUP_IMPORTER, MagicMock()) + @patch(UTILS_IMPORTER__REMOVE_IMPORTER, MagicMock()) + def test_importer_context__normal(self): + importer = "importer" + with utils.importer_context(importer): + pass + utils._setup_importer.assert_called_once_with(importer) + utils._remove_importer.assert_called_once_with(importer) + + @patch(UTILS_IMPORTER__SETUP_IMPORTER, MagicMock()) + @patch(UTILS_IMPORTER__REMOVE_IMPORTER, MagicMock()) + def test_importer_context__raise_exception(self): + importer = "importer" + + class CustomException(Exception): + pass + + try: + with utils.importer_context(importer): + raise CustomException() + except CustomException: + pass + + utils._setup_importer.assert_called_once_with(importer) + utils._remove_importer.assert_called_once_with(importer) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/__init__.py new file mode 100644 index 00000000..fd3bfda2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.contrib.external_plugins.utils.importer.utils import importer_context # noqa +from pipeline.contrib.external_plugins.utils.importer.git import GitRepoModuleImporter # noqa +from pipeline.contrib.external_plugins.utils.importer.s3 import S3ModuleImporter # noqa +from pipeline.contrib.external_plugins.utils.importer.fs import FSModuleImporter # noqa diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/base.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/base.py new file mode 100644 index 00000000..772e31f4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/base.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import imp +import logging +import sys +import traceback +from abc import ABCMeta, abstractmethod +from contextlib import contextmanager + +from pipeline.contrib.external_plugins.utils import requirement + +logger = logging.getLogger("root") + + +@contextmanager +def hook_sandbox(hook, fullname): + hook_name = hook.__func__.__name__ + try: + logger.info("Execute {hook_name} for {module}".format(module=fullname, hook_name=hook_name)) + yield + except Exception: + logger.error( + "{module} {hook_name} raise exception: {traceback}".format( + module=fullname, hook_name=hook_name, traceback=traceback.format_exc() + ) + ) + + +class NonstandardModuleImporter(object, metaclass=ABCMeta): + def __init__(self, modules, name=None): + self.name = name + self.modules = modules + + def find_module(self, fullname, path=None): + logger.info("=============FINDER: {cls}".format(cls=self.__class__.__name__)) + logger.info("Try to find module: {module} in path: {path}".format(module=fullname, path=path)) + + logger.info("Check if in declared nonstandard modules: {modules}".format(modules=self.modules)) + root_parent = fullname.split(".")[0] + if root_parent not in self.modules: + logger.info("Root module({module}) are not find in nonstandard modules".format(module=root_parent)) + return None + + logger.info("Check if is built-in module") + try: + loader = imp.find_module(fullname, path) + if loader: + logger.info("Found {module} locally".format(module=fullname)) + return None + except ImportError: + pass + + logger.info("Checking if is name repetition") + if fullname.split(".").count(fullname.split(".")[-1]) > 1: + logger.info("Found {module} locally".format(module=fullname)) + return None + + with hook_sandbox(fullname=fullname, hook=self.accept_find_module_request_hook): + self.accept_find_module_request_hook(fullname=fullname, path=path) + + return self + + def load_module(self, fullname): + try: + imp.acquire_lock() + + logger.info("=============LOADER: {cls}".format(cls=self.__class__.__name__)) + logger.info("Try to load module: {module}".format(module=fullname)) + + if fullname in sys.modules: + logger.info("Module {module} already loaded".format(module=fullname)) + return sys.modules[fullname] + + is_pkg = self.is_package(fullname) + + try: + src_code = self.get_source(fullname) + except ImportError as e: + logger.info("Get source code for {module} error: {message}".format(module=fullname, message=e)) + return None + + logger.info("Importing {module}".format(module=fullname)) + mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) + + with hook_sandbox(fullname=fullname, hook=self.pre_load_module_hook): + self.pre_load_module_hook(fullname=fullname, module=mod) + + mod.__file__ = self.get_file(fullname) + mod.__loader__ = self + mod.__name__ = fullname + if is_pkg: + mod.__path__ = self.get_path(fullname) + mod.__package__ = fullname + else: + mod.__package__ = fullname.rpartition(".")[0] + + logger.info("Module prepared, ready to execute source code for {module}".format(module=fullname)) + logger.info("Source code for {module}:\n{src_code}".format(module=fullname, src_code=src_code)) + + self._execute_src_code(src_code=src_code, module=mod) + + with hook_sandbox(fullname=fullname, hook=self.post_load_module_hook): + self.post_load_module_hook(fullname=fullname, module=mod) + + return mod + + except Exception: + + with hook_sandbox(fullname=fullname, hook=self.import_error_hook): + self.import_error_hook(fullname) + + err_msg = "{module} import raise exception: {traceback}".format( + module=fullname, traceback=traceback.format_exc() + ) + logger.error(err_msg) + + if fullname in sys.modules: + logger.info("Remove module {module} from sys.modules".format(module=fullname)) + del sys.modules[fullname] + + raise ImportError(err_msg) + + finally: + imp.release_lock() + + def _execute_src_code(self, src_code, module): + exec(src_code, module.__dict__) + + @abstractmethod + def is_package(self, fullname): + raise NotImplementedError() + + @abstractmethod + def get_code(self, fullname): + raise NotImplementedError() + + @abstractmethod + def get_source(self, fullname): + raise NotImplementedError() + + @abstractmethod + def get_file(self, fullname): + return NotImplementedError() + + @abstractmethod + def get_path(self, fullname): + return NotImplementedError() + + def accept_find_module_request_hook(self, fullname, path): + pass + + def pre_load_module_hook(self, fullname, module): + pass + + def post_load_module_hook(self, fullname, module): + pass + + def import_error_hook(self, fullname): + pass + + +class AutoInstallRequirementsImporter(NonstandardModuleImporter, metaclass=ABCMeta): + def post_load_module_hook(self, fullname, module): + requirements = getattr(module, "__requirements__", []) + if not isinstance(requirements, list) or not requirements: + return + + sys.stdout.write( + "Start to install requirements({reqs}) for module({mod})\n".format( + reqs=",".join(requirements), mod=fullname + ) + ) + requirement.install(requirements) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/fs.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/fs.py new file mode 100644 index 00000000..0c43742e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/fs.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import os +import traceback + +from pipeline.contrib.external_plugins.utils.importer.base import AutoInstallRequirementsImporter + +logger = logging.getLogger("root") + + +class FSModuleImporter(AutoInstallRequirementsImporter): + def __init__(self, name, modules, path, use_cache=True): + super(FSModuleImporter, self).__init__(name=name, modules=modules) + + self.path = path if path.endswith("/") else "%s/" % path + self.use_cache = use_cache + self.file_cache = {} + + def is_package(self, fullname): + return os.path.exists(self._file_path(fullname, is_pkg=True)) + + def get_code(self, fullname): + return compile(self.get_source(fullname), self.get_file(fullname), "exec") + + def get_source(self, fullname): + source_code = self._fetch_file_content(self._file_path(fullname, is_pkg=self.is_package(fullname))) + + if source_code is None: + raise ImportError("Can not find {module} in {path}".format(module=fullname, path=self.path)) + + return source_code + + def get_path(self, fullname): + return [self._file_path(fullname, is_pkg=True).rpartition("/")[0]] + + def get_file(self, fullname): + return self._file_path(fullname, is_pkg=self.is_package(fullname)) + + def _file_path(self, fullname, is_pkg=False): + base_path = "{path}{file_path}".format(path=self.path, file_path=fullname.replace(".", "/")) + file_path = "%s/__init__.py" % base_path if is_pkg else "%s.py" % base_path + return file_path + + def _fetch_file_content(self, file_path): + logger.info("Try to fetch file {file_path}".format(file_path=file_path)) + + if self.use_cache and file_path in self.file_cache: + logger.info("Use content in cache for file: {file_path}".format(file_path=file_path)) + return self.file_cache[file_path] + + file_content = self._get_file_content(file_path) + + if self.use_cache: + self.file_cache[file_path] = file_content + + return file_content + + def _get_file_content(self, file_path): + try: + with open(file_path) as f: + file_content = f.read() + except IOError: + logger.info( + "Error occurred when read {file_path} content: {trace}".format( + file_path=file_path, trace=traceback.format_exc() + ) + ) + file_content = None + + return file_content diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/git.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/git.py new file mode 100644 index 00000000..3507e734 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/git.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import urllib.parse + +import requests + +from pipeline.contrib.external_plugins.utils.importer.base import AutoInstallRequirementsImporter + +logger = logging.getLogger("root") + + +class GitRepoModuleImporter(AutoInstallRequirementsImporter): + def __init__(self, name, modules, repo_raw_url, branch, use_cache=True, secure_only=True, proxy=None): + super(GitRepoModuleImporter, self).__init__(name=name, modules=modules) + + if secure_only and not repo_raw_url.startswith("https"): + raise ValueError("Only accept https when secure_only is True.") + elif not secure_only: + logger.warning("Using not secure protocol is extremely dangerous!!") + + self.repo_raw_url = repo_raw_url if repo_raw_url.endswith("/") else "%s/" % repo_raw_url + self.branch = branch + self.use_cache = use_cache + self.file_cache = {} + self.proxy = proxy or {} + + def is_package(self, fullname): + return self._fetch_repo_file(self._file_url(fullname, is_pkg=True)) is not None + + def get_code(self, fullname): + return compile(self.get_source(fullname), self.get_file(fullname), "exec") + + def get_source(self, fullname): + source_code = self._fetch_repo_file(self._file_url(fullname, is_pkg=self.is_package(fullname))) + + if source_code is None: + raise ImportError( + "Can not find {module} in {repo}{branch}".format( + module=fullname, repo=self.repo_raw_url, branch=self.branch + ) + ) + return source_code + + def get_path(self, fullname): + return [self._file_url(fullname, is_pkg=True).rpartition("/")[0]] + + def get_file(self, fullname): + return self._file_url(fullname, is_pkg=self.is_package(fullname)) + + def _file_url(self, fullname, is_pkg=False): + base_url = "%s/" % urllib.parse.urljoin(self.repo_raw_url, self.branch) + path = fullname.replace(".", "/") + file_name = "%s/__init__.py" % path if is_pkg else "%s.py" % path + return urllib.parse.urljoin(base_url, file_name) + + def _fetch_repo_file(self, file_url): + logger.info("Try to fetch git file: {file_url}".format(file_url=file_url)) + + if self.use_cache and file_url in self.file_cache: + logger.info("Use content in cache for git file: {file_url}".format(file_url=file_url)) + return self.file_cache[file_url] + + resp = requests.get(file_url, timeout=10, proxies=self.proxy) + + file_content = resp.content if resp.ok else None + + if self.use_cache: + self.file_cache[file_url] = file_content + logger.info("Content cached for git file: {file_url}".format(file_url=file_url)) + + return file_content diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/s3.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/s3.py new file mode 100644 index 00000000..b4d05592 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/s3.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +import boto3 +from botocore.client import Config +from botocore.exceptions import ClientError + +from pipeline.contrib.external_plugins.utils.importer.base import AutoInstallRequirementsImporter + +logger = logging.getLogger("root") +CONFIG = Config(connect_timeout=10, read_timeout=10, retries={"max_attempts": 2}) + + +class S3ModuleImporter(AutoInstallRequirementsImporter): + def __init__( + self, + name, + modules, + service_address, + bucket, + access_key, + secret_key, + use_cache=True, + secure_only=True, + source_dir="", + ): + super(S3ModuleImporter, self).__init__(name=name, modules=modules) + + if secure_only and not service_address.startswith("https"): + raise ValueError("Only accept https when secure_only is True.") + elif not secure_only: + logger.warning("Using not secure protocol is extremely dangerous!!") + + self.service_address = service_address if service_address.endswith("/") else "%s/" % service_address + self.bucket = bucket + self.source_dir = source_dir if source_dir == "" or source_dir.endswith("/") else "%s/" % source_dir + self.use_cache = use_cache + self.s3 = boto3.resource( + "s3", + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + endpoint_url=self.service_address, + config=CONFIG, + ) + self.obj_cache = {} + + def is_package(self, fullname): + return self._fetch_obj_content(self._obj_key(fullname, is_pkg=True)) is not None + + def get_code(self, fullname): + return compile(self.get_source(fullname), self.get_file(fullname), "exec") + + def get_source(self, fullname): + source_code = self._fetch_obj_content(self._obj_key(fullname, is_pkg=self.is_package(fullname))) + + if source_code is None: + raise ImportError( + "Can not find {module} in {service_address}{bucket}/{source_dir}".format( + module=fullname, + service_address=self.service_address, + bucket=self.bucket, + source_dir=self.source_dir, + ) + ) + + return source_code + + def get_path(self, fullname): + return [self.get_file(fullname).rpartition("/")[0]] + + def get_file(self, fullname): + return "{service_address}{bucket}/{key}".format( + service_address=self.service_address, + bucket=self.bucket, + key=self._obj_key(fullname, is_pkg=self.is_package(fullname)), + ) + + def _obj_key(self, fullname, is_pkg): + base_key = self.source_dir + fullname.replace(".", "/") + key = "%s/__init__.py" % base_key if is_pkg else "%s.py" % base_key + return key + + def _fetch_obj_content(self, key): + logger.info("Try to fetch object: {key}".format(key=key)) + + if self.use_cache and key in self.obj_cache: + logger.info("Use content in cache for s3 object: {key}".format(key=key)) + return self.obj_cache[key] + + obj_content = self._get_s3_obj_content(key) + + if self.use_cache: + self.obj_cache[key] = obj_content + + return obj_content + + def _get_s3_obj_content(self, key): + obj = self.s3.Object(bucket_name=self.bucket, key=key) + + try: + resp = obj.get() + obj_content = resp["Body"].read() + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + obj_content = None + else: + raise + + return obj_content diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/utils.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/utils.py new file mode 100644 index 00000000..5d1dc296 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/importer/utils.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import sys +from contextlib import contextmanager + +logger = logging.getLogger("root") + + +@contextmanager +def importer_context(importer): + _setup_importer(importer) + try: + yield + except Exception as e: + raise e + finally: + _remove_importer(importer) + + +def _setup_importer(importer): + logger.info("========== setup importer: %s" % importer) + sys.meta_path.insert(0, importer) + + +def _remove_importer(importer): + for hooked_importer in sys.meta_path: + if hooked_importer is importer: + logger.info("========== remove importer: %s" % importer) + sys.meta_path.remove(hooked_importer) + return diff --git a/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/requirement.py b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/requirement.py new file mode 100644 index 00000000..869228b7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/external_plugins/utils/requirement.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +try: + from pip import main as pipmain +except ImportError: + from pip._internal import main as pipmain + + +def install(requirements): + for r in requirements: + if pipmain(["install", r]) != 0: + raise RuntimeError("can not install requirement %s" % r) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/__init__.py new file mode 100644 index 00000000..e794cecf --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +default_app_config = "pipeline.contrib.periodic_task.apps.PeriodicTaskConfig" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/admin.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/admin.py new file mode 100644 index 00000000..1323cabc --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/admin.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from pipeline.contrib.periodic_task import models + + +@admin.register(models.PeriodicTask) +class PeriodicTaskAdmin(admin.ModelAdmin): + list_display = ["id", "name", "total_run_count", "last_run_at", "creator"] + search_fields = ["id", "name"] + raw_id_fields = ["template", "celery_task", "snapshot"] + + +@admin.register(models.PeriodicTaskHistory) +class PeriodicTaskHistoryAdmin(admin.ModelAdmin): + list_display = ["id", "start_at", "ex_data", "start_success", "periodic_task"] + search_fields = ["periodic_task__id"] + raw_id_fields = ["periodic_task", "pipeline_instance"] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/apps.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/apps.py new file mode 100644 index 00000000..39653d3a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/apps.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.apps import AppConfig + + +class PeriodicTaskConfig(AppConfig): + name = "pipeline.contrib.periodic_task" + verbose_name = "PipelinePeriodicTask" + + def ready(self): + from pipeline.contrib.periodic_task.tasks import periodic_task_start # noqa # register task diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/context.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/context.py new file mode 100644 index 00000000..69745f9f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/context.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.utils.module_loading import import_string + +from pipeline.conf import settings + + +def get_periodic_task_root_pipeline_context(root_pipeline_data: dict): + try: + provider = import_string(settings.BAMBOO_PERIODIC_TASK_ROOT_PIPELINE_CONTEXT_PROVIER) + except ImportError: + return {} + + return provider(root_pipeline_data) + + +def get_periodic_task_subprocess_context(root_pipeline_data: dict): + try: + provider = import_string(settings.BAMBOO_PERIODIC_TASK_SUBPROCESS_CONTEXT_PROVIER) + except ImportError: + return {} + + return provider(root_pipeline_data) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/compat.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/compat.py new file mode 100644 index 00000000..4cafa717 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/compat.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import sys + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + + +def python_2_unicode_compatible(cls): + """Taken from Django project (django/utils/encoding.py) & modified a bit to + always have __unicode__ method available. + """ + if "__str__" not in cls.__dict__: + raise ValueError( + "@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % cls.__name__ + ) + + cls.__unicode__ = cls.__str__ + + if PY2: + cls.__str__ = lambda self: self.__unicode__().encode("utf-8") + + return cls diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/db.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/db.py new file mode 100644 index 00000000..4359cd60 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/db.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from contextlib import contextmanager + +import django +from django.db import transaction + +if django.VERSION < (1, 6): # pragma: no cover + + def get_queryset(s): + return s.get_query_set() + + +else: + + def get_queryset(s): # noqa + return s.get_queryset() + + +try: + from django.db.transaction import atomic # noqa +except ImportError: # pragma: no cover + + try: + from django.db.transaction import Transaction # noqa + except ImportError: + + @contextmanager + def commit_on_success(*args, **kwargs): + try: + transaction.enter_transaction_management(*args, **kwargs) + transaction.managed(True, *args, **kwargs) + try: + yield + except Exception: + if transaction.is_dirty(*args, **kwargs): + transaction.rollback(*args, **kwargs) + raise + else: + if transaction.is_dirty(*args, **kwargs): + try: + transaction.commit(*args, **kwargs) + except Exception: + transaction.rollback(*args, **kwargs) + raise + finally: + transaction.leave_transaction_management(*args, **kwargs) + + else: # pragma: no cover + from django.db.transaction import commit_on_success # noqa + + commit_unless_managed = transaction.commit_unless_managed + rollback_unless_managed = transaction.rollback_unless_managed +else: + + @contextmanager + def commit_on_success(using=None): # noqa + connection = transaction.get_connection(using) + if connection.features.autocommits_when_autocommit_is_off: + # ignore stupid warnings and errors + yield + else: + with transaction.atomic(using): + yield + + def commit_unless_managed(*args, **kwargs): # noqa + pass + + def rollback_unless_managed(*args, **kwargs): # noqa + pass diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/managers.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/managers.py new file mode 100644 index 00000000..c0c6049d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/managers.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import warnings +from functools import wraps +from itertools import count + +from django.conf import settings +from django.db import connection, models +from django.db.models.query import QuerySet + +from pipeline.contrib.periodic_task.djcelery.db import ( + commit_on_success, + get_queryset, + rollback_unless_managed, +) +from pipeline.contrib.periodic_task.djcelery.utils import now + +try: + from django.db import connections, router +except ImportError: # pre-Django 1.2 + connections = router = None # noqa + + +try: + from celery.utils.timeutils import maybe_timedelta +except ImportError: + from celery.utils.time import maybe_timedelta + + +def update_model_with_dict(obj, fields): + [setattr(obj, attr_name, attr_value) for attr_name, attr_value in list(fields.items())] + obj.save() + return obj + + +class TxIsolationWarning(UserWarning): + pass + + +def transaction_retry(max_retries=1): + """Decorator for methods doing database operations. + + If the database operation fails, it will retry the operation + at most ``max_retries`` times. + + """ + + def _outer(fun): + @wraps(fun) + def _inner(*args, **kwargs): + _max_retries = kwargs.pop("exception_retry_count", max_retries) + for retries in count(0): + try: + return fun(*args, **kwargs) + except Exception: # pragma: no cover + # Depending on the database backend used we can experience + # various exceptions. E.g. psycopg2 raises an exception + # if some operation breaks the transaction, so saving + # the task result won't be possible until we rollback + # the transaction. + if retries >= _max_retries: + raise + try: + rollback_unless_managed() + except Exception: + pass + + return _inner + + return _outer + + +class ExtendedQuerySet(QuerySet): + def update_or_create(self, **kwargs): + obj, created = self.get_or_create(**kwargs) + + if not created: + fields = dict(kwargs.pop("defaults", {})) + fields.update(kwargs) + update_model_with_dict(obj, fields) + + return obj, created + + +class ExtendedManager(models.Manager): + def get_queryset(self): + return ExtendedQuerySet(self.model) + + get_query_set = get_queryset # Pre django 1.6 + + def update_or_create(self, **kwargs): + return get_queryset(self).update_or_create(**kwargs) + + def connection_for_write(self): + if connections: + return connections[router.db_for_write(self.model)] + return connection + + def connection_for_read(self): + if connections: + return connections[self.db] + return connection + + def current_engine(self): + try: + return settings.DATABASES[self.db]["ENGINE"] + except AttributeError: + return settings.DATABASE_ENGINE + + +class ResultManager(ExtendedManager): + def get_all_expired(self, expires): + """Get all expired task results.""" + return self.filter(date_done__lt=now() - maybe_timedelta(expires)) + + def delete_expired(self, expires): + """Delete all expired taskset results.""" + meta = self.model._meta + with commit_on_success(): + self.get_all_expired(expires).update(hidden=True) + cursor = self.connection_for_write().cursor() + cursor.execute( + "DELETE FROM {0.db_table} WHERE hidden=%s".format(meta), (True,), + ) + + +class PeriodicTaskManager(ExtendedManager): + def enabled(self): + return self.filter(enabled=True) + + +class TaskManager(ResultManager): + """Manager for :class:`celery.models.Task` models.""" + + _last_id = None + + def get_task(self, task_id): + """Get task meta for task by ``task_id``. + + :keyword exception_retry_count: How many times to retry by + transaction rollback on exception. This could theoretically + happen in a race condition if another worker is trying to + create the same task. The default is to retry once. + + """ + try: + return self.get(task_id=task_id) + except self.model.DoesNotExist: + if self._last_id == task_id: + self.warn_if_repeatable_read() + self._last_id = task_id + return self.model(task_id=task_id) + + @transaction_retry(max_retries=2) + def store_result(self, task_id, result, status, traceback=None, children=None): + """Store the result and status of a task. + + :param task_id: task id + + :param result: The return value of the task, or an exception + instance raised by the task. + + :param status: Task status. See + :meth:`celery.result.AsyncResult.get_status` for a list of + possible status values. + + :keyword traceback: The traceback at the point of exception (if the + task failed). + + :keyword children: List of serialized results of subtasks + of this task. + + :keyword exception_retry_count: How many times to retry by + transaction rollback on exception. This could theoretically + happen in a race condition if another worker is trying to + create the same task. The default is to retry twice. + + """ + return self.update_or_create( + task_id=task_id, + defaults={"status": status, "result": result, "traceback": traceback, "meta": {"children": children}}, + ) + + def warn_if_repeatable_read(self): + if "mysql" in self.current_engine().lower(): + cursor = self.connection_for_read().cursor() + if cursor.execute("SELECT @@tx_isolation"): + isolation = cursor.fetchone()[0] + if isolation == "REPEATABLE-READ": + warnings.warn( + TxIsolationWarning( + "Polling results with transaction isolation level " + "repeatable-read within the same transaction " + "may give outdated results. Be sure to commit the " + "transaction for each poll iteration." + ) + ) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/migrate.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/migrate.py new file mode 100644 index 00000000..3921dac3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/migrate.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import transaction +from django_celery_beat.models import ( + IntervalSchedule, + CrontabSchedule, + PeriodicTask, +) + +from pipeline.contrib.periodic_task.djcelery.models import ( + IntervalSchedule as DjCeleryIntervalSchedule, + CrontabSchedule as DjCeleryCrontabSchedule, + DjCeleryPeriodicTask, +) + +BATCH_SIZE = 500 + + +@transaction.atomic +def try_to_migrate_to_django_celery_beat(): + """ + try to migrate djcelery to django_celery_beat + if django_celery_beat models has data, indicate that pipeline is first use in project + (because old version pipeline is not compatible with django celery beat) + so we will not do the migration works + """ + if IntervalSchedule.objects.exists() or CrontabSchedule.objects.exists() or PeriodicTask.objects.exists(): + print("django_celery_beat in used, skip pipeline djcelery migration works") + return + + # migrate IntervalScheudle + old_intervals = DjCeleryIntervalSchedule.objects.all() + new_intervals = [] + for oi in old_intervals: + new_intervals.append(IntervalSchedule(id=oi.id, every=oi.every, period=oi.period)) + IntervalSchedule.objects.bulk_create(new_intervals, batch_size=BATCH_SIZE) + print("[pipeline]migrate {} interval objects".format(len(new_intervals))) + + # migrate CrontabSchedule + old_crontabs = DjCeleryCrontabSchedule.objects.all() + new_crontabs = [] + for oc in old_crontabs: + new_crontabs.append( + CrontabSchedule( + id=oc.id, + minute=oc.minute, + hour=oc.hour, + day_of_week=oc.day_of_week, + day_of_month=oc.day_of_month, + month_of_year=oc.month_of_year, + timezone=oc.timezone, + ) + ) + CrontabSchedule.objects.bulk_create(new_crontabs, batch_size=BATCH_SIZE) + print("[pipeline]migrate {} crontab objects".format(len(new_crontabs))) + + # migrate PeriodicTask + old_tasks = DjCeleryPeriodicTask.objects.all() + new_tasks = [] + for ot in old_tasks: + new_tasks.append( + PeriodicTask( + id=ot.id, + name=ot.name, + task=ot.task, + interval_id=ot.interval_id, + crontab_id=ot.crontab_id, + solar_id=None, + clocked_id=None, + args=ot.args, + kwargs=ot.kwargs, + queue=ot.queue, + exchange=ot.exchange, + routing_key=ot.routing_key, + expires=ot.expires, + enabled=ot.enabled, + last_run_at=ot.last_run_at, + total_run_count=ot.total_run_count, + date_changed=ot.date_changed, + description=ot.description, + ) + ) + PeriodicTask.objects.bulk_create(new_tasks, batch_size=BATCH_SIZE) + print("[pipeline]migrate {} periodic tasks".format(len(new_tasks))) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/models.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/models.py new file mode 100644 index 00000000..7bdb1646 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/models.py @@ -0,0 +1,218 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from datetime import timedelta + +import timezone_field +from celery import schedules +from django.core.exceptions import MultipleObjectsReturned, ValidationError +from django.db import models +from django.db.models import signals +from django.utils.translation import ugettext_lazy as _ +from pipeline.contrib.periodic_task.djcelery import managers +from pipeline.contrib.periodic_task.djcelery.tzcrontab import TzAwareCrontab +from pipeline.contrib.periodic_task.djcelery.utils import now +from pipeline.contrib.periodic_task.djcelery.compat import python_2_unicode_compatible + + +PERIOD_CHOICES = ( + ("days", _("Days")), + ("hours", _("Hours")), + ("minutes", _("Minutes")), + ("seconds", _("Seconds")), + ("microseconds", _("Microseconds")), +) + + +@python_2_unicode_compatible +class IntervalSchedule(models.Model): + every = models.IntegerField(_("every"), null=False) + period = models.CharField(_("period"), max_length=24, choices=PERIOD_CHOICES,) + + class Meta: + verbose_name = _("interval") + verbose_name_plural = _("intervals") + ordering = ["period", "every"] + + @property + def schedule(self): + return schedules.schedule(timedelta(**{self.period: self.every})) + + @classmethod + def from_schedule(cls, schedule, period="seconds"): + every = max(schedule.run_every.total_seconds(), 0) + try: + return cls.objects.get(every=every, period=period) + except cls.DoesNotExist: + return cls(every=every, period=period) + except MultipleObjectsReturned: + cls.objects.filter(every=every, period=period).delete() + return cls(every=every, period=period) + + def __str__(self): + if self.every == 1: + return _("every {0.period_singular}").format(self) + return _("every {0.every:d} {0.period}").format(self) + + @property + def period_singular(self): + return self.period[:-1] + + +def cronexp(field): + return field and str(field).replace(" ", "") or "*" + + +@python_2_unicode_compatible +class CrontabSchedule(models.Model): + minute = models.CharField(_("minute"), max_length=64, default="*") + hour = models.CharField(_("hour"), max_length=64, default="*") + day_of_week = models.CharField(_("day of week"), max_length=64, default="*",) + day_of_month = models.CharField(_("day of month"), max_length=64, default="*",) + month_of_year = models.CharField(_("month of year"), max_length=64, default="*",) + timezone = timezone_field.TimeZoneField(default="UTC") + + class Meta: + verbose_name = _("crontab") + verbose_name_plural = _("crontabs") + ordering = ["month_of_year", "day_of_month", "day_of_week", "hour", "minute"] + + def __str__(self): + return "{} {} {} {} {} (m/h/d/dM/MY)".format( + cronexp(self.minute), + cronexp(self.hour), + cronexp(self.day_of_week), + cronexp(self.day_of_month), + cronexp(self.month_of_year), + ) + + @property + def schedule(self): + return TzAwareCrontab( + minute=self.minute, + hour=self.hour, + day_of_week=self.day_of_week, + day_of_month=self.day_of_month, + month_of_year=self.month_of_year, + tz=self.timezone, + ) + + @classmethod + def from_schedule(cls, schedule): + spec = { + "minute": schedule._orig_minute, + "hour": schedule._orig_hour, + "day_of_week": schedule._orig_day_of_week, + "day_of_month": schedule._orig_day_of_month, + "month_of_year": schedule._orig_month_of_year, + "timezone": schedule.tz, + } + try: + return cls.objects.get(**spec) + except cls.DoesNotExist: + return cls(**spec) + except MultipleObjectsReturned: + cls.objects.filter(**spec).delete() + return cls(**spec) + + +class DjCeleryPeriodicTasks(models.Model): + ident = models.SmallIntegerField(default=1, primary_key=True, unique=True) + last_update = models.DateTimeField(null=False) + + objects = managers.ExtendedManager() + + @classmethod + def changed(cls, instance, **kwargs): + if not instance.no_changes: + cls.objects.update_or_create(ident=1, defaults={"last_update": now()}) + + @classmethod + def last_change(cls): + try: + return cls.objects.get(ident=1).last_update + except cls.DoesNotExist: + pass + + +@python_2_unicode_compatible +class DjCeleryPeriodicTask(models.Model): + name = models.CharField(_("name"), max_length=200, unique=True, help_text=_("Useful description"),) + task = models.CharField(_("task name"), max_length=200) + interval = models.ForeignKey( + IntervalSchedule, null=True, blank=True, verbose_name=_("interval"), on_delete=models.CASCADE, + ) + crontab = models.ForeignKey( + CrontabSchedule, + null=True, + blank=True, + verbose_name=_("crontab"), + on_delete=models.CASCADE, + help_text=_("Use one of interval/crontab"), + ) + args = models.TextField(_("Arguments"), blank=True, default="[]", help_text=_("JSON encoded positional arguments"),) + kwargs = models.TextField( + _("Keyword arguments"), blank=True, default="{}", help_text=_("JSON encoded keyword arguments"), + ) + queue = models.CharField( + _("queue"), max_length=200, blank=True, null=True, default=None, help_text=_("Queue defined in CELERY_QUEUES"), + ) + exchange = models.CharField(_("exchange"), max_length=200, blank=True, null=True, default=None,) + routing_key = models.CharField(_("routing key"), max_length=200, blank=True, null=True, default=None,) + expires = models.DateTimeField(_("expires"), blank=True, null=True,) + enabled = models.BooleanField(_("enabled"), default=True,) + last_run_at = models.DateTimeField(auto_now=False, auto_now_add=False, editable=False, blank=True, null=True,) + total_run_count = models.PositiveIntegerField(default=0, editable=False,) + date_changed = models.DateTimeField(auto_now=True) + description = models.TextField(_("description"), blank=True) + + objects = managers.PeriodicTaskManager() + no_changes = False + + class Meta: + verbose_name = _("djcelery periodic task") + verbose_name_plural = _("djcelery periodic tasks") + + def validate_unique(self, *args, **kwargs): + super(DjCeleryPeriodicTask, self).validate_unique(*args, **kwargs) + if not self.interval and not self.crontab: + raise ValidationError({"interval": ["One of interval or crontab must be set."]}) + if self.interval and self.crontab: + raise ValidationError({"crontab": ["Only one of interval or crontab must be set"]}) + + def save(self, *args, **kwargs): + self.exchange = self.exchange or None + self.routing_key = self.routing_key or None + self.queue = self.queue or None + if not self.enabled: + self.last_run_at = None + super(DjCeleryPeriodicTask, self).save(*args, **kwargs) + + def __str__(self): + fmt = "{0.name}: {{no schedule}}" + if self.interval: + fmt = "{0.name}: {0.interval}" + if self.crontab: + fmt = "{0.name}: {0.crontab}" + return fmt.format(self) + + @property + def schedule(self): + if self.interval: + return self.interval.schedule + if self.crontab: + return self.crontab.schedule + + +signals.pre_delete.connect(DjCeleryPeriodicTasks.changed, sender=DjCeleryPeriodicTask) +signals.pre_save.connect(DjCeleryPeriodicTasks.changed, sender=DjCeleryPeriodicTask) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/tzcrontab.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/tzcrontab.py new file mode 100644 index 00000000..b0fdee7d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/tzcrontab.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +from collections import namedtuple +from datetime import datetime + +import pytz +from celery import schedules +from celery.utils.time import is_naive, make_aware + +schedstate = namedtuple("schedstate", ("is_due", "next")) +logger = logging.getLogger("celery") + + +class TzAwareCrontab(schedules.crontab): + """Timezone Aware Crontab.""" + + def __init__( + self, minute="*", hour="*", day_of_week="*", day_of_month="*", month_of_year="*", tz=pytz.utc, app=None, + ): + """Overwrite Crontab constructor to include a timezone argument.""" + self.tz = tz + + nowfun = self.nowfunc + + super(TzAwareCrontab, self).__init__( + minute=minute, + hour=hour, + day_of_week=day_of_week, + day_of_month=day_of_month, + month_of_year=month_of_year, + nowfun=nowfun, + app=app, + ) + + def nowfunc(self): + return self.tz.normalize(pytz.utc.localize(datetime.utcnow())) + + def is_due(self, last_run_at): + """Calculate when the next run will take place. + Return tuple of (is_due, next_time_to_check). + The last_run_at argument needs to be timezone aware. + """ + logger.debug("################### is_due begin ###################") + logger.debug("native last_run_at: %s" % last_run_at) + + last_run_at = last_run_at.astimezone(self.tz) + + now = datetime.now(self.tz) + logger.debug("last_run_at: %s" % last_run_at) + logger.debug("now: %s" % now) + + rem_delta = self.remaining_estimate(last_run_at) + + logger.debug("rem_delta: %s" % rem_delta) + logger.debug("next run at: %s" % (now + rem_delta)) + + rem = max(rem_delta.total_seconds(), 0) + due = rem == 0 + if due: + rem_delta = self.remaining_estimate(self.now()) + rem = max(rem_delta.total_seconds(), 0) + + logger.debug("self: %s" % self) + logger.debug("due: {} {} {}".format(self.tz, due, rem)) + logger.debug("################### is_due end ###################") + return schedstate(due, rem) + + # Needed to support pickling + def __repr__(self): + return ( + "".format(self) + ) + + def __reduce__(self): + return ( + self.__class__, + ( + self._orig_minute, + self._orig_hour, + self._orig_day_of_week, + self._orig_day_of_month, + self._orig_month_of_year, + self.tz, + ), + None, + ) + + def __eq__(self, other): + if isinstance(other, schedules.crontab): + return ( + other.month_of_year == self.month_of_year + and other.day_of_month == self.day_of_month + and other.day_of_week == self.day_of_week + and other.hour == self.hour + and other.minute == self.minute + and other.tz == self.tz + ) + return NotImplemented + + def maybe_make_aware(self, dt): + if not is_naive(dt): + return dt + return make_aware(dt, self.tz) + + def to_local(self, dt): + return self.maybe_make_aware(dt) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/utils.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/utils.py new file mode 100644 index 00000000..4d3a182c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/djcelery/utils.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +# -- XXX This module must not use translation as that causes +# -- a recursive loader import! + + +from django.conf import settings +from django.utils import timezone + + +def make_aware(value): + if settings.USE_TZ: + # naive datetimes are assumed to be in UTC. + if timezone.is_naive(value): + value = timezone.make_aware(value, timezone.utc) + # then convert to the Django configured timezone. + default_tz = timezone.get_default_timezone() + value = timezone.localtime(value, default_tz) + return value + + +def now(): + return make_aware(timezone.now()) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0001_initial.py new file mode 100644 index 00000000..7cc1120c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0001_initial.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import django.db.models.deletion +import timezone_field.fields +import pipeline.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0016_auto_20181220_0958"), + ] + + operations = [ + migrations.CreateModel( + name="CrontabSchedule", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), + ("minute", models.CharField(default=b"*", max_length=64, verbose_name="minute"),), + ("hour", models.CharField(default=b"*", max_length=64, verbose_name="hour"),), + ("day_of_week", models.CharField(default=b"*", max_length=64, verbose_name="day of week"),), + ("day_of_month", models.CharField(default=b"*", max_length=64, verbose_name="day of month"),), + ("month_of_year", models.CharField(default=b"*", max_length=64, verbose_name="month of year"),), + ("timezone", timezone_field.fields.TimeZoneField(default=b"UTC")), + ], + options={ + "ordering": ["month_of_year", "day_of_month", "day_of_week", "hour", "minute",], + "verbose_name": "crontab", + "verbose_name_plural": "crontabs", + }, + ), + migrations.CreateModel( + name="DjCeleryPeriodicTask", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), + ( + "name", + models.CharField(help_text="Useful description", unique=True, max_length=200, verbose_name="name",), + ), + ("task", models.CharField(max_length=200, verbose_name="task name")), + ( + "args", + models.TextField( + default=b"[]", + help_text="JSON encoded positional arguments", + verbose_name="Arguments", + blank=True, + ), + ), + ( + "kwargs", + models.TextField( + default=b"{}", + help_text="JSON encoded keyword arguments", + verbose_name="Keyword arguments", + blank=True, + ), + ), + ( + "queue", + models.CharField( + default=None, + max_length=200, + blank=True, + help_text="Queue defined in CELERY_QUEUES", + null=True, + verbose_name="queue", + ), + ), + ( + "exchange", + models.CharField(default=None, max_length=200, null=True, verbose_name="exchange", blank=True,), + ), + ( + "routing_key", + models.CharField(default=None, max_length=200, null=True, verbose_name="routing key", blank=True,), + ), + ("expires", models.DateTimeField(null=True, verbose_name="expires", blank=True),), + ("enabled", models.BooleanField(default=True, verbose_name="enabled")), + ("last_run_at", models.DateTimeField(null=True, editable=False, blank=True),), + ("total_run_count", models.PositiveIntegerField(default=0, editable=False),), + ("date_changed", models.DateTimeField(auto_now=True)), + ("description", models.TextField(verbose_name="description", blank=True),), + ( + "crontab", + models.ForeignKey( + blank=True, + to="periodic_task.CrontabSchedule", + help_text="Use one of interval/crontab", + null=True, + verbose_name="crontab", + on_delete=models.CASCADE, + ), + ), + ], + options={"verbose_name": "periodic task", "verbose_name_plural": "periodic tasks",}, + ), + migrations.CreateModel( + name="DjCeleryPeriodicTasks", + fields=[ + ("ident", models.SmallIntegerField(default=1, unique=True, serialize=False, primary_key=True),), + ("last_update", models.DateTimeField()), + ], + ), + migrations.CreateModel( + name="IntervalSchedule", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), + ("every", models.IntegerField(verbose_name="every")), + ( + "period", + models.CharField( + max_length=24, + verbose_name="period", + choices=[ + (b"days", "Days"), + (b"hours", "Hours"), + (b"minutes", "Minutes"), + (b"seconds", "Seconds"), + (b"microseconds", "Microseconds"), + ], + ), + ), + ], + options={"ordering": ["period", "every"], "verbose_name": "interval", "verbose_name_plural": "intervals",}, + ), + migrations.CreateModel( + name="PeriodicTask", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), + ("name", models.CharField(max_length=64, verbose_name="\u5468\u671f\u4efb\u52a1\u540d\u79f0",),), + ("cron", models.CharField(max_length=128, verbose_name="\u8c03\u5ea6\u7b56\u7565"),), + ("total_run_count", models.PositiveIntegerField(default=0, verbose_name="\u6267\u884c\u6b21\u6570"),), + ("last_run_at", models.DateTimeField(null=True, verbose_name="\u4e0a\u6b21\u8fd0\u884c\u65f6\u95f4"),), + ("creator", models.CharField(default=b"", max_length=32, verbose_name="\u521b\u5efa\u8005"),), + ("extra_info", pipeline.models.CompressJSONField(verbose_name="\u989d\u5916\u4fe1\u606f", null=True),), + ( + "celery_task", + models.ForeignKey( + verbose_name="celery \u5468\u671f\u4efb\u52a1\u5b9e\u4f8b", + to="periodic_task.DjCeleryPeriodicTask", + null=True, + on_delete=models.SET_NULL, + ), + ), + ( + "snapshot", + models.ForeignKey( + related_name="periodic_tasks", + verbose_name="\u7528\u4e8e\u521b\u5efa\u6d41\u7a0b\u5b9e\u4f8b\u7684\u7ed3\u6784\u6570\u636e", + to="pipeline.Snapshot", + on_delete=models.DO_NOTHING, + ), + ), + ( + "template", + models.ForeignKey( + related_name="periodic_tasks", + on_delete=django.db.models.deletion.SET_NULL, + verbose_name="\u5468\u671f\u4efb\u52a1\u5bf9\u5e94\u7684\u6a21\u677f", + to_field="template_id", + to="pipeline.PipelineTemplate", + null=True, + ), + ), + ], + ), + migrations.CreateModel( + name="PeriodicTaskHistory", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), + ("ex_data", models.TextField(verbose_name="\u5f02\u5e38\u4fe1\u606f")), + ("start_at", models.DateTimeField(auto_now_add=True, verbose_name="\u5f00\u59cb\u65f6\u95f4"),), + ( + "start_success", + models.BooleanField(default=True, verbose_name="\u662f\u5426\u542f\u52a8\u6210\u529f",), + ), + ( + "periodic_task", + models.ForeignKey( + related_name="instance_rel", + verbose_name="\u5468\u671f\u4efb\u52a1", + to="periodic_task.PeriodicTask", + null=True, + on_delete=models.DO_NOTHING, + ), + ), + ( + "pipeline_instance", + models.ForeignKey( + related_name="periodic_task_rel", + verbose_name="Pipeline \u5b9e\u4f8b", + to_field="instance_id", + to="pipeline.PipelineInstance", + null=True, + on_delete=models.DO_NOTHING, + ), + ), + ], + ), + migrations.AddField( + model_name="djceleryperiodictask", + name="interval", + field=models.ForeignKey( + verbose_name="interval", + blank=True, + to="periodic_task.IntervalSchedule", + null=True, + on_delete=models.CASCADE, + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0002_auto_20190103_1918.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0002_auto_20190103_1918.py new file mode 100644 index 00000000..aeb98a61 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0002_auto_20190103_1918.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("periodic_task", "0001_initial"), + ] + + operations = [ + migrations.AlterModelOptions( + name="djceleryperiodictask", + options={"verbose_name": "djcelery periodic task", "verbose_name_plural": "djcelery periodic tasks",}, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0003_auto_20191213_0819.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0003_auto_20191213_0819.py new file mode 100644 index 00000000..0f221eae --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0003_auto_20191213_0819.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2019-12-13 08:19 +from __future__ import unicode_literals + +import timezone_field.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("periodic_task", "0002_auto_20190103_1918"), + ] + + operations = [ + migrations.AddField( + model_name="periodictask", + name="priority", + field=models.IntegerField(default=100, verbose_name="流程优先级"), + ), + migrations.AddField( + model_name="periodictask", + name="queue", + field=models.CharField(default="", max_length=512, verbose_name="流程使用的队列名"), + ), + migrations.AlterField( + model_name="crontabschedule", + name="day_of_month", + field=models.CharField( + default="*", max_length=64, verbose_name="day of month" + ), + ), + migrations.AlterField( + model_name="crontabschedule", + name="day_of_week", + field=models.CharField( + default="*", max_length=64, verbose_name="day of week" + ), + ), + migrations.AlterField( + model_name="crontabschedule", + name="hour", + field=models.CharField(default="*", max_length=64, verbose_name="hour"), + ), + migrations.AlterField( + model_name="crontabschedule", + name="minute", + field=models.CharField(default="*", max_length=64, verbose_name="minute"), + ), + migrations.AlterField( + model_name="crontabschedule", + name="month_of_year", + field=models.CharField( + default="*", max_length=64, verbose_name="month of year" + ), + ), + migrations.AlterField( + model_name="crontabschedule", + name="timezone", + field=timezone_field.fields.TimeZoneField(default="UTC"), + ), + migrations.AlterField( + model_name="djceleryperiodictask", + name="args", + field=models.TextField( + blank=True, + default="[]", + help_text="JSON encoded positional arguments", + verbose_name="Arguments", + ), + ), + migrations.AlterField( + model_name="djceleryperiodictask", + name="kwargs", + field=models.TextField( + blank=True, + default="{}", + help_text="JSON encoded keyword arguments", + verbose_name="Keyword arguments", + ), + ), + migrations.AlterField( + model_name="intervalschedule", + name="period", + field=models.CharField( + choices=[ + ("days", "Days"), + ("hours", "Hours"), + ("minutes", "Minutes"), + ("seconds", "Seconds"), + ("microseconds", "Microseconds"), + ], + max_length=24, + verbose_name="period", + ), + ), + migrations.AlterField( + model_name="periodictask", + name="creator", + field=models.CharField(default="", max_length=32, verbose_name="创建者"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0004_auto_20191213_0828.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0004_auto_20191213_0828.py new file mode 100644 index 00000000..3571e4d3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0004_auto_20191213_0828.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2019-12-13 08:28 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("periodic_task", "0003_auto_20191213_0819"), + ] + + operations = [ + migrations.AddField( + model_name="periodictaskhistory", + name="priority", + field=models.IntegerField(default=100, verbose_name="流程优先级"), + ), + migrations.AddField( + model_name="periodictaskhistory", + name="queue", + field=models.CharField(default="", max_length=512, verbose_name="流程使用的队列名"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0005_migrate_task.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0005_migrate_task.py new file mode 100644 index 00000000..759220b4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0005_migrate_task.py @@ -0,0 +1,22 @@ +# Generated by Django 2.2.6 on 2020-12-16 02:56 + +from django.db import migrations + + +def reverse_func(apps, schema_editor): + raise Exception("task migrate cannot reverse") + + +def forward_func(apps, schema_editor): + from pipeline.contrib.periodic_task.djcelery import migrate + + migrate.try_to_migrate_to_django_celery_beat() + + +class Migration(migrations.Migration): + + dependencies = [ + ("periodic_task", "0004_auto_20191213_0828"), + ] + + operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0006_change_task_ref_table.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0006_change_task_ref_table.py new file mode 100644 index 00000000..44f177ec --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/0006_change_task_ref_table.py @@ -0,0 +1,24 @@ +# Generated by Django 2.2.6 on 2020-11-02 13:19 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("periodic_task", "0005_migrate_task"), + ] + + operations = [ + migrations.AlterField( + model_name="periodictask", + name="celery_task", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="django_celery_beat.PeriodicTask", + verbose_name="celery 周期任务实例", + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/models.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/models.py new file mode 100644 index 00000000..5ff5bffb --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/models.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import ujson as json +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from pipeline.constants import PIPELINE_DEFAULT_PRIORITY +from pipeline.contrib.periodic_task.signals import periodic_task_start_failed +from pipeline.exceptions import InvalidOperationException +from pipeline.models import ( + CompressJSONField, + PipelineInstance, + PipelineTemplate, + Snapshot, +) +from pipeline.utils.uniqid import uniqid +from django_celery_beat.models import ( + PeriodicTask as DjangoCeleryBeatPeriodicTask, + CrontabSchedule as DjangoCeleryBeatCrontabSchedule, +) + +from pipeline.contrib.periodic_task.djcelery.models import * # noqa + +BAMBOO_ENGINE_TRIGGER_TASK = "pipeline.contrib.periodic_task.tasks.bamboo_engine_periodic_task_start" + + +class PeriodicTaskManager(models.Manager): + def create_task( + self, + name, + template, + cron, + data, + creator, + timezone=None, + extra_info=None, + spread=False, + priority=PIPELINE_DEFAULT_PRIORITY, + queue="", + trigger_task="", + ): + snapshot = Snapshot.objects.create_snapshot(data) + schedule, _ = DjangoCeleryBeatCrontabSchedule.objects.get_or_create( + minute=cron.get("minute", "*"), + hour=cron.get("hour", "*"), + day_of_week=cron.get("day_of_week", "*"), + day_of_month=cron.get("day_of_month", "*"), + month_of_year=cron.get("month_of_year", "*"), + timezone=timezone or "UTC", + ) + _ = schedule.schedule # noqa + + task = self.create( + name=name, + template=template, + snapshot=snapshot, + cron=schedule.__str__(), + creator=creator, + extra_info=extra_info, + priority=priority, + queue=queue, + ) + + kwargs = {"period_task_id": task.id, "spread": spread} + celery_task = DjangoCeleryBeatPeriodicTask.objects.create( + crontab=schedule, + name=uniqid(), + task=trigger_task or "pipeline.contrib.periodic_task.tasks.periodic_task_start", + enabled=False, + kwargs=json.dumps(kwargs), + ) + task.celery_task = celery_task + task.save() + return task + + +class PeriodicTask(models.Model): + name = models.CharField(_("周期任务名称"), max_length=64) + template = models.ForeignKey( + PipelineTemplate, + related_name="periodic_tasks", + to_field="template_id", + verbose_name=_("周期任务对应的模板"), + null=True, + on_delete=models.deletion.SET_NULL, + ) + cron = models.CharField(_("调度策略"), max_length=128) + celery_task = models.ForeignKey( + DjangoCeleryBeatPeriodicTask, verbose_name=_("celery 周期任务实例"), null=True, on_delete=models.SET_NULL, + ) + snapshot = models.ForeignKey( + Snapshot, related_name="periodic_tasks", verbose_name=_("用于创建流程实例的结构数据"), on_delete=models.DO_NOTHING, + ) + total_run_count = models.PositiveIntegerField(_("执行次数"), default=0) + last_run_at = models.DateTimeField(_("上次运行时间"), null=True) + creator = models.CharField(_("创建者"), max_length=32, default="") + priority = models.IntegerField(_("流程优先级"), default=PIPELINE_DEFAULT_PRIORITY) + queue = models.CharField(_("流程使用的队列名"), max_length=512, default="") + extra_info = CompressJSONField(verbose_name=_("额外信息"), null=True) + + objects = PeriodicTaskManager() + + def __unicode__(self): + return "{name}({id})".format(name=self.name, id=self.id) + + @property + def enabled(self): + return self.celery_task.enabled + + @property + def execution_data(self): + return self.snapshot.data + + @property + def form(self): + form = { + key: var_info + for key, var_info in list(self.execution_data["constants"].items()) + if var_info["show_type"] == "show" + } + return form + + def delete(self, using=None): + self.set_enabled(False) + self.celery_task.delete() + PeriodicTaskHistory.objects.filter(periodic_task=self).delete() + return super(PeriodicTask, self).delete(using) + + def set_enabled(self, enabled): + self.celery_task.enabled = enabled + self.celery_task.save() + + def modify_cron(self, cron, timezone=None): + if self.enabled: + raise InvalidOperationException("can not modify cron when task is enabled") + schedule, _ = DjangoCeleryBeatCrontabSchedule.objects.get_or_create( + minute=cron.get("minute", "*"), + hour=cron.get("hour", "*"), + day_of_week=cron.get("day_of_week", "*"), + day_of_month=cron.get("day_of_month", "*"), + month_of_year=cron.get("month_of_year", "*"), + timezone=timezone or "UTC", + ) + # try to initiate schedule object + _ = schedule.schedule # noqa + self.cron = schedule.__str__() + self.celery_task.crontab = schedule + self.celery_task.save() + self.save() + + def modify_constants(self, constants): + if self.enabled: + raise InvalidOperationException("can not modify constants when task is enabled") + exec_data = self.execution_data + for key, value in list(constants.items()): + if key in exec_data["constants"]: + exec_data["constants"][key]["value"] = value + self.snapshot.data = exec_data + self.snapshot.save() + return exec_data["constants"] + + +class PeriodicTaskHistoryManager(models.Manager): + def record_schedule(self, periodic_task, pipeline_instance, ex_data, start_success=True): + history = self.create( + periodic_task=periodic_task, + pipeline_instance=pipeline_instance, + ex_data=ex_data, + start_success=start_success, + priority=periodic_task.priority, + queue=periodic_task.queue, + ) + + if not start_success: + periodic_task_start_failed.send(sender=PeriodicTask, periodic_task=periodic_task, history=history) + + return history + + +class PeriodicTaskHistory(models.Model): + periodic_task = models.ForeignKey( + PeriodicTask, related_name="instance_rel", verbose_name=_("周期任务"), null=True, on_delete=models.DO_NOTHING, + ) + pipeline_instance = models.ForeignKey( + PipelineInstance, + related_name="periodic_task_rel", + verbose_name=_("Pipeline 实例"), + to_field="instance_id", + null=True, + on_delete=models.DO_NOTHING, + ) + ex_data = models.TextField(_("异常信息")) + start_at = models.DateTimeField(_("开始时间"), auto_now_add=True) + start_success = models.BooleanField(_("是否启动成功"), default=True) + priority = models.IntegerField(_("流程优先级"), default=PIPELINE_DEFAULT_PRIORITY) + queue = models.CharField(_("流程使用的队列名"), max_length=512, default="") + + objects = PeriodicTaskHistoryManager() diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/signals/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/signals/__init__.py new file mode 100644 index 00000000..3aaf776b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/signals/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.dispatch import Signal + +pre_periodic_task_start = Signal(providing_args=["periodic_task", "pipeline_instance"]) +post_periodic_task_start = Signal(providing_args=["periodic_task", "pipeline_instance"]) +periodic_task_start_failed = Signal(providing_args=["periodic_task", "history"]) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/tasks.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/tasks.py new file mode 100644 index 00000000..4264a739 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/tasks.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import datetime +import logging +import traceback + +import pytz +from celery import task +from django.utils import timezone +from django.utils.module_loading import import_string +from bamboo_engine import api as bamboo_engine_api + +from pipeline.contrib.periodic_task import signals +from pipeline.contrib.periodic_task.models import PeriodicTask, PeriodicTaskHistory +from pipeline.engine.models import FunctionSwitch +from pipeline.models import PipelineInstance +from pipeline.parser.context import get_pipeline_context +from pipeline.eri.runtime import BambooDjangoRuntime +from pipeline.contrib.periodic_task.context import ( + get_periodic_task_root_pipeline_context, + get_periodic_task_subprocess_context, +) + +logger = logging.getLogger("celery") + + +@task(ignore_result=True) +def periodic_task_start(*args, **kwargs): + try: + periodic_task = PeriodicTask.objects.get(id=kwargs["period_task_id"]) + except PeriodicTask.DoesNotExist: + # task has been deleted + return + + if FunctionSwitch.objects.is_frozen(): + PeriodicTaskHistory.objects.record_schedule( + periodic_task=periodic_task, + pipeline_instance=None, + ex_data="engine is frozen, can not start task", + start_success=False, + ) + return + + try: + tz = periodic_task.celery_task.crontab.timezone + now = datetime.datetime.now(tz=pytz.utc).astimezone(tz) + instance, _ = PipelineInstance.objects.create_instance( + template=periodic_task.template, + exec_data=periodic_task.execution_data, + spread=kwargs.get("spread", True), + name="{}_{}".format(periodic_task.name[:113], now.strftime("%Y%m%d%H%M%S")), + creator=periodic_task.creator, + description="periodic task instance", + ) + + signals.pre_periodic_task_start.send( + sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance + ) + + result = instance.start( + periodic_task.creator, check_workers=False, priority=periodic_task.priority, queue=periodic_task.queue, + ) + except Exception: + et = traceback.format_exc() + logger.error(et) + PeriodicTaskHistory.objects.record_schedule( + periodic_task=periodic_task, pipeline_instance=None, ex_data=et, start_success=False, + ) + return + + if not result.result: + PeriodicTaskHistory.objects.record_schedule( + periodic_task=periodic_task, pipeline_instance=None, ex_data=result.message, start_success=False, + ) + return + + periodic_task.total_run_count += 1 + periodic_task.last_run_at = timezone.now() + periodic_task.save() + signals.post_periodic_task_start.send(sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance) + + PeriodicTaskHistory.objects.record_schedule(periodic_task=periodic_task, pipeline_instance=instance, ex_data="") + + +@task(ignore_result=True) +def bamboo_engine_periodic_task_start(*args, **kwargs): + try: + periodic_task = PeriodicTask.objects.get(id=kwargs["period_task_id"]) + except PeriodicTask.DoesNotExist: + # task has been deleted + return + + try: + tz = periodic_task.celery_task.crontab.timezone + now = datetime.datetime.now(tz=pytz.utc).astimezone(tz) + instance, _ = PipelineInstance.objects.create_instance( + template=periodic_task.template, + exec_data=periodic_task.execution_data, + spread=kwargs.get("spread", True), + name="{}_{}".format(periodic_task.name[:113], now.strftime("%Y%m%d%H%M%S")), + creator=periodic_task.creator, + description="periodic task instance", + ) + + signals.pre_periodic_task_start.send( + sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance + ) + + # convert web pipeline to pipeline + pipeline_formator = import_string(periodic_task.extra_info["pipeline_formator"]) + pipeline = pipeline_formator(instance.execution_data) + + # run pipeline + instance.calculate_tree_info() + PipelineInstance.objects.filter(instance_id=instance.instance_id).update( + tree_info_id=instance.tree_info.id, + start_time=timezone.now(), + is_started=True, + executor=periodic_task.creator, + ) + root_pipeline_data = get_pipeline_context( + instance, obj_type="instance", data_type="data", username=periodic_task.creator + ) + root_pipeline_context = get_periodic_task_root_pipeline_context(root_pipeline_data) + subprocess_context = get_periodic_task_subprocess_context(root_pipeline_data) + result = bamboo_engine_api.run_pipeline( + runtime=BambooDjangoRuntime(), + pipeline=pipeline, + root_pipeline_data=root_pipeline_data, + root_pipeline_context=root_pipeline_context, + subprocess_context=subprocess_context, + queue=periodic_task.queue, + cycle_tolerate=True, + ) + except Exception: + et = traceback.format_exc() + logger.error(et) + PeriodicTaskHistory.objects.record_schedule( + periodic_task=periodic_task, pipeline_instance=None, ex_data=et, start_success=False, + ) + return + + if not result.result: + PipelineInstance.objects.filter(id=instance.instance_id).update( + start_time=None, is_started=False, executor="", + ) + PeriodicTaskHistory.objects.record_schedule( + periodic_task=periodic_task, pipeline_instance=None, ex_data=result.message, start_success=False, + ) + return + + periodic_task.total_run_count += 1 + periodic_task.last_run_at = timezone.now() + periodic_task.save() + signals.post_periodic_task_start.send(sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance) + + PeriodicTaskHistory.objects.record_schedule(periodic_task=periodic_task, pipeline_instance=instance, ex_data="") diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/tests.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/tests.py new file mode 100644 index 00000000..baf1914a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/tests.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy + +from django.test import TestCase + +from django_celery_beat.models import PeriodicTask +from pipeline.contrib.periodic_task.models import PeriodicTask as PipelinePeriodicTask +from pipeline.exceptions import InvalidOperationException + + +class PeriodicTestCase(TestCase): + def setUp(self): + self.name = "test" + self.creator = "tester" + self.extra_info = {"extra_info": "val"} + self.data = { + "constants": { + "key_1": {"value": "val_1", "show_type": "show"}, + "key_2": {"value": "val_2", "show_type": "hide"}, + } + } + self.task = self.create_a_task() + + def tearDown(self): + if self.task: + self.task = self.task.delete() + + def create_a_task(self): + return PipelinePeriodicTask.objects.create_task( + name=self.name, template=None, cron={}, data=self.data, creator=self.creator, extra_info=self.extra_info, + ) + + def test_create_task(self): + self.assertIsInstance(self.task, PipelinePeriodicTask) + self.assertIsInstance(self.task.celery_task, PeriodicTask) + self.assertEqual(self.task.name, self.name) + self.assertEqual(self.task.template, None) + self.assertEqual(self.task.creator, self.creator) + self.assertEqual(self.task.extra_info, self.extra_info) + self.assertEqual(self.task.cron, self.task.celery_task.crontab.__str__()) + self.assertEqual(self.task.snapshot.data, self.data) + self.assertEqual(self.task.total_run_count, 0) + self.assertEqual(self.task.last_run_at, None) + + def test_enabled(self): + self.assertEqual(self.task.enabled, self.task.celery_task.enabled) + + def test_set_enabled(self): + self.task.set_enabled(True) + self.assertTrue(self.task.enabled) + self.assertTrue(self.task.celery_task.enabled) + self.task.set_enabled(False) + self.assertFalse(self.task.enabled) + self.assertFalse(self.task.celery_task.enabled) + + def test_execution_data(self): + self.assertEqual(self.task.execution_data, self.data) + + def test_delete(self): + celery_task_id = self.task.celery_task.id + self.task.delete() + self.assertRaises(PeriodicTask.DoesNotExist, PeriodicTask.objects.get, id=celery_task_id) + self.task = None + + def test_modify_cron(self): + self.task.set_enabled(True) + self.assertRaises(InvalidOperationException, self.task.modify_cron, {}) + self.task.set_enabled(False) + self.task.modify_cron({"minite": "*/1"}) + self.assertEqual(self.task.cron, self.task.celery_task.crontab.__str__()) + + def test_modify_constants(self): + expect_constants = copy.deepcopy(self.task.execution_data["constants"]) + expect_constants["key_1"]["value"] = "val_3" + new_constants = self.task.modify_constants({"key_1": "val_3"}) + self.assertEqual(self.task.execution_data["constants"], expect_constants) + self.assertEqual(new_constants, expect_constants) + + self.task.set_enabled(True) + self.assertRaises(InvalidOperationException, self.task.modify_constants, {}) + + def test_form(self): + expect_form = {k: v for k, v in list(self.data["constants"].items()) if v["show_type"] == "show"} + self.assertEqual(self.task.form, expect_form) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/views.py b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/views.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/periodic_task/views.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/__init__.py new file mode 100644 index 00000000..af18888a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +default_app_config = "pipeline.contrib.statistics.apps.StatisticsConfig" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/admin.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/admin.py new file mode 100644 index 00000000..a6239d93 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/admin.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from .models import ComponentExecuteData, ComponentInTemplate, InstanceInPipeline, TemplateInPipeline + + +@admin.register(ComponentInTemplate) +class ComponentInTemplateAdmin(admin.ModelAdmin): + list_display = ("id", "component_code", "template_id", "node_id", "is_sub", "version") + search_fields = ( + "template_id", + "node_id", + ) + list_filter = ("component_code", "is_sub") + + +@admin.register(ComponentExecuteData) +class ComponentExecuteDataAdmin(admin.ModelAdmin): + list_display = ( + "id", + "component_code", + "instance_id", + "node_id", + "is_sub", + "started_time", + "archived_time", + "elapsed_time", + "status", + "is_skip", + "is_retry", + "version", + ) + search_fields = ( + "instance_id", + "node_id", + ) + list_filter = ( + "component_code", + "is_sub", + "status", + "is_skip", + ) + + +@admin.register(TemplateInPipeline) +class TemplateInPipelineAdmin(admin.ModelAdmin): + list_display = ("template_id", "atom_total", "subprocess_total", "gateways_total") + + search_fields = ("template_id",) + list_filter = ("template_id", "atom_total", "subprocess_total", "gateways_total") + + +@admin.register(InstanceInPipeline) +class InstanceInPipelineAdmin(admin.ModelAdmin): + list_display = ("instance_id", "atom_total", "subprocess_total", "gateways_total") + + search_fields = ("instance_id",) + list_filter = ("instance_id", "atom_total", "subprocess_total", "gateways_total") diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/apps.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/apps.py new file mode 100644 index 00000000..13e0f4a5 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/apps.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.apps import AppConfig + + +class StatisticsConfig(AppConfig): + name = "pipeline.contrib.statistics" + verbose_name = "PipelineContribStatistics" + + def ready(self): + from pipeline.contrib.statistics.signals.handlers import ( # noqa + template_post_save_handler, + pipeline_post_save_handler, + ) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0001_initial.py new file mode 100644 index 00000000..04f8655c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0001_initial.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="ComponentExecuteData", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("tag_code", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), + ("instance_id", models.CharField(max_length=32, verbose_name="\u5b9e\u4f8bID")), + ("node_id", models.CharField(max_length=32, verbose_name="\u8282\u70b9ID")), + ( + "is_sub", + models.BooleanField(default=False, verbose_name="\u662f\u5426\u5b50\u6d41\u7a0b\u5f15\u7528"), + ), + ( + "subprocess_stack", + models.TextField( + default=b"[]", + help_text="JSON \u683c\u5f0f\u7684\u5217\u8868", + verbose_name="\u5b50\u6d41\u7a0b\u5806\u6808", + ), + ), + ("begin_time", models.DateTimeField(verbose_name="\u539f\u5b50\u6267\u884c\u5f00\u59cb\u65f6\u95f4")), + ( + "end_time", + models.DateTimeField( + null=True, verbose_name="\u539f\u5b50\u6267\u884c\u7ed3\u675f\u65f6\u95f4", blank=True + ), + ), + ( + "elapse_time", + models.IntegerField(null=True, verbose_name="\u539f\u5b50\u6267\u884c\u8017\u65f6(s)", blank=True), + ), + ("status", models.BooleanField(default=False, verbose_name="\u662f\u5426\u6267\u884c\u6210\u529f")), + ("is_skip", models.BooleanField(default=False, verbose_name="\u662f\u5426\u8df3\u8fc7")), + ], + options={ + "ordering": ["-id"], + "verbose_name": "Pipeline\u539f\u5b50\u6267\u884c\u6570\u636e", + "verbose_name_plural": "Pipeline\u539f\u5b50\u6267\u884c\u6570\u636e", + }, + ), + migrations.CreateModel( + name="ComponentInTemplate", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("component_code", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), + ("template_id", models.CharField(max_length=32, verbose_name="\u6a21\u677fID")), + ("node_id", models.CharField(max_length=32, verbose_name="\u8282\u70b9ID")), + ( + "is_sub", + models.BooleanField(default=False, verbose_name="\u662f\u5426\u5b50\u6d41\u7a0b\u5f15\u7528"), + ), + ( + "subprocess_stack", + models.TextField( + default=b"[]", + help_text="JSON \u683c\u5f0f\u7684\u5217\u8868", + verbose_name="\u5b50\u6d41\u7a0b\u5806\u6808", + ), + ), + ], + options={ + "verbose_name": "Pipeline\u539f\u5b50\u5f15\u7528\u6570\u636e", + "verbose_name_plural": "Pipeline\u539f\u5b50\u5f15\u7528\u6570\u636e", + }, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0002_auto_20180817_1212.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0002_auto_20180817_1212.py new file mode 100644 index 00000000..26229f51 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0002_auto_20180817_1212.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0001_initial"), + ] + + operations = [ + migrations.RenameField(model_name="componentexecutedata", old_name="tag_code", new_name="component_code",), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0003_auto_20180821_2015.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0003_auto_20180821_2015.py new file mode 100644 index 00000000..8f76c0f4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0003_auto_20180821_2015.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0002_auto_20180817_1212"), + ] + + operations = [ + migrations.RenameField(model_name="componentexecutedata", old_name="end_time", new_name="archived_time",), + migrations.RenameField(model_name="componentexecutedata", old_name="elapse_time", new_name="elapsed_time",), + migrations.RenameField(model_name="componentexecutedata", old_name="begin_time", new_name="started_time",), + migrations.AddField( + model_name="componentexecutedata", + name="is_retry", + field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u91cd\u8bd5\u8bb0\u5f55"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0004_instanceinpipeline_templateinpipeline.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0004_instanceinpipeline_templateinpipeline.py new file mode 100644 index 00000000..5087b906 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0004_instanceinpipeline_templateinpipeline.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0003_auto_20180821_2015"), + ] + + operations = [ + migrations.CreateModel( + name="InstanceInPipeline", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("instance_id", models.IntegerField(null=True, verbose_name="\u5b9e\u4f8bID", blank=True)), + ("atom_total", models.IntegerField(null=True, verbose_name="\u539f\u5b50\u603b\u6570", blank=True)), + ( + "subprocess_total", + models.IntegerField(null=True, verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570", blank=True), + ), + ("gateways_total", models.IntegerField(null=True, verbose_name="\u7f51\u5173\u603b\u6570", blank=True)), + ], + options={ + "verbose_name": "\u5b9e\u4f8b\u4f7f\u7528\u6570\u636e", + "verbose_name_plural": "\u5b9e\u4f8b\u4f7f\u7528\u6570\u636e", + }, + ), + migrations.CreateModel( + name="TemplateInPipeline", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("template_id", models.IntegerField(null=True, verbose_name="\u6a21\u677fID", blank=True)), + ("atom_total", models.IntegerField(null=True, verbose_name="\u539f\u5b50\u603b\u6570", blank=True)), + ( + "subprocess_total", + models.IntegerField(null=True, verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570", blank=True), + ), + ("gateways_total", models.IntegerField(null=True, verbose_name="\u7f51\u5173\u603b\u6570", blank=True)), + ], + options={ + "verbose_name": "\u6a21\u677f\u4f7f\u7528\u6570\u636e", + "verbose_name_plural": "\u6a21\u677f\u4f7f\u7528\u6570\u636e", + }, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0005_init_pipeline_data.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0005_init_pipeline_data.py new file mode 100644 index 00000000..30654be7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0005_init_pipeline_data.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [("statistics", "0004_instanceinpipeline_templateinpipeline")] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0006_auto_20181115_1208.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0006_auto_20181115_1208.py new file mode 100644 index 00000000..ef322bf4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0006_auto_20181115_1208.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0005_init_pipeline_data"), + ] + + operations = [ + migrations.AlterField( + model_name="instanceinpipeline", + name="instance_id", + field=models.CharField(max_length=255, null=True, verbose_name="\u5b9e\u4f8bID", blank=True), + ), + migrations.AlterField( + model_name="templateinpipeline", + name="template_id", + field=models.CharField(max_length=255, null=True, verbose_name="\u6a21\u677fID", blank=True), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0007_init_pipeline_data.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0007_init_pipeline_data.py new file mode 100644 index 00000000..08915ded --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0007_init_pipeline_data.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import migrations + +from pipeline.models import PipelineInstance, PipelineTemplate +from pipeline.contrib.statistics.models import InstanceInPipeline, TemplateInPipeline + + +def load_data(apps, schema_editor): + # 清空数据 + TemplateInPipeline.objects.all().delete() + InstanceInPipeline.objects.all().delete() + template_list = PipelineTemplate.objects.filter(is_deleted=False) + template_data = [] + for template in template_list: + template_id = template.template_id + try: + result = statistics_total(template.data) + data = TemplateInPipeline( + template_id=template_id, + atom_total=result["atom_total"], + subprocess_total=result["subprocess_total"], + gateways_total=result["gateways_total"], + ) + template_data.append(data) + except Exception: + pass + TemplateInPipeline.objects.bulk_create(template_data) + + instance_list = PipelineInstance.objects.filter(is_deleted=False) + instance_data = [] + for instance in instance_list: + instance_id = instance.instance_id + try: + result = statistics_total(instance.execution_data) + data = InstanceInPipeline( + instance_id=instance_id, + atom_total=result["atom_total"], + subprocess_total=result["subprocess_total"], + gateways_total=result["gateways_total"], + ) + instance_data.append(data) + except Exception: + pass + InstanceInPipeline.objects.bulk_create(instance_data) + + +def statistics_total(pipeline_tree): + atom_total = 0 + subprocess_total = 0 + tree_activities = pipeline_tree["activities"] + # 获取网关数量 + gateways_total = len(pipeline_tree["gateways"]) + + # 遍历activities节点 + for activity in tree_activities: + activity_type = tree_activities[activity]["type"] + if activity_type == "ServiceActivity": + atom_total += 1 + elif activity_type == "SubProcess": + subprocess_total += 1 + return {"atom_total": atom_total, "subprocess_total": subprocess_total, "gateways_total": gateways_total} + + +class Migration(migrations.Migration): + dependencies = [("statistics", "0006_auto_20181115_1208")] + operations = [migrations.RunPython(load_data)] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0008_auto_20181116_1448.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0008_auto_20181116_1448.py new file mode 100644 index 00000000..34ff67bd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0008_auto_20181116_1448.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0007_init_pipeline_data"), + ] + + operations = [ + migrations.AlterField( + model_name="instanceinpipeline", + name="atom_total", + field=models.IntegerField(verbose_name="\u539f\u5b50\u603b\u6570"), + ), + migrations.AlterField( + model_name="instanceinpipeline", + name="gateways_total", + field=models.IntegerField(verbose_name="\u7f51\u5173\u603b\u6570"), + ), + migrations.AlterField( + model_name="instanceinpipeline", + name="instance_id", + field=models.CharField(max_length=255, verbose_name="\u5b9e\u4f8bID"), + ), + migrations.AlterField( + model_name="instanceinpipeline", + name="subprocess_total", + field=models.IntegerField(verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570"), + ), + migrations.AlterField( + model_name="templateinpipeline", + name="atom_total", + field=models.IntegerField(verbose_name="\u539f\u5b50\u603b\u6570"), + ), + migrations.AlterField( + model_name="templateinpipeline", + name="gateways_total", + field=models.IntegerField(verbose_name="\u7f51\u5173\u603b\u6570"), + ), + migrations.AlterField( + model_name="templateinpipeline", + name="subprocess_total", + field=models.IntegerField(verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570"), + ), + migrations.AlterField( + model_name="templateinpipeline", + name="template_id", + field=models.CharField(max_length=255, verbose_name="\u6a21\u677fID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0009_auto_20181116_1627.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0009_auto_20181116_1627.py new file mode 100644 index 00000000..e5dae2d1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0009_auto_20181116_1627.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0008_auto_20181116_1448"), + ] + + operations = [ + migrations.AlterModelOptions( + name="componentintemplate", + options={ + "verbose_name": "Pipeline\u539f\u5b50\u88ab\u5f15\u7528\u6570\u636e", + "verbose_name_plural": "Pipeline\u539f\u5b50\u88ab\u5f15\u7528\u6570\u636e", + }, + ), + migrations.AlterModelOptions( + name="instanceinpipeline", + options={ + "verbose_name": "Pipeline\u5b9e\u4f8b\u5f15\u7528\u6570\u636e", + "verbose_name_plural": "Pipeline\u5b9e\u4f8b\u5f15\u7528\u6570\u636e", + }, + ), + migrations.AlterModelOptions( + name="templateinpipeline", + options={ + "verbose_name": "Pipeline\u6a21\u677f\u5f15\u7528\u6570\u636e", + "verbose_name_plural": "Pipeline\u6a21\u677f\u5f15\u7528\u6570\u636e", + }, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0010_auto_20190304_1747.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0010_auto_20190304_1747.py new file mode 100644 index 00000000..60a8c8e9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0010_auto_20190304_1747.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0009_auto_20181116_1627"), + ] + + operations = [ + migrations.AlterModelOptions( + name="componentexecutedata", + options={ + "ordering": ["-id"], + "verbose_name": "Pipeline\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u6570\u636e", + "verbose_name_plural": "Pipeline\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u6570\u636e", + }, + ), + migrations.AlterModelOptions( + name="componentintemplate", + options={ + "verbose_name": "Pipeline\u6807\u51c6\u63d2\u4ef6\u88ab\u5f15\u7528\u6570\u636e", + "verbose_name_plural": "Pipeline\u6807\u51c6\u63d2\u4ef6\u88ab\u5f15\u7528\u6570\u636e", + }, + ), + migrations.AlterField( + model_name="componentexecutedata", + name="archived_time", + field=models.DateTimeField( + blank=True, null=True, verbose_name="\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u7ed3\u675f\u65f6\u95f4" + ), + ), + migrations.AlterField( + model_name="componentexecutedata", + name="elapsed_time", + field=models.IntegerField( + blank=True, null=True, verbose_name="\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u8017\u65f6(s)" + ), + ), + migrations.AlterField( + model_name="componentexecutedata", + name="started_time", + field=models.DateTimeField(verbose_name="\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u5f00\u59cb\u65f6\u95f4"), + ), + migrations.AlterField( + model_name="instanceinpipeline", + name="atom_total", + field=models.IntegerField(verbose_name="\u6807\u51c6\u63d2\u4ef6\u603b\u6570"), + ), + migrations.AlterField( + model_name="templateinpipeline", + name="atom_total", + field=models.IntegerField(verbose_name="\u6807\u51c6\u63d2\u4ef6\u603b\u6570"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0011_auto_20200217_0822.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0011_auto_20200217_0822.py new file mode 100644 index 00000000..57b2a6cc --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0011_auto_20200217_0822.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2020-02-17 08:22 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0010_auto_20190304_1747"), + ] + + operations = [ + migrations.AddField( + model_name="componentexecutedata", + name="version", + field=models.CharField(default="legacy", max_length=255, verbose_name="插件版本"), + ), + migrations.AddField( + model_name="componentintemplate", + name="version", + field=models.CharField(default="legacy", max_length=255, verbose_name="插件版本"), + ), + migrations.AlterField( + model_name="componentexecutedata", + name="subprocess_stack", + field=models.TextField(default="[]", help_text="JSON 格式的列表", verbose_name="子流程堆栈"), + ), + migrations.AlterField( + model_name="componentintemplate", + name="subprocess_stack", + field=models.TextField(default="[]", help_text="JSON 格式的列表", verbose_name="子流程堆栈"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0012_auto_20201123_1552.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0012_auto_20201123_1552.py new file mode 100644 index 00000000..0869b873 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0012_auto_20201123_1552.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.29 on 2020-11-23 07:52 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0011_auto_20200217_0822"), + ] + + operations = [ + migrations.AlterField( + model_name="instanceinpipeline", + name="instance_id", + field=models.CharField(db_index=True, max_length=255, verbose_name="实例ID"), + ), + migrations.AlterField( + model_name="templateinpipeline", + name="template_id", + field=models.CharField(db_index=True, max_length=255, verbose_name="模板ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0013_auto_20201201_1506.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0013_auto_20201201_1506.py new file mode 100644 index 00000000..acd0544d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/0013_auto_20201201_1506.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.29 on 2020-12-01 07:06 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("statistics", "0012_auto_20201123_1552"), + ] + + operations = [ + migrations.AlterField( + model_name="componentexecutedata", + name="component_code", + field=models.CharField(db_index=True, max_length=255, verbose_name="组件编码"), + ), + migrations.AlterField( + model_name="componentexecutedata", + name="instance_id", + field=models.CharField(db_index=True, max_length=32, verbose_name="实例ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/models.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/models.py new file mode 100644 index 00000000..98d51ed6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/models.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + + +class ComponentInTemplate(models.Model): + component_code = models.CharField(_("组件编码"), max_length=255) + template_id = models.CharField(_("模板ID"), max_length=32) + node_id = models.CharField(_("节点ID"), max_length=32) + is_sub = models.BooleanField(_("是否子流程引用"), default=False) + subprocess_stack = models.TextField(_("子流程堆栈"), default="[]", help_text=_("JSON 格式的列表")) + version = models.CharField(_("插件版本"), max_length=255, default="legacy") + + class Meta: + verbose_name = _("Pipeline标准插件被引用数据") + verbose_name_plural = _("Pipeline标准插件被引用数据") + + def __unicode__(self): + return "{}_{}".format(self.component_code, self.template_id) + + +class ComponentExecuteData(models.Model): + component_code = models.CharField(_("组件编码"), max_length=255, db_index=True) + instance_id = models.CharField(_("实例ID"), max_length=32, db_index=True) + node_id = models.CharField(_("节点ID"), max_length=32) + is_sub = models.BooleanField(_("是否子流程引用"), default=False) + subprocess_stack = models.TextField(_("子流程堆栈"), default="[]", help_text=_("JSON 格式的列表")) + started_time = models.DateTimeField(_("标准插件执行开始时间")) + archived_time = models.DateTimeField(_("标准插件执行结束时间"), null=True, blank=True) + elapsed_time = models.IntegerField(_("标准插件执行耗时(s)"), null=True, blank=True) + status = models.BooleanField(_("是否执行成功"), default=False) + is_skip = models.BooleanField(_("是否跳过"), default=False) + is_retry = models.BooleanField(_("是否重试记录"), default=False) + version = models.CharField(_("插件版本"), max_length=255, default="legacy") + + class Meta: + verbose_name = _("Pipeline标准插件执行数据") + verbose_name_plural = _("Pipeline标准插件执行数据") + ordering = ["-id"] + + def __unicode__(self): + return "{}_{}".format(self.component_code, self.instance_id) + + +class TemplateInPipeline(models.Model): + template_id = models.CharField(_("模板ID"), max_length=255, db_index=True) + atom_total = models.IntegerField(_("标准插件总数")) + subprocess_total = models.IntegerField(_("子流程总数")) + gateways_total = models.IntegerField(_("网关总数")) + + class Meta: + verbose_name = _("Pipeline模板引用数据") + verbose_name_plural = _("Pipeline模板引用数据") + + def __unicode__(self): + return "{}_{}_{}_{}".format(self.template_id, self.atom_total, self.subprocess_total, self.gateways_total) + + +class InstanceInPipeline(models.Model): + instance_id = models.CharField(_("实例ID"), max_length=255, db_index=True) + atom_total = models.IntegerField(_("标准插件总数")) + subprocess_total = models.IntegerField(_("子流程总数")) + gateways_total = models.IntegerField(_("网关总数")) + + class Meta: + verbose_name = _("Pipeline实例引用数据") + verbose_name_plural = _("Pipeline实例引用数据") + + def __unicode__(self): + return "{}_{}_{}_{}".format(self.instance_id, self.atom_total, self.subprocess_total, self.gateways_total) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/signals/__init__.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/signals/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/signals/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/signals/handlers.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/signals/handlers.py new file mode 100644 index 00000000..f7fa84d3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/signals/handlers.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import ujson as json + +from django.db.models.signals import post_save +from django.dispatch import receiver + +from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION +from pipeline.contrib.statistics.models import ( + ComponentInTemplate, + TemplateInPipeline, +) +from pipeline.contrib.statistics.tasks import pipeline_post_save_statistics_task, pipeline_archive_statistics_task +from pipeline.contrib.statistics.utils import count_pipeline_tree_nodes +from pipeline.core.constants import PE +from pipeline.models import PipelineInstance, PipelineTemplate +from pipeline.signals import post_pipeline_finish, post_pipeline_revoke + +logger = logging.getLogger("root") + + +@receiver(post_save, sender=PipelineTemplate) +def template_post_save_handler(sender, instance, created, **kwargs): + """ + 模板执行保存处理 + :param sender: + :param instance: 任务实例 Instance.Object对象 + :param created: 是否是创建(可为更新) + :param kwargs: 参数序列 + :return: + """ + template = instance + template_id = template.template_id + # 删除原先该项模板数据(无论是更新还是创建,都需要重新创建统计数据) + ComponentInTemplate.objects.filter(template_id=template_id).delete() + data = template.data + component_list = [] + # 任务节点引用标准插件统计(包含间接通过子流程引用) + for act_id, act in data[PE.activities].items(): + # 标准插件节点直接引用 + if act["type"] == PE.ServiceActivity: + component = ComponentInTemplate( + component_code=act["component"]["code"], + template_id=template_id, + node_id=act_id, + version=act["component"].get("version", LEGACY_PLUGINS_VERSION), + ) + component_list.append(component) + # 子流程节点间接引用 + else: + components = ComponentInTemplate.objects.filter(template_id=act["template_id"]).values( + "subprocess_stack", "component_code", "node_id", "version" + ) + for component_sub in components: + # 子流程的执行堆栈(子流程的执行过程) + stack = json.loads(component_sub["subprocess_stack"]) + # 添加节点id + stack.insert(0, act_id) + component = ComponentInTemplate( + component_code=component_sub["component_code"], + template_id=template_id, + node_id=component_sub["node_id"], + is_sub=True, + subprocess_stack=json.dumps(stack), + version=component_sub["version"], + ) + component_list.append(component) + ComponentInTemplate.objects.bulk_create(component_list) + + # 统计流程标准插件个数,子流程个数,网关个数 + atom_total, subprocess_total, gateways_total = count_pipeline_tree_nodes(template.data) + TemplateInPipeline.objects.update_or_create( + template_id=template_id, + defaults={"atom_total": atom_total, "subprocess_total": subprocess_total, "gateways_total": gateways_total}, + ) + + +@receiver(post_save, sender=PipelineInstance) +def pipeline_post_save_handler(sender, instance, created, **kwargs): + try: + if created: + pipeline_post_save_statistics_task.delay(instance_id=instance.instance_id) + except Exception: + logger.exception("pipeline_post_save_handler[instance_id={}] send message error".format(instance.id)) + + +@receiver(post_pipeline_finish, sender=PipelineInstance) +def pipeline_post_finish_handler(sender, instance_id, **kwargs): + try: + pipeline_archive_statistics_task.delay(instance_id=instance_id) + except Exception: + logger.exception("pipeline_post_finish_handler[instance_id={}] send message error".format(instance_id)) + + +@receiver(post_pipeline_revoke, sender=PipelineInstance) +def pipeline_post_revoke_handler(sender, instance_id, **kwargs): + try: + pipeline_archive_statistics_task.delay(instance_id=instance_id) + except Exception: + logger.exception("pipeline_post_revoke_handler[instance_id={}] send message error".format(instance_id)) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/tasks.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/tasks.py new file mode 100644 index 00000000..46b02306 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/tasks.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +import logging +import ujson as json +from copy import deepcopy + +from celery import task +from bamboo_engine import api as bamboo_engine_api + +from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION +from pipeline.contrib.statistics.models import ( + ComponentExecuteData, + InstanceInPipeline, +) +from pipeline.contrib.statistics.utils import count_pipeline_tree_nodes +from pipeline.core.constants import PE +from pipeline.engine import api as pipeline_api +from pipeline.engine import states +from pipeline.engine.exceptions import InvalidOperationException +from pipeline.engine.utils import calculate_elapsed_time +from pipeline.models import PipelineInstance +from pipeline.eri.runtime import BambooDjangoRuntime + +logger = logging.getLogger("celery") + + +def recursive_collect_components(activities, status_tree, instance_id, stack=None, engine_ver=1): + """ + @summary 递归流程树,获取所有执行成功/失败的插件 + @param activities: 当前流程树的任务节点信息 + @param status_tree: 当前流程树的任务节点状态 + @param instance_id: 根流程的示例 instance_id + @param stack: 子流程堆栈 + """ + if stack is None: + stack = [] + is_sub = False + else: + is_sub = True + component_list = [] + for act_id, act in activities.items(): + # 只有执行了才会查询到 status,兼容中途撤销的任务 + if act_id in status_tree: + exec_act = status_tree[act_id] + # 属于标准插件节点 + if act[PE.type] == PE.ServiceActivity: + if exec_act["state"] in states.ARCHIVED_STATES: + create_kwargs = { + "component_code": act["component"]["code"], + "instance_id": instance_id, + "is_sub": is_sub, + "node_id": act_id, + "subprocess_stack": json.dumps(stack), + "started_time": exec_act["started_time"], + "archived_time": exec_act["archived_time"], + "elapsed_time": exec_act.get( + "elapsed_time", calculate_elapsed_time(exec_act["started_time"], exec_act["archived_time"]) + ), + "is_skip": exec_act["skip"], + "is_retry": False, + "status": exec_act["state"] == "FINISHED", + "version": act["component"].get("version", LEGACY_PLUGINS_VERSION), + } + component_list.append(ComponentExecuteData(**create_kwargs)) + if exec_act["retry"] > 0: + # 需要通过执行历史获得 + if engine_ver == 1: + history_list = pipeline_api.get_activity_histories(act_id) + else: + history_list_result = bamboo_engine_api.get_node_short_histories( + runtime=BambooDjangoRuntime(), node_id=act_id + ) + history_list = history_list_result.data if history_list_result.result else [] + + for history in history_list: + create_kwargs.update( + { + "started_time": history["started_time"], + "archived_time": history["archived_time"], + "elapsed_time": history.get( + "elapsed_time", + calculate_elapsed_time(history["started_time"], history["archived_time"]), + ), + "is_retry": True, + "is_skip": False, + "status": False, + } + ) + component_list.append(ComponentExecuteData(**create_kwargs)) + # 子流程的执行堆栈(子流程的执行过程) + elif act[PE.type] == PE.SubProcess: + # 递归子流程树 + sub_activities = act[PE.pipeline][PE.activities] + # 防止stack共用 + copied_stack = deepcopy(stack) + copied_stack.insert(0, act_id) + component_list += recursive_collect_components( + sub_activities, exec_act["children"], instance_id, copied_stack + ) + return component_list + + +@task +def pipeline_post_save_statistics_task(instance_id): + instance = PipelineInstance.objects.get(instance_id=instance_id) + # 统计流程标准插件个数,子流程个数,网关个数 + try: + atom_total, subprocess_total, gateways_total = count_pipeline_tree_nodes(instance.execution_data) + InstanceInPipeline.objects.update_or_create( + instance_id=instance_id, + defaults={ + "atom_total": atom_total, + "subprocess_total": subprocess_total, + "gateways_total": gateways_total, + }, + ) + except Exception as e: + logger.error( + ( + "pipeline_post_save_handler save InstanceInPipeline[instance_id={instance_id}] " "raise error: {error}" + ).format(instance_id=instance_id, error=e) + ) + + +@task +def pipeline_archive_statistics_task(instance_id): + instance = PipelineInstance.objects.get(instance_id=instance_id) + engine_ver = 1 + # 获得任务实例的执行树 + try: + status_tree = pipeline_api.get_status_tree(instance_id, 99) + except InvalidOperationException: + engine_ver = 2 + status_tree_result = bamboo_engine_api.get_pipeline_states( + runtime=BambooDjangoRuntime(), root_id=instance_id, flat_children=False + ) + if not status_tree_result.result: + logger.error( + "pipeline_archive_statistics_task bamboo_engine_api.get_pipeline_states fail: {}".format( + status_tree_result.result.exc_trace + ) + ) + return + status_tree = status_tree_result.data[instance_id] + + # 删除原有标准插件数据 + ComponentExecuteData.objects.filter(instance_id=instance_id).delete() + # 获得任务实例的执行数据 + data = instance.execution_data + try: + component_list = recursive_collect_components( + activities=data[PE.activities], + status_tree=status_tree["children"], + instance_id=instance_id, + engine_ver=engine_ver, + ) + ComponentExecuteData.objects.bulk_create(component_list) + except Exception: + logger.exception( + ("pipeline_post_save_handler save ComponentExecuteData[instance_id={instance_id}] raise error").format( + instance_id=instance_id + ) + ) diff --git a/runtime/bamboo-pipeline/pipeline/contrib/statistics/utils.py b/runtime/bamboo-pipeline/pipeline/contrib/statistics/utils.py new file mode 100644 index 00000000..2a9b48da --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/contrib/statistics/utils.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.core.constants import PE + + +def count_pipeline_tree_nodes(pipeline_tree): + gateways_total = len(pipeline_tree["gateways"]) + activities = pipeline_tree["activities"] + atom_total = len([act for act in activities.values() if act["type"] == PE.ServiceActivity]) + subprocess_total = len([act for act in activities.values() if act["type"] == PE.SubProcess]) + return atom_total, subprocess_total, gateways_total diff --git a/runtime/bamboo-pipeline/pipeline/core/__init__.py b/runtime/bamboo-pipeline/pipeline/core/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/core/constants.py b/runtime/bamboo-pipeline/pipeline/core/constants.py new file mode 100644 index 00000000..e90fe2f0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/constants.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class PipelineElement(object): + ServiceActivity = "ServiceActivity" + SubProcess = "SubProcess" + ExclusiveGateway = "ExclusiveGateway" + ParallelGateway = "ParallelGateway" + ConditionalParallelGateway = "ConditionalParallelGateway" + ConvergeGateway = "ConvergeGateway" + EmptyStartEvent = "EmptyStartEvent" + EmptyEndEvent = "EmptyEndEvent" + + Activities = {ServiceActivity} + TaskNodes = {ServiceActivity, SubProcess} + BranchGateways = {ExclusiveGateway, ParallelGateway, ConditionalParallelGateway} + Gateways = {ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway} + + pipeline = "pipeline" + id = "id" + type = "type" + start_event = "start_event" + end_event = "end_event" + activities = "activities" + flows = "flows" + gateways = "gateways" + constants = "constants" + conditions = "conditions" + incoming = "incoming" + outgoing = "outgoing" + source = "source" + target = "target" + data = "data" + component = "component" + evaluate = "evaluate" + name = "name" + stage_name = "stage_name" + failure_handler = "failure_handler" + inputs = "inputs" + outputs = "outputs" + pre_render_keys = "pre_render_keys" + source_act = "source_act" + source_key = "source_key" + code = "code" + error_ignorable = "error_ignorable" + skippable = "skippable" + # 兼容3.3.X不规范的命名 + skippable_old = "isSkipped" + retryable = "retryable" + # 兼容3.3.X不规范的命名 + retryable_old = "can_retry" + timeout = "timeout" + loop_times = "loop_times" + converge_gateway_id = "converge_gateway_id" + is_param = "is_param" + value = "value" + params = "params" + is_default = "is_default" + optional = "optional" + template_id = "template_id" + plain = "plain" + splice = "splice" + lazy = "lazy" + version = "version" + subprocess_detail = "subprocess_detail" + custom_type = "custom_type" + + +PE = PipelineElement() + +ESCAPED_CHARS = {"\n": r"\n", "\r": r"\r", "\t": r"\t"} diff --git a/runtime/bamboo-pipeline/pipeline/core/data/__init__.py b/runtime/bamboo-pipeline/pipeline/core/data/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/core/data/base.py b/runtime/bamboo-pipeline/pipeline/core/data/base.py new file mode 100644 index 00000000..7056c41a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/base.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy + +import ujson as json + +from pipeline import exceptions +from pipeline.utils.collections import FancyDict +from pipeline.utils.utils import convert_bytes_to_str + + +class DataObject(object): + def __init__(self, inputs, outputs=None): + if not isinstance(inputs, dict): + raise exceptions.DataTypeErrorException("inputs is not dict") + self.inputs = FancyDict(inputs) + if outputs is None: + outputs = {} + if not isinstance(outputs, dict): + raise exceptions.DataTypeErrorException("outputs is not dict") + self.outputs = FancyDict(outputs) + + def get_inputs(self): + return self.inputs + + def get_outputs(self): + return self.outputs + + def get_one_of_inputs(self, key, default=None): + return self.inputs.get(key, default) + + def get_one_of_outputs(self, key, default=None): + return self.outputs.get(key, default) + + def set_outputs(self, key, value): + self.outputs.update({key: value}) + return True + + def reset_outputs(self, outputs): + if not isinstance(outputs, dict): + raise exceptions.DataTypeErrorException("outputs is not dict") + self.outputs = FancyDict(outputs) + return True + + def update_outputs(self, dic): + self.outputs.update(dic) + + def inputs_copy(self): + return copy.deepcopy(self.inputs) + + def outputs_copy(self): + return copy.deepcopy(self.outputs) + + def override_inputs(self, inputs): + if not isinstance(inputs, FancyDict): + inputs = FancyDict(inputs) + self.inputs = inputs + + def override_outputs(self, outputs): + if not isinstance(outputs, FancyDict): + outputs = FancyDict(outputs) + self.outputs = outputs + + def serializer(self): + result = {"inputs": self.inputs, "outputs": self.outputs} + return json.dumps(result) + + def __setstate__(self, state): + # py2 pickle dumps data compatible + input_key = b"inputs" if b"inputs" in state else "inputs" + outputs_key = b"outputs" if b"outputs" in state else "outputs" + + self.inputs = FancyDict(convert_bytes_to_str(state[input_key])) + self.outputs = FancyDict(convert_bytes_to_str(state[outputs_key])) + + def __str__(self): + return "".format(self.inputs, self.outputs) diff --git a/runtime/bamboo-pipeline/pipeline/core/data/context.py b/runtime/bamboo-pipeline/pipeline/core/data/context.py new file mode 100644 index 00000000..949a0f6f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/context.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from copy import deepcopy +from pprint import pformat + +from pipeline.exceptions import InvalidOperationException, ReferenceNotExistError + + +class Context(object): + def __init__(self, act_outputs, output_key=None, scope=None): + self.variables = scope or {} + self.act_outputs = act_outputs + self._output_key = set(output_key or []) + self._change_keys = set() + self._raw_variables = None + + def extract_output(self, activity, set_miss=True): + self.extract_output_from_data(activity.id, activity.data, set_miss=set_miss) + + def extract_output_from_data(self, activity_id, data, set_miss=True): + if activity_id in self.act_outputs: + global_outputs = self.act_outputs[activity_id] + output = data.get_outputs() + for key in global_outputs: + # set value to key if can not find + # e.g. key: result + # e.g. global_outputs[key]: result_5hoi2 + if key not in output and not set_miss: + continue + + self.variables[global_outputs[key]] = output.get(key, global_outputs[key]) + self.change_keys.add(global_outputs[key]) + + def get(self, key): + try: + return self.variables[key] + except KeyError: + raise ReferenceNotExistError('reference "%s" does not exist.' % key) + + def set_global_var(self, key, val): + self.variables[key] = val + self.change_keys.add(key) + + def update_global_var(self, var_dict): + self.variables.update(var_dict) + self.change_keys.update(list(var_dict.keys())) + + def mark_as_output(self, key): + self._output_key.add(key) + + def write_output(self, pipeline): + from pipeline.core.data import var + + data = pipeline.data + for key in self._output_key: + try: + value = self.get(key) + except ReferenceNotExistError: + value = key + + if issubclass(value.__class__, var.Variable): + value = value.get() + # break circle + data.set_outputs(key, value) + + def duplicate_variables(self): + self._raw_variables = deepcopy(self.variables) + + def clear(self): + self.variables.clear() + if self.raw_variables: + self.raw_variables.clear() + + def recover_variable(self): + if self.raw_variables is None: + raise InvalidOperationException("make sure duplicate_variables() is called before do recover") + + # collect all act output key + act_outputs_keys = set() + for global_outputs in list(self.act_outputs.values()): + for output_key in list(global_outputs.values()): + act_outputs_keys.add(output_key) + + # recover to Variable for which key not in act output + for key, var in list(self.raw_variables.items()): + if key not in act_outputs_keys: + self.variables[key] = deepcopy(var) + + def clear_change_keys(self): + if hasattr(self, "_change_keys"): + self.change_keys.clear() + + def sync_change(self, context): + from pipeline.core.data.var import SpliceVariable + + # sync obvious change keys + for k in context.change_keys: + self.set_global_var(k, context.get(k)) + + # sync resolved splice value + for k, child_v in context.variables.items(): + parent_v = self.variables.get(k) + if isinstance(child_v, SpliceVariable) and isinstance(parent_v, SpliceVariable): + # if var is resolved in child + if parent_v._value is None and child_v._value is not None: + parent_v._value = child_v._value + + def __repr__(self): + return "variables:{}\nact_outputs:{}\n_output_key:{}".format( + pformat(self.variables), pformat(self.act_outputs), pformat(self._output_key) + ) + + def __str__(self): + return self.__repr__() + + def __unicode__(self): + return self.__repr__() + + @property + def change_keys(self): + if not hasattr(self, "_change_keys"): + self._change_keys = set() + + return self._change_keys + + @property + def raw_variables(self): + if not hasattr(self, "_raw_variables"): + self._raw_variables = None + + return self._raw_variables + + +class OutputRef(object): + def __init__(self, key, context): + self.key = key + self.context = context + + @property + def value(self): + return self.context.get(self.key) + + def __deepcopy__(self, memodict={}): + return self diff --git a/runtime/bamboo-pipeline/pipeline/core/data/converter.py b/runtime/bamboo-pipeline/pipeline/core/data/converter.py new file mode 100644 index 00000000..311624d2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/converter.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline import exceptions +from pipeline.core.data.var import PlainVariable, SpliceVariable, Variable +from pipeline.core.data import library + + +def get_variable(key, info, context, pipeline_data): + if isinstance(info["value"], Variable): + variable = info["value"] + else: + if info.get("type", "plain") == "plain": + variable = PlainVariable(key, info["value"]) + elif info["type"] == "splice": + variable = SpliceVariable(key, info["value"], context) + elif info["type"] == "lazy": + variable = library.VariableLibrary.get_var_class(info["custom_type"])( + key, info["value"], context, pipeline_data + ) + else: + raise exceptions.DataTypeErrorException( + "Unknown type: %s, which should be one of [plain, splice, lazy]" % info["type"] + ) + return variable diff --git a/runtime/bamboo-pipeline/pipeline/core/data/expression.py b/runtime/bamboo-pipeline/pipeline/core/data/expression.py new file mode 100644 index 00000000..c479dd7f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/expression.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy +import re +import logging + +from mako.template import Template +from mako import lexer, codegen +from mako.exceptions import MakoException + +from pipeline import exceptions +from pipeline.conf.default_settings import MAKO_SAFETY_CHECK +from pipeline.core.data.sandbox import SANDBOX +from pipeline.core.data import mako_safety +from pipeline.utils.mako_utils.checker import check_mako_template_safety +from pipeline.utils.mako_utils.exceptions import ForbiddenMakoTemplateException + + +logger = logging.getLogger("root") +# find mako template(format is ${xxx},and ${}# not in xxx, # may raise memory error) +TEMPLATE_PATTERN = re.compile(r"\${[^$#]+}") + + +def format_constant_key(key): + """ + @summary: format key to ${key} + @param key: + @return: + """ + return "${%s}" % key + + +def deformat_constant_key(key): + """ + @summary: deformat ${key} to key + @param key: + @return: + """ + return key[2:-1] + + +class ConstantTemplate(object): + def __init__(self, data): + self.data = data + + def get_reference(self): + reference = [] + templates = self.get_templates() + for tpl in templates: + reference += self.get_template_reference(tpl) + reference = list(set(reference)) + return reference + + def get_templates(self): + templates = [] + data = self.data + if isinstance(data, str): + templates += self.get_string_templates(data) + if isinstance(data, (list, tuple)): + for item in data: + templates += ConstantTemplate(item).get_templates() + if isinstance(data, dict): + for value in list(data.values()): + templates += ConstantTemplate(value).get_templates() + return list(set(templates)) + + def resolve_data(self, value_maps): + data = self.data + if isinstance(data, str): + return self.resolve_string(data, value_maps) + if isinstance(data, list): + ldata = [""] * len(data) + for index, item in enumerate(data): + ldata[index] = ConstantTemplate(copy.deepcopy(item)).resolve_data(value_maps) + return ldata + if isinstance(data, tuple): + ldata = [""] * len(data) + for index, item in enumerate(data): + ldata[index] = ConstantTemplate(copy.deepcopy(item)).resolve_data(value_maps) + return tuple(ldata) + if isinstance(data, dict): + for key, value in list(data.items()): + data[key] = ConstantTemplate(copy.deepcopy(value)).resolve_data(value_maps) + return data + return data + + @staticmethod + def get_string_templates(string): + return list(set(TEMPLATE_PATTERN.findall(string))) + + @staticmethod + def get_template_reference(template): + lex = lexer.Lexer(template) + + try: + node = lex.parse() + except MakoException as e: + logger.warning("pipeline get template[{}] reference error[{}]".format(template, e)) + return [] + + # Dummy compiler. _Identifiers class requires one + # but only interested in the reserved_names field + def compiler(): + return None + + compiler.reserved_names = set() + identifiers = codegen._Identifiers(compiler, node) + + return list(identifiers.undeclared) + + @staticmethod + def resolve_string(string, value_maps): + if not isinstance(string, str): + return string + templates = ConstantTemplate.get_string_templates(string) + + # TODO keep render return object, here only process simple situation + if len(templates) == 1 and templates[0] == string and deformat_constant_key(string) in value_maps: + return value_maps[deformat_constant_key(string)] + + for tpl in templates: + if MAKO_SAFETY_CHECK: + try: + check_mako_template_safety( + tpl, mako_safety.SingleLineNodeVisitor(), mako_safety.SingleLinCodeExtractor() + ) + except ForbiddenMakoTemplateException as e: + logger.warning("forbidden template: {}, exception: {}".format(tpl, e)) + continue + except Exception: + logger.exception("{} safety check error.".format(tpl)) + continue + resolved = ConstantTemplate.resolve_template(tpl, value_maps) + string = string.replace(tpl, resolved) + return string + + @staticmethod + def resolve_template(template, value_maps): + data = {} + data.update(SANDBOX) + data.update(value_maps) + if not isinstance(template, str): + raise exceptions.ConstantTypeException("constant resolve error, template[%s] is not a string" % template) + try: + tm = Template(template) + except (MakoException, SyntaxError) as e: + logger.error("pipeline resolve template[{}] error[{}]".format(template, e)) + return template + try: + resolved = tm.render_unicode(**data) + except Exception as e: + logger.warning("constant content({}) is invalid, data({}), error: {}".format(template, data, e)) + return template + else: + return resolved diff --git a/runtime/bamboo-pipeline/pipeline/core/data/hydration.py b/runtime/bamboo-pipeline/pipeline/core/data/hydration.py new file mode 100644 index 00000000..8c86b6f3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/hydration.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +def hydrate_node_data(node): + """ + 替换当前节点的 data 中的变量 + :param node: + :return: + """ + data = node.data + hydrated = hydrate_data(data.get_inputs()) + data.get_inputs().update(hydrated) + + +def hydrate_data(data): + hydrated = {} + for k, v in list(data.items()): + from pipeline.core.data import var + + if issubclass(v.__class__, var.Variable): + hydrated[k] = v.get() + else: + hydrated[k] = v + return hydrated + + +def hydrate_subprocess_context(subprocess_act): + # hydrate data + hydrate_node_data(subprocess_act) + + # context injection + data = subprocess_act.pipeline.data + context = subprocess_act.pipeline.context + for k, v in list(data.get_inputs().items()): + context.set_global_var(k, v) + + hydrated = hydrate_data(context.variables) + context.update_global_var(hydrated) diff --git a/runtime/bamboo-pipeline/pipeline/core/data/library.py b/runtime/bamboo-pipeline/pipeline/core/data/library.py new file mode 100644 index 00000000..f53941df --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/library.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class VariableLibrary(object): + variables = {} + + @classmethod + def get_var_class(cls, code): + return cls.variables.get(code) + + @classmethod + def get_var(cls, code, name, data, context=None, pipeline_data=None): + if not context: + return cls.variables[code](name, data) + return cls.variables[code](name, data, context, pipeline_data) diff --git a/runtime/bamboo-pipeline/pipeline/core/data/mako_safety.py b/runtime/bamboo-pipeline/pipeline/core/data/mako_safety.py new file mode 100644 index 00000000..db5f75fa --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/mako_safety.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from ast import NodeVisitor + +from mako import parsetree + +from pipeline.utils.mako_utils.code_extract import MakoNodeCodeExtractor +from pipeline.utils.mako_utils.exceptions import ForbiddenMakoTemplateException + + +class SingleLineNodeVisitor(NodeVisitor): + """ + 遍历语法树节点,遇到魔术方法使用或 import 时,抛出异常 + """ + + def __init__(self, *args, **kwargs): + super(SingleLineNodeVisitor, self).__init__(*args, **kwargs) + + def visit_Attribute(self, node): + if node.attr.startswith("__"): + raise ForbiddenMakoTemplateException("can not access private attribute") + + def visit_Name(self, node): + if node.id.startswith("__"): + raise ForbiddenMakoTemplateException("can not access private method") + + def visit_Import(self, node): + raise ForbiddenMakoTemplateException("can not use import statement") + + def visit_ImportFrom(self, node): + self.visit_Import(node) + + +class SingleLinCodeExtractor(MakoNodeCodeExtractor): + def extract(self, node): + if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): + return node.text + elif isinstance(node, parsetree.Text): + return None + else: + raise ForbiddenMakoTemplateException("Unsupported node: [{}]".format(node.__class__.__name__)) diff --git a/runtime/bamboo-pipeline/pipeline/core/data/sandbox.py b/runtime/bamboo-pipeline/pipeline/core/data/sandbox.py new file mode 100644 index 00000000..7e47e933 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/sandbox.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +# mock str return value of Built-in Functions,make str(func) return "func" rather than "" + +import builtins +import importlib + +from pipeline.conf import default_settings + +SANDBOX = {} + + +class MockStrMeta(type): + def __new__(cls, name, bases, attrs): + new_cls = super(MockStrMeta, cls).__new__(cls, name, bases, attrs) + SANDBOX.update({new_cls.str_return: new_cls}) + return new_cls + + def __str__(cls): + return cls.str_return + + def __call__(cls, *args, **kwargs): + return cls.call(*args, **kwargs) + + +def _shield_words(sandbox, words): + for shield_word in words: + sandbox[shield_word] = None + + +class ModuleObject: + def __init__(self, sub_paths, module): + if len(sub_paths) == 1: + setattr(self, sub_paths[0], module) + return + setattr(self, sub_paths[0], ModuleObject(sub_paths[1:], module)) + + +def _import_modules(sandbox, modules): + for mod_path, alias in modules.items(): + mod = importlib.import_module(mod_path) + sub_paths = alias.split(".") + if len(sub_paths) == 1: + sandbox[alias] = mod + else: + sandbox[sub_paths[0]] = ModuleObject(sub_paths[1:], mod) + + +def _mock_builtins(): + """ + @summary: generate mock class of built-in functions like id,int + """ + for func_name in dir(builtins): + if func_name.lower() == func_name and not func_name.startswith("_"): + new_func_name = "Mock{}".format(func_name.capitalize()) + MockStrMeta(new_func_name, (object,), {"call": getattr(builtins, func_name), "str_return": func_name}) + + +_mock_builtins() + +_shield_words(SANDBOX, default_settings.MAKO_SANDBOX_SHIELD_WORDS) + +_import_modules(SANDBOX, default_settings.MAKO_SANDBOX_IMPORT_MODULES) diff --git a/runtime/bamboo-pipeline/pipeline/core/data/schemas.py b/runtime/bamboo-pipeline/pipeline/core/data/schemas.py new file mode 100644 index 00000000..27c6cda9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/schemas.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +BASE_PARAM = { + "type": "object", + "required": ["inputs", "outputs"], + "properties": {"inputs": {"type": "object"}, "outputs": {"type": "object"}}, +} diff --git a/runtime/bamboo-pipeline/pipeline/core/data/var.py b/runtime/bamboo-pipeline/pipeline/core/data/var.py new file mode 100644 index 00000000..0cb8ece5 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/data/var.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +from abc import abstractmethod + +from pipeline import exceptions +from pipeline.conf import settings +from pipeline.core.data import library +from pipeline.core.data.context import OutputRef +from pipeline.core.data.expression import ConstantTemplate, format_constant_key +from pipeline.core.signals import pre_variable_register + +logger = logging.getLogger("root") + + +class Variable(object): + def __init__(self, name, value): + self.name = name + self.value = value + + @abstractmethod + def get(self): + pass + + +class PlainVariable(Variable): + def __init__(self, name, value): + super(PlainVariable, self).__init__(name, value) + self.name = name + self.value = value + + def get(self): + return self.value + + def __repr__(self): + return "[plain_var] {}".format(self.name) + + def __str__(self): + return self.__repr__() + + def __unicode__(self): + return self.__repr__() + + +class SpliceVariable(Variable): + def __init__(self, name, value, context): + super(SpliceVariable, self).__init__(name, value) + self._value = None + self._build_reference(context) + + def get(self): + if not self._value: + try: + self._resolve() + except settings.VARIABLE_SPECIFIC_EXCEPTIONS as e: + logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) + return "Error: {}".format(e) + except Exception as e: + logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) + return self.value + return self._value + + def _build_reference(self, context): + keys = ConstantTemplate(self.value).get_reference() + refs = {} + for key in keys: + refs[key] = OutputRef(format_constant_key(key), context) + self._refs = refs + + def _resolve(self): + maps = {} + for key in self._refs: + try: + ref_val = self._refs[key].value + if issubclass(ref_val.__class__, Variable): + ref_val = ref_val.get() + except exceptions.ReferenceNotExistError: + continue + maps[key] = ref_val + val = ConstantTemplate(self.value).resolve_data(maps) + + self._value = val + + def __repr__(self): + return "[splice_var] {}".format(self.name) + + def __str__(self): + return self.__repr__() + + def __unicode__(self): + return self.__repr__() + + +class RegisterVariableMeta(type): + def __new__(cls, name, bases, attrs): + super_new = super(RegisterVariableMeta, cls).__new__ + + # Also ensure initialization is only performed for subclasses of Model + # (excluding Model class itself). + parents = [b for b in bases if isinstance(b, RegisterVariableMeta)] + if not parents: + return super_new(cls, name, bases, attrs) + + # Create the class + new_class = super_new(cls, name, bases, attrs) + + if not new_class.code: + raise exceptions.ConstantReferenceException("LazyVariable %s: code can't be empty." % new_class.__name__) + + pre_variable_register.send(sender=LazyVariable, variable_cls=new_class) + + library.VariableLibrary.variables[new_class.code] = new_class + + return new_class + + +class LazyVariable(SpliceVariable, metaclass=RegisterVariableMeta): + def __init__(self, name, value, context, pipeline_data): + super(LazyVariable, self).__init__(name, value, context) + self.context = context + self.pipeline_data = pipeline_data + + # variable reference resolve + def get(self): + self.value = super(LazyVariable, self).get() + try: + return self.get_value() + except settings.VARIABLE_SPECIFIC_EXCEPTIONS as e: + logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) + return "Error: {}".format(e) + except Exception as e: + logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) + return self.value + + # get real value by user code + @abstractmethod + def get_value(self): + pass diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/__init__.py b/runtime/bamboo-pipeline/pipeline/core/flow/__init__.py new file mode 100644 index 00000000..157b9aca --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/__init__.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .activity import SubProcess # noqa +from .activity import AbstractIntervalGenerator # noqa + +from .activity import ( # noqa + DefaultIntervalGenerator, + LinearIntervalGenerator, + NullIntervalGenerator, + Service, + ServiceActivity, + SquareIntervalGenerator, + StaticIntervalGenerator, +) +from .base import SequenceFlow # noqa +from .event import ( # noqa + EmptyEndEvent, + EmptyStartEvent, + EndEvent, + ExecutableEndEvent, + StartEvent, +) +from .gateway import ( # noqa + Condition, + ConditionalParallelGateway, + ConvergeGateway, + ExclusiveGateway, + ParallelGateway, +) +from .signals import post_new_end_event_register + + +class FlowNodeClsFactory(object): + nodes_cls = { + ServiceActivity.__name__: ServiceActivity, + SubProcess.__name__: SubProcess, + EmptyEndEvent.__name__: EmptyEndEvent, + EmptyStartEvent.__name__: EmptyStartEvent, + ParallelGateway.__name__: ParallelGateway, + ConditionalParallelGateway.__name__: ConditionalParallelGateway, + ExclusiveGateway.__name__: ExclusiveGateway, + ConvergeGateway.__name__: ConvergeGateway, + } + + @classmethod + def _nodes_types_filter(cls, cls_filter): + types = [] + for node_type, node_cls in list(cls.nodes_cls.items()): + if not cls_filter(node_cls): + types.append(node_type) + + return types + + @classmethod + def node_types_without_start_event(cls): + return cls._nodes_types_filter(cls_filter=lambda node_cls: issubclass(node_cls, StartEvent)) + + @classmethod + def node_types_without_start_end_event(cls): + return cls._nodes_types_filter( + cls_filter=lambda node_cls: issubclass(node_cls, EndEvent) or issubclass(node_cls, StartEvent) + ) + + @classmethod + def get_node_cls(cls, key): + return cls.nodes_cls.get(key) + + @classmethod + def register_node(cls, key, node_cls): + if key in cls.nodes_cls: + raise KeyError("node with key({key}) is already exist: {node}".format(key=key, node=cls.nodes_cls[key])) + + cls.nodes_cls[key] = node_cls + + if issubclass(node_cls, EndEvent): + post_new_end_event_register.send(sender=EndEvent, node_type=key, node_cls=node_cls) diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/activity/__init__.py b/runtime/bamboo-pipeline/pipeline/core/flow/activity/__init__.py new file mode 100644 index 00000000..8fb98dff --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/activity/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.core.flow.activity.base import * # noqa +from pipeline.core.flow.activity.base import _empty_method # noqa +from pipeline.core.flow.activity.service_activity import * # noqa +from pipeline.core.flow.activity.subprocess import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/activity/base.py b/runtime/bamboo-pipeline/pipeline/core/flow/activity/base.py new file mode 100644 index 00000000..d853659b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/activity/base.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from abc import ABCMeta + +from pipeline.core.flow.base import FlowNode + + +def _empty_method(data, parent_data): + return + + +class Activity(FlowNode, metaclass=ABCMeta): + def __init__(self, id, name=None, data=None, failure_handler=None): + super(Activity, self).__init__(id, name, data) + self._failure_handler = failure_handler or _empty_method + + def next(self): + return self.outgoing.unique_one().target + + def failure_handler(self, parent_data): + return self._failure_handler(data=self.data, parent_data=parent_data) + + def skip(self): + raise NotImplementedError() + + def prepare_rerun_data(self): + raise NotImplementedError() diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/activity/service_activity.py b/runtime/bamboo-pipeline/pipeline/core/flow/activity/service_activity.py new file mode 100644 index 00000000..b55405d8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/activity/service_activity.py @@ -0,0 +1,292 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from abc import ABCMeta, abstractmethod +from copy import deepcopy + +from django.utils.translation import ugettext_lazy as _ + +from pipeline.conf import settings +from pipeline.core.flow.activity.base import Activity +from pipeline.core.flow.io import BooleanItemSchema, InputItem, IntItemSchema, OutputItem +from pipeline.utils.utils import convert_bytes_to_str + + +class Service(object, metaclass=ABCMeta): + schedule_result_attr = "__schedule_finish__" + schedule_determine_attr = "__need_schedule__" + multi_callback_determine_attr = "__multi_callback_enabled__" + InputItem = InputItem + OutputItem = OutputItem + interval = None + default_outputs = [ + OutputItem( + name=_("执行结果"), + key="_result", + type="boolean", + schema=BooleanItemSchema(description=_("执行结果的布尔值,True or False")), + ), + OutputItem(name=_("循环次数"), key="_loop", type="int", schema=IntItemSchema(description=_("循环执行次数"))), + OutputItem( + name=_("当前流程循环次数"), + key="_inner_loop", + type="int", + schema=IntItemSchema(description=_("在当前流程节点循环执行次数,由父流程重新进入时会重置(仅支持新版引擎)")), + ), + ] + + def __init__(self, name=None): + self.name = name + self.interval = deepcopy(self.interval) + self._runtime_attrs = {} + + def __getattr__(self, name): + if name not in self.__dict__.get("_runtime_attrs", {}): + raise AttributeError() + + return self._runtime_attrs[name] + + def __getstate__(self): + if "logger" in self.__dict__: + del self.__dict__["logger"] + # compatible with old version pickle obj + if "_runtime_attrs" in self.__dict__: + if "logger" in self._runtime_attrs: + del self._runtime_attrs["logger"] + + return self.__dict__ + + @abstractmethod + def execute(self, data, parent_data): + # get params from data + pass + + def outputs_format(self): + return [] + + def inputs_format(self): + return [] + + def inputs(self): + return self.inputs_format() + + def outputs(self): + custom_format = self.outputs_format() + assert isinstance(custom_format, list) + custom_format += self.default_outputs + return custom_format + + def need_schedule(self): + return getattr(self, Service.schedule_determine_attr, False) + + def schedule(self, data, parent_data, callback_data=None): + return True + + def finish_schedule(self): + setattr(self, self.schedule_result_attr, True) + + def is_schedule_finished(self): + return getattr(self, self.schedule_result_attr, False) + + def multi_callback_enabled(self): + return getattr(self, self.multi_callback_determine_attr, False) + + def clean_status(self): + setattr(self, self.schedule_result_attr, False) + + def setup_runtime_attrs(self, **kwargs): + # compatible with old version pickle obj + if "_runtime_attrs" not in self.__dict__: + self._runtime_attrs = {} + self._runtime_attrs.update(**kwargs) + + +class ServiceActivity(Activity): + result_bit = "_result" + loop = "_loop" + ON_RETRY = "_on_retry" + + def __init__( + self, + id, + service, + name=None, + data=None, + error_ignorable=False, + failure_handler=None, + skippable=True, + retryable=True, + timeout=None, + ): + super(ServiceActivity, self).__init__(id, name, data, failure_handler) + self.service = service + self.error_ignorable = error_ignorable + self.skippable = skippable + self.retryable = retryable + self.timeout = timeout + + if data: + self._prepared_inputs = self.data.inputs_copy() + self._prepared_outputs = self.data.outputs_copy() + + def __setstate__(self, state): + + for attr, obj in list(state.items()): + # py2 pickle dumps data compatible + if isinstance(attr, bytes): + attr = attr.decode("utf-8") + obj = convert_bytes_to_str(obj) + + setattr(self, attr, obj) + + if "timeout" not in state: + self.timeout = None + + def execute_pre_process(self, parent_data): + # return True if the plugin does not complete execute_pre_process function + if not (hasattr(self.service, "execute_pre_process") and callable(self.service.execute_pre_process)): + return True + + result = self.service.execute_pre_process(self.data, parent_data) + + # set result + self.set_result_bit(result) + + if self.error_ignorable: + return True + return result + + def execute(self, parent_data): + self.setup_logger() + try: + result = self.service.execute(self.data, parent_data) + except settings.PLUGIN_SPECIFIC_EXCEPTIONS as e: + self.data.set_outputs("ex_data", e) + result = False + + # set result + self.set_result_bit(result) + + if self.error_ignorable: + return True + return result + + def set_result_bit(self, result): + if result is False: + self.data.set_outputs(self.result_bit, False) + else: + self.data.set_outputs(self.result_bit, True) + + def get_result_bit(self): + return self.data.get_one_of_outputs(self.result_bit, False) + + def skip(self): + self.set_result_bit(True) + return True + + def ignore_error(self): + self.set_result_bit(False) + return True + + def clear_outputs(self): + self.data.reset_outputs({}) + + def need_schedule(self): + return self.service.need_schedule() + + def schedule(self, parent_data, callback_data=None): + self.setup_logger() + try: + result = self.service.schedule(self.data, parent_data, callback_data) + except settings.PLUGIN_SPECIFIC_EXCEPTIONS as e: + self.data.set_outputs("ex_data", e) + result = False + self.set_result_bit(result) + + if result is False: + if self.error_ignorable: + self.service.finish_schedule() + return True + + return result + + def is_schedule_done(self): + return self.service.is_schedule_finished() + + def finish_schedule(self): + self.service.finish_schedule() + + def shell(self): + shell = ServiceActivity( + id=self.id, + service=self.service, + name=self.name, + data=self.data, + error_ignorable=self.error_ignorable, + timeout=self.timeout, + ) + return shell + + def schedule_fail(self): + return + + def schedule_success(self): + return + + def prepare_rerun_data(self): + self.data.override_inputs(deepcopy(self._prepared_inputs)) + self.data.override_outputs(deepcopy(self._prepared_outputs)) + + def setup_runtime_attrs(self, **kwargs): + self.service.setup_runtime_attrs(**kwargs) + + def setup_logger(self): + self.service.setup_runtime_attrs(logger=self.logger) + + +class AbstractIntervalGenerator(object, metaclass=ABCMeta): + def __init__(self): + self.count = 0 + + def next(self): + self.count += 1 + + +class DefaultIntervalGenerator(AbstractIntervalGenerator): + def next(self): + super(DefaultIntervalGenerator, self).next() + return self.count ** 2 + + +class SquareIntervalGenerator(AbstractIntervalGenerator): + def next(self): + super(SquareIntervalGenerator, self).next() + return self.count ** 2 + + +class NullIntervalGenerator(AbstractIntervalGenerator): + pass + + +class LinearIntervalGenerator(AbstractIntervalGenerator): + pass + + +class StaticIntervalGenerator(AbstractIntervalGenerator): + def __init__(self, interval): + super(StaticIntervalGenerator, self).__init__() + self.interval = interval + + def next(self): + super(StaticIntervalGenerator, self).next() + return self.interval diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/activity/subprocess.py b/runtime/bamboo-pipeline/pipeline/core/flow/activity/subprocess.py new file mode 100644 index 00000000..14ff3aed --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/activity/subprocess.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from copy import deepcopy + +from pipeline.core.flow.activity.base import Activity +from pipeline.utils.utils import convert_bytes_to_str + + +class SubProcess(Activity): + def __init__(self, id, pipeline, name=None): + super(SubProcess, self).__init__(id, name, pipeline.data) + self.pipeline = pipeline + self._prepared_inputs = self.pipeline.data.inputs_copy() + self._prepared_outputs = self.pipeline.data.outputs_copy() + + def prepare_rerun_data(self): + self.data.override_inputs(deepcopy(self._prepared_inputs)) + self.data.override_outputs(deepcopy(self._prepared_outputs)) + + def __setstate__(self, state): + for attr, obj in list(state.items()): + if isinstance(attr, bytes): + attr = attr.decode("utf-8") + obj = convert_bytes_to_str(obj) + setattr(self, attr, obj) + + if "_prepared_inputs" not in state: + self._prepared_inputs = self.pipeline.data.inputs_copy() + + if "_prepared_outputs" not in state: + self._prepared_outputs = self.pipeline.data.outputs_copy() diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/base.py b/runtime/bamboo-pipeline/pipeline/core/flow/base.py new file mode 100644 index 00000000..5d62ae15 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/base.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import weakref +from abc import ABCMeta, abstractmethod +from functools import wraps + +from pipeline.exceptions import InvalidOperationException + + +def extra_inject(func): + @wraps(func) + def wrapper(*args, **kwargs): + if "extra" not in kwargs: + kwargs["extra"] = {} + kwargs["extra"]["_id"] = args[0].id + return func(*args, **kwargs) + + return wrapper + + +class FlowElement(object, metaclass=ABCMeta): + def __init__(self, id, name=None): + self.id = id + self.name = name + + +class FlowNode(FlowElement, metaclass=ABCMeta): + ON_RETRY = "_on_retry" + + def __init__(self, id, name=None, data=None): + super(FlowNode, self).__init__(id, name) + self.incoming = SequenceFlowCollection() + self.outgoing = SequenceFlowCollection() + self.data = data + + def on_retry(self): + return hasattr(self, self.ON_RETRY) + + def next_exec_is_retry(self): + setattr(self, self.ON_RETRY, True) + + def retry_at_current_exec(self): + delattr(self, self.ON_RETRY) + + @abstractmethod + def next(self): + """ + 该节点的下一个节点,由子类来实现 + :return: + """ + raise NotImplementedError() + + class FlowNodeLogger: + def __init__(self, id): + self.id = id + self._logger = logging.getLogger("pipeline.logging") + + @extra_inject + def info(self, *args, **kwargs): + self._logger.info(*args, **kwargs) + + @extra_inject + def warning(self, *args, **kwargs): + self._logger.warning(*args, **kwargs) + + @extra_inject + def error(self, *args, **kwargs): + self._logger.error(*args, **kwargs) + + @extra_inject + def critical(self, *args, **kwargs): + self._logger.critical(*args, **kwargs) + + @property + def logger(self): + _logger = getattr(self, "_logger", None) + if not _logger: + _logger = self.FlowNodeLogger(self.id) + setattr(self, "_logger", _logger) + return _logger + + def __getstate__(self): + if "_logger" in self.__dict__: + del self.__dict__["_logger"] + return self.__dict__ + + +class SequenceFlow(FlowElement): + def __init__(self, id, source, target, is_default=False, name=None): + super(SequenceFlow, self).__init__(id, name) + self.source = weakref.proxy(source) if source is not None else source + self.target = weakref.proxy(target) if target is not None else target + self.is_default = is_default + + +class SequenceFlowCollection(object): + def __init__(self, *flows): + flow_dict = {} + for flow in flows: + flow_dict[flow.id] = flow + + self.flows = list(flows) + self.flow_dict = flow_dict + + def get_flow(self, id): + """ + 获取 flow.id = id 的某个 flow + :param id: flow id + :return: + """ + return self.flow_dict.get(id) + + def unique_one(self): + """ + 获取唯一的一个 flow,若当前集合内 flow 不只一条则抛出异常 + :return: + """ + if len(self.flows) != 1: + raise InvalidOperationException("this collection contains multiple flow, can not get unique one.") + return self.flows[0] + + def is_empty(self): + """ + 当前集合是否为空 + :return: + """ + return len(self.flows) == 0 + + def default_flow(self): + """ + 获取当前集合中默认的 flow + :return: 若存在默认的 flow 则返回,否则返回 None + """ + for flow in self.flows: + if flow.is_default: + return flow + return None + + def add_flow(self, flow): + """ + 向当前结合中添加一条 flow + :param flow: 待添加的 flow + :return: + """ + self.flows.append(flow) + self.flow_dict[flow.id] = flow + + def all_target_node(self): + """ + 返回当前集合中所有 flow 的 target + :return: + """ + nodes = [] + for flow in self.flows: + nodes.append(flow.target) + return nodes + + def all_source_node(self): + """ + 返回当前集合中所有 flow 的 source + :return: + """ + nodes = [] + for flow in self.flows: + nodes.append(flow.source) + return nodes + + def __iter__(self): + return iter(self.flows) diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/event.py b/runtime/bamboo-pipeline/pipeline/core/flow/event.py new file mode 100644 index 00000000..a5394e48 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/event.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback +from abc import ABCMeta, abstractmethod + +from pipeline.core.flow.base import FlowNode +from pipeline.core.pipeline import Pipeline +from pipeline.engine.signals import pipeline_end + +logger = logging.getLogger("celery") + + +class Event(FlowNode, metaclass=ABCMeta): + def __init__(self, id, name=None, data=None): + super(Event, self).__init__(id, name, data) + + def next(self): + return self.outgoing.unique_one().target + + +class ThrowEvent(Event, metaclass=ABCMeta): + pass + + +class CatchEvent(Event, metaclass=ABCMeta): + pass + + +class EndEvent(ThrowEvent, metaclass=ABCMeta): + def pipeline_finish(self, root_pipeline_id): + try: + pipeline_end.send(sender=Pipeline, root_pipeline_id=root_pipeline_id) + except Exception: + logger.error("pipeline end handler error %s" % traceback.format_exc()) + + +class StartEvent(CatchEvent, metaclass=ABCMeta): + pass + + +class EmptyStartEvent(StartEvent): + pass + + +class EmptyEndEvent(EndEvent): + pass + + +class ExecutableEndEvent(EndEvent, metaclass=ABCMeta): + @abstractmethod + def execute(self, in_subprocess, root_pipeline_id, current_pipeline_id): + raise NotImplementedError() diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/gateway.py b/runtime/bamboo-pipeline/pipeline/core/flow/gateway.py new file mode 100644 index 00000000..9659deb2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/gateway.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import logging +from abc import ABCMeta + +import ujson as json + +from pipeline.core.constants import ESCAPED_CHARS +from pipeline.core.data.expression import ConstantTemplate, deformat_constant_key +from pipeline.core.flow.base import FlowNode +from pipeline.exceptions import ConditionExhaustedException, EvaluationException, InvalidOperationException +from pipeline.utils.boolrule import BoolRule + +logger = logging.getLogger("pipeline_engine") + + +class Gateway(FlowNode, metaclass=ABCMeta): + pass + + +class ExclusiveGateway(Gateway): + def __init__(self, id, conditions=None, name=None, data=None): + super(ExclusiveGateway, self).__init__(id, name, data) + self.conditions = conditions or [] + + def add_condition(self, condition): + self.conditions.append(condition) + + def next(self, data=None): + default_flow = self.outgoing.default_flow() + next_flow = self._determine_next_flow_with_boolrule(data) + + if not next_flow: # determine fail + if not default_flow: # try to use default flow + raise ConditionExhaustedException( + "all conditions of branches are False " "while default flow is not appointed" + ) + return default_flow.target + + return next_flow.target + + def target_for_sequence_flow(self, flow_id): + flow_to_target = {c.sequence_flow.id: c.sequence_flow.target for c in self.conditions} + if flow_id not in flow_to_target: + raise InvalidOperationException("sequence flow(%s) does not exist." % flow_id) + return flow_to_target[flow_id] + + @staticmethod + def _transform_escape_char(string): + """ + 对未转义的字符串进行转义,现有的转义字符包括\n, \r, \t + """ + if not isinstance(string, str): + return string + # 已转义的情况 + if len([c for c in ESCAPED_CHARS.values() if c in string]) > 0: + return string + for key, value in ESCAPED_CHARS.items(): + if key in string: + string = string.replace(key, value) + return string + + def _determine_next_flow_with_boolrule(self, data): + """ + 根据当前传入的数据判断下一个应该流向的 flow ( 不使用 eval 的版本) + :param data: + :return: + """ + for key, value in data.items(): + data[key] = self._transform_escape_char(value) + logger.info("[{}] ready to resolve conditions: {}".format(self.id, [c.evaluate for c in self.conditions])) + for condition in self.conditions: + deformatted_data = {deformat_constant_key(key): value for key, value in list(data.items())} + try: + logger.info("[{}] before resolve condition: {}".format(self.id, condition.evaluate)) + resolved_evaluate = ConstantTemplate(condition.evaluate).resolve_data(deformatted_data) + logger.info("[{}] test {} with data {}".format(self.id, resolved_evaluate, data)) + result = BoolRule(resolved_evaluate).test(data) + logger.info("[{}] {} test result: {}".format(self.id, resolved_evaluate, result)) + except Exception as e: + raise EvaluationException( + "evaluate[%s] fail with data[%s] message: %s" + % (condition.evaluate, json.dumps(deformatted_data), e) + ) + if result: + return condition.sequence_flow + + return None + + def skip(self): + return True + + +class ParallelGateway(Gateway): + def __init__(self, id, converge_gateway_id, name=None, data=None): + super(ParallelGateway, self).__init__(id, name, data) + self.converge_gateway_id = converge_gateway_id + + def next(self): + raise InvalidOperationException("can not determine next node for parallel gateway.") + + +class ConditionalParallelGateway(Gateway): + def __init__(self, id, converge_gateway_id, conditions=None, name=None, data=None): + super(ConditionalParallelGateway, self).__init__(id, name, data) + self.converge_gateway_id = converge_gateway_id + self.conditions = conditions or [] + + def add_condition(self, condition): + self.conditions.append(condition) + + def targets_meet_condition(self, data): + + targets = [] + + logger.info("[{}] ready to resolve conditions: {}".format(self.id, [c.evaluate for c in self.conditions])) + for condition in self.conditions: + deformatted_data = {deformat_constant_key(key): value for key, value in list(data.items())} + try: + logger.info("[{}] before resolve condition: {}".format(self.id, condition.evaluate)) + resolved_evaluate = ConstantTemplate(condition.evaluate).resolve_data(deformatted_data) + logger.info("[{}] test {} with data {}".format(self.id, resolved_evaluate, data)) + result = BoolRule(resolved_evaluate).test(data) + logger.info("[{}] {} test result: {}".format(self.id, resolved_evaluate, result)) + except Exception as e: + raise EvaluationException( + "evaluate[%s] fail with data[%s] message: %s" + % (condition.evaluate, json.dumps(deformatted_data), e) + ) + if result: + targets.append(condition.sequence_flow.target) + + if not targets: + raise ConditionExhaustedException("all conditions of branches are False") + + return targets + + def next(self): + raise InvalidOperationException("can not determine next node for conditional parallel gateway.") + + def skip(self): + raise InvalidOperationException("can not skip conditional parallel gateway.") + + +class ConvergeGateway(Gateway): + def next(self): + return self.outgoing.unique_one().target + + def skip(self): + raise InvalidOperationException("can not skip conditional converge gateway.") + + +class Condition(object): + def __init__(self, evaluate, sequence_flow): + self.evaluate = evaluate + self.sequence_flow = sequence_flow diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/io.py b/runtime/bamboo-pipeline/pipeline/core/flow/io.py new file mode 100644 index 00000000..f9829a87 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/io.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import abc +from collections import Mapping + + +class DataItem(object, metaclass=abc.ABCMeta): + def __init__(self, name, key, type, schema=None): + self.name = name + self.key = key + self.type = type + self.schema = schema + + def as_dict(self): + return { + "name": self.name, + "key": self.key, + "type": self.type, + "schema": self.schema.as_dict() if self.schema else {}, + } + + +class InputItem(DataItem): + def __init__(self, required=True, *args, **kwargs): + self.required = required + super(InputItem, self).__init__(*args, **kwargs) + + def as_dict(self): + base = super(InputItem, self).as_dict() + base["required"] = self.required + return base + + +class OutputItem(DataItem): + pass + + +class ItemSchema(object, metaclass=abc.ABCMeta): + def __init__(self, description, enum=None): + self.type = self._type() + self.description = description + self.enum = enum or [] + + def as_dict(self): + return {"type": self.type, "description": self.description, "enum": self.enum} + + @abc.abstractmethod + def _type(self): + raise NotImplementedError() + + +class SimpleItemSchema(ItemSchema, metaclass=abc.ABCMeta): + pass + + +class IntItemSchema(SimpleItemSchema): + @classmethod + def _type(cls): + return "int" + + +class StringItemSchema(SimpleItemSchema): + @classmethod + def _type(cls): + return "string" + + +class FloatItemSchema(SimpleItemSchema): + @classmethod + def _type(cls): + return "float" + + +class BooleanItemSchema(SimpleItemSchema): + @classmethod + def _type(cls): + return "boolean" + + +class ArrayItemSchema(ItemSchema): + def __init__(self, item_schema, *args, **kwargs): + if not isinstance(item_schema, ItemSchema): + raise TypeError("item_schema of ArrayItemSchema must be subclass of ItemSchema") + self.item_schema = item_schema + super(ArrayItemSchema, self).__init__(*args, **kwargs) + + def as_dict(self): + base = super(ArrayItemSchema, self).as_dict() + base["items"] = self.item_schema.as_dict() + return base + + @classmethod + def _type(cls): + return "array" + + +class ObjectItemSchema(ItemSchema): + def __init__(self, property_schemas, *args, **kwargs): + if not isinstance(property_schemas, Mapping): + raise TypeError("property_schemas of ObjectItemSchema must be Mapping type") + + if not all([isinstance(value, ItemSchema) for value in list(property_schemas.values())]): + raise TypeError("value in property_schemas of ObjectItemSchema must be subclass of ItemSchema") + + self.property_schemas = property_schemas + super(ObjectItemSchema, self).__init__(*args, **kwargs) + + def as_dict(self): + base = super(ObjectItemSchema, self).as_dict() + properties = {prop: schema.as_dict() for prop, schema in list(self.property_schemas.items())} + base["properties"] = properties + return base + + @classmethod + def _type(cls): + return "object" diff --git a/runtime/bamboo-pipeline/pipeline/core/flow/signals.py b/runtime/bamboo-pipeline/pipeline/core/flow/signals.py new file mode 100644 index 00000000..4c23fed6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/flow/signals.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.dispatch import Signal + +post_new_end_event_register = Signal(providing_args=["node_type", "node_cls"]) diff --git a/runtime/bamboo-pipeline/pipeline/core/pipeline.py b/runtime/bamboo-pipeline/pipeline/core/pipeline.py new file mode 100644 index 00000000..4fa75933 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/pipeline.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from queue import Queue + +from pipeline.core.flow.activity import Activity +from pipeline.core.flow.gateway import Gateway +from pipeline.exceptions import PipelineException + + +class PipelineSpec(object): + def __init__(self, start_event, end_event, flows, activities, gateways, data, context): + objects = {start_event.id: start_event, end_event.id: end_event} + for act in activities: + objects[act.id] = act + for gw in gateways: + objects[gw.id] = gw + + self.start_event = start_event + self.end_event = end_event + self.flows = flows + self.activities = activities + self.gateways = gateways + self.data = data + self.objects = objects + self.context = context + + def prune(self, keep_from, keep_to): + if keep_from != self.start_event.id: + self.start_event.outgoing = None + + if keep_to != self.end_event.id: + self.end_event.incoming = None + + self.activities = [] + self.gateways = [] + self.flows = [] + + keep_from_node = self.objects[keep_from] + keep_to_node = self.objects[keep_to] + + keep_from_node.incoming = None + keep_to_node.outgoing = None + + to_be_process = Queue() + to_be_process.put(keep_from_node) + + new_objects = {} + keep_to_incoming_flows = [] + + while not to_be_process.empty(): + node = to_be_process.get() + + if issubclass(node.__class__, Activity): + self.activities.append(node) + elif issubclass(node.__class__, Gateway): + self.gateways.append(node) + + new_objects[node.id] = node + + if node.id == keep_to_node.id: + continue + + for out in node.outgoing: + + self.flows.append(out) + + if out.target.id not in new_objects: + next_node = out.target + if next_node.id == keep_to_node.id: + keep_to_incoming_flows.append(out) + to_be_process.put(next_node) + + keep_to_node.incoming.flows = keep_to_incoming_flows + keep_to_node.incoming.flow_dict = {} + for flow in keep_to_incoming_flows: + keep_to_node.incoming.flow_dict[flow.id] = flow + + self.objects = new_objects + + +class PipelineShell(object): + def __init__(self, id, data): + self.id = id + self.data = data + + def shell(self): + return PipelineShell(id=self.id, data=self.data) + + +class Pipeline(object): + def __init__(self, id, pipeline_spec, parent=None): + self.id = id + self.spec = pipeline_spec + self.parent = parent + + @property + def data(self): + return self.spec.data + + @property + def context(self): + return self.spec.context + + @property + def start_event(self): + return self.spec.start_event + + @property + def end_event(self): + return self.spec.end_event + + @property + def all_nodes(self): + return self.spec.objects + + def data_for_node(self, node): + node = self.spec.objects.get(node.id) + if not node: + raise PipelineException("Can not find node %s in this pipeline." % node.id) + return node.data + + def node(self, id): + return self.spec.objects.get(id) + + def prune(self, keep_from, keep_to): + self.spec.prune(keep_from=keep_from, keep_to=keep_to) + + def shell(self): + return PipelineShell(id=self.id, data=self.data) diff --git a/runtime/bamboo-pipeline/pipeline/core/signals/__init__.py b/runtime/bamboo-pipeline/pipeline/core/signals/__init__.py new file mode 100644 index 00000000..22849a49 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/core/signals/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.dispatch import Signal + +pre_variable_register = Signal(providing_args=["variable_code"]) diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/__init__.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/admin.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/admin.py new file mode 100644 index 00000000..be469dfd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/admin.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from pipeline.django_signal_valve.models import Signal + + +@admin.register(Signal) +class SignalAdmin(admin.ModelAdmin): + list_display = ["id", "module_path", "name", "kwargs"] + search_fields = ["id", "module_path", "name"] diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/migrations/0001_initial.py new file mode 100644 index 00000000..4afb9047 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/migrations/0001_initial.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import pipeline.django_signal_valve.models + + +class Migration(migrations.Migration): + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Signal", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("module_path", models.TextField(verbose_name="\u4fe1\u53f7\u6a21\u5757\u540d")), + ("name", models.CharField(max_length=64, verbose_name="\u4fe1\u53f7\u5c5e\u6027\u540d")), + ("kwargs", pipeline.django_signal_valve.models.IOField(verbose_name="\u4fe1\u53f7\u53c2\u6570")), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/models.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/models.py new file mode 100644 index 00000000..d648849e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/models.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import zlib +import pickle + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + + +class IOField(models.BinaryField): + def __init__(self, compress_level=6, *args, **kwargs): + super(IOField, self).__init__(*args, **kwargs) + self.compress_level = compress_level + + def get_prep_value(self, value): + value = super(IOField, self).get_prep_value(value) + return zlib.compress(pickle.dumps(value), self.compress_level) + + def to_python(self, value): + value = super(IOField, self).to_python(value) + return pickle.loads(zlib.decompress(value)) + + def from_db_value(self, value, expression, connection, context): + return self.to_python(value) + + +class SignalManager(models.Manager): + def dump(self, module_path, signal_name, kwargs): + self.create(module_path=module_path, name=signal_name, kwargs=kwargs) + + +class Signal(models.Model): + module_path = models.TextField(_("信号模块名")) + name = models.CharField(_("信号属性名"), max_length=64) + kwargs = IOField(verbose_name=_("信号参数")) + + objects = SignalManager() diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/__init__.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/mock_signal/__init__.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/mock_signal/__init__.py new file mode 100644 index 00000000..ed84f2a8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/mock_signal/__init__.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class MockSignal(object): + def __init__(self): + self.history = [] + + def send(self, **kwargs): + self.history.append(kwargs) + + +signal_1 = MockSignal() + + +def clear(): + global signal_1 + signal_1 = MockSignal() diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/test_models.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/test_models.py new file mode 100644 index 00000000..a52d8699 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/test_models.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.django_signal_valve.models import Signal + + +class TestModels(TestCase): + def tearDown(self): + Signal.objects.all().delete() + + def test_manager_dump(self): + kwargs = {"key1": "value1", "key2": [1, 2, 3], "key3": {"key4": "value4"}} + Signal.objects.dump(module_path="path", signal_name="name", kwargs=kwargs) + signal = Signal.objects.all()[0] + self.assertEqual(signal.module_path, "path") + self.assertEqual(signal.name, "name") + self.assertEqual(signal.kwargs, kwargs) diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/test_valve.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/test_valve.py new file mode 100644 index 00000000..a0cda55e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/tests/test_valve.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.django_signal_valve import valve +from pipeline.django_signal_valve.models import Signal +from pipeline.django_signal_valve.tests import mock_signal + + +class TestValve(TestCase): + def setUp(self): + valve.unload_valve_function() + + def test_set_valve_function(self): + self.assertRaises(Exception, valve.set_valve_function, args=[1]) + + def func(): + return True + + valve.unload_valve_function() + valve.set_valve_function(func) + self.assertEqual(valve.valve_function(), func) + self.assertRaises(Exception, valve.set_valve_function, args=[func]) + + valve.__valve_function = None + + def test_send_on_valve_is_none(self): + kwargs_1 = {"1": 1} + kwargs_2 = {"2": 2} + + valve.unload_valve_function() + valve.send(mock_signal, "signal_1", **kwargs_1) + valve.send(mock_signal, "signal_1", **kwargs_2) + self.assertEqual(mock_signal.signal_1.history[0], kwargs_1) + self.assertEqual(mock_signal.signal_1.history[1], kwargs_2) + + mock_signal.clear() + + def test_send_on_valve_opened(self): + kwargs_1 = {"1": 1} + kwargs_2 = {"2": 2} + + def is_valve_closed(): + return False + + valve.unload_valve_function() + valve.set_valve_function(is_valve_closed) + valve.send(mock_signal, "signal_1", **kwargs_1) + valve.send(mock_signal, "signal_1", **kwargs_2) + self.assertEqual(mock_signal.signal_1.history[0], kwargs_1) + self.assertEqual(mock_signal.signal_1.history[1], kwargs_2) + + mock_signal.clear() + + def test_send_on_closed(self): + kwargs_1 = {"1": 1} + kwargs_2 = {"2": 2} + + def is_valve_closed(): + return True + + valve.unload_valve_function() + valve.set_valve_function(is_valve_closed) + valve.send(mock_signal, "signal_1", **kwargs_1) + valve.send(mock_signal, "signal_1", **kwargs_2) + self.assertEqual(len(mock_signal.signal_1.history), 0) + + mock_signal.clear() + Signal.objects.all().delete() + + def test_open_valve(self): + kwargs_1 = {"1": 1} + kwargs_2 = {"2": 2} + + def valve_closed(): + return True + + valve.unload_valve_function() + valve.set_valve_function(valve_closed) + valve.send(mock_signal, "signal_1", **kwargs_1) + valve.send(mock_signal, "signal_1", **kwargs_2) + self.assertEqual(len(mock_signal.signal_1.history), 0) + valve.open_valve(mock_signal) + self.assertEqual(mock_signal.signal_1.history[0], kwargs_1) + self.assertEqual(mock_signal.signal_1.history[1], kwargs_2) + + mock_signal.clear() diff --git a/runtime/bamboo-pipeline/pipeline/django_signal_valve/valve.py b/runtime/bamboo-pipeline/pipeline/django_signal_valve/valve.py new file mode 100644 index 00000000..215165f4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/django_signal_valve/valve.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback + +from .models import Signal + +logger = logging.getLogger(__name__) + + +def set_valve_function(func): + global __valve_function + if __valve_function is not None: + raise Exception("valve function can only be set once.") + if not callable(func): + raise Exception("valve function must be a callable object") + + __valve_function = func + + +def send(signal_mod, signal_name, **kwargs): + if not __valve_function or not __valve_function(): + try: + return getattr(signal_mod, signal_name).send(**kwargs) + except Exception: + raise + else: + Signal.objects.dump(signal_mod.__path__, signal_name, kwargs) + return None + + +def open_valve(signal_mod): + signal_list = Signal.objects.filter(module_path=signal_mod.__path__).order_by("id") + response = [] + for signal in signal_list: + try: + response.append(getattr(signal_mod, signal.name).send(**signal.kwargs)) + signal.delete() + except Exception: + logger.error( + "signal({} - {}) resend failed: {}".format(signal.module_path, signal.name, traceback.format_exc()) + ) + return response + + +def unload_valve_function(): + global __valve_function + __valve_function = None + + +def valve_function(): + return __valve_function + + +__valve_function = None diff --git a/runtime/bamboo-pipeline/pipeline/engine/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/__init__.py new file mode 100644 index 00000000..dc9ff245 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/__init__.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +default_app_config = "pipeline.engine.apps.EngineConfig" diff --git a/runtime/bamboo-pipeline/pipeline/engine/admin.py b/runtime/bamboo-pipeline/pipeline/engine/admin.py new file mode 100644 index 00000000..023be3c0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/admin.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin +from django.utils.translation import ugettext_lazy as _ + +from pipeline.engine import models +from pipeline.engine.conf.function_switch import FREEZE_ENGINE +from pipeline.engine.core import api +from pipeline.service import task_service + + +@admin.register(models.PipelineModel) +class PipelineModelAdmin(admin.ModelAdmin): + list_display = ["id", "process"] + search_fields = ["id__exact", "process__id__exact"] + raw_id_fields = ["process"] + + +@admin.register(models.PipelineProcess) +class PipelineProcessAdmin(admin.ModelAdmin): + list_display = [ + "id", + "root_pipeline_id", + "current_node_id", + "destination_id", + "parent_id", + "need_ack", + "ack_num", + "is_alive", + "is_sleep", + "is_frozen", + ] + search_fields = ["id__exact", "root_pipeline_id__exact", "current_node_id__exact"] + list_filter = ["is_alive", "is_sleep"] + raw_id_fields = ["snapshot"] + + +def force_fail_node(modeladmin, request, queryset): + for item in queryset: + task_service.forced_fail(item.id) + + +@admin.register(models.Status) +class StatusAdmin(admin.ModelAdmin): + list_display = [ + "id", + "name", + "state", + "retry", + "skip", + "loop", + "created_time", + "started_time", + "archived_time", + ] + search_fields = ["=id"] + actions = [force_fail_node] + + +@admin.register(models.ScheduleService) +class ScheduleServiceAdmin(admin.ModelAdmin): + list_display = [ + "id", + "activity_id", + "process_id", + "schedule_times", + "wait_callback", + "is_finished", + ] + search_fields = ["id__exact"] + list_filter = ["wait_callback", "is_finished"] + + +@admin.register(models.ProcessCeleryTask) +class ProcessCeleryTaskAdmin(admin.ModelAdmin): + list_display = ["id", "process_id", "celery_task_id"] + search_fields = ["id__exact", "process_id__exact"] + + +@admin.register(models.Data) +class DataAdmin(admin.ModelAdmin): + list_display = ["id", "inputs", "outputs", "ex_data"] + search_fields = ["id__exact"] + + +@admin.register(models.HistoryData) +class HistoryDataAdmin(admin.ModelAdmin): + list_display = ["id", "inputs", "outputs", "ex_data"] + search_fields = ["id__exact"] + + +@admin.register(models.History) +class HistoryAdmin(admin.ModelAdmin): + list_display = ["identifier", "started_time", "archived_time"] + search_fields = ["identifier__exact"] + raw_id_fields = ["data"] + + +@admin.register(models.ScheduleCeleryTask) +class ScheduleCeleryTaskAdmin(admin.ModelAdmin): + list_display = ["schedule_id", "celery_task_id"] + search_fields = ["schedule_id__exact"] + + +@admin.register(models.NodeCeleryTask) +class NodeCeleryTaskAdmin(admin.ModelAdmin): + list_display = ["node_id", "celery_task_id"] + search_fields = ["node_id__exact"] + + +on = True +off = False + +switch_hook = {FREEZE_ENGINE: {on: api.freeze, off: api.unfreeze}} + + +def turn_on_function(modeladmin, request, queryset): + for item in queryset: + if not item.is_active: + switch_hook[item.name][on]() + + +def turn_off_function(modeladmin, request, queryset): + for item in queryset: + if item.is_active: + switch_hook[item.name][off]() + + +turn_on_function.short_description = _("打开所选的功能") +turn_off_function.short_description = _("关闭所选的功能") + + +@admin.register(models.FunctionSwitch) +class FunctionAdmin(admin.ModelAdmin): + list_display = ["name", "description", "is_active"] + search_fields = ["name", "description"] + actions = [turn_on_function, turn_off_function] + + def has_delete_permission(self, request, obj=None): + return False + + def get_actions(self, request): + actions = super(FunctionAdmin, self).get_actions(request) + if "delete_selected" in actions: + del actions["delete_selected"] + return actions + + def get_readonly_fields(self, request, obj=None): + if obj: # obj is not None, so this is an edit + return [ + "name", + "is_active", + ] # Return a list or tuple of readonly fields' names + else: # This is an addition + return [] + + +def resend_task(modeladmin, request, queryset): + for item in queryset: + item.resend() + + +@admin.register(models.SendFailedCeleryTask) +class SendFailedCeleryTaskAdmin(admin.ModelAdmin): + list_display = [ + "id", + "name", + "kwargs", + "type", + "extra_kwargs", + "exec_trace", + "created_at", + ] + search_fields = ["id__exact", "name"] + actions = [resend_task] diff --git a/runtime/bamboo-pipeline/pipeline/engine/api.py b/runtime/bamboo-pipeline/pipeline/engine/api.py new file mode 100644 index 00000000..813144ce --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/api.py @@ -0,0 +1,531 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import functools +import logging +import time + +from celery import current_app +from django.db import transaction +from redis.exceptions import ConnectionError as RedisConnectionError + +from pipeline.celery.queues import ScalableQueues +from pipeline.constants import PIPELINE_DEFAULT_PRIORITY, PIPELINE_MAX_PRIORITY, PIPELINE_MIN_PRIORITY +from pipeline.core.flow.activity import ServiceActivity +from pipeline.core.flow.gateway import ExclusiveGateway, ParallelGateway +from pipeline.engine import exceptions, states +from pipeline.engine.core.api import workers +from pipeline.engine.models import ( + Data, + FunctionSwitch, + History, + NodeRelationship, + Pipeline, + PipelineModel, + PipelineProcess, + ProcessCeleryTask, + ScheduleService, + Status, + SubProcessRelationship, +) +from pipeline.engine.signals import pipeline_revoke +from pipeline.engine.utils import ActionResult, calculate_elapsed_time +from pipeline.utils import uniqid + +logger = logging.getLogger("celery") + + +def _node_existence_check(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + id_from_kwargs = kwargs.get("node_id") + node_id = id_from_kwargs if id_from_kwargs else args[0] + try: + Status.objects.get(id=node_id) + except Status.DoesNotExist: + return ActionResult(result=False, message="node not exists or not be executed yet") + return func(*args, **kwargs) + + return wrapper + + +def _frozen_check(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + if FunctionSwitch.objects.is_frozen(): + return ActionResult(result=False, message="engine is frozen, can not perform operation") + + return func(*args, **kwargs) + + return wrapper + + +def _worker_check(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + def on_connection_error(exc, interval): + logger.warning("Connection Error: {!r}. Retry in {}s.".format(exc, interval)) + + if kwargs.get("check_workers", True): + try: + with current_app.connection() as conn: + try: + conn.ensure_connection(on_connection_error, current_app.conf.BROKER_CONNECTION_MAX_RETRIES) + except conn.connection_errors + conn.channel_errors as exc: + logger.warning("Connection lost: {!r}".format(exc)) + if not workers(conn): + return ActionResult( + result=False, message="can not find celery workers, please check worker status" + ) + except exceptions.RabbitMQConnectionError as e: + return ActionResult( + result=False, + message="celery worker status check failed with message: %s, " "check rabbitmq status please" % e, + ) + except RedisConnectionError: + return ActionResult(result=False, message="redis connection error, check redis status please") + + return func(*args, **kwargs) + + return wrapper + + +@_worker_check +@_frozen_check +def start_pipeline(pipeline_instance, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): + """ + start a pipeline + :param pipeline_instance: + :param priority: + :return: + """ + + if priority > PIPELINE_MAX_PRIORITY or priority < PIPELINE_MIN_PRIORITY: + raise exceptions.InvalidOperationException( + "pipeline priority must between [{min}, {max}]".format(min=PIPELINE_MIN_PRIORITY, max=PIPELINE_MAX_PRIORITY) + ) + + if queue and not ScalableQueues.has_queue(queue): + return ActionResult(result=False, message="can't not find queue({}) in any config queues.".format(queue)) + + Status.objects.prepare_for_pipeline(pipeline_instance) + process = PipelineProcess.objects.prepare_for_pipeline(pipeline_instance) + PipelineModel.objects.prepare_for_pipeline(pipeline_instance, process, priority, queue=queue) + + PipelineModel.objects.pipeline_ready(process_id=process.id) + + return ActionResult(result=True, message="success") + + +@_frozen_check +def pause_pipeline(pipeline_id): + """ + pause a running pipeline + :param pipeline_id: + :return: + """ + + return Status.objects.transit(id=pipeline_id, to_state=states.SUSPENDED, is_pipeline=True, appoint=True) + + +@_worker_check +@_frozen_check +def resume_pipeline(pipeline_id): + """ + resume a pipeline from suspended + :param pipeline_id: + :return: + """ + if not Status.objects.filter(id=pipeline_id).exists(): + return ActionResult(result=False, message="only started pipeline can be resumed.") + + action_result = Status.objects.transit(id=pipeline_id, to_state=states.READY, is_pipeline=True, appoint=True) + if not action_result.result: + return action_result + + process = PipelineModel.objects.get(id=pipeline_id).process + to_be_waked = [] + _get_process_to_be_waked(process, to_be_waked) + PipelineProcess.objects.batch_process_ready(process_id_list=to_be_waked, pipeline_id=pipeline_id) + + return action_result + + +@_frozen_check +def revoke_pipeline(pipeline_id): + """ + revoke a pipeline + :param pipeline_id: + :return: + """ + + try: + pipeline_model = PipelineModel.objects.get(id=pipeline_id) + except PipelineModel.DoesNotExist: + return ActionResult(result=False, message="pipeline to be revoked does not exist.") + + action_result = Status.objects.transit(id=pipeline_id, to_state=states.REVOKED, is_pipeline=True, appoint=True) + if not action_result.result: + return action_result + + process = pipeline_model.process + + if not process: + return ActionResult(result=False, message="relate process is none, this pipeline may be revoked.") + + with transaction.atomic(): + PipelineProcess.objects.select_for_update().get(id=process.id) + process.revoke_subprocess() + process.destroy_all() + + pipeline_revoke.send(sender=Pipeline, root_pipeline_id=pipeline_id) + + return action_result + + +@_frozen_check +def pause_node_appointment(node_id): + """ + make a appointment to pause a node + :param node_id: + :return: + """ + + return Status.objects.transit(id=node_id, to_state=states.SUSPENDED, appoint=True) + + +@_worker_check +@_frozen_check +@_node_existence_check +def resume_node_appointment(node_id): + """ + make a appointment to resume a node + :param node_id: + :return: + """ + + qs = PipelineProcess.objects.filter(current_node_id=node_id, is_sleep=True) + if qs.exists(): + # a process had sleep caused by pause reservation + action_result = Status.objects.transit(id=node_id, to_state=states.READY, appoint=True) + if not action_result.result: + return action_result + + process = qs.first() + Status.objects.recover_from_block(process.root_pipeline.id, process.subprocess_stack) + PipelineProcess.objects.process_ready(process_id=process.id) + return ActionResult(result=True, message="success") + + processing_sleep = SubProcessRelationship.objects.get_relate_process(subprocess_id=node_id) + if processing_sleep.exists(): + action_result = Status.objects.transit(id=node_id, to_state=states.RUNNING, appoint=True, is_pipeline=True) + if not action_result.result: + return action_result + # processes had sleep caused by subprocess pause + root_pipeline_id = processing_sleep.first().root_pipeline_id + + process_can_be_waked = [p for p in processing_sleep if p.can_be_waked()] + can_be_waked_ids = [p.id for p in process_can_be_waked] + + # get subprocess id which should be transited + subprocess_to_be_transit = set() + for process in process_can_be_waked: + _, subproc_above = process.subproc_sleep_check() + for subproc in subproc_above: + subprocess_to_be_transit.add(subproc) + + Status.objects.recover_from_block(root_pipeline_id, subprocess_to_be_transit) + PipelineProcess.objects.batch_process_ready(process_id_list=can_be_waked_ids, pipeline_id=root_pipeline_id) + return ActionResult(result=True, message="success") + + return ActionResult(result=False, message="node not exists or not be executed yet") + + +@_worker_check +@_frozen_check +@_node_existence_check +def retry_node(node_id, inputs=None): + """ + retry a node + :param node_id: + :param inputs: + :return: + """ + + try: + PipelineProcess.objects.get(current_node_id=node_id) + except PipelineProcess.DoesNotExist: # can not retry subprocess + return ActionResult(result=False, message="can't not retry a subprocess or this process has been revoked") + + process = PipelineProcess.objects.get(current_node_id=node_id) + + # try to get next + node = process.top_pipeline.node(node_id) + if not (isinstance(node, ServiceActivity) or isinstance(node, ParallelGateway)): + return ActionResult(result=False, message="can't retry this type of node") + + if hasattr(node, "retryable") and not node.retryable: + return ActionResult(result=False, message="the node is set to not be retryable, try skip it please.") + + action_result = Status.objects.retry(process, node, inputs) + if not action_result.result: + return action_result + + # wake up process + PipelineProcess.objects.process_ready(process_id=process.id) + + return action_result + + +@_worker_check +@_frozen_check +@_node_existence_check +def skip_node(node_id): + """ + skip a node + :param node_id: + :return: + """ + + try: + process = PipelineProcess.objects.get(current_node_id=node_id) + except PipelineProcess.DoesNotExist: # can not skip subprocess + return ActionResult(result=False, message="can't not skip a subprocess or this process has been revoked") + + # try to get next + node = process.top_pipeline.node(node_id) + if not isinstance(node, ServiceActivity): + return ActionResult(result=False, message="can't skip this type of node") + + if hasattr(node, "skippable") and not node.skippable: + return ActionResult(result=False, message="this node is set to not be skippable, try retry it please.") + + # skip and write result bit + action_result = Status.objects.skip(process, node) + if not action_result.result: + return action_result + + next_node_id = node.next().id + + # extract outputs and wake up process + process.top_pipeline.context.extract_output(node) + process.save() + PipelineProcess.objects.process_ready(process_id=process.id, current_node_id=next_node_id) + + return action_result + + +@_worker_check +@_frozen_check +@_node_existence_check +def skip_exclusive_gateway(node_id, flow_id): + """ + skip a failed exclusive gateway and appoint the flow to be pushed + :param node_id: + :param flow_id: + :return: + """ + + try: + process = PipelineProcess.objects.get(current_node_id=node_id) + except PipelineProcess.DoesNotExist: + return ActionResult( + result=False, message="invalid operation, this gateway is finished or pipeline have been revoked" + ) + + exclusive_gateway = process.top_pipeline.node(node_id) + + if not isinstance(exclusive_gateway, ExclusiveGateway): + return ActionResult(result=False, message="invalid operation, this node is not a exclusive gateway") + + next_node_id = exclusive_gateway.target_for_sequence_flow(flow_id).id + + action_result = Status.objects.skip(process, exclusive_gateway) + if not action_result.result: + return action_result + + # wake up process + PipelineProcess.objects.process_ready(process_id=process.id, current_node_id=next_node_id) + + return action_result + + +def get_status_tree(node_id, max_depth=1): + """ + get state and children states for a node + :param node_id: + :param max_depth: + :return: + """ + rel_qs = NodeRelationship.objects.filter(ancestor_id=node_id, distance__lte=max_depth) + if not rel_qs.exists(): + raise exceptions.InvalidOperationException( + "node(%s) does not exist, may have not by executed or expired" % node_id + ) + descendants = [rel.descendant_id for rel in rel_qs] + # remove root node + descendants.remove(node_id) + + rel_qs = NodeRelationship.objects.filter(descendant_id__in=descendants, distance=1) + targets = [rel.descendant_id for rel in rel_qs] + + root_status = Status.objects.filter(id=node_id).values().first() + root_status["elapsed_time"] = calculate_elapsed_time(root_status["started_time"], root_status["archived_time"]) + status_map = {node_id: root_status} + status_qs = Status.objects.filter(id__in=targets).values() + for status in status_qs: + status["elapsed_time"] = calculate_elapsed_time(status["started_time"], status["archived_time"]) + status_map[status["id"]] = status + + relationships = [(s.ancestor_id, s.descendant_id) for s in rel_qs] + for (parent_id, child_id) in relationships: + if parent_id not in status_map: + return + + parent_status = status_map[parent_id] + child_status = status_map[child_id] + child_status.setdefault("children", {}) + + parent_status.setdefault("children", {}).setdefault(child_id, child_status) + + return status_map[node_id] + + +@_worker_check +@_frozen_check +def activity_callback(activity_id, callback_data): + """ + callback a schedule node + :param activity_id: + :param callback_data: + :return: + """ + + version = Status.objects.version_for(activity_id) + times = 0 + + # it's possible that ScheduleService is not be set when callback make + while times < 3: + try: + service = ScheduleService.objects.schedule_for(activity_id, version) + break + except ScheduleService.DoesNotExist as e: + times += 1 + time.sleep(times) + if times >= 3: + raise e + + try: + process_id = PipelineProcess.objects.get(current_node_id=activity_id).id + except PipelineProcess.DoesNotExist: + return ActionResult( + result=False, message="invalid operation, this node is finished or pipeline have been revoked" + ) + + if service.is_finished: + raise exceptions.InvalidOperationException("activity(%s) callback already finished" % activity_id) + service.callback(callback_data, process_id) + return ActionResult(result=True, message="success") + + +def get_inputs(node_id): + """ + get inputs data for a node + :param node_id: + :return: + """ + return Data.objects.get(id=node_id).inputs + + +def get_outputs(node_id): + """ + get outputs data for a node + :param node_id: + :return: + """ + data = Data.objects.get(id=node_id) + return {"outputs": data.outputs, "ex_data": data.ex_data} + + +def get_batch_outputs(node_ids): + """ + get outputs data for a batch of nodes + :param node_ids: a list of node_id + :return: + """ + nodes_data = Data.objects.filter(id__in=node_ids) + return {node_data.id: {"outputs": node_data.outputs, "ex_data": node_data.ex_data} for node_data in nodes_data} + + +def get_activity_histories(node_id, loop=None): + """ + get get_activity_histories data for a node + :param node_id: 节点 ID + :param loop: 循环序号 + :return: + """ + return History.objects.get_histories(node_id, loop) + + +@_frozen_check +@_node_existence_check +def forced_fail(node_id, kill=False, ex_data=""): + """ + forced fail a node + :param node_id: + :param kill: + :param ex_data: + :return: + """ + + try: + process = PipelineProcess.objects.get(current_node_id=node_id) + except PipelineProcess.DoesNotExist: + return ActionResult( + result=False, message="invalid operation, this node is finished or pipeline have been revoked" + ) + + node = process.top_pipeline.node(node_id) + if not isinstance(node, ServiceActivity): + return ActionResult(result=False, message="can't not forced fail this type of node") + + action_result = Status.objects.transit(node_id, to_state=states.FAILED) + if not action_result.result: + return action_result + + try: + node.failure_handler(process.root_pipeline.data) + except Exception: + pass + + with transaction.atomic(): + s = Status.objects.get(id=node.id) + ScheduleService.objects.delete_schedule(s.id, s.version) + Data.objects.forced_fail(node_id, ex_data) + ProcessCeleryTask.objects.revoke(process.id, kill) + process.adjust_status() + process.is_sleep = True + process.save() + s.version = uniqid.uniqid() + s.save() + + return ActionResult(result=True, message="success") + + +def _get_process_to_be_waked(process, to_be_waked): + if process.can_be_waked(): + to_be_waked.append(process.id) + elif process.children: + for child_id in process.children: + child = PipelineProcess.objects.get(id=child_id) + _get_process_to_be_waked(child, to_be_waked) diff --git a/runtime/bamboo-pipeline/pipeline/engine/apps.py b/runtime/bamboo-pipeline/pipeline/engine/apps.py new file mode 100644 index 00000000..9b8a4aed --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/apps.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.apps import AppConfig + + +class EngineConfig(AppConfig): + name = "pipeline.engine" + verbose_name = "PipelineEngine" + + def ready(self): + from pipeline.engine.signals import dispatch + + dispatch.dispatch() + + from pipeline.django_signal_valve import valve + from pipeline.engine.models import FunctionSwitch + + valve.set_valve_function(FunctionSwitch.objects.is_frozen) + FunctionSwitch.objects.init_db() diff --git a/runtime/bamboo-pipeline/pipeline/engine/conf/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/conf/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/conf/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/engine/conf/function_switch.py b/runtime/bamboo-pipeline/pipeline/engine/conf/function_switch.py new file mode 100644 index 00000000..1de5435c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/conf/function_switch.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.utils.translation import ugettext_lazy as _ + +FREEZE_ENGINE = "FREEZE_ENGINE" + +switch_list = [ + {"name": FREEZE_ENGINE, "description": _("用于冻结引擎, 冻结期间会屏蔽所有内部信号及暂停所有进程,同时拒绝所有流程控制请求"), "is_active": False} +] diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/core/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/api.py b/runtime/bamboo-pipeline/pipeline/engine/core/api.py new file mode 100644 index 00000000..9e602fe3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/api.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import socket + +import kombu +from celery import current_app +from django.conf import settings as django_settings +from redis.exceptions import ConnectionError + +from pipeline.celery.settings import CELERY_QUEUES +from pipeline.conf import settings +from pipeline.django_signal_valve import valve +from pipeline.engine import signals +from pipeline.engine.core import data +from pipeline.engine.exceptions import RabbitMQConnectionError +from pipeline.engine.models import FunctionSwitch, PipelineProcess + +logger = logging.getLogger("root") +WORKER_PING_TIMES = 2 + + +def freeze(): + # turn on switch + FunctionSwitch.objects.freeze_engine() + + +def unfreeze(): + # turn off switch + FunctionSwitch.objects.unfreeze_engine() + + # resend signal + valve.open_valve(signals) + + # unfreeze process + frozen_process_list = PipelineProcess.objects.filter(is_frozen=True) + for process in frozen_process_list: + process.unfreeze() + + +def workers(connection=None): + try: + worker_list = data.cache_for("__pipeline__workers__") + except ConnectionError as e: + logger.exception("pipeline cache_for __pipeline__workers__ raise error: %s" % e) + raise e + + if not worker_list: + tries = 0 + while tries < WORKER_PING_TIMES: + kwargs = {"timeout": tries + 1} + if connection is not None: + kwargs["connection"] = connection + try: + worker_list = current_app.control.ping(**kwargs) + except socket.error as err: + logger.exception("pipeline current_app.control.ping raise error: %s" % err) + # raise error at last loop + if tries >= WORKER_PING_TIMES - 1: + raise RabbitMQConnectionError(err) + + if worker_list: + break + + tries += 1 + + if worker_list: + data.expire_cache("__pipeline__workers__", worker_list, settings.PIPELINE_WORKER_STATUS_CACHE_EXPIRES) + + return worker_list + + +def stats(): + inspect = current_app.control.inspect() + + stats = {"workers": {}, "queues": {}} + + worker_stats = inspect.stats() + active_queues = inspect.active_queues() + + if worker_stats: + + for name, stat in worker_stats.items(): + stats["workers"].setdefault(name, {"stat": {}, "queues": {}})["stat"] = stat + + if active_queues: + + for name, queues in active_queues.items(): + stats["workers"].setdefault(name, {"stat": {}, "queues": {}})["queues"] = queues + + if not hasattr(django_settings, "BROKER_VHOST"): + stats["queues"] = "can not find BROKER_VHOST in django settings" + + return stats + + with kombu.Connection(django_settings.BROKER_URL) as conn: + client = conn.get_manager() + + if not hasattr(client, "get_queue"): + stats["queues"] = "broker does not support queues info query" + + return stats + + for queue in CELERY_QUEUES: + stats["queues"][queue.name] = client.get_queue(django_settings.BROKER_VHOST, queue.name) + + return stats diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/context.py b/runtime/bamboo-pipeline/pipeline/engine/core/context.py new file mode 100644 index 00000000..329cd47b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/context.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +from werkzeug.local import Local + +logger = logging.getLogger("celery") +local = Local() + + +def set_node_id(node_id): + try: + local.currnet_node_id = node_id + except Exception: + logger.exception("[engine context] set current_node_id for node({}) err.".format(node_id)) + + +def get_node_id(): + return getattr(local, "currnet_node_id", None) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/data/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/core/data/__init__.py new file mode 100644 index 00000000..34ebae8c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/data/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.engine.core.data.api import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/data/api.py b/runtime/bamboo-pipeline/pipeline/engine/core/data/api.py new file mode 100644 index 00000000..9f72086e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/data/api.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import contextlib +import logging +import traceback + +from django.utils.module_loading import import_string + +from pipeline.conf import settings +from pipeline.engine.exceptions import InvalidDataBackendError + +logger = logging.getLogger("celery") + +_backend = None +_candidate_backend = None + + +def _import_backend(backend_cls_path): + try: + backend_cls = import_string(backend_cls_path) + return backend_cls() + except ImportError: + raise InvalidDataBackendError( + "data backend({}) import error with exception: {}".format( + settings.PIPELINE_DATA_BACKEND, traceback.format_exc() + ) + ) + + +@contextlib.contextmanager +def _candidate_exc_ensure(propagate): + try: + yield + except Exception: + logger.error("candidate data backend operate error: {}".format(traceback.format_exc())) + + if propagate: + raise + + +if not _backend: + _backend = _import_backend(settings.PIPELINE_DATA_BACKEND) + +if not _candidate_backend and settings.PIPELINE_DATA_CANDIDATE_BACKEND: + _candidate_backend = _import_backend(settings.PIPELINE_DATA_CANDIDATE_BACKEND) + + +if settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE and not (_backend and _candidate_backend): + raise RuntimeError( + "PIPELINE_DATA_BACKEND and PIPELINE_DATA_CANDIDATE_BACKEND can't both be empty when PIPELINE_DATA_BACKEND_AUTO_EXPIRE is set." # noqa + ) + + +def _write_operation(method, *args, **kwargs): + propagate = False + + try: + + if settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE and method == "set_object": + # change set_object to expire_cache + getattr(_backend, "expire_cache")( + *args, **kwargs, expires=settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE_SECONDS + ) + else: + getattr(_backend, method)(*args, **kwargs) + + except Exception: + logger.error("data backend operate error: {}".format(traceback.format_exc())) + + if not _candidate_backend: + raise + + propagate = True + + if _candidate_backend: + with _candidate_exc_ensure(propagate): + getattr(_candidate_backend, method)(*args, **kwargs) + + +def _read_operation(method, *args, **kwargs): + result = None + propagate = False + + try: + result = getattr(_backend, method)(*args, **kwargs) + except Exception: + logger.error("data backend operate error: {}".format(traceback.format_exc())) + + if not _candidate_backend: + raise + + propagate = True + + if result is None and _candidate_backend: + with _candidate_exc_ensure(propagate): + result = getattr(_candidate_backend, method)(*args, **kwargs) + + return result + + +def set_object(key, obj): + _write_operation("set_object", key, obj) + + +def del_object(key): + _write_operation("del_object", key) + + +def expire_cache(key, obj, expires): + _write_operation("expire_cache", key, obj, expires) + + +def get_object(key): + return _read_operation("get_object", key) + + +def cache_for(key): + return _read_operation("cache_for", key) + + +def set_schedule_data(schedule_id, parent_data): + return set_object("%s_schedule_parent_data" % schedule_id, parent_data) + + +def get_schedule_parent_data(schedule_id): + return get_object("%s_schedule_parent_data" % schedule_id) + + +def delete_parent_data(schedule_id): + return del_object("%s_schedule_parent_data" % schedule_id) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/data/base_backend.py b/runtime/bamboo-pipeline/pipeline/engine/core/data/base_backend.py new file mode 100644 index 00000000..143b6d93 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/data/base_backend.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from abc import abstractmethod, ABCMeta + + +class BaseDataBackend(object, metaclass=ABCMeta): + @abstractmethod + def set_object(self, key, obj): + raise NotImplementedError() + + @abstractmethod + def get_object(self, key): + raise NotImplementedError() + + @abstractmethod + def del_object(self, key): + raise NotImplementedError() + + @abstractmethod + def expire_cache(self, key, value, expires): + raise NotImplementedError() + + @abstractmethod + def cache_for(self, key): + raise NotImplementedError() diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/data/mysql_backend.py b/runtime/bamboo-pipeline/pipeline/engine/core/data/mysql_backend.py new file mode 100644 index 00000000..64d894c7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/data/mysql_backend.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.core.cache import cache + +from pipeline.engine.core.data.base_backend import BaseDataBackend +from pipeline.engine.models.data import DataSnapshot + + +class MySQLDataBackend(BaseDataBackend): + def set_object(self, key, obj): + return DataSnapshot.objects.set_object(key, obj) + + def get_object(self, key): + return DataSnapshot.objects.get_object(key) + + def del_object(self, key): + return DataSnapshot.objects.del_object(key) + + def expire_cache(self, key, value, expires): + return cache.set(key, value, expires) + + def cache_for(self, key): + return cache.get(key) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/data/redis_backend.py b/runtime/bamboo-pipeline/pipeline/engine/core/data/redis_backend.py new file mode 100644 index 00000000..f4118461 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/data/redis_backend.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import pickle + +from pipeline.conf import settings +from pipeline.engine.core.data.base_backend import BaseDataBackend + + +class RedisDataBackend(BaseDataBackend): + def set_object(self, key, obj): + return settings.redis_inst.set(key, pickle.dumps(obj)) + + def get_object(self, key): + pickle_str = settings.redis_inst.get(key) + if not pickle_str: + return None + return pickle.loads(pickle_str) + + def del_object(self, key): + return settings.redis_inst.delete(key) + + def expire_cache(self, key, value, expires): + settings.redis_inst.set(key, pickle.dumps(value)) + settings.redis_inst.expire(key, expires) + return True + + def cache_for(self, key): + cache = settings.redis_inst.get(key) + return pickle.loads(cache) if cache else cache diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/__init__.py new file mode 100644 index 00000000..62eb73ff --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/__init__.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .conditional_parallel import ConditionalParallelGatewayHandler +from .converge_gateway import ConvergeGatewayHandler +from .empty_start_event import EmptyStartEventHandler +from .endevent import EmptyEndEventHandler, ExecutableEndEventHandler +from .exclusive_gateway import ExclusiveGatewayHandler +from .parallel_gateway import ParallelGatewayHandler +from .service_activity import ServiceActivityHandler +from .subprocess import SubprocessHandler + + +class HandlersFactory(object): + _handlers = { + EmptyStartEventHandler.element_cls(): EmptyStartEventHandler(), + EmptyEndEventHandler.element_cls(): EmptyEndEventHandler(), + ServiceActivityHandler.element_cls(): ServiceActivityHandler(), + SubprocessHandler.element_cls(): SubprocessHandler(), + ExclusiveGatewayHandler.element_cls(): ExclusiveGatewayHandler(), + ParallelGatewayHandler.element_cls(): ParallelGatewayHandler(), + ConditionalParallelGatewayHandler.element_cls(): ConditionalParallelGatewayHandler(), + ConvergeGatewayHandler.element_cls(): ConvergeGatewayHandler(), + ExecutableEndEventHandler.element_cls(): ExecutableEndEventHandler(), + } + + _cluster_roots = [ExecutableEndEventHandler.element_cls()] + + @classmethod + def find_cluster_root_cls(cls, element): + for root in cls._cluster_roots: + if issubclass(type(element), root): + return root + + return type(element) + + @classmethod + def handlers_for(cls, element): + handler = cls._handlers.get(cls.find_cluster_root_cls(element)) + if not handler: + raise KeyError("handler for element({element}) not found.".format(element=element)) + + return handler diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/base.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/base.py new file mode 100644 index 00000000..944b9722 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/base.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from abc import abstractmethod + + +class FlowElementHandler(object): + class HandleResult(object): + def __init__(self, next_node, should_return, should_sleep, after_sleep_call=None, args=[], kwargs={}): + self.next_node = next_node + self.should_return = should_return + self.should_sleep = should_sleep + self.after_sleep_call = after_sleep_call + self.args = args + self.kwargs = kwargs + + @staticmethod + @abstractmethod + def element_cls(): + raise NotImplementedError() + + @abstractmethod + def handle(self, process, element, status): + raise NotImplementedError() + + def __call__(self, *args, **kwargs): + return self.handle(*args, **kwargs) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/conditional_parallel.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/conditional_parallel.py new file mode 100644 index 00000000..f28897a6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/conditional_parallel.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback + +from pipeline.core.data.hydration import hydrate_data +from pipeline.core.flow.gateway import ConditionalParallelGateway +from pipeline.engine.models import PipelineProcess, Status +from pipeline.exceptions import PipelineException + +from .base import FlowElementHandler + +logger = logging.getLogger("pipeline_engine") + +__all__ = ["ConditionalParallelGatewayHandler"] + + +class ConditionalParallelGatewayHandler(FlowElementHandler): + @staticmethod + def element_cls(): + return ConditionalParallelGateway + + def handle(self, process, element, status): + if status.loop > 1: + process.top_pipeline.context.recover_variable() + + try: + hydrate_context = hydrate_data(process.top_pipeline.context.variables) + targets = element.targets_meet_condition(hydrate_context) + except PipelineException as e: + logger.error(traceback.format_exc()) + Status.objects.fail(element, ex_data=str(e)) + return self.HandleResult(next_node=None, should_return=True, should_sleep=True) + + children = [] + + for target in targets: + try: + child = PipelineProcess.objects.fork_child( + parent=process, current_node_id=target.id, destination_id=element.converge_gateway_id + ) + except PipelineException as e: + logger.error(traceback.format_exc()) + Status.objects.fail(element, ex_data=str(e)) + return self.HandleResult(next_node=None, should_return=True, should_sleep=True) + + children.append(child) + + process.join(children) + + Status.objects.finish(element) + + return self.HandleResult(next_node=None, should_return=True, should_sleep=True) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/converge_gateway.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/converge_gateway.py new file mode 100644 index 00000000..ea59889d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/converge_gateway.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback + +from pipeline.core.flow.gateway import ConvergeGateway +from pipeline.engine import exceptions +from pipeline.engine.models import Status + +from .base import FlowElementHandler + +logger = logging.getLogger("pipeline_engine") + +__all__ = ["ConvergeGatewayHandler"] + + +class ConvergeGatewayHandler(FlowElementHandler): + @staticmethod + def element_cls(): + return ConvergeGateway + + def handle(self, process, element, status): + # try to sync data if current process has children + if process.children: + try: + process.sync_with_children() + except exceptions.ChildDataSyncError: + logger.error(traceback.format_exc()) + # clean children and update current_node to prevent re execute child process + process.clean_children() + Status.objects.fail(element, ex_data="Sync branch context error, check data backend status please.") + return self.HandleResult(next_node=None, should_return=True, should_sleep=True) + + Status.objects.finish(element) + return self.HandleResult(next_node=element.next(), should_return=False, should_sleep=False) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/empty_start_event.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/empty_start_event.py new file mode 100644 index 00000000..4f4e936a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/empty_start_event.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from pipeline.core.data import var +from pipeline.core.flow.event import EmptyStartEvent +from pipeline.engine.models import Status + +from .base import FlowElementHandler + +logger = logging.getLogger("pipeline_engine") + +__all__ = ["EmptyStartEventHandler"] + + +class EmptyStartEventHandler(FlowElementHandler): + @staticmethod + def element_cls(): + return EmptyStartEvent + + @staticmethod + def _hydrate(value): + return value.get() if issubclass(value.__class__, var.Variable) else value + + def handle(self, process, element, status): + # 进行变量预渲染 + if hasattr(element.data, "inputs"): + for pre_render_key in element.data.inputs.get("pre_render_keys", []): + context_variables = process.top_pipeline.context.variables + if pre_render_key in context_variables: + context_variables[pre_render_key] = self._hydrate(context_variables[pre_render_key]) + + Status.objects.finish(element) + return self.HandleResult(next_node=element.next(), should_return=False, should_sleep=False) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/__init__.py new file mode 100644 index 00000000..9111c447 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .empty_end_event import EmptyEndEventHandler # noqa +from .executable_end_event import ExecutableEndEventHandler # noqa diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/base.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/base.py new file mode 100644 index 00000000..30644fbe --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/base.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from pipeline.core.flow import activity +from pipeline.engine import states +from pipeline.engine.models import Data, Status + +from ..base import FlowElementHandler + +logger = logging.getLogger("celery") + + +class EndEventHandler(FlowElementHandler): + @staticmethod + def element_cls(): + raise NotImplementedError() + + def handle(self, process, element, status): + pipeline = process.pop_pipeline() + if process.pipeline_stack: + # pop subprocess and return to top of stack + pipeline.context.write_output(pipeline) + Status.objects.finish(element) + sub_process_node = process.top_pipeline.node(pipeline.id) + Status.objects.finish(sub_process_node) + # extract subprocess output + process.top_pipeline.context.extract_output(sub_process_node) + return self.HandleResult(next_node=sub_process_node.next(), should_return=False, should_sleep=False) + else: + with Status.objects.lock(pipeline.id): + # save data and destroy process + pipeline.context.write_output(pipeline) + Data.objects.write_node_data(pipeline) + Status.objects.finish(element) + + Status.objects.transit(pipeline.id, to_state=states.FINISHED, is_pipeline=True) + # PipelineInstance.objects.set_finished(process.root_pipeline.id) + element.pipeline_finish(process.root_pipeline.id) + for act in pipeline.spec.activities: + if isinstance(act, activity.SubProcess): + act.pipeline.context.clear() + pipeline.context.clear() + process.destroy() + return self.HandleResult(next_node=None, should_return=True, should_sleep=False) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/empty_end_event.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/empty_end_event.py new file mode 100644 index 00000000..959d7a6f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/empty_end_event.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.core.flow.event import EmptyEndEvent + +from .base import EndEventHandler + + +class EmptyEndEventHandler(EndEventHandler): + @staticmethod + def element_cls(): + return EmptyEndEvent diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/executable_end_event.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/executable_end_event.py new file mode 100644 index 00000000..bd684b94 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/endevent/executable_end_event.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback + +from pipeline.core.flow.event import ExecutableEndEvent +from pipeline.engine.models import Status + +from .base import EndEventHandler + +logger = logging.getLogger("celery") + + +class ExecutableEndEventHandler(EndEventHandler): + @staticmethod + def element_cls(): + return ExecutableEndEvent + + def handle(self, process, element, status): + try: + element.execute( + in_subprocess=process.in_subprocess, + root_pipeline_id=process.root_pipeline.id, + current_pipeline_id=process.top_pipeline.id, + ) + except Exception: + ex_data = traceback.format_exc() + element.data.outputs.ex_data = ex_data + logger.error(ex_data) + + Status.objects.fail(element, ex_data) + return self.HandleResult(next_node=None, should_return=False, should_sleep=True) + + return super(ExecutableEndEventHandler, self).handle(process, element, status) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/exclusive_gateway.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/exclusive_gateway.py new file mode 100644 index 00000000..0e66df13 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/exclusive_gateway.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback +from copy import deepcopy + +from pipeline.core.data.hydration import hydrate_data +from pipeline.core.flow.gateway import ExclusiveGateway +from pipeline.engine.models import Status +from pipeline.exceptions import PipelineException + +from .base import FlowElementHandler + +logger = logging.getLogger("pipeline_engine") + +__all__ = ["ExclusiveGatewayHandler"] + + +class ExclusiveGatewayHandler(FlowElementHandler): + @staticmethod + def element_cls(): + return ExclusiveGateway + + def handle(self, process, element, status): + if status.loop > 1: + process.top_pipeline.context.recover_variable() + try: + # use temp variables instead of real variables to prevent output pre extract error + temp_variables = deepcopy(process.top_pipeline.context.variables) + hydrate_context = hydrate_data(temp_variables) + logger.info("[{}] hydrate_context: {}".format(element.id, hydrate_context)) + next_node = element.next(hydrate_context) + except PipelineException as e: + logger.error(traceback.format_exc()) + Status.objects.fail(element, ex_data=str(e)) + return self.HandleResult(next_node=None, should_return=True, should_sleep=True) + Status.objects.finish(element) + return self.HandleResult(next_node=next_node, should_return=False, should_sleep=False) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/parallel_gateway.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/parallel_gateway.py new file mode 100644 index 00000000..0ca88e6b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/parallel_gateway.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback + +from pipeline.core.flow.gateway import ParallelGateway +from pipeline.engine.models import PipelineProcess, Status +from pipeline.exceptions import PipelineException + +from .base import FlowElementHandler + +logger = logging.getLogger("pipeline_engine") + +__all__ = ["ParallelGatewayHandler"] + + +class ParallelGatewayHandler(FlowElementHandler): + @staticmethod + def element_cls(): + return ParallelGateway + + def handle(self, process, element, status): + targets = element.outgoing.all_target_node() + children = [] + + for target in targets: + try: + child = PipelineProcess.objects.fork_child( + parent=process, current_node_id=target.id, destination_id=element.converge_gateway_id + ) + except PipelineException as e: + logger.error(traceback.format_exc()) + Status.objects.fail(element, str(e)) + return self.HandleResult(next_node=None, should_return=True, should_sleep=True) + + children.append(child) + + process.join(children) + + Status.objects.finish(element) + + return self.HandleResult(next_node=None, should_return=True, should_sleep=True) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/service_activity.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/service_activity.py new file mode 100644 index 00000000..c711fe2b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/service_activity.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback + +from pipeline.conf import default_settings +from pipeline.core.data.hydration import hydrate_node_data +from pipeline.core.flow.activity import ServiceActivity +from pipeline.django_signal_valve import valve +from pipeline.engine import signals +from pipeline.engine.models import Data, ScheduleService, Status + +from .base import FlowElementHandler + +logger = logging.getLogger("pipeline_engine") + +__all__ = ["ServiceActivityHandler"] + + +class ServiceActivityHandler(FlowElementHandler): + @staticmethod + def element_cls(): + return ServiceActivity + + def handle(self, process, element, status): + pre_execute_success = False + success = False + exception_occurred = False + monitoring = False + version = status.version + root_pipeline = process.root_pipeline + + # rerun mode + if status.loop > 1 and not element.on_retry(): + element.prepare_rerun_data() + process.top_pipeline.context.recover_variable() + + elif element.on_retry(): + element.retry_at_current_exec() + + # set loop to data + element.data.inputs._loop = status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + element.data.outputs._loop = status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + + # pre output extract + process.top_pipeline.context.extract_output(element, set_miss=False) + + # hydrate inputs + hydrate_node_data(element) + + if element.timeout: + logger.info("node {} {} start timeout monitor, timeout: {}".format(element.id, version, element.timeout)) + signals.service_activity_timeout_monitor_start.send( + sender=element.__class__, + node_id=element.id, + version=version, + root_pipeline_id=root_pipeline.id, + countdown=element.timeout, + ) + monitoring = True + + element.setup_runtime_attrs( + id=element.id, root_pipeline_id=root_pipeline.id, + ) + + # pre_process inputs and execute service + try: + pre_execute_success = element.execute_pre_process(root_pipeline.data) + if pre_execute_success: + success = element.execute(root_pipeline.data) + except Exception: + if element.error_ignorable: + # ignore exception + pre_execute_success = True + success = True + exception_occurred = True + element.ignore_error() + ex_data = traceback.format_exc() + element.data.outputs.ex_data = ex_data + logger.error(ex_data) + + # process result + if pre_execute_success is False or success is False: + ex_data = element.data.get_one_of_outputs("ex_data") + Status.objects.fail(element, ex_data) + try: + element.failure_handler(root_pipeline.data) + except Exception: + logger.error("failure_handler({}) failed: {}".format(element.id, traceback.format_exc())) + + if monitoring: + signals.service_activity_timeout_monitor_end.send( + sender=element.__class__, node_id=element.id, version=version + ) + logger.info("node {} {} timeout monitor revoke".format(element.id, version)) + + # send activity error signal + valve.send( + signals, + "activity_failed", + sender=root_pipeline, + pipeline_id=root_pipeline.id, + pipeline_activity_id=element.id, + subprocess_id_stack=process.subprocess_stack, + ) + + return self.HandleResult(next_node=None, should_return=False, should_sleep=True) + else: + is_error_ignored = element.error_ignorable and not element.get_result_bit() + if element.need_schedule() and not exception_occurred and not is_error_ignored: + # write data before schedule + Data.objects.write_node_data(element) + return self.HandleResult( + next_node=None, + should_return=True, + should_sleep=True, + after_sleep_call=ScheduleService.objects.set_schedule, + args=[], + kwargs=dict( + activity_id=element.id, + service_act=element.shell(), + process_id=process.id, + version=version, + parent_data=process.top_pipeline.data, + ), + ) + + process.top_pipeline.context.extract_output(element) + error_ignorable = not element.get_result_bit() + + if monitoring: + signals.service_activity_timeout_monitor_end.send( + sender=element.__class__, node_id=element.id, version=version + ) + logger.info("node {} {} timeout monitor revoke".format(element.id, version)) + + if not Status.objects.finish(element, error_ignorable): + # has been forced failed + return self.HandleResult(next_node=None, should_return=False, should_sleep=True) + return self.HandleResult(next_node=element.next(), should_return=False, should_sleep=False) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/handlers/subprocess.py b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/subprocess.py new file mode 100644 index 00000000..4f14921e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/handlers/subprocess.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from pipeline.conf import default_settings +from pipeline.core.data.hydration import hydrate_node_data +from pipeline.core.flow.activity import SubProcess + +from .base import FlowElementHandler + +logger = logging.getLogger("pipeline_engine") + +__all__ = ["SubprocessHandler"] + + +class SubprocessHandler(FlowElementHandler): + @staticmethod + def element_cls(): + return SubProcess + + def handle(self, process, element, status): + # rerun mode + if status.loop > 1: + element.prepare_rerun_data() + element.pipeline.context.recover_variable() + process.top_pipeline.context.recover_variable() + + # set loop count + element.data.outputs._loop = status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + + # pre output extract + process.top_pipeline.context.extract_output(element, set_miss=False) + + # hydrate data + hydrate_node_data(element) + + # context injection + data = element.pipeline.data + context = element.pipeline.context + for k, v in list(data.get_inputs().items()): + context.set_global_var(k, v) + + sub_pipeline = element.pipeline + process.push_pipeline(sub_pipeline, is_subprocess=True) + process.take_snapshot() + return self.HandleResult(next_node=sub_pipeline.start_event, should_return=False, should_sleep=False) diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/runtime.py b/runtime/bamboo-pipeline/pipeline/engine/core/runtime.py new file mode 100644 index 00000000..44947244 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/runtime.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import contextlib +import logging +import traceback + +from pipeline.conf import settings as pipeline_settings +from pipeline.core.flow.activity import SubProcess +from pipeline.engine import states +from pipeline.engine.core import context +from pipeline.engine.core.handlers import HandlersFactory +from pipeline.engine.models import NAME_MAX_LENGTH, FunctionSwitch, NodeRelationship, Status + +logger = logging.getLogger("pipeline_engine") + +RERUN_MAX_LIMIT = pipeline_settings.PIPELINE_RERUN_MAX_TIMES + + +@contextlib.contextmanager +def runtime_exception_handler(process): + try: + yield + except Exception as e: + logger.error(traceback.format_exc()) + process.exit_gracefully(e) + + +def run_loop(process): + """ + pipeline 推进主循环 + :param process: 当前进程 + :return: + """ + with runtime_exception_handler(process): + while True: + current_node = process.top_pipeline.node(process.current_node_id) + + # check child process destination + if process.destination_id == current_node.id: + try: + process.destroy_and_wake_up_parent(current_node.id) + except Exception: + logger.error(traceback.format_exc()) + logger.info("child process(%s) finish." % process.id) + return + + # check root pipeline status + need_sleep, pipeline_state = process.root_sleep_check() + if need_sleep: + logger.info("pipeline(%s) turn to sleep." % process.root_pipeline.id) + process.sleep(do_not_save=(pipeline_state == states.REVOKED)) + return + + # check subprocess status + need_sleep, subproc_above = process.subproc_sleep_check() + if need_sleep: + logger.info("process(%s) turn to sleep." % process.root_pipeline.id) + process.sleep(adjust_status=True, adjust_scope=subproc_above) + return + + # check engine status + if FunctionSwitch.objects.is_frozen(): + logger.info("pipeline(%s) have been frozen." % process.id) + process.freeze() + return + + # try to transit current node to running state + name = (current_node.name or str(current_node.__class__))[:NAME_MAX_LENGTH] + action = Status.objects.transit(id=current_node.id, to_state=states.RUNNING, start=True, name=name) + + # check rerun limit + if ( + not isinstance(current_node, SubProcess) + and RERUN_MAX_LIMIT != 0 + and action.extra.loop > RERUN_MAX_LIMIT + ): + logger.info( + "node({nid}) rerun times exceed max limit: {limit}".format( + nid=current_node.id, limit=RERUN_MAX_LIMIT + ) + ) + + # fail + action = Status.objects.fail( + current_node, "rerun times exceed max limit: {limit}".format(limit=RERUN_MAX_LIMIT) + ) + + if not action.result: + logger.warning( + "can not transit node({}) to running, pipeline({}) turn to sleep. " + "message: {}".format(current_node.id, process.root_pipeline.id, action.message) + ) + + process.sleep(adjust_status=True) + return + + if not action.result: + logger.warning( + "can not transit node({}) to running, pipeline({}) turn to sleep. message: {}".format( + current_node.id, process.root_pipeline.id, action.message + ) + ) + process.sleep(adjust_status=True) + return + + # refresh current node + process.refresh_current_node(current_node.id) + + # build relationship + NodeRelationship.objects.build_relationship(process.top_pipeline.id, current_node.id) + # set up context + context.set_node_id(current_node.id) + + result = HandlersFactory.handlers_for(current_node)(process, current_node, action.extra) + + if result.should_return or result.should_sleep: + if result.should_sleep: + process.sleep(adjust_status=True) + if result.after_sleep_call: + result.after_sleep_call(*result.args, **result.kwargs) + return + + # store current node id + process.current_node_id = result.next_node.id diff --git a/runtime/bamboo-pipeline/pipeline/engine/core/schedule.py b/runtime/bamboo-pipeline/pipeline/engine/core/schedule.py new file mode 100644 index 00000000..5c47f073 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/core/schedule.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import contextlib +import logging +import traceback + +from django.db import transaction + +from pipeline.django_signal_valve import valve +from pipeline.engine.core import context +from pipeline.engine import exceptions, signals, states +from pipeline.engine.core.data import delete_parent_data, get_schedule_parent_data, set_schedule_data +from pipeline.engine.models import Data, MultiCallbackData, PipelineProcess, ScheduleService, Status + +logger = logging.getLogger("pipeline_engine") + + +@contextlib.contextmanager +def schedule_exception_handler(process_id, schedule_id): + try: + yield + except Exception as e: + activity_id = schedule_id[: ScheduleService.SCHEDULE_ID_SPLIT_DIVISION] + version = schedule_id[ScheduleService.SCHEDULE_ID_SPLIT_DIVISION :] + if Status.objects.filter(id=activity_id, version=version).exists(): + logger.error(traceback.format_exc()) + process = PipelineProcess.objects.get(id=process_id) + process.exit_gracefully(e) + else: + logger.warning("schedule({} - {}) forced exit.".format(activity_id, version)) + + delete_parent_data(schedule_id) + + +@contextlib.contextmanager +def auto_release_schedule_lock(schedule_id): + yield + # release schedule lock before exit schedule + ScheduleService.objects.filter(id=schedule_id, is_scheduling=True).update(is_scheduling=False) + logger.warning("schedule({}) unlock success.".format(schedule_id)) + + +def schedule(process_id, schedule_id, data_id=None): + """ + 调度服务主函数 + :param process_id: 被调度的节点所属的 PipelineProcess + :param schedule_id: 调度 ID + :param data_id: 回调数据ID + :return: + """ + with schedule_exception_handler(process_id, schedule_id): + # set up context + context.set_node_id(schedule_id[: ScheduleService.SCHEDULE_ID_SPLIT_DIVISION]) + + # schedule maybe destroyed by other schedule + try: + sched_service = ScheduleService.objects.get(id=schedule_id) + # stop if schedule status finished + if sched_service.is_finished: + logger.warning("schedule already finished, give up, sched_id: {}".format(schedule_id)) + return + except ScheduleService.DoesNotExist: + logger.warning("schedule not exist, give up, sched_id: {}".format(schedule_id)) + return + + # check whether the node is in a state waiting for scheduling + service_act = sched_service.service_act + act_id = sched_service.activity_id + version = sched_service.version + + if not Status.objects.filter(id=act_id, version=version, state=states.RUNNING).exists(): + # forced failed + logger.warning( + "schedule service failed, schedule({} - {}) node state is not running or version do not match.".format( + act_id, version + ) + ) + sched_service.destroy() + return + + # try update lock schedule + is_updated = ScheduleService.objects.filter(id=schedule_id, is_scheduling=False).update(is_scheduling=True) + + # lock failed, other worker may locking + if is_updated == 0: + # only retry at multi calback enabled case + if not sched_service.multi_callback_enabled: + logger.warning( + "invalid schedule request, schedule({} - {}) node state is not multi callback enabled type.".format( + act_id, version + ) + ) + return + + # retry lock after seconds + logger.warning("schedule service lock-{} failed, retry after seconds".format(schedule_id)) + valve.send( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=process_id, + schedule_id=schedule_id, + data_id=data_id, + countdown=2, + ) + return + + with auto_release_schedule_lock(schedule_id): + # get data + parent_data = get_schedule_parent_data(sched_service.id) + if parent_data is None: + raise exceptions.DataRetrieveError( + "child process({}) retrieve parent_data error, sched_id: {}".format(process_id, schedule_id) + ) + + # get schedule data + if sched_service.multi_callback_enabled and data_id: + try: + callback_data = MultiCallbackData.objects.get(id=data_id) + schedule_data = callback_data.data + except MultiCallbackData.DoesNotExist: + logger.warning( + "schedule get callback_data failed, give up schedule, sched_id: {}".format(schedule_id) + ) + return + else: + schedule_data = sched_service.callback_data + + # schedule + ex_data, success = None, False + try: + success = service_act.schedule(parent_data, schedule_data) + if success is None: + success = True + except Exception: + if service_act.error_ignorable: + success = True + service_act.ignore_error() + service_act.finish_schedule() + + ex_data = traceback.format_exc() + logging.error(ex_data) + + sched_service.schedule_times += 1 + set_schedule_data(sched_service.id, parent_data) + + # schedule failed + if not success: + if not Status.objects.transit(id=act_id, version=version, to_state=states.FAILED).result: + # forced failed + logger.warning( + "FAILED transit failed, schedule({} - {}) had been forced exit.".format(act_id, version) + ) + sched_service.destroy() + return + + if service_act.timeout: + signals.service_activity_timeout_monitor_end.send( + sender=service_act.__class__, node_id=service_act.id, version=version + ) + logger.info("node {} {} timeout monitor revoke".format(service_act.id, version)) + + Data.objects.write_node_data(service_act, ex_data=ex_data) + + with transaction.atomic(): + process = PipelineProcess.objects.select_for_update().get(id=sched_service.process_id) + if not process.is_alive: + logger.info("pipeline %s has been revoked, status adjust failed." % process.root_pipeline_id) + return + + process.adjust_status() + + # send activity error signal + try: + service_act.schedule_fail() + except Exception: + logger.error("schedule_fail handler fail: %s" % traceback.format_exc()) + + signals.service_schedule_fail.send( + sender=ScheduleService, activity_shell=service_act, schedule_service=sched_service, ex_data=ex_data + ) + + valve.send( + signals, + "activity_failed", + sender=process.root_pipeline, + pipeline_id=process.root_pipeline_id, + pipeline_activity_id=service_act.id, + subprocess_id_stack=process.subprocess_stack, + ) + return + + # schedule execute finished or one time callback finished + if service_act.is_schedule_done() or sched_service.is_one_time_callback(): + error_ignorable = not service_act.get_result_bit() + if not Status.objects.transit(id=act_id, version=version, to_state=states.FINISHED).result: + # forced failed + logger.warning( + "FINISHED transit failed, schedule({} - {}) had been forced exit.".format(act_id, version) + ) + sched_service.destroy() + return + + if service_act.timeout: + signals.service_activity_timeout_monitor_end.send( + sender=service_act.__class__, node_id=service_act.id, version=version + ) + logger.info("node {} {} timeout monitor revoke".format(service_act.id, version)) + + Data.objects.write_node_data(service_act) + if error_ignorable: + s = Status.objects.get(id=act_id) + s.error_ignorable = True + s.save() + + # sync parent data + process = PipelineProcess.objects.get(id=sched_service.process_id) + if not process.is_alive: + logger.warning("schedule({} - {}) revoked.".format(act_id, version)) + sched_service.destroy() + return + + process.top_pipeline.data.update_outputs(parent_data.get_outputs()) + # extract outputs + process.top_pipeline.context.extract_output(service_act) + process.save(save_snapshot=True) + + # clear temp data + delete_parent_data(sched_service.id) + # save schedule service + sched_service.finish() + + signals.service_schedule_success.send( + sender=ScheduleService, activity_shell=service_act, schedule_service=sched_service + ) + + valve.send( + signals, + "wake_from_schedule", + sender=ScheduleService, + process_id=sched_service.process_id, + activity_id=sched_service.activity_id, + ) + else: + Data.objects.write_node_data(service_act) + if sched_service.multi_callback_enabled: + sched_service.save() + else: + sched_service.set_next_schedule() diff --git a/runtime/bamboo-pipeline/pipeline/engine/exceptions.py b/runtime/bamboo-pipeline/pipeline/engine/exceptions.py new file mode 100644 index 00000000..10fc35dd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/exceptions.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.exceptions import PipelineException + + +class PipelineEngineException(PipelineException): + pass + + +class NodeNotExistException(PipelineEngineException): + pass + + +class InvalidOperationException(PipelineEngineException): + pass + + +class RabbitMQConnectionError(PipelineEngineException): + pass + + +class ChildDataSyncError(PipelineEngineException): + pass + + +class DataRetrieveError(PipelineEngineException): + pass + + +class InvalidDataBackendError(PipelineEngineException): + pass + + +class InvalidPipelineEndHandleError(PipelineEngineException): + pass + + +class CeleryFailedTaskCatchException(PipelineEngineException): + def __init__(self, task_name): + self.task_name = task_name diff --git a/runtime/bamboo-pipeline/pipeline/engine/health/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/health/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/health/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/engine/health/zombie/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/health/zombie/__init__.py new file mode 100644 index 00000000..15fe6271 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/health/zombie/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.engine.health.zombie.heal import get_healer # noqa diff --git a/runtime/bamboo-pipeline/pipeline/engine/health/zombie/doctors.py b/runtime/bamboo-pipeline/pipeline/engine/health/zombie/doctors.py new file mode 100644 index 00000000..9306884e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/health/zombie/doctors.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import abc +import logging + +from django.utils import timezone + +from pipeline.core.pipeline import Pipeline +from pipeline.engine import signals, states +from pipeline.engine.models import ProcessCeleryTask, ScheduleService, Status +from pipeline.utils import uniqid + +logger = logging.getLogger("celery") + + +class ZombieProcDoctor(metaclass=abc.ABCMeta): + @abc.abstractmethod + def confirm(self, proc): + raise NotImplementedError() + + @abc.abstractmethod + def cure(self, proc): + raise NotImplementedError() + + +class RunningNodeZombieDoctor(ZombieProcDoctor): + def __init__(self, max_stuck_time: float, detect_wait_callback_proc: bool = False): + """ + :param max_stuck_time: 最大卡住时间 + :param detect_wait_callback_proc: 是否检测等待回调的进程 + """ + self.max_stuck_time = max_stuck_time + self.detect_wait_callback_proc = detect_wait_callback_proc + + def confirm(self, proc): + + # do not process none current node + if not proc.current_node_id: + logger.warning("Process({}) with current_node({}), skip".format(proc.id, proc.current_node_id)) + return False + + # do not process node status not exist + try: + status = Status.objects.get(id=proc.current_node_id) + except Status.DoesNotExist: + logger.warning("Process({})'s current_node({}) not exist, skip".format(proc.id, proc.current_node_id)) + return False + + # do not process legacy status data + if not status.state_refresh_at: + logger.warning( + "Process({})'s current_node({}) state_fresh_at({}) is invalid, skip".format( + proc.id, proc.current_node_id, status.state_refresh_at + ) + ) + return False + + # only process RUNNING node + if status.state != states.RUNNING: + return False + + try: + schedule = ScheduleService.objects.schedule_for(status.id, status.version) + except ScheduleService.DoesNotExist: + pass + else: + if schedule.wait_callback and not self.detect_wait_callback_proc: + return False + + stuck_time = (timezone.now() - status.state_refresh_at).total_seconds() + if float(stuck_time) > float(self.max_stuck_time): + logger.info( + "Process({}) with current_node({}) stuck_time({}) exceed max_stuck_time({}), " + "mark as zombie".format(proc.id, proc.current_node_id, stuck_time, self.max_stuck_time) + ) + return True + + return False + + def cure(self, proc): + + current_node_id = proc.current_node_id + + # try to transit current node to FAILURE + try: + result = Status.objects.raw_fail( + node_id=current_node_id, + ex_data="This node had been failed because the process diagnode as zombie process", + ) + except Exception: + logger.exception( + "An error occurred when transit node({}) for zombie process({}).".format(current_node_id, proc.id) + ) + else: + if not result.result: + logger.error( + "can't not transit node({}) for zombie process({}), message: {}".format( + current_node_id, proc.id, result.message + ) + ) + else: + status = result.extra + status.version = uniqid.uniqid() + status.save() + ProcessCeleryTask.objects.revoke(proc.id, kill=True) + + # adjust pipeline state + proc.adjust_status() + proc.is_sleep = True + proc.save() + logger.info( + "Zombie process({}) with node({}) had been cured by {}".format( + proc.id, current_node_id, self.__class__.__name__ + ) + ) + try: + signals.activity_failed.send( + sender=Pipeline, pipeline_id=proc.root_pipeline_id, pipeline_activity_id=current_node_id + ) + except Exception as e: + logger.exception( + "An error({}) occurred when send activity_failed signals node({}) " + "for zombie process({}).".format(e, current_node_id, proc.id) + ) diff --git a/runtime/bamboo-pipeline/pipeline/engine/health/zombie/heal.py b/runtime/bamboo-pipeline/pipeline/engine/health/zombie/heal.py new file mode 100644 index 00000000..75af7d50 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/health/zombie/heal.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from django.utils.module_loading import import_string + +from pipeline.conf import default_settings +from pipeline.engine.models import PipelineProcess + +logger = logging.getLogger("celery") + + +def get_healer(): + if not default_settings.ENGINE_ZOMBIE_PROCESS_DOCTORS: + logger.info("ENGINE_ZOMBIE_PROCESS_DOCTORS settings is empty, use dummy healer") + return DummyZombieProcHealer() + + doctors = [] + + for dr_setting in default_settings.ENGINE_ZOMBIE_PROCESS_DOCTORS: + try: + doctors.append(import_string(dr_setting["class"])(**dr_setting["config"])) + except Exception: + logger.exception("Error occurred when init doctor({}), skip".format(dr_setting)) + + if not doctors: + logger.info("All doctor init failed, use dummy healer") + return DummyZombieProcHealer() + + return ZombieProcHealer(doctors=doctors) + + +class DummyZombieProcHealer(object): + def heal(self): + pass + + +class ZombieProcHealer(object): + def __init__(self, doctors): + self.doctors = doctors + + def heal(self): + + if not self.doctors: + return + + proc_ids = self._get_process_ids() + + for proc_id in proc_ids: + + # get proc every time for latest state + proc = PipelineProcess.objects.get(id=proc_id) + + if not proc.is_alive or proc.is_frozen: + continue + + for dr in self.doctors: + if dr.confirm(proc): + dr.cure(proc) + break + + def _get_process_ids(self): + return PipelineProcess.objects.filter(is_alive=True, is_frozen=False).values_list("id", flat=True) diff --git a/runtime/bamboo-pipeline/pipeline/engine/logging.py b/runtime/bamboo-pipeline/pipeline/engine/logging.py new file mode 100644 index 00000000..e845852e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/logging.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + + +def get_logger(): + return logging.getLogger(__name__) + + +logger = get_logger() diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0001_initial.py new file mode 100644 index 00000000..8fbaab4e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0001_initial.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +# Generated by Django 1.11.2 on 2017-11-24 10:43 + + +from django.db import migrations, models +import django.db.models.deletion +import pipeline.engine.models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Data", + fields=[ + ( + "id", + models.CharField( + max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="\u8282\u70b9 ID" + ), + ), + ("inputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u5165\u6570\u636e")), + ("outputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u51fa\u6570\u636e")), + ("ex_data", pipeline.engine.models.IOField(verbose_name="\u5f02\u5e38\u6570\u636e")), + ], + ), + migrations.CreateModel( + name="History", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("identifier", models.CharField(db_index=True, max_length=32, verbose_name="\u8282\u70b9 id")), + ("started_time", models.DateTimeField(verbose_name="\u5f00\u59cb\u65f6\u95f4")), + ("archived_time", models.DateTimeField(verbose_name="\u7ed3\u675f\u65f6\u95f4")), + ], + ), + migrations.CreateModel( + name="HistoryData", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("inputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u5165\u6570\u636e")), + ("outputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u51fa\u6570\u636e")), + ("ex_data", pipeline.engine.models.IOField(verbose_name="\u5f02\u5e38\u6570\u636e")), + ], + ), + migrations.CreateModel( + name="NodeRelationship", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("ancestor_id", models.CharField(db_index=True, max_length=32, verbose_name="\u7956\u5148 ID")), + ("descendant_id", models.CharField(db_index=True, max_length=32, verbose_name="\u540e\u4ee3 ID")), + ("distance", models.IntegerField(verbose_name="\u8ddd\u79bb")), + ], + ), + migrations.CreateModel( + name="PipelineModel", + fields=[ + ( + "id", + models.CharField( + max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="pipeline ID" + ), + ), + ], + ), + migrations.CreateModel( + name="PipelineProcess", + fields=[ + ( + "id", + models.CharField( + max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="Process ID" + ), + ), + ("root_pipeline_id", models.CharField(max_length=32, verbose_name="\u6839 pipeline \u7684 ID")), + ( + "current_node_id", + models.CharField( + db_index=True, + default=b"", + max_length=32, + verbose_name="\u5f53\u524d\u63a8\u8fdb\u5230\u7684\u8282\u70b9\u7684 ID", + ), + ), + ( + "destination_id", + models.CharField( + default=b"", + max_length=32, + verbose_name="\u9047\u5230\u8be5 ID \u7684\u8282\u70b9\u5c31\u505c\u6b62\u63a8\u8fdb", + ), + ), + ("parent_id", models.CharField(default=b"", max_length=32, verbose_name="\u7236 process \u7684 ID")), + ( + "ack_num", + models.IntegerField( + default=0, verbose_name="\u6536\u5230\u5b50\u8282\u70b9 ACK \u7684\u6570\u91cf" + ), + ), + ( + "need_ack", + models.IntegerField( + default=-1, + verbose_name="\u9700\u8981\u6536\u5230\u7684\u5b50\u8282\u70b9 ACK \u7684\u6570\u91cf", + ), + ), + ( + "is_alive", + models.BooleanField(default=True, verbose_name="\u8be5 process \u662f\u5426\u8fd8\u6709\u6548"), + ), + ( + "is_sleep", + models.BooleanField( + default=False, verbose_name="\u8be5 process \u662f\u5426\u6b63\u5728\u4f11\u7720" + ), + ), + ], + ), + migrations.CreateModel( + name="ProcessCeleryTask", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "process_id", + models.CharField( + db_index=True, max_length=32, unique=True, verbose_name="pipeline \u8fdb\u7a0b ID" + ), + ), + ("celery_task_id", models.CharField(default=b"", max_length=40, verbose_name="celery \u4efb\u52a1 ID")), + ], + ), + migrations.CreateModel( + name="ProcessSnapshot", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("data", pipeline.engine.models.IOField(verbose_name="\u5b50 process ID \u4e0e pipeline_stack")), + ], + ), + migrations.CreateModel( + name="ScheduleService", + fields=[ + ( + "id", + models.CharField( + max_length=64, + primary_key=True, + serialize=False, + unique=True, + verbose_name="ID \u8282\u70b9ID+version", + ), + ), + ("activity_id", models.CharField(db_index=True, max_length=32, verbose_name="\u8282\u70b9 ID")), + ("process_id", models.CharField(max_length=32, verbose_name="Pipeline \u8fdb\u7a0b ID")), + ("schedule_times", models.IntegerField(default=0, verbose_name="\u88ab\u8c03\u5ea6\u6b21\u6570")), + ( + "wait_callback", + models.BooleanField(default=False, verbose_name="\u662f\u5426\u662f\u56de\u8c03\u578b\u8c03\u5ea6"), + ), + ( + "callback_data", + pipeline.engine.models.IOField(default=None, verbose_name="\u56de\u8c03\u6570\u636e"), + ), + ("service_act", pipeline.engine.models.IOField(verbose_name="\u5f85\u8c03\u5ea6\u670d\u52a1")), + ("is_finished", models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u5b8c\u6210")), + ("version", models.CharField(db_index=True, max_length=32, verbose_name="Activity \u7684\u7248\u672c")), + ], + ), + migrations.CreateModel( + name="Status", + fields=[ + ( + "id", + models.CharField( + max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="\u8282\u70b9 ID" + ), + ), + ("state", models.CharField(max_length=10, verbose_name="\u72b6\u6001")), + ("name", models.CharField(default=b"", max_length=64, verbose_name="\u8282\u70b9\u540d\u79f0")), + ("retry", models.IntegerField(default=0, verbose_name="\u91cd\u8bd5\u6b21\u6570")), + ("loop", models.IntegerField(default=1, verbose_name="\u5faa\u73af\u6b21\u6570")), + ("skip", models.BooleanField(default=False, verbose_name="\u662f\u5426\u8df3\u8fc7")), + ("created_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), + ("started_time", models.DateTimeField(null=True, verbose_name="\u5f00\u59cb\u65f6\u95f4")), + ("archived_time", models.DateTimeField(null=True, verbose_name="\u5f52\u6863\u65f6\u95f4")), + ("version", models.CharField(max_length=32, verbose_name="\u7248\u672c")), + ], + options={"ordering": ["-created_time"]}, + ), + migrations.CreateModel( + name="SubProcessRelationship", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("subprocess_id", models.CharField(db_index=True, max_length=32, verbose_name="\u5b50\u6d41\u7a0b ID")), + ("process_id", models.CharField(max_length=32, verbose_name="\u5bf9\u5e94\u7684\u8fdb\u7a0b ID")), + ], + ), + migrations.AddField( + model_name="pipelineprocess", + name="snapshot", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="engine.ProcessSnapshot" + ), + ), + migrations.AddField( + model_name="pipelinemodel", + name="process", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.SET_NULL, to="engine.PipelineProcess" + ), + ), + migrations.AddField( + model_name="history", + name="data", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="engine.HistoryData"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0002_auto_20180109_1825.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0002_auto_20180109_1825.py new file mode 100644 index 00000000..d42e6f6d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0002_auto_20180109_1825.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +# Generated by Django 1.11.2 on 2018-01-09 18:25 + + +from django.db import migrations +import pipeline.engine.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="processsnapshot", + name="data", + field=pipeline.engine.models.IOField(verbose_name="pipeline \u8fd0\u884c\u65f6\u6570\u636e"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0003_auto_20180717_1148.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0003_auto_20180717_1148.py new file mode 100644 index 00000000..9b95c16f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0003_auto_20180717_1148.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("engine", "0002_auto_20180109_1825"), + ] + + operations = [ + migrations.CreateModel( + name="FunctionSwitch", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("name", models.CharField(max_length=32, verbose_name="\u529f\u80fd\u540d\u79f0")), + ("description", models.TextField(default=b"", verbose_name="\u529f\u80fd\u63cf\u8ff0")), + ("is_active", models.BooleanField(default=False, verbose_name="\u662f\u5426\u6fc0\u6d3b")), + ], + ), + migrations.AddField( + model_name="pipelineprocess", + name="is_froze", + field=models.BooleanField(default=False, verbose_name="\u8be5 process \u662f\u5426\u88ab\u51bb\u7ed3"), + ), + migrations.AddField( + model_name="scheduleservice", + name="celery_id", + field=models.CharField(default=b"", max_length=36, verbose_name="celery \u4efb\u52a1ID"), + ), + migrations.AddField( + model_name="scheduleservice", + name="celery_info_lock", + field=models.IntegerField(default=0, verbose_name="celery \u4fe1\u606f\u66f4\u65b0\u9501"), + ), + migrations.AddField( + model_name="scheduleservice", + name="is_frozen", + field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u88ab\u51bb\u7ed3"), + ), + migrations.AddField( + model_name="scheduleservice", + name="is_scheduling", + field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u6b63\u5728\u88ab\u8c03\u5ea6"), + ), + migrations.AddField( + model_name="scheduleservice", + name="schedule_date", + field=models.DateTimeField( + null=True, verbose_name="\u4e0b\u4e00\u6b21\u88ab\u8c03\u5ea6\u7684\u65f6\u95f4" + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0004_auto_20180717_1411.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0004_auto_20180717_1411.py new file mode 100644 index 00000000..1903fc86 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0004_auto_20180717_1411.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0003_auto_20180717_1148"), + ] + + operations = [ + migrations.RenameField(model_name="pipelineprocess", old_name="is_froze", new_name="is_frozen",), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0005_auto_20180717_1433.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0005_auto_20180717_1433.py new file mode 100644 index 00000000..6ea13784 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0005_auto_20180717_1433.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0004_auto_20180717_1411"), + ] + + operations = [ + migrations.AlterField( + model_name="functionswitch", + name="name", + field=models.CharField(unique=True, max_length=32, verbose_name="\u529f\u80fd\u540d\u79f0"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0006_auto_20180717_1543.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0006_auto_20180717_1543.py new file mode 100644 index 00000000..6df40ecf --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0006_auto_20180717_1543.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0005_auto_20180717_1433"), + ] + + operations = [ + migrations.AlterField( + model_name="scheduleservice", + name="celery_id", + field=models.CharField(max_length=36, null=True, verbose_name="celery \u4efb\u52a1ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0007_auto_20180717_2022.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0007_auto_20180717_2022.py new file mode 100644 index 00000000..e6f859f8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0007_auto_20180717_2022.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0006_auto_20180717_1543"), + ] + + operations = [ + migrations.RemoveField(model_name="scheduleservice", name="celery_id",), + migrations.RemoveField(model_name="scheduleservice", name="celery_info_lock",), + migrations.RemoveField(model_name="scheduleservice", name="is_frozen",), + migrations.RemoveField(model_name="scheduleservice", name="schedule_date",), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0008_schedulecelerytask.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0008_schedulecelerytask.py new file mode 100644 index 00000000..f9d2d801 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0008_schedulecelerytask.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0007_auto_20180717_2022"), + ] + + operations = [ + migrations.CreateModel( + name="ScheduleCeleryTask", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ( + "schedule_id", + models.CharField(unique=True, max_length=64, verbose_name="schedule ID", db_index=True), + ), + ("celery_task_id", models.CharField(default=b"", max_length=40, verbose_name="celery \u4efb\u52a1 ID")), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0009_status_error_ignorable.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0009_status_error_ignorable.py new file mode 100644 index 00000000..7f7aa5d9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0009_status_error_ignorable.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0008_schedulecelerytask"), + ] + + operations = [ + migrations.AddField( + model_name="status", + name="error_ignorable", + field=models.BooleanField( + default=False, verbose_name="\u662f\u5426\u51fa\u9519\u540e\u81ea\u52a8\u5ffd\u7565" + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0010_auto_20180830_1203.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0010_auto_20180830_1203.py new file mode 100644 index 00000000..083f2902 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0010_auto_20180830_1203.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0009_status_error_ignorable"), + ] + + operations = [ + migrations.AlterField( + model_name="scheduleservice", + name="id", + field=models.CharField( + max_length=96, unique=True, serialize=False, verbose_name="ID \u8282\u70b9ID+version", primary_key=True + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0010_nodecelerytask.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0010_nodecelerytask.py new file mode 100644 index 00000000..fe9044c1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0010_nodecelerytask.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0010_auto_20180830_1203"), + ] + + operations = [ + migrations.CreateModel( + name="NodeCeleryTask", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ( + "node_id", + models.CharField(unique=True, max_length=32, verbose_name="\u8282\u70b9 ID", db_index=True), + ), + ("celery_task_id", models.CharField(default=b"", max_length=40, verbose_name="celery \u4efb\u52a1 ID")), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0011_auto_20180830_1205.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0011_auto_20180830_1205.py new file mode 100644 index 00000000..305052f3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0011_auto_20180830_1205.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0010_nodecelerytask"), + ] + + operations = [ + migrations.AlterField( + model_name="scheduleservice", + name="id", + field=models.CharField( + max_length=64, unique=True, serialize=False, verbose_name="ID \u8282\u70b9ID+version", primary_key=True + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0015_datasnapshot.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0015_datasnapshot.py new file mode 100644 index 00000000..48bd7704 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0015_datasnapshot.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import pipeline.engine.models.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0011_auto_20180830_1205"), + ] + + operations = [ + migrations.CreateModel( + name="DataSnapshot", + fields=[ + ( + "key", + models.CharField( + max_length=255, serialize=False, verbose_name="\u5bf9\u8c61\u552f\u4e00\u952e", primary_key=True + ), + ), + ("obj", pipeline.engine.models.fields.IOField(verbose_name="\u5bf9\u8c61\u5b58\u50a8\u5b57\u6bb5")), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0016_auto_20181228_0345.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0016_auto_20181228_0345.py new file mode 100644 index 00000000..3fcaa000 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0016_auto_20181228_0345.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0015_datasnapshot"), + ] + + operations = [ + migrations.AddField( + model_name="history", + name="loop", + field=models.IntegerField(default=1, verbose_name="\u5faa\u73af\u6b21\u6570"), + ), + migrations.AddField( + model_name="history", + name="skip", + field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u8df3\u8fc7"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0017_auto_20190719_1010.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0017_auto_20190719_1010.py new file mode 100644 index 00000000..b8589b46 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0017_auto_20190719_1010.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0016_auto_20181228_0345"), + ] + + operations = [ + migrations.AlterField( + model_name="status", + name="created_time", + field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="\u521b\u5efa\u65f6\u95f4"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0018_auto_20190729_1041.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0018_auto_20190729_1041.py new file mode 100644 index 00000000..757dad71 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0018_auto_20190729_1041.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0017_auto_20190719_1010"), + ] + + operations = [ + migrations.AlterField( + model_name="history", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AlterField( + model_name="history", + name="data", + field=models.ForeignKey( + db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to="engine.HistoryData" + ), + ), + migrations.AlterField( + model_name="historydata", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AlterField( + model_name="nodecelerytask", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AlterField( + model_name="noderelationship", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AlterField( + model_name="processcelerytask", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AlterField( + model_name="pipelineprocess", + name="snapshot", + field=models.ForeignKey( + db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to="engine.ProcessSnapshot" + ), + ), + migrations.AlterField( + model_name="processsnapshot", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AlterField( + model_name="schedulecelerytask", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + migrations.AlterField( + model_name="subprocessrelationship", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0019_auto_20190729_1044.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0019_auto_20190729_1044.py new file mode 100644 index 00000000..265dcb68 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0019_auto_20190729_1044.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0018_auto_20190729_1041"), + ] + + operations = [ + migrations.AlterField( + model_name="history", + name="data", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="engine.HistoryData"), + ), + migrations.AlterField( + model_name="pipelineprocess", + name="snapshot", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="engine.ProcessSnapshot" + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0020_pipelinemodel_priority.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0020_pipelinemodel_priority.py new file mode 100644 index 00000000..0e3a012c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0020_pipelinemodel_priority.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.11 on 2019-09-12 07:31 + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0019_auto_20190729_1044"), + ] + + operations = [ + migrations.AddField( + model_name="pipelinemodel", + name="priority", + field=models.IntegerField(default=100, verbose_name="\u6d41\u7a0b\u4f18\u5148\u7ea7"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0021_auto_20191213_0725.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0021_auto_20191213_0725.py new file mode 100644 index 00000000..f3c08be3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0021_auto_20191213_0725.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2019-12-13 07:25 +from __future__ import unicode_literals + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0020_pipelinemodel_priority"), + ] + + operations = [ + migrations.AddField( + model_name="pipelinemodel", + name="queue", + field=models.CharField(default="", max_length=512, verbose_name="流程使用的队列名"), + ), + migrations.AlterField( + model_name="functionswitch", name="description", field=models.TextField(default="", verbose_name="功能描述"), + ), + migrations.AlterField( + model_name="history", + name="data", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to="engine.HistoryData"), + ), + migrations.AlterField( + model_name="nodecelerytask", + name="celery_task_id", + field=models.CharField(default="", max_length=40, verbose_name="celery 任务 ID"), + ), + migrations.AlterField( + model_name="pipelineprocess", + name="current_node_id", + field=models.CharField(db_index=True, default="", max_length=32, verbose_name="当前推进到的节点的 ID"), + ), + migrations.AlterField( + model_name="pipelineprocess", + name="destination_id", + field=models.CharField(default="", max_length=32, verbose_name="遇到该 ID 的节点就停止推进"), + ), + migrations.AlterField( + model_name="pipelineprocess", + name="parent_id", + field=models.CharField(default="", max_length=32, verbose_name="父 process 的 ID"), + ), + migrations.AlterField( + model_name="pipelineprocess", + name="snapshot", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.SET_NULL, to="engine.ProcessSnapshot" + ), + ), + migrations.AlterField( + model_name="processcelerytask", + name="celery_task_id", + field=models.CharField(default="", max_length=40, verbose_name="celery 任务 ID"), + ), + migrations.AlterField( + model_name="schedulecelerytask", + name="celery_task_id", + field=models.CharField(default="", max_length=40, verbose_name="celery 任务 ID"), + ), + migrations.AlterField( + model_name="status", name="name", field=models.CharField(default="", max_length=64, verbose_name="节点名称"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0022_scheduleservice_multi_callback_enabled.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0022_scheduleservice_multi_callback_enabled.py new file mode 100644 index 00000000..314ccd48 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0022_scheduleservice_multi_callback_enabled.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2020-01-15 02:55 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0021_auto_20191213_0725"), + ] + + operations = [ + migrations.AddField( + model_name="scheduleservice", + name="multi_callback_enabled", + field=models.BooleanField(default=False, verbose_name="是否支持多次回调"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0023_status_state_refresh_at.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0023_status_state_refresh_at.py new file mode 100644 index 00000000..1e472655 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0023_status_state_refresh_at.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2020-02-13 07:38 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0022_scheduleservice_multi_callback_enabled"), + ] + + operations = [ + migrations.AddField( + model_name="status", + name="state_refresh_at", + field=models.DateTimeField(null=True, verbose_name="上次状态更新的时间"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0024_auto_20200224_0308.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0024_auto_20200224_0308.py new file mode 100644 index 00000000..fff49fee --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0024_auto_20200224_0308.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2020-02-24 03:08 +from __future__ import unicode_literals + +from django.db import migrations + +import pipeline.engine.models.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0023_status_state_refresh_at"), + ] + + operations = [ + migrations.AlterField( + model_name="data", + name="ex_data", + field=pipeline.engine.models.fields.IOField(default=None, verbose_name="异常数据"), + ), + migrations.AlterField( + model_name="data", + name="inputs", + field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输入数据"), + ), + migrations.AlterField( + model_name="data", + name="outputs", + field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输出数据"), + ), + migrations.AlterField( + model_name="historydata", + name="ex_data", + field=pipeline.engine.models.fields.IOField(default=None, verbose_name="异常数据"), + ), + migrations.AlterField( + model_name="historydata", + name="inputs", + field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输入数据"), + ), + migrations.AlterField( + model_name="historydata", + name="outputs", + field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输出数据"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0025_multicallbackdata.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0025_multicallbackdata.py new file mode 100644 index 00000000..baef270f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0025_multicallbackdata.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2020-03-03 07:30 +from __future__ import unicode_literals + +from django.db import migrations, models +import pipeline.engine.models.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0024_auto_20200224_0308"), + ] + + operations = [ + migrations.CreateModel( + name="MultiCallbackData", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="自增ID")), + ("schedule_id", models.CharField(max_length=64, verbose_name="回调服务ID")), + ("data", pipeline.engine.models.fields.IOField(verbose_name="回调数据")), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0026_auto_20200610_1442.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0026_auto_20200610_1442.py new file mode 100644 index 00000000..dd55f124 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0026_auto_20200610_1442.py @@ -0,0 +1,43 @@ +# Generated by Django 2.2.8 on 2020-06-10 06:42 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('engine', '0025_multicallbackdata'), + ] + + operations = [ + migrations.AlterField( + model_name='pipelineprocess', + name='is_alive', + field=models.BooleanField(db_index=True, default=True, verbose_name='该 process 是否还有效'), + ), + migrations.AlterField( + model_name='pipelineprocess', + name='is_frozen', + field=models.BooleanField(db_index=True, default=False, verbose_name='该 process 是否被冻结'), + ), + migrations.AlterField( + model_name='pipelineprocess', + name='is_sleep', + field=models.BooleanField(db_index=True, default=False, verbose_name='该 process 是否正在休眠'), + ), + migrations.AlterField( + model_name='pipelineprocess', + name='root_pipeline_id', + field=models.CharField(db_index=True, max_length=32, verbose_name='根 pipeline 的 ID'), + ), + migrations.AlterField( + model_name='scheduleservice', + name='is_scheduling', + field=models.BooleanField(db_index=True, default=False, verbose_name='是否正在被调度'), + ), + migrations.AlterField( + model_name='noderelationship', + name='distance', + field=models.IntegerField(db_index=True, verbose_name='距离'), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0027_sendfailedcelerytask.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0027_sendfailedcelerytask.py new file mode 100644 index 00000000..917a8482 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0027_sendfailedcelerytask.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.28 on 2020-07-01 11:54 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0026_auto_20200610_1442"), + ] + + operations = [ + migrations.CreateModel( + name="SendFailedCeleryTask", + fields=[ + ( + "id", + models.BigAutoField( + primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("name", models.CharField(max_length=1024, verbose_name="任务名")), + ("kwargs", models.TextField(verbose_name="任务参数")), + ( + "type", + models.IntegerField( + choices=[ + (0, "empty"), + (1, "process"), + (2, "node"), + (3, "schedule"), + ], + verbose_name="任务类型", + ), + ), + ("extra_kwargs", models.TextField(verbose_name="额外参数")), + ("exec_trace", models.TextField(verbose_name="错误信息")), + ( + "created_at", + models.DateTimeField(auto_now_add=True, verbose_name="创建时间"), + ), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/0028_auto_20210812_0906.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/0028_auto_20210812_0906.py new file mode 100644 index 00000000..f68c2df9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/0028_auto_20210812_0906.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.19 on 2021-08-12 09:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("engine", "0027_sendfailedcelerytask"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineprocess", + name="parent_id", + field=models.CharField(db_index=True, default="", max_length=32, verbose_name="父 process 的 ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/engine/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/engine/models/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/models/__init__.py new file mode 100644 index 00000000..7d0a3600 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/models/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.engine.models.core import * # noqa +from pipeline.engine.models.function import * # noqa +from pipeline.engine.models.data import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/engine/models/core.py b/runtime/bamboo-pipeline/pipeline/engine/models/core.py new file mode 100644 index 00000000..3c1c65fc --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/models/core.py @@ -0,0 +1,1372 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import ujson as json +import contextlib +import logging +import traceback + +from celery import current_app +from celery.task.control import revoke +from django.db import models, transaction +from django.utils import timezone +from django.utils.translation import ugettext_lazy as _ + +from pipeline.conf import settings as pipeline_settings +from pipeline.constants import PIPELINE_DEFAULT_PRIORITY +from pipeline.core.data.base import DataObject +from pipeline.core.pipeline import Pipeline +from pipeline.django_signal_valve import valve +from pipeline.engine import exceptions, signals, states, utils +from pipeline.engine.core import data as data_service +from pipeline.engine.models.fields import IOField +from pipeline.engine.utils import ActionResult, Stack, calculate_elapsed_time +from pipeline.log.models import LogEntry +from pipeline.utils.uniqid import node_uniqid, uniqid + +logger = logging.getLogger("celery") + +RERUN_MAX_LIMIT = pipeline_settings.PIPELINE_RERUN_MAX_TIMES +NAME_MAX_LENGTH = 64 + + +class ProcessSnapshotManager(models.Manager): + def create_snapshot(self, pipeline_stack, children, root_pipeline, subprocess_stack): + data = { + "_pipeline_stack": pipeline_stack, + "_subprocess_stack": subprocess_stack, + "_children": children, + "_root_pipeline": root_pipeline, + } + return self.create(data=data) + + +class ProcessSnapshot(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + data = IOField(verbose_name=_("pipeline 运行时数据")) + + objects = ProcessSnapshotManager() + + @property + def pipeline_stack(self): + return self.data["_pipeline_stack"] + + @property + def children(self): + return self.data["_children"] + + @property + def root_pipeline(self): + return self.data["_root_pipeline"] + + @property + def subprocess_stack(self): + return self.data["_subprocess_stack"] + + def clean_children(self): + self.data["_children"] = [] + + def prune_top_pipeline(self, keep_from, keep_to): + self.data["_pipeline_stack"].top().prune(keep_from, keep_to) + + +class ProcessManager(models.Manager): + def prepare_for_pipeline(self, pipeline): + """ + 为 pipeline 创建相应的 process 并进行一系列初始化 + :param pipeline: + :return: + """ + # init runtime info + snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=utils.Stack(), children=[], root_pipeline=pipeline, subprocess_stack=utils.Stack(), + ) + process = self.create( + id=node_uniqid(), root_pipeline_id=pipeline.id, current_node_id=pipeline.start_event.id, snapshot=snapshot, + ) + process.push_pipeline(pipeline) + process.save() + return process + + def fork_child(self, parent, current_node_id, destination_id): + """ + 创建一个上下文信息与当前 parent 一致的 child process + :param parent: + :param current_node_id: + :param destination_id: + :return: + """ + # init runtime info + pipeline_stack = Stack([parent.top_pipeline]) + root_pipeline_shell = parent.root_pipeline.shell() + snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=pipeline_stack, + children=[], + root_pipeline=root_pipeline_shell, + subprocess_stack=parent.subprocess_stack, + ) + # refresh first, avoid keep the same ref to parent.top_pipeline + snapshot.refresh_from_db() + snapshot.prune_top_pipeline(current_node_id, destination_id) + snapshot.save() + # clear parent's change + snapshot.pipeline_stack.top().context.clear_change_keys() + + child = self.create( + id=node_uniqid(), + root_pipeline_id=parent.root_pipeline.id, + current_node_id=current_node_id, + destination_id=destination_id, + parent_id=parent.id, + snapshot=snapshot, + ) + for subproc_id in parent.subprocess_stack: + SubProcessRelationship.objects.add_relation(subproc_id, child.id) + + return child + + def process_ready(self, process_id, current_node_id=None, call_from_child=False): + """ + 发送一个进程已经准备好被调度的信号 + :param process_id: 已经准备好的进程 ID + :param current_node_id: 下一个执行的节点的 ID(可用于失败跳过) + :param call_from_child: 该信号是否由子进程发出 + :return: + """ + valve.send( + signals, + "process_ready", + sender=PipelineProcess, + process_id=process_id, + current_node_id=current_node_id, + call_from_child=call_from_child, + ) + + def batch_process_ready(self, process_id_list, pipeline_id): + """ + 发送批量唤醒进程的信号 + :param process_id_list: 需要被唤醒的进程 ID 列表 + :param pipeline_id: 这些进程相关的 root pipeline + :return: + """ + valve.send( + signals, + "batch_process_ready", + sender=PipelineProcess, + process_id_list=process_id_list, + pipeline_id=pipeline_id, + ) + + def child_process_ready(self, child_id): + """ + 发送子进程已经准备好被调度的信号 + :param child_id: 子进程 ID + :return: + """ + valve.send(signals, "child_process_ready", sender=PipelineProcess, child_id=child_id) + + def priority_for_process(self, process_id): + """ + 查询进程对应的 pipeline 的优先级 + :param process_id: 进程 ID + :return: + """ + return PipelineModel.objects.get(id=self.get(id=process_id).root_pipeline_id).priority + + def queue_for_process(self, process_id): + """ + 查询进程对应的 pipeline 所使用的队列 + :param process_id: 进程 ID + :return: + """ + return PipelineModel.objects.get(id=self.get(id=process_id).root_pipeline_id).queue + + def task_args_for_process(self, process_id): + pipeline_model = PipelineModel.objects.get(id=self.get(id=process_id).root_pipeline_id) + + return {"priority": pipeline_model.priority, "queue": pipeline_model.queue} + + +class PipelineProcess(models.Model): + """ + @relationship with other models + 1. PipelineInstance + process = PipelineProcess.objects.get(root_pipeline_id=pipeline_inst.instance_id) + pipeline_inst = PipelineInstance.objects.get(instance_id=process.root_pipeline_id) + """ + + id = models.CharField(_("Process ID"), unique=True, primary_key=True, max_length=32) + root_pipeline_id = models.CharField(_("根 pipeline 的 ID"), max_length=32, db_index=True) + current_node_id = models.CharField(_("当前推进到的节点的 ID"), max_length=32, default="", db_index=True) + destination_id = models.CharField(_("遇到该 ID 的节点就停止推进"), max_length=32, default="") + parent_id = models.CharField(_("父 process 的 ID"), max_length=32, default="", db_index=True) + ack_num = models.IntegerField(_("收到子节点 ACK 的数量"), default=0) + need_ack = models.IntegerField(_("需要收到的子节点 ACK 的数量"), default=-1) + is_alive = models.BooleanField(_("该 process 是否还有效"), default=True, db_index=True) + is_sleep = models.BooleanField(_("该 process 是否正在休眠"), default=False, db_index=True) + is_frozen = models.BooleanField(_("该 process 是否被冻结"), default=False, db_index=True) + snapshot = models.ForeignKey(ProcessSnapshot, null=True, on_delete=models.SET_NULL) + + objects = ProcessManager() + + @property + def pipeline_stack(self): + return self.snapshot.pipeline_stack if self.snapshot else None + + @property + def children(self): + return self.snapshot.children if self.snapshot else None + + @property + def root_pipeline(self): + return self.snapshot.root_pipeline if self.snapshot else None + + @property + def top_pipeline(self): + return self.pipeline_stack.top() + + @property + def subprocess_stack(self): + return self.snapshot.subprocess_stack if self.snapshot else None + + @property + def in_subprocess(self): + return len(self.snapshot.pipeline_stack) > 1 if self.snapshot else False + + def push_pipeline(self, pipeline, is_subprocess=False): + """ + 将 pipeline 压入运行时栈中 + :param pipeline: 需要被压入栈中的 pipeline 对象 + :param is_subprocess: 该 pipeline 是否是子流程 + :return: + """ + self.pipeline_stack.push(pipeline) + if is_subprocess: + self.subprocess_stack.push(pipeline.id) + SubProcessRelationship.objects.add_relation(pipeline.id, self.id) + + def pop_pipeline(self): + """ + 从运行时栈中弹出一个 pipeline + :return: + """ + pipeline = self.pipeline_stack.pop() + if self.subprocess_stack: + subproc_id = self.subprocess_stack.pop() + SubProcessRelationship.objects.delete_relation(subproc_id, self.id) + return pipeline + + def join(self, children): + """ + 令父进程等待子进程 + :param children: 需要等待的子进程列表 + :return: + """ + self.need_ack = len(children) + for child in children: + self.children.append(child.id) + self.save() + + def root_sleep_check(self): + """ + 检测 root pipeline 的状态判断当前进程是否需要休眠 + :return: + """ + root_state = Status.objects.state_for(self.root_pipeline.id) + if root_state in states.SLEEP_STATES: + return True, root_state + if root_state == states.BLOCKED: + # 子进程不会因为父进程进入了 BLOCKED 状态就进入睡眠 + return not self.parent_id, root_state + return False, root_state + + def subproc_sleep_check(self): + """ + 检测当前子流程栈中所有子流程的状态判断当前进程是否需要休眠 + :return: + """ + status = Status.objects.filter(id__in=self.subprocess_stack) + status_map = {s.id: s.state for s in status} + # 记录第一个处于暂停状态之前的所有子流程,用于子流程状态的修改 + before_suspended = [] + for subproc_id in self.subprocess_stack: + if status_map[subproc_id] == states.SUSPENDED: + return True, before_suspended + else: + before_suspended.append(subproc_id) + return False, before_suspended + + def freeze(self): + """ + 冻结当前进程 + :return: + """ + with transaction.atomic(): + self.__class__.objects.select_for_update().get(id=self.id) + self.is_frozen = True + self.save() + ProcessCeleryTask.objects.unbind(self.id) + + def unfreeze(self): + """ + 解冻当前进程 + :return: + """ + with transaction.atomic(): + self.__class__.objects.select_for_update().get(id=self.id) + self.is_frozen = False + self.save(save_snapshot=False) + valve.send(signals, "process_unfreeze", sender=PipelineProcess, process_id=self.id) + + def sleep(self, do_not_save=False, adjust_status=False, adjust_scope=None): + """ + 休眠当前进程 + :param do_not_save: 是否需要保存进程信息 + :param adjust_status: 是否需要调整 pipeline 中当前节点父级节点的状态 + :param adjust_scope: 状态调整的范围 + :return: + """ + if adjust_status: + self.adjust_status(adjust_scope) + if do_not_save: + return + with transaction.atomic(): + self.__class__.objects.select_for_update().get(id=self.id) + self.is_sleep = True + self.save() + ProcessCeleryTask.objects.unbind(self.id) + # dispatch children + for child_id in self.children: + PipelineProcess.objects.child_process_ready(child_id) + + def adjust_status(self, adjust_scope=None): + """ + 根据当前节点和子流程的状态来调整父级节点的状态 + :param adjust_scope: 子流程状态调整范围 + :return: + """ + node_state = Status.objects.state_for(self.current_node_id, may_not_exist=True) + pipeline_state = Status.objects.state_for(self.root_pipeline.id, may_not_exist=True) + subproc_states = Status.objects.states_for(self.subprocess_stack) + + if node_state in {states.FAILED, states.SUSPENDED}: + # if current node failed or suspended + Status.objects.batch_transit( + id_list=self.subprocess_stack, state=states.BLOCKED, from_state=states.RUNNING, + ) + Status.objects.transit(self.root_pipeline.id, to_state=states.BLOCKED, is_pipeline=True) + elif states.SUSPENDED in set(subproc_states): + # if any subprocess suspended + Status.objects.batch_transit(id_list=adjust_scope, state=states.BLOCKED, from_state=states.RUNNING) + Status.objects.transit(self.root_pipeline.id, to_state=states.BLOCKED, is_pipeline=True) + elif pipeline_state == states.SUSPENDED: + # if root pipeline suspended + Status.objects.batch_transit( + id_list=self.subprocess_stack, state=pipeline_state, from_state=states.RUNNING, + ) + + def wake_up(self): + """ + 唤醒当前进程 + :return: + """ + with transaction.atomic(): + self.__class__.objects.select_for_update().get(id=self.id) + self.is_sleep = False + self.save(save_snapshot=False) + + def destroy(self): + """ + 销毁当前进程及其上下文数据 + :return: + """ + self.is_alive = False + self.current_node_id = "" + snapshot = self.snapshot + self.snapshot = None + + self.save() + snapshot.delete() + ProcessCeleryTask.objects.destroy(self.id) + + def destroy_all(self): + """ + 销毁当前进程并递归销毁其所有子进程 + :return: + """ + _destroy_recursively(self) + + def save(self, save_snapshot=True, **kwargs): + if save_snapshot and self.snapshot: + self.snapshot.save() + return super(PipelineProcess, self).save(**kwargs) + + def blocked_by_failure_or_suspended(self): + """ + 检测当前进程是否因为节点失败而休眠 + :return: + """ + if not self.is_sleep: + return False + if Status.objects.state_for(self.current_node_id, may_not_exist=True) in { + states.FAILED, + states.SUSPENDED, + }: + return True + if not self.children: + return False + children = self.__class__.objects.filter(id__in=self.children) + result = [] + for child in children: + result.append(child.blocked_by_failure_or_suspended()) + return True in result + + def sync_with_children(self): + """ + 与子进程同步数据 + :return: + """ + for child_id in self.children: + context = data_service.get_object(self._context_key(child_id)) + parent_data = data_service.get_object(self._data_key(child_id)) + if context is None or parent_data is None: + raise exceptions.ChildDataSyncError( + "sync data with children {} failed, context({}) or parent_data({}) is None".format( + child_id, context, parent_data + ) + ) + self.top_pipeline.context.sync_change(context) + # self.top_pipeline.context.update_global_var(context.variables) + self.top_pipeline.data.update_outputs(parent_data.get_outputs()) + self.clean_children() # remove all children + + def destroy_and_wake_up_parent(self, destination_id): + """ + 销毁当前进程并尝试唤醒父进程 + :param destination_id: 当前进程终点节点 ID + :return: + """ + # save sync data + data_service.set_object(self._context_key(), self.top_pipeline.context) + data_service.set_object(self._data_key(), self.top_pipeline.data) + + self.__class__.objects.filter(id=self.parent_id).update(ack_num=models.F("ack_num") + 1) + can_wake_up = False + + with transaction.atomic(): + parent = self.__class__.objects.select_for_update().get(id=self.parent_id) + + if parent.need_ack != -1: + if parent.ack_num == parent.need_ack: + # try to wake up parent + parent.need_ack = -1 + parent.ack_num = 0 + can_wake_up = True + else: + if parent.blocked_by_failure_or_suspended(): + Status.objects.batch_transit( + id_list=self.subprocess_stack, state=states.BLOCKED, from_state=states.RUNNING, + ) + Status.objects.transit( + id=self.root_pipeline.id, to_state=states.BLOCKED, is_pipeline=True, + ) + + parent.save(save_snapshot=False) + + if can_wake_up: + self.__class__.objects.process_ready(parent.id, current_node_id=destination_id, call_from_child=True) + + SubProcessRelationship.objects.delete_relation(None, self.id) + self.destroy() + + def _context_key(self, process_id=None): + return "%s_context" % (process_id if process_id else self.id) + + def _data_key(self, process_id=None): + return "%s_data" % (process_id if process_id else self.id) + + def can_be_waked(self): + """ + 检测当前进程是否能够被唤醒 + :return: + """ + if not self.is_sleep or not self.is_alive: + return False + if self.need_ack != -1 and self.need_ack != self.ack_num: + return False + return True + + def clean_children(self): + """ + 清空当前进程的 children + :return: + """ + for child_id in self.children: + # 删除子进程的数据 + data_service.del_object(self._context_key(child_id)) + data_service.del_object(self._data_key(child_id)) + + self.snapshot.clean_children() + self.snapshot.save() + + def exit_gracefully(self, e): + """ + 在遇到无法处理的异常时优雅的退出当前进程 + :param e: + :return: + """ + ex_data = traceback.format_exc() + try: + current_node = self.top_pipeline.node(self.current_node_id) + except IndexError: + current_node = self.root_pipeline.node(self.current_node_id) + + # current_node may be a end_event in pipeline which had been popped + if current_node is not None: + result = Status.objects.fail(current_node, ex_data=ex_data) + else: + result = Status.objects.raw_fail(self.current_node_id, ex_data=ex_data) + + if not result.result: + logger.error( + "process({process_id}) exit_gracefully status transit failed, current_node :{node_id}".format( + process_id=self.id, node_id=current_node.id if current_node else self.current_node_id, + ) + ) + self.sleep(adjust_status=True) + + def refresh_current_node(self, current_node_id): + """ + 刷新当前节点的 ID + :param current_node_id: + :return: + """ + self.__class__.objects.filter(id=self.id).update(current_node_id=current_node_id) + + def revoke_subprocess(self): + if self.subprocess_stack: + Status.objects.batch_transit(id_list=list(self.subprocess_stack), state=states.REVOKED) + + if self.children: + for child_id in self.children: + PipelineProcess.objects.get(id=child_id).revoke_subprocess() + + def take_snapshot(self): + """ + 保存当前进程的快照对象 + :return: + """ + self.snapshot.save() + + +def _destroy_recursively(process): + if not process.is_alive: + return + if process.children: + for child_id in process.children: + child = PipelineProcess.objects.get(id=child_id) + _destroy_recursively(child) + process.destroy() + else: + process.destroy() + + +class PipelineModelManager(models.Manager): + def prepare_for_pipeline(self, pipeline, process, priority, queue=""): + return self.create(id=pipeline.id, process=process, priority=priority, queue=queue) + + def pipeline_ready(self, process_id): + valve.send(signals, "pipeline_ready", sender=Pipeline, process_id=process_id) + + def priority_for_pipeline(self, pipeline_id): + return self.get(id=pipeline_id).priority + + def task_args_for_pipeline(self, pipeline_id): + model = self.get(id=pipeline_id) + + return {"priority": model.priority, "queue": model.queue} + + +class PipelineModel(models.Model): + id = models.CharField("pipeline ID", unique=True, primary_key=True, max_length=32) + process = models.ForeignKey(PipelineProcess, null=True, on_delete=models.SET_NULL) + priority = models.IntegerField(_("流程优先级"), default=PIPELINE_DEFAULT_PRIORITY) + queue = models.CharField(_("流程使用的队列名"), max_length=512, default="") + + objects = PipelineModelManager() + + +class RelationshipManager(models.Manager): + def build_relationship(self, ancestor_id, descendant_id): + if self.filter(ancestor_id=ancestor_id, descendant_id=descendant_id).exists(): + # already build + return + ancestors = self.filter(descendant_id=ancestor_id) + relationships = [NodeRelationship(ancestor_id=descendant_id, descendant_id=descendant_id, distance=0)] + for ancestor in ancestors: + rel = NodeRelationship( + ancestor_id=ancestor.ancestor_id, descendant_id=descendant_id, distance=ancestor.distance + 1, + ) + relationships.append(rel) + self.bulk_create(relationships) + + +class NodeRelationship(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + ancestor_id = models.CharField(_("祖先 ID"), max_length=32, db_index=True) + descendant_id = models.CharField(_("后代 ID"), max_length=32, db_index=True) + distance = models.IntegerField(_("距离"), db_index=True) + + objects = RelationshipManager() + + def __unicode__(self): + return str("#{} -({})-> #{}".format(self.ancestor_id, self.distance, self.descendant_id,)) + + +class StatusManager(models.Manager): + def transit( + self, id, to_state, is_pipeline=False, appoint=False, start=False, name="", version=None, unchanged_pass=False, + ): + """ + 尝试改变某个节点的状态 + :param id: 节点 ID + :param to_state: 目标状态 + :param is_pipeline: 该节点是否是 pipeline + :param appoint: 该动作是否由用户发起(非引擎内部操作) + :param start: 是否刷新其开始时间 + :param name: 节点名称 + :param version: 节点版本 + :param unchanged_pass: 当 to_state 与当前节点状态相同时则视为操作成功 + :return: + """ + defaults = { + "name": name, + "state": to_state, + "version": uniqid(), + } + if start: + now = timezone.now() + defaults["started_time"] = now + defaults["state_refresh_at"] = now + status, created = self.get_or_create(id=id, defaults=defaults) + + # reservation or first creation + if created: + return ActionResult(result=True, message="success", extra=status) + + with transaction.atomic(): + kwargs = {"id": id} + if version: + kwargs["version"] = version + + try: + status = self.select_for_update().get(**kwargs) + + except Status.DoesNotExist: + return ActionResult(result=False, message="node not exists or not be executed yet") + + if unchanged_pass and status.state == to_state: + return ActionResult(result=True, message="success", extra=status) + + if states.can_transit( + from_state=status.state, to_state=to_state, is_pipeline=is_pipeline, appoint=appoint, + ): + + # 在冻结状态下不能改变 pipeline 的状态 + if is_pipeline: + subprocess_rel = SubProcessRelationship.objects.filter(subprocess_id=id) + if subprocess_rel: + process = PipelineProcess.objects.get(id=subprocess_rel[0].process_id) + if process.is_frozen: + return ActionResult(result=False, message="engine is frozen, can not perform operation",) + + processes = PipelineProcess.objects.filter(root_pipeline_id=id) + if processes and processes[0].is_frozen: + return ActionResult(result=False, message="engine is frozen, can not perform operation",) + + if name: + status.name = name + if to_state in states.ARCHIVED_STATES: + status.archived_time = timezone.now() + + # from FINISHED to RUNNING + if states.is_rerunning(from_state=status.state, to_state=to_state): + history = History.objects.record(status, is_rerunning=True) + if history: + LogEntry.objects.link_history(node_id=status.id, history_id=history.id) + status.loop += 1 + status.skip = False + status.version = uniqid() + + # reset started_time after record last status + if start: + status.started_time = timezone.now() + status.state = to_state + status.state_refresh_at = timezone.now() + status.save() + return ActionResult(result=True, message="success", extra=status) + else: + return ActionResult( + result=False, + message="can't transit state({}) from {} to {}".format(id, status.state, to_state), + extra=status, + ) + + def batch_transit(self, id_list, state, from_state=None, exclude=None): + """ + 批量改变节点状态,仅用于子流程的状态修改 + :param id_list: 子流程 ID 列表 + :param state: 目标状态 + :param from_state: 起始状态 + :param exclude: 不需要改变状态的子流程 ID 列表 + :return: + """ + if not id_list: + return + if not exclude: + exclude = [] + + id_list = set(id_list) + exclude = set(exclude) + kwargs = {"id__in": [i for i in id_list if i not in exclude]} + if from_state: + kwargs["state"] = from_state + with transaction.atomic(): + self.select_for_update().filter(**kwargs).update(state=state) + + def state_for(self, id, may_not_exist=False, version=None): + """ + 获取某个节点的状态 + :param id: 节点 ID + :param may_not_exist: 该节点是否可能不存在(未执行到) + :param version: 节点版本 + :return: + """ + kwargs = {"id": id} + if version: + kwargs["version"] = version + if may_not_exist: + try: + return self.get(**kwargs).state + except Status.DoesNotExist: + return None + return self.get(**kwargs).state + + def version_for(self, id): + return self.get(id=id).version + + def states_for(self, id_list): + return [s.state for s in self.filter(id__in=id_list)] + + def prepare_for_pipeline(self, pipeline): + cls_str = str(pipeline.__class__) + cls_name = pipeline.__class__.__name__[:NAME_MAX_LENGTH] + self.create( + id=pipeline.id, state=states.READY, name=cls_str if len(cls_str) <= NAME_MAX_LENGTH else cls_name, + ) + + def fail(self, node, ex_data): + action_res = self.transit(node.id, states.FAILED) + + if not action_res.result: + return action_res + + Data.objects.write_node_data(node, ex_data) + return action_res + + def raw_fail(self, node_id, ex_data): + action_res = self.transit(node_id, states.FAILED) + + if not action_res.result: + return action_res + + Data.objects.write_ex_data(node_id, ex_data) + return action_res + + def finish(self, node, error_ignorable=False): + action_res = self.transit(node.id, states.FINISHED) + + if not action_res.result: + return action_res + + Data.objects.write_node_data(node) + if error_ignorable: + s = Status.objects.get(id=node.id) + s.error_ignorable = True + s.save() + + return action_res + + def skip(self, process, node): + s = Status.objects.get(id=node.id) # 一定要先取出来,不然 archive time 会被覆盖 + if RERUN_MAX_LIMIT != 0 and s.loop > RERUN_MAX_LIMIT: + return ActionResult( + result=False, + message="rerun times exceed max limit: {limit}, can not skip".format(limit=RERUN_MAX_LIMIT), + ) + + action_res = self.transit(id=node.id, to_state=states.FINISHED, appoint=True) + if not action_res.result: + return action_res + + history = History.objects.record(s) + LogEntry.objects.link_history(node_id=node.id, history_id=history.id) + + s.refresh_from_db() + s.started_time = s.archived_time + + s.skip = True + s.save() + + # 由于节点执行失败,数据可能尚未写入当前的 Data 对象,所以要在这里做一次写入操作 + node.skip() + Data.objects.write_node_data(node) + + self.recover_from_block(process.root_pipeline.id, process.subprocess_stack) + signals.node_skip_call.send(sender=Status, process=process, node=node) + + return action_res + + def retry(self, process, node, inputs): + if RERUN_MAX_LIMIT != 0 and self.get(id=node.id).loop > RERUN_MAX_LIMIT: + return ActionResult( + result=False, + message="rerun times exceed max limit: {limit}, can not retry".format(limit=RERUN_MAX_LIMIT), + ) + + action_res = self.transit(id=node.id, to_state=states.READY, appoint=True) + if not action_res.result: + return action_res + + # add retry times + s = Status.objects.get(id=node.id) + s.version = uniqid() + history = History.objects.record(s) + LogEntry.objects.link_history(node_id=node.id, history_id=history.id) + s.retry += 1 + s.started_time = None + s.archived_time = None + s.save() + + # update inputs + if inputs: + new_data = DataObject(inputs=inputs, outputs={}) + node.data = new_data + Data.objects.write_node_data(node) + + # mark + node.next_exec_is_retry() + + self.recover_from_block(process.root_pipeline.id, process.subprocess_stack) + signals.node_retry_ready.send(sender=Status, process=process, node=node) + + # because node may be updated + process.save() + + return action_res + + def recover_from_block(self, root_pipeline_id, subprocess_stack): + Status.objects.batch_transit(id_list=subprocess_stack, state=states.RUNNING, from_state=states.BLOCKED) + Status.objects.transit(id=root_pipeline_id, to_state=states.READY, is_pipeline=True) + + @contextlib.contextmanager + def lock(self, id): + with transaction.atomic(): + self.select_for_update().get(id=id) + yield + + +class Status(models.Model): + id = models.CharField(_("节点 ID"), unique=True, primary_key=True, max_length=32) + state = models.CharField(_("状态"), max_length=10) + name = models.CharField(_("节点名称"), max_length=NAME_MAX_LENGTH, default="") + retry = models.IntegerField(_("重试次数"), default=0) + loop = models.IntegerField(_("循环次数"), default=1) + skip = models.BooleanField(_("是否跳过"), default=False) + error_ignorable = models.BooleanField(_("是否出错后自动忽略"), default=False) + created_time = models.DateTimeField(_("创建时间"), auto_now_add=True, db_index=True) + started_time = models.DateTimeField(_("开始时间"), null=True) + archived_time = models.DateTimeField(_("归档时间"), null=True) + version = models.CharField(_("版本"), max_length=32) + state_refresh_at = models.DateTimeField(_("上次状态更新的时间"), null=True) + + objects = StatusManager() + + class Meta: + ordering = ["-created_time"] + + def is_state_for_subproc(self): + return self.name.endswith("SubProcess") + + +class DataManager(models.Manager): + def write_node_data(self, node, ex_data=None): + data, created = self.get_or_create(id=node.id) + if hasattr(node, "data") and node.data: + data.inputs = node.data.get_inputs() + outputs = node.data.get_outputs() + ex_data = outputs.pop("ex_data", ex_data) + data.outputs = outputs + data.ex_data = ex_data + data.save() + + def write_ex_data(self, node_id, ex_data=None): + data, created = self.get_or_create(id=node_id) + data.ex_data = ex_data + data.save() + + def forced_fail(self, node_id, ex_data=""): + data, created = self.get_or_create(id=node_id) + data.outputs = { + "_forced_failed": True, + } + data.ex_data = ex_data + data.save() + + +class Data(models.Model): + id = models.CharField(_("节点 ID"), unique=True, primary_key=True, max_length=32) + inputs = IOField(verbose_name=_("输入数据"), default=None) + outputs = IOField(verbose_name=_("输出数据"), default=None) + ex_data = IOField(verbose_name=_("异常数据"), default=None) + + objects = DataManager() + + +class HistoryData(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + inputs = IOField(verbose_name=_("输入数据"), default=None) + outputs = IOField(verbose_name=_("输出数据"), default=None) + ex_data = IOField(verbose_name=_("异常数据"), default=None) + + objects = DataManager() + + +class MultiCallbackData(models.Model): + id = models.BigAutoField(_("自增ID"), primary_key=True) + schedule_id = models.CharField(_("回调服务ID"), max_length=NAME_MAX_LENGTH) + data = IOField(verbose_name=_("回调数据")) + + +DO_NOT_RECORD_WHEN_RERUN = frozenset({""}) + + +class HistoryManager(models.Manager): + def record(self, status, is_rerunning=False): + if is_rerunning and status.name in DO_NOT_RECORD_WHEN_RERUN: + return None + + data = Data.objects.get(id=status.id) + history_data = HistoryData.objects.create(inputs=data.inputs, outputs=data.outputs, ex_data=data.ex_data) + return self.create( + identifier=status.id, + started_time=status.started_time, + archived_time=status.archived_time, + data=history_data, + loop=status.loop, + skip=status.skip, + ) + + def get_histories(self, identifier, loop=None): + filters = {"identifier": identifier} + if loop is not None: + filters["loop"] = loop + histories = self.filter(**filters).order_by("started_time") + data = [ + { + "history_id": item.id, + "started_time": item.started_time, + "archived_time": item.archived_time, + "elapsed_time": calculate_elapsed_time(item.started_time, item.archived_time), + "inputs": item.data.inputs, + "outputs": item.data.outputs, + "ex_data": item.data.ex_data, + "loop": item.loop, + "skip": item.skip, + } + for item in histories + ] + return data + + +class History(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + identifier = models.CharField(_("节点 id"), max_length=32, db_index=True) + started_time = models.DateTimeField(_("开始时间")) + archived_time = models.DateTimeField(_("结束时间")) + loop = models.IntegerField(_("循环次数"), default=1) + skip = models.BooleanField(_("是否跳过"), default=False) + + data = models.ForeignKey(HistoryData, null=True, on_delete=models.SET_NULL) + + objects = HistoryManager() + + +class ScheduleServiceManager(models.Manager): + def set_schedule(self, activity_id, service_act, process_id, version, parent_data): + wait_callback = service_act.service.interval is None + multi_callback_enabled = service_act.service.multi_callback_enabled() + schedule = self.create( + id="{}{}".format(activity_id, version), + activity_id=activity_id, + service_act=service_act, + process_id=process_id, + wait_callback=wait_callback, + multi_callback_enabled=multi_callback_enabled, + version=version, + ) + data_service.set_schedule_data(schedule.id, parent_data) + + if not wait_callback: + count_down = service_act.service.interval.next() + valve.send( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=process_id, + schedule_id=schedule.id, + countdown=count_down, + ) + + return schedule + + def schedule_for(self, activity_id, version): + return self.get(id="{}{}".format(activity_id, version)) + + def delete_schedule(self, activity_id, version): + return self.filter(activity_id=activity_id, version=version).delete() + + def update_celery_info(self, id, lock, celery_id, schedule_date, is_scheduling=False): + return self.filter(id=id, celery_info_lock=lock).update( + celery_info_lock=models.F("celery_info_lock") + 1, + celery_id=celery_id, + schedule_date=schedule_date, + is_scheduling=is_scheduling, + ) + + +class ScheduleService(models.Model): + SCHEDULE_ID_SPLIT_DIVISION = 32 + + id = models.CharField(_("ID 节点ID+version"), max_length=NAME_MAX_LENGTH, unique=True, primary_key=True) + activity_id = models.CharField(_("节点 ID"), max_length=32, db_index=True) + process_id = models.CharField(_("Pipeline 进程 ID"), max_length=32) + schedule_times = models.IntegerField(_("被调度次数"), default=0) + wait_callback = models.BooleanField(_("是否是回调型调度"), default=False) + multi_callback_enabled = models.BooleanField(_("是否支持多次回调"), default=False) + callback_data = IOField(verbose_name=_("回调数据"), default=None) + service_act = IOField(verbose_name=_("待调度服务")) + is_finished = models.BooleanField(_("是否已完成"), default=False) + version = models.CharField(_("Activity 的版本"), max_length=32, db_index=True) + is_scheduling = models.BooleanField(_("是否正在被调度"), default=False, db_index=True) + + objects = ScheduleServiceManager() + + def set_next_schedule(self): + if self.wait_callback: + raise exceptions.InvalidOperationException("can't set next schedule on callback schedule.") + count_down = self.service_act.service.interval.next() + self.is_scheduling = False + self.save() + ScheduleCeleryTask.objects.unbind(self.id) + + valve.send( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=self.process_id, + schedule_id=self.id, + countdown=count_down, + ) + + def destroy(self): + schedule_id = self.id + self.delete() + data_service.delete_parent_data(schedule_id) + ScheduleCeleryTask.objects.destroy(schedule_id) + + def finish(self): + self.is_finished = True + self.service_act = None + self.is_scheduling = False + self.save() + ScheduleCeleryTask.objects.destroy(self.id) + + def callback(self, callback_data, process_id): + if not self.wait_callback: + raise exceptions.InvalidOperationException("can't callback a poll schedule.") + + if self.multi_callback_enabled: + callback_data = MultiCallbackData.objects.create(schedule_id=self.id, data=callback_data) + valve.send( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=process_id, + schedule_id=self.id, + data_id=callback_data.id, + countdown=0, + ) + else: + self.callback_data = callback_data + self.save() + valve.send( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=process_id, + schedule_id=self.id, + countdown=0, + ) + + def is_one_time_callback(self): + return self.wait_callback and not self.multi_callback_enabled + + +class SubProcessRelationshipManager(models.Manager): + def add_relation(self, subprocess_id, process_id): + return self.create(subprocess_id=subprocess_id, process_id=process_id) + + def delete_relation(self, subprocess_id, process_id): + kwargs = {} + if subprocess_id: + kwargs["subprocess_id"] = subprocess_id + if process_id: + kwargs["process_id"] = process_id + self.filter(**kwargs).delete() + + def get_relate_process(self, subprocess_id): + qs = self.filter(subprocess_id=subprocess_id) + proc_ids = [i.process_id for i in qs] + return PipelineProcess.objects.filter(id__in=proc_ids) + + +class SubProcessRelationship(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + subprocess_id = models.CharField(_("子流程 ID"), max_length=32, db_index=True) + process_id = models.CharField(_("对应的进程 ID"), max_length=32) + + objects = SubProcessRelationshipManager() + + +class ProcessCeleryTaskManager(models.Manager): + def bind(self, process_id, celery_task_id): + rel, created = self.get_or_create(process_id=process_id, defaults={"celery_task_id": celery_task_id}) + if not created: + rel.celery_task_id = celery_task_id + rel.save() + + def unbind(self, process_id): + self.filter(process_id=process_id).update(celery_task_id="") + + def destroy(self, process_id): + self.filter(process_id=process_id).delete() + + def start_task(self, process_id, task, kwargs, record_error=True): + if record_error: + with SendFailedCeleryTask.watch( + name=task.name, + kwargs=kwargs, + type=SendFailedCeleryTask.TASK_TYPE_PROCESS, + extra_kwargs={"process_id": process_id}, + ): + task_id = task.apply_async(**kwargs) + else: + task_id = task.apply_async(**kwargs) + self.bind(process_id, task_id) + + def revoke(self, process_id, kill=False): + task = self.get(process_id=process_id) + kwargs = {} if not kill else {"signal": "SIGKILL"} + revoke(task.celery_task_id, terminate=True, **kwargs) + self.destroy(process_id) + + +class ProcessCeleryTask(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + process_id = models.CharField(_("pipeline 进程 ID"), max_length=32, unique=True, db_index=True) + celery_task_id = models.CharField(_("celery 任务 ID"), max_length=40, default="") + + objects = ProcessCeleryTaskManager() + + +class ScheduleCeleryTaskManager(models.Manager): + def bind(self, schedule_id, celery_task_id): + rel, created = self.get_or_create(schedule_id=schedule_id, defaults={"celery_task_id": celery_task_id}) + if not created: + rel.celery_task_id = celery_task_id + rel.save() + + def unbind(self, schedule_id): + self.filter(schedule_id=schedule_id).update(celery_task_id="") + + def destroy(self, schedule_id): + self.filter(schedule_id=schedule_id).delete() + + def start_task(self, schedule_id, task, kwargs, record_error=True): + if record_error: + with SendFailedCeleryTask.watch( + name=task.name, + kwargs=kwargs, + type=SendFailedCeleryTask.TASK_TYPE_SCHEDULE, + extra_kwargs={"schedule_id": schedule_id}, + ): + task_id = task.apply_async(**kwargs) + else: + task_id = task.apply_async(**kwargs) + self.bind(schedule_id, task_id) + + +class ScheduleCeleryTask(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + schedule_id = models.CharField(_("schedule ID"), max_length=NAME_MAX_LENGTH, unique=True, db_index=True) + celery_task_id = models.CharField(_("celery 任务 ID"), max_length=40, default="") + + objects = ScheduleCeleryTaskManager() + + +class NodeCeleryTaskManager(models.Manager): + def bind(self, node_id, celery_task_id): + rel, created = self.update_or_create(node_id=node_id, defaults={"celery_task_id": celery_task_id}) + if not created: + rel.celery_task_id = celery_task_id + rel.save() + + def unbind(self, node_id): + self.filter(node_id=node_id).update(celery_task_id="") + + def destroy(self, node_id): + self.filter(node_id=node_id).delete() + + def start_task(self, node_id, task, kwargs, record_error=True): + if record_error: + with SendFailedCeleryTask.watch( + name=task.name, + kwargs=kwargs, + type=SendFailedCeleryTask.TASK_TYPE_NODE, + extra_kwargs={"node_id": node_id}, + ): + task_id = task.apply_async(**kwargs) + else: + task_id = task.apply_async(**kwargs) + self.bind(node_id, task_id) + + def revoke(self, node_id): + task = self.get(node_id=node_id) + revoke(task.celery_task_id) + self.destroy(node_id) + + +class NodeCeleryTask(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), max_length=32, unique=True, db_index=True) + celery_task_id = models.CharField(_("celery 任务 ID"), max_length=40, default="") + + objects = NodeCeleryTaskManager() + + +class SendFailedCeleryTaskManager(models.Manager): + def record(self, name, kwargs, type, extra_kwargs, exec_trace): + save_extra_kwargs = extra_kwargs + save_kwargs = kwargs + + if not isinstance(save_extra_kwargs, str): + save_extra_kwargs = json.dumps(save_extra_kwargs) + + if not isinstance(save_kwargs, str): + save_kwargs = json.dumps(save_kwargs) + + return self.create( + name=name, kwargs=save_kwargs, type=type, extra_kwargs=save_extra_kwargs, exec_trace=exec_trace, + ) + + def resend(self, id): + self.get(id=id).resend() + + +class SendFailedCeleryTask(models.Model): + + TASK_TYPE_EMPTY = 0 + TASK_TYPE_PROCESS = 1 + TASK_TYPE_NODE = 2 + TASK_TYPE_SCHEDULE = 3 + + TASK_TYPE_CHOICES = ( + (TASK_TYPE_EMPTY, "empty"), + (TASK_TYPE_PROCESS, "process"), + (TASK_TYPE_NODE, "node"), + (TASK_TYPE_SCHEDULE, "schedule"), + ) + + id = models.BigAutoField(_("ID"), primary_key=True) + name = models.CharField(_("任务名"), max_length=1024) + kwargs = models.TextField(_("任务参数")) + type = models.IntegerField(_("任务类型"), choices=TASK_TYPE_CHOICES) + extra_kwargs = models.TextField(_("额外参数")) + exec_trace = models.TextField(_("错误信息")) + created_at = models.DateTimeField(_("创建时间"), auto_now_add=True) + + objects = SendFailedCeleryTaskManager() + + @property + def kwargs_dict(self): + return json.loads(self.kwargs) + + @property + def extra_kwargs_dict(self): + return json.loads(self.extra_kwargs) + + def resend(self): + try: + task = current_app.tasks[self.name] + + if self.type == self.TASK_TYPE_EMPTY: + task.apply_async(**self.kwargs_dict) + elif self.type == self.TASK_TYPE_PROCESS: + ProcessCeleryTask.objects.start_task( + process_id=self.extra_kwargs_dict["process_id"], + task=task, + kwargs=self.kwargs_dict, + record_error=False, + ) + elif self.type == self.TASK_TYPE_NODE: + NodeCeleryTask.objects.start_task( + node_id=self.extra_kwargs_dict["node_id"], task=task, kwargs=self.kwargs_dict, record_error=False, + ) + elif self.type == self.TASK_TYPE_SCHEDULE: + ScheduleCeleryTask.objects.start_task( + schedule_id=self.extra_kwargs_dict["schedule_id"], + task=task, + kwargs=self.kwargs_dict, + record_error=False, + ) + else: + raise TypeError("unsupport type: {}.".format(self.type)) + except Exception as e: + logger.exception("fail task send replay error.") + raise e + else: + self.delete() + + @classmethod + @contextlib.contextmanager + def watch(cls, name, kwargs, type, extra_kwargs): + try: + yield + except Exception: + logger.exception("celery task({}) watcher catch error.".format(name)) + cls.objects.record( + name=name, kwargs=kwargs, type=type, extra_kwargs=extra_kwargs, exec_trace=traceback.format_exc(), + ) + # raise specific exception to indicate that send fail task have been catched + raise exceptions.CeleryFailedTaskCatchException(name) diff --git a/runtime/bamboo-pipeline/pipeline/engine/models/data.py b/runtime/bamboo-pipeline/pipeline/engine/models/data.py new file mode 100644 index 00000000..13f1b418 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/models/data.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import models, transaction +from django.utils.translation import ugettext_lazy as _ + +from pipeline.engine.models.fields import IOField + + +class DataSnapshotManager(models.Manager): + def set_object(self, key, obj): + # do not use update_or_create, prevent of deadlock + with transaction.atomic(): + if self.get_object(key): + self.filter(key=key).update(obj=obj) + else: + self.create(key=key, obj=obj) + return True + + def get_object(self, key): + try: + return self.get(key=key).obj + except DataSnapshot.DoesNotExist: + return None + + def del_object(self, key): + try: + self.get(key=key).delete() + return True + except DataSnapshot.DoesNotExist: + return False + + +class DataSnapshot(models.Model): + key = models.CharField(_("对象唯一键"), max_length=255, primary_key=True) + obj = IOField(verbose_name=_("对象存储字段")) + + objects = DataSnapshotManager() diff --git a/runtime/bamboo-pipeline/pipeline/engine/models/fields.py b/runtime/bamboo-pipeline/pipeline/engine/models/fields.py new file mode 100644 index 00000000..7d263cfa --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/models/fields.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import pickle +import traceback +import zlib + +from django.db import models + +from pipeline.utils.utils import convert_bytes_to_str +from . import nr_pickle + + +class IOField(models.BinaryField): + def __init__(self, compress_level=6, *args, **kwargs): + super(IOField, self).__init__(*args, **kwargs) + self.compress_level = compress_level + + def get_prep_value(self, value): + value = super(IOField, self).get_prep_value(value) + try: + serialized = zlib.compress(pickle.dumps(value), self.compress_level) + except RecursionError: + serialized = zlib.compress(nr_pickle.dumps(value), self.compress_level) + return serialized + + def to_python(self, value): + try: + value = super(IOField, self).to_python(value) + return pickle.loads(zlib.decompress(value)) + except UnicodeDecodeError: + # py2 pickle data process + return convert_bytes_to_str(pickle.loads(zlib.decompress(value), encoding="bytes")) + except Exception: + return "IOField to_python raise error: {}".format(traceback.format_exc()) + + def from_db_value(self, value, expression, connection, context): + return self.to_python(value) diff --git a/runtime/bamboo-pipeline/pipeline/engine/models/function.py b/runtime/bamboo-pipeline/pipeline/engine/models/function.py new file mode 100644 index 00000000..0bf6b767 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/models/function.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import traceback + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from pipeline.engine.conf import function_switch + +logger = logging.getLogger("celery") + + +class FunctionSwitchManager(models.Manager): + def init_db(self): + try: + name_set = {s.name for s in self.all()} + s_to_be_created = [] + for switch in function_switch.switch_list: + if switch["name"] not in name_set: + s_to_be_created.append( + FunctionSwitch( + name=switch["name"], description=switch["description"], is_active=switch["is_active"] + ) + ) + else: + self.filter(name=switch["name"]).update(description=switch["description"]) + self.bulk_create(s_to_be_created) + except Exception: + logger.error("function switch init failed: %s" % traceback.format_exc()) + + def is_frozen(self): + return self.get(name=function_switch.FREEZE_ENGINE).is_active + + def freeze_engine(self): + self.filter(name=function_switch.FREEZE_ENGINE).update(is_active=True) + + def unfreeze_engine(self): + self.filter(name=function_switch.FREEZE_ENGINE).update(is_active=False) + + +class FunctionSwitch(models.Model): + name = models.CharField(_("功能名称"), max_length=32, null=False, unique=True) + description = models.TextField(_("功能描述"), default="") + is_active = models.BooleanField(_("是否激活"), default=False) + + objects = FunctionSwitchManager() diff --git a/runtime/bamboo-pipeline/pipeline/engine/models/nr_pickle.py b/runtime/bamboo-pipeline/pipeline/engine/models/nr_pickle.py new file mode 100644 index 00000000..0ab1cd48 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/models/nr_pickle.py @@ -0,0 +1,1681 @@ +"""Create portable serialized representations of Python objects. + +See module copyreg for a mechanism for registering custom picklers. +See module pickletools source for extensive comments. + +Classes: + + Pickler + Unpickler + +Functions: + + dump(object, file) + dumps(object) -> string + load(file) -> object + loads(string) -> object + +Misc variables: + + __version__ + format_version + compatible_formats + +""" + +from types import FunctionType +from copyreg import dispatch_table +from copyreg import _extension_registry, _inverted_registry, _extension_cache +from itertools import islice +from functools import partial +import sys +from sys import maxsize +from struct import pack, unpack +import re +import io +import codecs +import _compat_pickle + +__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler", "Unpickler", "dump", "dumps", "load", "loads"] + +# Shortcut for use in isinstance testing +bytes_types = (bytes, bytearray) + +# These are purely informational; no code uses these. +format_version = "4.0" # File format version we write +compatible_formats = [ + "1.0", # Original protocol 0 + "1.1", # Protocol 0 with INST added + "1.2", # Original protocol 1 + "1.3", # Protocol 1 with BINFLOAT added + "2.0", # Protocol 2 + "3.0", # Protocol 3 + "4.0", # Protocol 4 +] # Old format versions we can read + +# This is the highest protocol number we know how to read. +HIGHEST_PROTOCOL = 4 + +# The protocol we write by default. May be less than HIGHEST_PROTOCOL. +# We intentionally write a protocol that Python 2.x cannot read; +# there are too many issues with that. +DEFAULT_PROTOCOL = 3 + + +class PickleError(Exception): + """A common base class for the other pickling exceptions.""" + + pass + + +class PicklingError(PickleError): + """This exception is raised when an unpicklable object is passed to the + dump() method. + + """ + + pass + + +class UnpicklingError(PickleError): + """This exception is raised when there is a problem unpickling an object, + such as a security violation. + + Note that other exceptions may also be raised during unpickling, including + (but not necessarily limited to) AttributeError, EOFError, ImportError, + and IndexError. + + """ + + pass + + +# An instance of _Stop is raised by Unpickler.load_stop() in response to +# the STOP opcode, passing the object that is the result of unpickling. + + +class _Stop(Exception): + def __init__(self, value): + self.value = value + + +# Jython has PyStringMap; it's a dict subclass with string keys +try: + from org.python.core import PyStringMap +except ImportError: + PyStringMap = None + +# Pickle opcodes. See pickletools.py for extensive docs. The listing +# here is in kind-of alphabetical order of 1-character pickle code. +# pickletools groups them by purpose. + +MARK = b"(" # push special markobject on stack +STOP = b"." # every pickle ends with STOP +POP = b"0" # discard topmost stack item +POP_MARK = b"1" # discard stack top through topmost markobject +DUP = b"2" # duplicate top stack item +FLOAT = b"F" # push float object; decimal string argument +INT = b"I" # push integer or bool; decimal string argument +BININT = b"J" # push four-byte signed int +BININT1 = b"K" # push 1-byte unsigned int +LONG = b"L" # push long; decimal string argument +BININT2 = b"M" # push 2-byte unsigned int +NONE = b"N" # push None +PERSID = b"P" # push persistent object; id is taken from string arg +BINPERSID = b"Q" # " " " ; " " " " stack +REDUCE = b"R" # apply callable to argtuple, both on stack +STRING = b"S" # push string; NL-terminated string argument +BINSTRING = b"T" # push string; counted binary string argument +SHORT_BINSTRING = b"U" # " " ; " " " " < 256 bytes +UNICODE = b"V" # push Unicode string; raw-unicode-escaped'd argument +BINUNICODE = b"X" # " " " ; counted UTF-8 string argument +APPEND = b"a" # append stack top to list below it +BUILD = b"b" # call __setstate__ or __dict__.update() +GLOBAL = b"c" # push self.find_class(modname, name); 2 string args +DICT = b"d" # build a dict from stack items +EMPTY_DICT = b"}" # push empty dict +APPENDS = b"e" # extend list on stack by topmost stack slice +GET = b"g" # push item from memo on stack; index is string arg +BINGET = b"h" # " " " " " " ; " " 1-byte arg +INST = b"i" # build & push class instance +LONG_BINGET = b"j" # push item from memo on stack; index is 4-byte arg +LIST = b"l" # build list from topmost stack items +EMPTY_LIST = b"]" # push empty list +OBJ = b"o" # build & push class instance +PUT = b"p" # store stack top in memo; index is string arg +BINPUT = b"q" # " " " " " ; " " 1-byte arg +LONG_BINPUT = b"r" # " " " " " ; " " 4-byte arg +SETITEM = b"s" # add key+value pair to dict +TUPLE = b"t" # build tuple from topmost stack items +EMPTY_TUPLE = b")" # push empty tuple +SETITEMS = b"u" # modify dict by adding topmost key+value pairs +BINFLOAT = b"G" # push float; arg is 8-byte float encoding + +TRUE = b"I01\n" # not an opcode; see INT docs in pickletools.py +FALSE = b"I00\n" # not an opcode; see INT docs in pickletools.py + +# Protocol 2 + +PROTO = b"\x80" # identify pickle protocol +NEWOBJ = b"\x81" # build object by applying cls.__new__ to argtuple +EXT1 = b"\x82" # push object from extension registry; 1-byte index +EXT2 = b"\x83" # ditto, but 2-byte index +EXT4 = b"\x84" # ditto, but 4-byte index +TUPLE1 = b"\x85" # build 1-tuple from stack top +TUPLE2 = b"\x86" # build 2-tuple from two topmost stack items +TUPLE3 = b"\x87" # build 3-tuple from three topmost stack items +NEWTRUE = b"\x88" # push True +NEWFALSE = b"\x89" # push False +LONG1 = b"\x8a" # push long from < 256 bytes +LONG4 = b"\x8b" # push really big long + +_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3] + +# Protocol 3 (Python 3.x) + +BINBYTES = b"B" # push bytes; counted binary string argument +SHORT_BINBYTES = b"C" # " " ; " " " " < 256 bytes + +# Protocol 4 +SHORT_BINUNICODE = b"\x8c" # push short string; UTF-8 length < 256 bytes +BINUNICODE8 = b"\x8d" # push very long string +BINBYTES8 = b"\x8e" # push very long bytes string +EMPTY_SET = b"\x8f" # push empty set on the stack +ADDITEMS = b"\x90" # modify set by adding topmost stack items +FROZENSET = b"\x91" # build frozenset from topmost stack items +NEWOBJ_EX = b"\x92" # like NEWOBJ but work with keyword only arguments +STACK_GLOBAL = b"\x93" # same as GLOBAL but using names on the stacks +MEMOIZE = b"\x94" # store top of the stack in memo +FRAME = b"\x95" # indicate the beginning of a new frame + +__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$", x)]) + + +class _Framer: + + _FRAME_SIZE_TARGET = 64 * 1024 + + def __init__(self, file_write): + self.file_write = file_write + self.current_frame = None + + def start_framing(self): + self.current_frame = io.BytesIO() + + def end_framing(self): + if self.current_frame and self.current_frame.tell() > 0: + self.commit_frame(force=True) + self.current_frame = None + + def commit_frame(self, force=False): + if self.current_frame: + f = self.current_frame + if f.tell() >= self._FRAME_SIZE_TARGET or force: + with f.getbuffer() as data: + n = len(data) + write = self.file_write + write(FRAME) + write(pack("": + raise AttributeError("Can't get local attribute {!r} on {!r}".format(name, obj)) + try: + parent = obj + obj = getattr(obj, subpath) + except AttributeError: + raise AttributeError("Can't get attribute {!r} on {!r}".format(name, obj)) + return obj, parent + + +def whichmodule(obj, name): + """Find the module an object belong to.""" + module_name = getattr(obj, "__module__", None) + if module_name is not None: + return module_name + # Protect the iteration by using a list copy of sys.modules against dynamic + # modules that trigger imports of other modules upon calls to getattr. + for module_name, module in list(sys.modules.items()): + if module_name == "__main__" or module is None: + continue + try: + if _getattribute(module, name)[0] is obj: + return module_name + except AttributeError: + pass + return "__main__" + + +def encode_long(x): + r"""Encode a long to a two's complement little-endian binary string. + Note that 0 is a special case, returning an empty string, to save a + byte in the LONG1 pickling context. + + >>> encode_long(0) + b'' + >>> encode_long(255) + b'\xff\x00' + >>> encode_long(32767) + b'\xff\x7f' + >>> encode_long(-256) + b'\x00\xff' + >>> encode_long(-32768) + b'\x00\x80' + >>> encode_long(-128) + b'\x80' + >>> encode_long(127) + b'\x7f' + >>> + """ + if x == 0: + return b"" + nbytes = (x.bit_length() >> 3) + 1 + result = x.to_bytes(nbytes, byteorder="little", signed=True) + if x < 0 and nbytes > 1: + if result[-1] == 0xFF and (result[-2] & 0x80) != 0: + result = result[:-1] + return result + + +def decode_long(data): + r"""Decode a long from a two's complement little-endian binary string. + + >>> decode_long(b'') + 0 + >>> decode_long(b"\xff\x00") + 255 + >>> decode_long(b"\xff\x7f") + 32767 + >>> decode_long(b"\x00\xff") + -256 + >>> decode_long(b"\x00\x80") + -32768 + >>> decode_long(b"\x80") + -128 + >>> decode_long(b"\x7f") + 127 + """ + return int.from_bytes(data, byteorder="little", signed=True) + + +# Pickling machinery + + +class _Pickler: + def __init__(self, file, protocol=None, *, fix_imports=True): + """This takes a binary file for writing a pickle data stream. + + The optional *protocol* argument tells the pickler to use the + given protocol; supported protocols are 0, 1, 2, 3 and 4. The + default protocol is 3; a backward-incompatible protocol designed + for Python 3. + + Specifying a negative protocol version selects the highest + protocol version supported. The higher the protocol used, the + more recent the version of Python needed to read the pickle + produced. + + The *file* argument must have a write() method that accepts a + single bytes argument. It can thus be a file object opened for + binary writing, an io.BytesIO instance, or any other custom + object that meets this interface. + + If *fix_imports* is True and *protocol* is less than 3, pickle + will try to map the new Python 3 names to the old module names + used in Python 2, so that the pickle data stream is readable + with Python 2. + """ + if protocol is None: + protocol = DEFAULT_PROTOCOL + if protocol < 0: + protocol = HIGHEST_PROTOCOL + elif not 0 <= protocol <= HIGHEST_PROTOCOL: + raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL) + try: + self._file_write = file.write + except AttributeError: + raise TypeError("file must have a 'write' attribute") + self.framer = _Framer(self._file_write) + self.write = self.framer.write + self.memo = {} + self.proto = int(protocol) + self.bin = protocol >= 1 + self.fast = 0 + self.fix_imports = fix_imports and protocol < 3 + + def clear_memo(self): + """Clears the pickler's "memo". + + The memo is the data structure that remembers which objects the + pickler has already seen, so that shared or recursive objects + are pickled by reference and not by value. This method is + useful when re-using picklers. + """ + self.memo.clear() + + def dump(self, obj): + """Write a pickled representation of obj to the open file.""" + # Check whether Pickler was initialized correctly. This is + # only needed to mimic the behavior of _pickle.Pickler.dump(). + if not hasattr(self, "_file_write"): + raise PicklingError("Pickler.__init__() was not called by " "%s.__init__()" % (self.__class__.__name__,)) + if self.proto >= 2: + self.write(PROTO + pack("= 4: + self.framer.start_framing() + + # By faking recursion using generators, pickle is no longer dependent + # on python's recursion limit. This means that hugely recursive data + # structures can be pickled without a problem! It's also still just + # about as fast as it was for simple structures, albeit slower for + # large structures. + callstack = [self.save(obj)] + while callstack: + try: + result = next(callstack[-1]) + except StopIteration: + callstack.pop() + else: + if result is not None: + callstack.append(result) + + self.write(STOP) + self.framer.end_framing() + + def memoize(self, obj): + """Store an object in the memo.""" + + # The Pickler memo is a dictionary mapping object ids to 2-tuples + # that contain the Unpickler memo key and the object being memoized. + # The memo key is written to the pickle and will become + # the key in the Unpickler's memo. The object is stored in the + # Pickler memo so that transient objects are kept alive during + # pickling. + + # The use of the Unpickler memo length as the memo key is just a + # convention. The only requirement is that the memo values be unique. + # But there appears no advantage to any other scheme, and this + # scheme allows the Unpickler memo to be implemented as a plain (but + # growable) array, indexed by memo key. + if self.fast: + return + assert id(obj) not in self.memo + idx = len(self.memo) + self.write(self.put(idx)) + self.memo[id(obj)] = idx, obj + + # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i. + def put(self, idx): + if self.proto >= 4: + return MEMOIZE + elif self.bin: + if idx < 256: + return BINPUT + pack("= 2 and func_name == "__newobj_ex__": + cls, args, kwargs = args + if not hasattr(cls, "__new__"): + raise PicklingError("args[0] from {} args has no __new__".format(func_name)) + if obj is not None and cls is not obj.__class__: + raise PicklingError("args[0] from {} args has the wrong class".format(func_name)) + if self.proto >= 4: + yield save(cls) + yield save(args) + yield save(kwargs) + write(NEWOBJ_EX) + else: + func = partial(cls.__new__, cls, *args, **kwargs) + yield save(func) + yield save(()) + write(REDUCE) + elif self.proto >= 2 and func_name == "__newobj__": + # A __reduce__ implementation can direct protocol 2 or newer to + # use the more efficient NEWOBJ opcode, while still + # allowing protocol 0 and 1 to work normally. For this to + # work, the function returned by __reduce__ should be + # called __newobj__, and its first argument should be a + # class. The implementation for __newobj__ + # should be as follows, although pickle has no way to + # verify this: + # + # def __newobj__(cls, *args): + # return cls.__new__(cls, *args) + # + # Protocols 0 and 1 will pickle a reference to __newobj__, + # while protocol 2 (and above) will pickle a reference to + # cls, the remaining args tuple, and the NEWOBJ code, + # which calls cls.__new__(cls, *args) at unpickling time + # (see load_newobj below). If __reduce__ returns a + # three-tuple, the state from the third tuple item will be + # pickled regardless of the protocol, calling __setstate__ + # at unpickling time (see load_build below). + # + # Note that no standard __newobj__ implementation exists; + # you have to provide your own. This is to enforce + # compatibility with Python 2.2 (pickles written using + # protocol 0 or 1 in Python 2.3 should be unpicklable by + # Python 2.2). + cls = args[0] + if not hasattr(cls, "__new__"): + raise PicklingError("args[0] from __newobj__ args has no __new__") + if obj is not None and cls is not obj.__class__: + raise PicklingError("args[0] from __newobj__ args has the wrong class") + args = args[1:] + yield save(cls) + yield save(args) + write(NEWOBJ) + else: + yield save(func) + yield save(args) + write(REDUCE) + + if obj is not None: + # If the object is already in the memo, this means it is + # recursive. In this case, throw away everything we put on the + # stack, and fetch the object back from the memo. + if id(obj) in self.memo: + write(POP + self.get(self.memo[id(obj)][0])) + else: + self.memoize(obj) + + # More new special cases (that work with older protocols as + # well): when __reduce__ returns a tuple with 4 or 5 items, + # the 4th and 5th item should be iterators that provide list + # items and dict items (as (key, value) tuples), or None. + + if listitems is not None: + yield self._batch_appends(listitems) + + if dictitems is not None: + yield self._batch_setitems(dictitems) + + if state is not None: + yield save(state) + write(BUILD) + + # Methods below this point are dispatched through the dispatch table + + dispatch = {} + + def save_none(self, obj): + self.write(NONE) + + dispatch[type(None)] = save_none + + def save_bool(self, obj): + if self.proto >= 2: + self.write(NEWTRUE if obj else NEWFALSE) + else: + self.write(TRUE if obj else FALSE) + + dispatch[bool] = save_bool + + def save_long(self, obj): + if self.bin: + # If the int is small enough to fit in a signed 4-byte 2's-comp + # format, we can store it more efficiently than the general + # case. + # First one- and two-byte unsigned ints: + if obj >= 0: + if obj <= 0xFF: + self.write(BININT1 + pack("= 2: + encoded = encode_long(obj) + n = len(encoded) + if n < 256: + self.write(LONG1 + pack("d", obj)) + else: + self.write(FLOAT + repr(obj).encode("ascii") + b"\n") + + dispatch[float] = save_float + + def save_bytes(self, obj): + if self.proto < 3: + if not obj: # bytes object is empty + yield self.save_reduce(bytes, (), obj=obj) + else: + yield self.save_reduce(codecs.encode, (str(obj, "latin1"), "latin1"), obj=obj) + return + n = len(obj) + if n <= 0xFF: + self.write(SHORT_BINBYTES + pack(" 0xFFFFFFFF and self.proto >= 4: + self.write(BINBYTES8 + pack("= 4: + self.write(SHORT_BINUNICODE + pack(" 0xFFFFFFFF and self.proto >= 4: + self.write(BINUNICODE8 + pack("= 2: + for element in obj: + yield save(element) + # Subtle. Same as in the big comment below. + if id(obj) in memo: + get = self.get(memo[id(obj)][0]) + self.write(POP * n + get) + else: + self.write(_tuplesize2code[n]) + self.memoize(obj) + return + + # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple + # has more than 3 elements. + write = self.write + write(MARK) + for element in obj: + yield save(element) + + if id(obj) in memo: + # Subtle. d was not in memo when we entered save_tuple(), so + # the process of saving the tuple's elements must have saved + # the tuple itself: the tuple is recursive. The proper action + # now is to throw away everything we put on the stack, and + # simply GET the tuple (it's already constructed). This check + # could have been done in the "for element" loop instead, but + # recursive tuples are a rare thing. + get = self.get(memo[id(obj)][0]) + if self.bin: + write(POP_MARK + get) + else: # proto 0 -- POP_MARK not available + write(POP * (n + 1) + get) + return + + # No recursion. + write(TUPLE) + self.memoize(obj) + + dispatch[tuple] = save_tuple + + def save_list(self, obj): + if self.bin: + self.write(EMPTY_LIST) + else: # proto 0 -- can't use EMPTY_LIST + self.write(MARK + LIST) + + self.memoize(obj) + yield self._batch_appends(obj) + + dispatch[list] = save_list + + _BATCHSIZE = 1000 + + def _batch_appends(self, items): + # Helper to batch up APPENDS sequences + save = self.save + write = self.write + + if not self.bin: + for x in items: + yield save(x) + write(APPEND) + return + + it = iter(items) + while True: + tmp = list(islice(it, self._BATCHSIZE)) + n = len(tmp) + if n > 1: + write(MARK) + for x in tmp: + yield save(x) + write(APPENDS) + elif n: + yield save(tmp[0]) + write(APPEND) + # else tmp is empty, and we're done + if n < self._BATCHSIZE: + return + + def save_dict(self, obj): + if self.bin: + self.write(EMPTY_DICT) + else: # proto 0 -- can't use EMPTY_DICT + self.write(MARK + DICT) + + self.memoize(obj) + yield self._batch_setitems(obj.items()) + + dispatch[dict] = save_dict + if PyStringMap is not None: + dispatch[PyStringMap] = save_dict + + def _batch_setitems(self, items): + # Helper to batch up SETITEMS sequences; proto >= 1 only + save = self.save + write = self.write + + if not self.bin: + for k, v in items: + yield save(k) + yield save(v) + write(SETITEM) + return + + it = iter(items) + while True: + tmp = list(islice(it, self._BATCHSIZE)) + n = len(tmp) + if n > 1: + write(MARK) + for k, v in tmp: + yield save(k) + yield save(v) + write(SETITEMS) + elif n: + k, v = tmp[0] + yield save(k) + yield save(v) + write(SETITEM) + # else tmp is empty, and we're done + if n < self._BATCHSIZE: + return + + def save_set(self, obj): + save = self.save + write = self.write + + if self.proto < 4: + self.save_reduce(set, (list(obj),), obj=obj) + return + + write(EMPTY_SET) + self.memoize(obj) + + it = iter(obj) + while True: + batch = list(islice(it, self._BATCHSIZE)) + n = len(batch) + if n > 0: + write(MARK) + for item in batch: + yield save(item) + write(ADDITEMS) + if n < self._BATCHSIZE: + return + + dispatch[set] = save_set + + def save_frozenset(self, obj): + save = self.save + write = self.write + + if self.proto < 4: + self.save_reduce(frozenset, (list(obj),), obj=obj) + return + + write(MARK) + for item in obj: + yield save(item) + + if id(obj) in self.memo: + # If the object is already in the memo, this means it is + # recursive. In this case, throw away everything we put on the + # stack, and fetch the object back from the memo. + write(POP_MARK + self.get(self.memo[id(obj)][0])) + return + + write(FROZENSET) + self.memoize(obj) + + dispatch[frozenset] = save_frozenset + + def save_global(self, obj, name=None): + write = self.write + memo = self.memo # noqa + + if name is None: + name = getattr(obj, "__qualname__", None) + if name is None: + name = obj.__name__ + + module_name = whichmodule(obj, name) + try: + __import__(module_name, level=0) + module = sys.modules[module_name] + obj2, parent = _getattribute(module, name) + except (ImportError, KeyError, AttributeError): + raise PicklingError("Can't pickle %r: it's not found as %s.%s" % (obj, module_name, name)) + else: + if obj2 is not obj: + raise PicklingError("Can't pickle %r: it's not the same object as %s.%s" % (obj, module_name, name)) + + if self.proto >= 2: + code = _extension_registry.get((module_name, name)) + if code: + assert code > 0 + if code <= 0xFF: + write(EXT1 + pack("= 3. + if self.proto >= 4: + self.save(module_name) + self.save(name) + write(STACK_GLOBAL) + elif parent is not module: + self.save_reduce(getattr, (parent, lastname)) + elif self.proto >= 3: + write(GLOBAL + bytes(module_name, "utf-8") + b"\n" + bytes(name, "utf-8") + b"\n") + else: + if self.fix_imports: + r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING + r_import_mapping = _compat_pickle.REVERSE_IMPORT_MAPPING + if (module_name, name) in r_name_mapping: + module_name, name = r_name_mapping[(module_name, name)] + elif module_name in r_import_mapping: + module_name = r_import_mapping[module_name] + try: + write(GLOBAL + bytes(module_name, "ascii") + b"\n" + bytes(name, "ascii") + b"\n") + except UnicodeEncodeError: + raise PicklingError( + "can't pickle global identifier '%s.%s' using " "pickle protocol %i" % (module, name, self.proto) + ) + + self.memoize(obj) + + def save_type(self, obj): + if obj is type(None): # noqa + return self.save_reduce(type, (None,), obj=obj) + elif obj is type(NotImplemented): + return self.save_reduce(type, (NotImplemented,), obj=obj) + elif obj is type(...): # noqa + return self.save_reduce(type, (...,), obj=obj) + return self.save_global(obj) + + dispatch[FunctionType] = save_global + dispatch[type] = save_type + + +# Unpickling machinery + + +class _Unpickler: + def __init__(self, file, *, fix_imports=True, encoding="ASCII", errors="strict"): + """This takes a binary file for reading a pickle data stream. + + The protocol version of the pickle is detected automatically, so + no proto argument is needed. + + The argument *file* must have two methods, a read() method that + takes an integer argument, and a readline() method that requires + no arguments. Both methods should return bytes. Thus *file* + can be a binary file object opened for reading, an io.BytesIO + object, or any other custom object that meets this interface. + + The file-like object must have two methods, a read() method + that takes an integer argument, and a readline() method that + requires no arguments. Both methods should return bytes. + Thus file-like object can be a binary file object opened for + reading, a BytesIO object, or any other custom object that + meets this interface. + + Optional keyword arguments are *fix_imports*, *encoding* and + *errors*, which are used to control compatibility support for + pickle stream generated by Python 2. If *fix_imports* is True, + pickle will try to map the old Python 2 names to the new names + used in Python 3. The *encoding* and *errors* tell pickle how + to decode 8-bit string instances pickled by Python 2; these + default to 'ASCII' and 'strict', respectively. *encoding* can be + 'bytes' to read theses 8-bit string instances as bytes objects. + """ + self._file_readline = file.readline + self._file_read = file.read + self.memo = {} + self.encoding = encoding + self.errors = errors + self.proto = 0 + self.fix_imports = fix_imports + + def load(self): + """Read a pickled object representation from the open file. + + Return the reconstituted object hierarchy specified in the file. + """ + # Check whether Unpickler was initialized correctly. This is + # only needed to mimic the behavior of _pickle.Unpickler.dump(). + if not hasattr(self, "_file_read"): + raise UnpicklingError( + "Unpickler.__init__() was not called by " "%s.__init__()" % (self.__class__.__name__,) + ) + self._unframer = _Unframer(self._file_read, self._file_readline) + self.read = self._unframer.read + self.readline = self._unframer.readline + self.metastack = [] + self.stack = [] + self.append = self.stack.append + self.proto = 0 + read = self.read + dispatch = self.dispatch + try: + while True: + key = read(1) + if not key: + raise EOFError + assert isinstance(key, bytes_types) + dispatch[key[0]](self) + except _Stop as stopinst: + return stopinst.value + + # Return a list of items pushed in the stack after last MARK instruction. + def pop_mark(self): + items = self.stack + self.stack = self.metastack.pop() + self.append = self.stack.append + return items + + def persistent_load(self, pid): + raise UnpicklingError("unsupported persistent id encountered") + + dispatch = {} + + def load_proto(self): + proto = self.read(1)[0] + if not 0 <= proto <= HIGHEST_PROTOCOL: + raise ValueError("unsupported pickle protocol: %d" % proto) + self.proto = proto + + dispatch[PROTO[0]] = load_proto + + def load_frame(self): + (frame_size,) = unpack(" sys.maxsize: + raise ValueError("frame size > sys.maxsize: %d" % frame_size) + self._unframer.load_frame(frame_size) + + dispatch[FRAME[0]] = load_frame + + def load_persid(self): + try: + pid = self.readline()[:-1].decode("ascii") + except UnicodeDecodeError: + raise UnpicklingError("persistent IDs in protocol 0 must be ASCII strings") + self.append(self.persistent_load(pid)) + + dispatch[PERSID[0]] = load_persid + + def load_binpersid(self): + pid = self.stack.pop() + self.append(self.persistent_load(pid)) + + dispatch[BINPERSID[0]] = load_binpersid + + def load_none(self): + self.append(None) + + dispatch[NONE[0]] = load_none + + def load_false(self): + self.append(False) + + dispatch[NEWFALSE[0]] = load_false + + def load_true(self): + self.append(True) + + dispatch[NEWTRUE[0]] = load_true + + def load_int(self): + data = self.readline() + if data == FALSE[1:]: + val = False + elif data == TRUE[1:]: + val = True + else: + val = int(data, 0) + self.append(val) + + dispatch[INT[0]] = load_int + + def load_binint(self): + self.append(unpack("d", self.read(8))[0]) + + dispatch[BINFLOAT[0]] = load_binfloat + + def _decode_string(self, value): + # Used to allow strings from Python 2 to be decoded either as + # bytes or Unicode strings. This should be used only with the + # STRING, BINSTRING and SHORT_BINSTRING opcodes. + if self.encoding == "bytes": + return value + else: + return value.decode(self.encoding, self.errors) + + def load_string(self): + data = self.readline()[:-1] + # Strip outermost quotes + if len(data) >= 2 and data[0] == data[-1] and data[0] in b"\"'": + data = data[1:-1] + else: + raise UnpicklingError("the STRING opcode argument must be quoted") + self.append(self._decode_string(codecs.escape_decode(data)[0])) + + dispatch[STRING[0]] = load_string + + def load_binstring(self): + # Deprecated BINSTRING uses signed 32-bit length + (len,) = unpack(" maxsize: + raise UnpicklingError("BINBYTES exceeds system's maximum size " "of %d bytes" % maxsize) + self.append(self.read(len)) + + dispatch[BINBYTES[0]] = load_binbytes + + def load_unicode(self): + self.append(str(self.readline()[:-1], "raw-unicode-escape")) + + dispatch[UNICODE[0]] = load_unicode + + def load_binunicode(self): + (len,) = unpack(" maxsize: + raise UnpicklingError("BINUNICODE exceeds system's maximum size " "of %d bytes" % maxsize) + self.append(str(self.read(len), "utf-8", "surrogatepass")) + + dispatch[BINUNICODE[0]] = load_binunicode + + def load_binunicode8(self): + (len,) = unpack(" maxsize: + raise UnpicklingError("BINUNICODE8 exceeds system's maximum size " "of %d bytes" % maxsize) + self.append(str(self.read(len), "utf-8", "surrogatepass")) + + dispatch[BINUNICODE8[0]] = load_binunicode8 + + def load_binbytes8(self): + (len,) = unpack(" maxsize: + raise UnpicklingError("BINBYTES8 exceeds system's maximum size " "of %d bytes" % maxsize) + self.append(self.read(len)) + + dispatch[BINBYTES8[0]] = load_binbytes8 + + def load_short_binstring(self): + len = self.read(1)[0] + data = self.read(len) + self.append(self._decode_string(data)) + + dispatch[SHORT_BINSTRING[0]] = load_short_binstring + + def load_short_binbytes(self): + len = self.read(1)[0] + self.append(self.read(len)) + + dispatch[SHORT_BINBYTES[0]] = load_short_binbytes + + def load_short_binunicode(self): + len = self.read(1)[0] + self.append(str(self.read(len), "utf-8", "surrogatepass")) + + dispatch[SHORT_BINUNICODE[0]] = load_short_binunicode + + def load_tuple(self): + items = self.pop_mark() + self.append(tuple(items)) + + dispatch[TUPLE[0]] = load_tuple + + def load_empty_tuple(self): + self.append(()) + + dispatch[EMPTY_TUPLE[0]] = load_empty_tuple + + def load_tuple1(self): + self.stack[-1] = (self.stack[-1],) + + dispatch[TUPLE1[0]] = load_tuple1 + + def load_tuple2(self): + self.stack[-2:] = [(self.stack[-2], self.stack[-1])] + + dispatch[TUPLE2[0]] = load_tuple2 + + def load_tuple3(self): + self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])] + + dispatch[TUPLE3[0]] = load_tuple3 + + def load_empty_list(self): + self.append([]) + + dispatch[EMPTY_LIST[0]] = load_empty_list + + def load_empty_dictionary(self): + self.append({}) + + dispatch[EMPTY_DICT[0]] = load_empty_dictionary + + def load_empty_set(self): + self.append(set()) + + dispatch[EMPTY_SET[0]] = load_empty_set + + def load_frozenset(self): + items = self.pop_mark() + self.append(frozenset(items)) + + dispatch[FROZENSET[0]] = load_frozenset + + def load_list(self): + items = self.pop_mark() + self.append(items) + + dispatch[LIST[0]] = load_list + + def load_dict(self): + items = self.pop_mark() + d = {items[i]: items[i + 1] for i in range(0, len(items), 2)} + self.append(d) + + dispatch[DICT[0]] = load_dict + + # INST and OBJ differ only in how they get a class object. It's not + # only sensible to do the rest in a common routine, the two routines + # previously diverged and grew different bugs. + # klass is the class to instantiate, and k points to the topmost mark + # object, following which are the arguments for klass.__init__. + def _instantiate(self, klass, args): + if args or not isinstance(klass, type) or hasattr(klass, "__getinitargs__"): + try: + value = klass(*args) + except TypeError as err: + raise TypeError("in constructor for %s: %s" % (klass.__name__, str(err)), sys.exc_info()[2]) + else: + value = klass.__new__(klass) + self.append(value) + + def load_inst(self): + module = self.readline()[:-1].decode("ascii") + name = self.readline()[:-1].decode("ascii") + klass = self.find_class(module, name) + self._instantiate(klass, self.pop_mark()) + + dispatch[INST[0]] = load_inst + + def load_obj(self): + # Stack is ... markobject classobject arg1 arg2 ... + args = self.pop_mark() + cls = args.pop(0) + self._instantiate(cls, args) + + dispatch[OBJ[0]] = load_obj + + def load_newobj(self): + args = self.stack.pop() + cls = self.stack.pop() + obj = cls.__new__(cls, *args) + self.append(obj) + + dispatch[NEWOBJ[0]] = load_newobj + + def load_newobj_ex(self): + kwargs = self.stack.pop() + args = self.stack.pop() + cls = self.stack.pop() + obj = cls.__new__(cls, *args, **kwargs) + self.append(obj) + + dispatch[NEWOBJ_EX[0]] = load_newobj_ex + + def load_global(self): + module = self.readline()[:-1].decode("utf-8") + name = self.readline()[:-1].decode("utf-8") + klass = self.find_class(module, name) + self.append(klass) + + dispatch[GLOBAL[0]] = load_global + + def load_stack_global(self): + name = self.stack.pop() + module = self.stack.pop() + if type(name) is not str or type(module) is not str: + raise UnpicklingError("STACK_GLOBAL requires str") + self.append(self.find_class(module, name)) + + dispatch[STACK_GLOBAL[0]] = load_stack_global + + def load_ext1(self): + code = self.read(1)[0] + self.get_extension(code) + + dispatch[EXT1[0]] = load_ext1 + + def load_ext2(self): + (code,) = unpack("= 4: + return _getattribute(sys.modules[module], name)[0] + else: + return getattr(sys.modules[module], name) + + def load_reduce(self): + stack = self.stack + args = stack.pop() + func = stack[-1] + stack[-1] = func(*args) + + dispatch[REDUCE[0]] = load_reduce + + def load_pop(self): + if self.stack: + del self.stack[-1] + else: + self.pop_mark() + + dispatch[POP[0]] = load_pop + + def load_pop_mark(self): + self.pop_mark() + + dispatch[POP_MARK[0]] = load_pop_mark + + def load_dup(self): + self.append(self.stack[-1]) + + dispatch[DUP[0]] = load_dup + + def load_get(self): + i = int(self.readline()[:-1]) + self.append(self.memo[i]) + + dispatch[GET[0]] = load_get + + def load_binget(self): + i = self.read(1)[0] + self.append(self.memo[i]) + + dispatch[BINGET[0]] = load_binget + + def load_long_binget(self): + (i,) = unpack(" maxsize: + raise ValueError("negative LONG_BINPUT argument") + self.memo[i] = self.stack[-1] + + dispatch[LONG_BINPUT[0]] = load_long_binput + + def load_memoize(self): + memo = self.memo + memo[len(memo)] = self.stack[-1] + + dispatch[MEMOIZE[0]] = load_memoize + + def load_append(self): + stack = self.stack + value = stack.pop() + list = stack[-1] + list.append(value) + + dispatch[APPEND[0]] = load_append + + def load_appends(self): + items = self.pop_mark() + list_obj = self.stack[-1] + if isinstance(list_obj, list): + list_obj.extend(items) + else: + append = list_obj.append + for item in items: + append(item) + + dispatch[APPENDS[0]] = load_appends + + def load_setitem(self): + stack = self.stack + value = stack.pop() + key = stack.pop() + dict = stack[-1] + dict[key] = value + + dispatch[SETITEM[0]] = load_setitem + + def load_setitems(self): + items = self.pop_mark() + dict = self.stack[-1] + for i in range(0, len(items), 2): + dict[items[i]] = items[i + 1] + + dispatch[SETITEMS[0]] = load_setitems + + def load_additems(self): + items = self.pop_mark() + set_obj = self.stack[-1] + if isinstance(set_obj, set): + set_obj.update(items) + else: + add = set_obj.add + for item in items: + add(item) + + dispatch[ADDITEMS[0]] = load_additems + + def load_build(self): + stack = self.stack + state = stack.pop() + inst = stack[-1] + setstate = getattr(inst, "__setstate__", None) + if setstate is not None: + setstate(state) + return + slotstate = None + if isinstance(state, tuple) and len(state) == 2: + state, slotstate = state + if state: + inst_dict = inst.__dict__ + intern = sys.intern + for k, v in state.items(): + if type(k) is str: + inst_dict[intern(k)] = v + else: + inst_dict[k] = v + if slotstate: + for k, v in slotstate.items(): + setattr(inst, k, v) + + dispatch[BUILD[0]] = load_build + + def load_mark(self): + self.metastack.append(self.stack) + self.stack = [] + self.append = self.stack.append + + dispatch[MARK[0]] = load_mark + + def load_stop(self): + value = self.stack.pop() + raise _Stop(value) + + dispatch[STOP[0]] = load_stop + + +# Shorthands + + +def _dump(obj, file, protocol=None, *, fix_imports=True): + _Pickler(file, protocol, fix_imports=fix_imports).dump(obj) + + +def _dumps(obj, protocol=None, *, fix_imports=True): + f = io.BytesIO() + _Pickler(f, protocol, fix_imports=fix_imports).dump(obj) + res = f.getvalue() + assert isinstance(res, bytes_types) + return res + + +def _load(file, *, fix_imports=True, encoding="ASCII", errors="strict"): + return _Unpickler(file, fix_imports=fix_imports, encoding=encoding, errors=errors).load() + + +def _loads(s, *, fix_imports=True, encoding="ASCII", errors="strict"): + if isinstance(s, str): + raise TypeError("Can't load pickle from unicode string") + file = io.BytesIO(s) + return _Unpickler(file, fix_imports=fix_imports, encoding=encoding, errors=errors).load() + + +# # Use the faster _pickle if possible +# try: +# from _pickle import ( +# PickleError, +# PicklingError, +# UnpicklingError, +# Pickler, +# Unpickler, +# dump, +# dumps, +# load, +# loads +# ) +# except ImportError: +# Pickler, Unpickler = _Pickler, _Unpickler +# dump, dumps, load, loads = _dump, _dumps, _load, _loads + +Pickler, Unpickler = _Pickler, _Unpickler +dump, dumps, load, loads = _dump, _dumps, _load, _loads + +# Doctest + + +def _test(): + import doctest + + return doctest.testmod() + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description="display contents of the pickle files") + parser.add_argument("pickle_file", type=argparse.FileType("br"), nargs="*", help="the pickle file") + parser.add_argument("-t", "--test", action="store_true", help="run self-test suite") + parser.add_argument("-v", action="store_true", help="run verbosely; only affects self-test run") + args = parser.parse_args() + if args.test: + _test() + else: + if not args.pickle_file: + parser.print_help() + else: + import pprint + + for f in args.pickle_file: + obj = load(f) + pprint.pprint(obj) diff --git a/runtime/bamboo-pipeline/pipeline/engine/signals/__init__.py b/runtime/bamboo-pipeline/pipeline/engine/signals/__init__.py new file mode 100644 index 00000000..6d1b8764 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/signals/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.dispatch import Signal + +pipeline_ready = Signal(providing_args=["process_id"]) +pipeline_end = Signal(providing_args=["root_pipeline_id"]) +pipeline_revoke = Signal(providing_args=["root_pipeline_id"]) +child_process_ready = Signal(providing_args=["child_id"]) +process_ready = Signal(providing_args=["parent_id", "current_node_id", "call_from_child"]) +batch_process_ready = Signal(providing_args=["process_id_list", "pipeline_id"]) +wake_from_schedule = Signal(providing_args=["process_id, activity_id"]) +schedule_ready = Signal(providing_args=["schedule_id", "countdown", "process_id", "data_id"]) +process_unfreeze = Signal(providing_args=["process_id"]) +# activity failed signal +activity_failed = Signal(providing_args=["pipeline_id", "pipeline_activity_id", "subprocess_id_stack"]) + +# signal for developer (do not use valve to pass them!) +service_schedule_fail = Signal(providing_args=["activity_shell", "schedule_service", "ex_data"]) +service_schedule_success = Signal(providing_args=["activity_shell", "schedule_service"]) +node_skip_call = Signal(providing_args=["process", "node"]) +node_retry_ready = Signal(providing_args=["process", "node"]) + +service_activity_timeout_monitor_start = Signal(providing_args=["node_id", "version", "root_pipeline_id", "countdown"]) +service_activity_timeout_monitor_end = Signal(providing_args=["node_id", "version"]) diff --git a/runtime/bamboo-pipeline/pipeline/engine/signals/dispatch.py b/runtime/bamboo-pipeline/pipeline/engine/signals/dispatch.py new file mode 100644 index 00000000..61091d20 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/signals/dispatch.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import traceback + +from django.utils.module_loading import import_string + +from pipeline.conf import settings +from pipeline.core.flow.activity import ServiceActivity +from pipeline.core.pipeline import Pipeline +from pipeline.engine import models, signals +from pipeline.engine.exceptions import InvalidPipelineEndHandleError +from pipeline.engine.signals import handlers + +try: + end_handler = import_string(settings.PIPELINE_END_HANDLER) +except ImportError: + raise InvalidPipelineEndHandleError( + "pipeline end handler ({}) import error with exception: {}".format( + settings.PIPELINE_END_HANDLER, traceback.format_exc() + ) + ) + + +# DISPATCH_UID = __name__.replace('.', '_') + + +def dispatch_pipeline_ready(): + signals.pipeline_ready.connect(handlers.pipeline_ready_handler, sender=Pipeline, dispatch_uid="_pipeline_ready") + + +def dispatch_pipeline_end(): + signals.pipeline_end.connect(end_handler, sender=Pipeline, dispatch_uid="_pipeline_end") + + +def dispatch_child_process_ready(): + signals.child_process_ready.connect( + handlers.child_process_ready_handler, sender=models.PipelineProcess, dispatch_uid="_child_process_ready" + ) + + +def dispatch_process_ready(): + signals.process_ready.connect( + handlers.process_ready_handler, sender=models.PipelineProcess, dispatch_uid="_process_ready" + ) + + +def dispatch_batch_process_ready(): + signals.batch_process_ready.connect( + handlers.batch_process_ready_handler, sender=models.PipelineProcess, dispatch_uid="_batch_process_ready" + ) + + +def dispatch_wake_from_schedule(): + signals.wake_from_schedule.connect( + handlers.wake_from_schedule_handler, sender=models.ScheduleService, dispatch_uid="_wake_from_schedule" + ) + + +def dispatch_schedule_ready(): + signals.schedule_ready.connect( + handlers.schedule_ready_handler, sender=models.ScheduleService, dispatch_uid="_schedule_ready" + ) + + +def dispatch_process_unfreeze(): + signals.process_unfreeze.connect( + handlers.process_unfreeze_handler, sender=models.PipelineProcess, dispatch_uid="_process_unfreeze" + ) + + +def dispatch_service_activity_timeout_monitor_start(): + signals.service_activity_timeout_monitor_start.connect( + handlers.service_activity_timeout_monitor_start_handler, + sender=ServiceActivity, + dispatch_uid="_service_activity_timeout_monitor_start", + ) + + +def dispatch_service_activity_timeout_monitor_end(): + signals.service_activity_timeout_monitor_end.connect( + handlers.service_activity_timeout_monitor_end_handler, + sender=ServiceActivity, + dispatch_uid="__service_activity_timeout_monitor_end", + ) + + +def dispatch(): + dispatch_pipeline_ready() + dispatch_pipeline_end() + dispatch_child_process_ready() + dispatch_process_ready() + dispatch_batch_process_ready() + dispatch_wake_from_schedule() + dispatch_schedule_ready() + dispatch_process_unfreeze() + dispatch_service_activity_timeout_monitor_start() + dispatch_service_activity_timeout_monitor_end() diff --git a/runtime/bamboo-pipeline/pipeline/engine/signals/handlers.py b/runtime/bamboo-pipeline/pipeline/engine/signals/handlers.py new file mode 100644 index 00000000..6daab8b2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/signals/handlers.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import contextlib + +from pipeline.celery.settings import QueueResolver +from pipeline.engine import tasks, exceptions +from pipeline.engine.models import ( + NodeCeleryTask, + PipelineModel, + PipelineProcess, + ProcessCeleryTask, + ScheduleCeleryTask, + SendFailedCeleryTask, +) + +logger = logging.getLogger("root") + + +@contextlib.contextmanager +def celery_task_send_fail_pass(): + try: + yield + except exceptions.CeleryFailedTaskCatchException as e: + # we catch CeleryFailedTaskCatchException here and ignore it. + # so we can process the fail task in SendFailedCeleryTask + logger.exception("{} task send error.".format(e.task_name)) + + +class CeleryTaskArgsResolver(object): + def __init__(self, process_id): + self.process_id = process_id + + def resolve_args(self, task): + args = {} + task_args = PipelineProcess.objects.task_args_for_process(self.process_id) + + queue = task_args["queue"] + priority = task_args["priority"] + + args["priority"] = priority + + if queue: + queue_resolver = QueueResolver(queue) + args["routing_key"] = queue_resolver.resolve_task_routing_key(task) + args["queue"] = queue_resolver.resolve_task_queue_name(task) + + return args + + +def pipeline_ready_handler(sender, process_id, **kwargs): + task = tasks.start + args_resolver = CeleryTaskArgsResolver(process_id) + + with celery_task_send_fail_pass(): + ProcessCeleryTask.objects.start_task( + process_id=process_id, task=task, kwargs={"args": [process_id], **args_resolver.resolve_args(task)}, + ) + + +def pipeline_end_handler(sender, root_pipeline_id, **kwargs): + pass + + +def child_process_ready_handler(sender, child_id, **kwargs): + task = tasks.dispatch + args_resolver = CeleryTaskArgsResolver(child_id) + + with celery_task_send_fail_pass(): + ProcessCeleryTask.objects.start_task( + process_id=child_id, task=task, kwargs={"args": [child_id], **args_resolver.resolve_args(task)}, + ) + + +def process_ready_handler(sender, process_id, current_node_id=None, call_from_child=False, **kwargs): + + task = tasks.process_wake_up + args_resolver = CeleryTaskArgsResolver(process_id) + + with celery_task_send_fail_pass(): + ProcessCeleryTask.objects.start_task( + process_id=process_id, + task=task, + kwargs={"args": [process_id, current_node_id, call_from_child], **args_resolver.resolve_args(task)}, + ) + + +def batch_process_ready_handler(sender, process_id_list, pipeline_id, **kwargs): + + task = tasks.batch_wake_up + task_args = PipelineModel.objects.task_args_for_pipeline(pipeline_id) + priority = task_args["priority"] + queue = task_args["queue"] + + kwargs = { + "args": [process_id_list, pipeline_id], + "priority": priority, + } + if queue: + kwargs["routing_key"] = QueueResolver(queue).resolve_task_routing_key(task) + + with celery_task_send_fail_pass(): + with SendFailedCeleryTask.watch( + name=task.name, kwargs=kwargs, type=SendFailedCeleryTask.TASK_TYPE_EMPTY, extra_kwargs={}, + ): + task.apply_async(**kwargs) + + +def wake_from_schedule_handler(sender, process_id, activity_id, **kwargs): + + task = tasks.wake_from_schedule + args_resolver = CeleryTaskArgsResolver(process_id) + + with celery_task_send_fail_pass(): + ProcessCeleryTask.objects.start_task( + process_id=process_id, + task=task, + kwargs={"args": [process_id, activity_id], **args_resolver.resolve_args(task)}, + ) + + +def process_unfreeze_handler(sender, process_id, **kwargs): + task = tasks.process_unfreeze + args_resolver = CeleryTaskArgsResolver(process_id) + + with celery_task_send_fail_pass(): + ProcessCeleryTask.objects.start_task( + process_id=process_id, task=task, kwargs={"args": [process_id], **args_resolver.resolve_args(task)}, + ) + + +def schedule_ready_handler(sender, process_id, schedule_id, countdown, data_id=None, **kwargs): + task = tasks.service_schedule + args_resolver = CeleryTaskArgsResolver(process_id) + + with celery_task_send_fail_pass(): + ScheduleCeleryTask.objects.start_task( + schedule_id=schedule_id, + task=task, + kwargs={ + "args": [process_id, schedule_id, data_id], + "countdown": countdown, + **args_resolver.resolve_args(task), + }, + ) + + +def service_activity_timeout_monitor_start_handler(sender, node_id, version, root_pipeline_id, countdown, **kwargs): + NodeCeleryTask.objects.start_task( + node_id=node_id, + task=tasks.node_timeout_check, + kwargs={ + "args": [node_id, version, root_pipeline_id], + "countdown": countdown, + "priority": PipelineModel.objects.priority_for_pipeline(root_pipeline_id), + }, + ) + + +def service_activity_timeout_monitor_end_handler(sender, node_id, version, **kwargs): + NodeCeleryTask.objects.revoke(node_id) diff --git a/runtime/bamboo-pipeline/pipeline/engine/states.py b/runtime/bamboo-pipeline/pipeline/engine/states.py new file mode 100644 index 00000000..5d6d1ca3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/states.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.engine.utils import ConstantDict + +CREATED = "CREATED" +READY = "READY" +RUNNING = "RUNNING" +SUSPENDED = "SUSPENDED" +BLOCKED = "BLOCKED" +FINISHED = "FINISHED" +FAILED = "FAILED" +REVOKED = "REVOKED" +EXPIRED = "EXPIRED" + +ALL_STATES = frozenset([READY, RUNNING, SUSPENDED, BLOCKED, FINISHED, FAILED, REVOKED]) + +ARCHIVED_STATES = frozenset([FINISHED, FAILED, REVOKED]) +SLEEP_STATES = frozenset([SUSPENDED, REVOKED]) +CHILDREN_IGNORE_STATES = frozenset([BLOCKED]) + +_NODE_TRANSITION = ConstantDict( + { + READY: frozenset([RUNNING, SUSPENDED]), + RUNNING: frozenset([FINISHED, FAILED]), + SUSPENDED: frozenset([READY, REVOKED]), + BLOCKED: frozenset([]), + FINISHED: frozenset([RUNNING, FAILED]), + FAILED: frozenset([]), + REVOKED: frozenset([]), + } +) + +_PIPELINE_TRANSITION = ConstantDict( + { + READY: frozenset([RUNNING, SUSPENDED, BLOCKED]), + RUNNING: frozenset([SUSPENDED, BLOCKED, FINISHED, FAILED]), + SUSPENDED: frozenset([READY, REVOKED, BLOCKED]), + BLOCKED: frozenset([READY, REVOKED]), + FINISHED: frozenset([RUNNING]), + FAILED: frozenset([]), + REVOKED: frozenset([]), + } +) + +_APPOINT_PIPELINE_TRANSITION = ConstantDict( + { + READY: frozenset([SUSPENDED, REVOKED]), + RUNNING: frozenset([SUSPENDED, REVOKED]), + SUSPENDED: frozenset([READY, REVOKED, RUNNING]), + BLOCKED: frozenset([REVOKED]), + FINISHED: frozenset([]), + FAILED: frozenset([REVOKED]), + REVOKED: frozenset([]), + } +) + +_APPOINT_NODE_TRANSITION = ConstantDict( + { + READY: frozenset([SUSPENDED]), + RUNNING: frozenset([]), + SUSPENDED: frozenset([READY]), + BLOCKED: frozenset([]), + FINISHED: frozenset([]), + FAILED: frozenset([READY, FINISHED]), + REVOKED: frozenset([]), + } +) + +TRANSITION_MAP = { + # first level: is_pipeline + True: { + # second level: appoint + True: _APPOINT_PIPELINE_TRANSITION, + False: _PIPELINE_TRANSITION, + }, + False: {True: _APPOINT_NODE_TRANSITION, False: _NODE_TRANSITION}, +} + + +def can_transit(from_state, to_state, is_pipeline=False, appoint=False): + transition = TRANSITION_MAP[is_pipeline][appoint] + + if from_state in transition: + if to_state in transition[from_state]: + return True + return False + + +def is_rerunning(from_state, to_state): + return from_state == FINISHED and to_state == RUNNING diff --git a/runtime/bamboo-pipeline/pipeline/engine/tasks.py b/runtime/bamboo-pipeline/pipeline/engine/tasks.py new file mode 100644 index 00000000..c01511f7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/tasks.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import datetime +from dateutil.relativedelta import relativedelta +from celery import task +from celery.schedules import crontab +from celery.task import periodic_task +from django.db import transaction, connection + +from pipeline.conf import default_settings +from pipeline.core.pipeline import Pipeline +from pipeline.engine import api, signals, states +from pipeline.engine.core import runtime, schedule +from pipeline.engine.health import zombie +from pipeline.engine.models import ( + NodeCeleryTask, + NodeRelationship, + PipelineProcess, + ProcessCeleryTask, + Status, + ScheduleService, + History, +) +from pipeline.models import PipelineInstance + +logger = logging.getLogger("celery") + + +@task(ignore_result=True) +def process_unfreeze(process_id): + process = PipelineProcess.objects.get(id=process_id) + if not process.is_alive: + logger.warning("process(%s) is not alive, mission cancel." % process_id) + return + + runtime.run_loop(process) + + +@task(ignore_result=True) +def start(process_id): + process = PipelineProcess.objects.get(id=process_id) + if not process.is_alive: + logger.warning("process(%s) is not alive, mission cancel." % process_id) + return + + pipeline_id = process.root_pipeline.id + # try to run + action_result = Status.objects.transit(pipeline_id, states.RUNNING, is_pipeline=True, start=True) + if not action_result.result: + logger.warning("can not start pipeline({}), message: {}".format(pipeline_id, action_result.message)) + return + + NodeRelationship.objects.build_relationship(pipeline_id, pipeline_id) + + runtime.run_loop(process) + + +@task(ignore_result=True) +def dispatch(child_id): + process = PipelineProcess.objects.get(id=child_id) + if not process.is_alive: + logger.info("process(%s) is not alive, mission cancel." % child_id) + return + + runtime.run_loop(process) + + +@task(ignore_result=True) +def process_wake_up(process_id, current_node_id=None, call_from_child=False): + process = PipelineProcess.objects.get(id=process_id) + if not process.is_alive: + logger.warning("process(%s) is not alive, mission cancel." % process_id) + return + + pipeline_id = process.root_pipeline.id + if not call_from_child: + # success_when_unchanged to deal with parallel wake up + action_result = Status.objects.transit( + pipeline_id, to_state=states.RUNNING, is_pipeline=True, unchanged_pass=True + ) + if not action_result.result: + # BLOCKED is a tolerant running state + if action_result.extra.state != states.BLOCKED: + logger.warning("can not start pipeline({}), message: {}".format(pipeline_id, action_result.message)) + return + + process.wake_up() + if current_node_id: + process.current_node_id = current_node_id + + runtime.run_loop(process) + + +@task(ignore_result=True) +def wake_up(process_id): + process = PipelineProcess.objects.get(id=process_id) + if not process.is_alive: + logger.warning("process(%s) is not alive, mission cancel." % process_id) + return + + process.wake_up() + runtime.run_loop(process) + + +@task(ignore_result=True) +def batch_wake_up(process_id_list, pipeline_id): + action_result = Status.objects.transit(pipeline_id, to_state=states.RUNNING, is_pipeline=True) + if not action_result.result: + logger.warning("can not start pipeline({}), message: {}".format(pipeline_id, action_result.message)) + return + for process_id in process_id_list: + task_id = wake_up.apply_async(args=[process_id]).id + ProcessCeleryTask.objects.bind(process_id, task_id) + + +@task(ignore_result=True) +def wake_from_schedule(process_id, service_act_id): + process = PipelineProcess.objects.get(id=process_id) + process.wake_up() + + service_act = process.top_pipeline.node(service_act_id) + process.current_node_id = service_act.next().id + runtime.run_loop(process) + + +@task(ignore_result=True) +def service_schedule(process_id, schedule_id, data_id=None): + schedule.schedule(process_id, schedule_id, data_id) + + +@task(ignore_result=True) +def node_timeout_check(node_id, version, root_pipeline_id): + NodeCeleryTask.objects.destroy(node_id) + state = Status.objects.state_for(node_id, version=version, may_not_exist=True) + if not state or state != states.RUNNING: + logger.warning("node {} {} timeout kill failed, node not exist or not in running".format(node_id, version)) + return + + action_result = api.forced_fail(node_id, kill=True, ex_data="node execution timeout") + if action_result.result: + signals.activity_failed.send(sender=Pipeline, pipeline_id=root_pipeline_id, pipeline_activity_id=node_id) + else: + logger.warning("node {} - {} timeout kill failed".format(node_id, version)) + + +@periodic_task(run_every=(crontab(**default_settings.ENGINE_ZOMBIE_PROCESS_HEAL_CRON)), ignore_result=True) +def heal_zombie_process(): + logger.info("Zombie process heal start") + + healer = zombie.get_healer() + + try: + healer.heal() + except Exception: + logger.exception("An error occurred when healing zombies") + + logger.info("Zombie process heal finish") + + +@periodic_task(run_every=(crontab(**default_settings.EXPIRED_TASK_CLEAN_CRON)), ignore_result=True) +def expired_tasks_clean(): + if not default_settings.EXPIRED_TASK_CLEAN: + logger.info("EXPIRED_TASK_CLEAN switch off, won't clean expired tasks.") + return + timestamp = datetime.datetime.now().timestamp() + logger.info("Expired tasks clean start, timestamp: {}".format(timestamp)) + + expired_create_time = datetime.date.today() - relativedelta(months=default_settings.TASK_EXPIRED_MONTH) + pipeline_instance_ids = list( + PipelineInstance.objects.filter( + create_time__lte=expired_create_time, is_finished=True, is_revoked=False, is_expired=False + ) + .order_by("create_time") + .values_list("instance_id", flat=True)[: default_settings.EXPIRED_TASK_CLEAN_NUM_LIMIT] + ) + logger.info( + "Clean expired tasks before {} with tasks number: {}, instance ids: {}, timestamp: {}".format( + expired_create_time, len(pipeline_instance_ids), ",".join(pipeline_instance_ids), timestamp + ) + ) + + for instance_id in pipeline_instance_ids: + try: + logger.info("Clean expired task: {}, timestamp: {}".format(instance_id, timestamp)) + _clean_pipeline_instance_data(instance_id, timestamp) + except Exception as e: + logger.exception( + "An error occurred when clean expired task instance {}: {}, {}".format(instance_id, e, timestamp) + ) + + logger.info("Expired tasks clean finish, timestamp: {}".format(timestamp)) + + +def _clean_pipeline_instance_data(instance_id, timestamp): + """ + 根据instance_id删除对应的任务数据 + """ + process_nodes = list( + set(NodeRelationship.objects.filter(ancestor_id=instance_id).values_list("descendant_id", flat=True)) + ) + process_nodes = [process_node for process_node in process_nodes if process_node] + process_nodes_regex = "^" + "|^".join(process_nodes) if process_nodes else "" + pipeline_processes = PipelineProcess.objects.filter(root_pipeline_id=instance_id).values_list("id", "snapshot__id") + pipeline_process_ids, process_snapshot_ids = [], [] + for process_id, snapshot_id in pipeline_processes: + if process_id: + pipeline_process_ids.append(process_id) + if snapshot_id: + process_snapshot_ids.append(snapshot_id) + + delete_subprocess_relationship = ( + "DELETE FROM `engine_subprocessrelationship` WHERE `engine_subprocessrelationship`.`process_id` IN (%s)" + ) + delete_process_snapshot = "DELETE FROM `engine_processsnapshot` WHERE `engine_processsnapshot`.`id` IN (%s)" + delete_pipeline_model = "DELETE FROM `engine_pipelinemodel` WHERE `engine_pipelinemodel`.`process_id` IN (%s)" + delete_process_celery_task = ( + "DELETE FROM `engine_processcelerytask` WHERE `engine_processcelerytask`.`process_id` IN (%s)" + ) + schedule_service_ids = list( + ScheduleService.objects.filter(process_id__in=pipeline_process_ids).values_list("id", flat=True) + ) + schedule_service_ids = [schedule_service_id for schedule_service_id in schedule_service_ids if schedule_service_id] + delete_schedule_service = "DELETE FROM `engine_scheduleservice` WHERE `engine_scheduleservice`.`process_id` IN (%s)" + delete_multi_callback_data = ( + "DELETE FROM `engine_multicallbackdata` WHERE `engine_multicallbackdata`.`schedule_id` IN (%s)" + ) + delete_node_relationship = ( + "DELETE FROM `engine_noderelationship` " + "WHERE (`engine_noderelationship`.`ancestor_id` IN (%s) " + "OR `engine_noderelationship`.`descendant_id` IN (%s)) " + ) + delete_node_celery_tasks = "DELETE FROM `engine_nodecelerytask` " "WHERE `engine_nodecelerytask`.`node_id` IN (%s)" + delete_status = "DELETE FROM `engine_status` WHERE `engine_status`.`id` IN (%s)" + delete_data = "DELETE FROM `engine_data` WHERE `engine_data`.`id` IN (%s)" + delete_datasnapshot = "DELETE FROM `engine_datasnapshot` WHERE `engine_datasnapshot`.`key` REGEXP %s" + delete_schedule_celery_task = ( + "DELETE FROM `engine_schedulecelerytask`" "WHERE `engine_schedulecelerytask`.`schedule_id` REGEXP %s" + ) + history_data_ids = list( + History.objects.filter(identifier__in=process_nodes).only("data").values_list("data__id", flat=True) + ) + delete_history = "DELETE FROM `engine_history` WHERE `engine_history`.`identifier` IN (%s)" + delete_history_data = "DELETE FROM `engine_historydata` WHERE `engine_historydata`.`id` IN (%s)" + delete_pipeline_process = ( + "DELETE FROM `engine_pipelineprocess` " "WHERE `engine_pipelineprocess`.`root_pipeline_id` = %s" + ) + with transaction.atomic(): + with connection.cursor() as cursor: + if pipeline_process_ids: + process_fs = _sql_format_strings(pipeline_process_ids) + _raw_sql_execute(cursor, delete_subprocess_relationship % process_fs, pipeline_process_ids, timestamp) + _raw_sql_execute(cursor, delete_pipeline_model % process_fs, pipeline_process_ids, timestamp) + _raw_sql_execute(cursor, delete_process_celery_task % process_fs, pipeline_process_ids, timestamp) + _raw_sql_execute(cursor, delete_schedule_service % process_fs, pipeline_process_ids, timestamp) + if process_snapshot_ids: + snapshot_fd = _sql_format_strings(process_snapshot_ids) + _raw_sql_execute(cursor, delete_process_snapshot % snapshot_fd, process_snapshot_ids, timestamp) + if schedule_service_ids: + service_fd = _sql_format_strings(schedule_service_ids) + _raw_sql_execute(cursor, delete_multi_callback_data % service_fd, schedule_service_ids, timestamp) + if process_nodes: + node_fs = _sql_format_strings(process_nodes) + _raw_sql_execute( + cursor, delete_node_relationship % (node_fs, node_fs), process_nodes + process_nodes, timestamp + ) + _raw_sql_execute(cursor, delete_node_celery_tasks % node_fs, process_nodes, timestamp) + _raw_sql_execute(cursor, delete_status % node_fs, process_nodes, timestamp) + _raw_sql_execute(cursor, delete_data % node_fs, process_nodes, timestamp) + _raw_sql_execute(cursor, delete_history % node_fs, process_nodes, timestamp) + if process_nodes_regex: + _raw_sql_execute(cursor, delete_datasnapshot, [process_nodes_regex], timestamp) + _raw_sql_execute(cursor, delete_schedule_celery_task, [process_nodes_regex], timestamp) + if history_data_ids: + history_fs = _sql_format_strings(history_data_ids) + _raw_sql_execute(cursor, delete_history_data % history_fs, history_data_ids, timestamp) + _raw_sql_execute(cursor, delete_pipeline_process, [instance_id], timestamp) + PipelineInstance.objects.filter(instance_id=instance_id).update(is_expired=True) + + +def _sql_log(sql, params, timestamp): + if isinstance(params, list): + logger.info("[execute raw sql]: {}, timestamp: {}".format(sql % tuple(params), timestamp)) + else: + logger.info("[execute raw sql]: {}, timestamp: {}".format(sql % params, timestamp)) + + +def _sql_format_strings(list_data): + return ",".join(["%s"] * len(list_data)) + + +def _raw_sql_execute(cursor, sql, params, timestamp): + _sql_log(sql, params, timestamp) + cursor.execute(sql, params) diff --git a/runtime/bamboo-pipeline/pipeline/engine/utils.py b/runtime/bamboo-pipeline/pipeline/engine/utils.py new file mode 100644 index 00000000..daec04f8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/engine/utils.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.utils import timezone + + +class Stack(list): + def top(self): + return self[len(self) - 1] + + def push(self, item): + self.append(item) + + +class ConstantDict(dict): + """ConstantDict is a subclass of :class:`dict`, implementing __setitem__ + method to avoid item assignment:: + + >>> d = ConstantDict({'key': 'value'}) + >>> d['key'] = 'value' + Traceback (most recent call last): + ... + TypeError: 'ConstantDict' object does not support item assignment + """ + + def __setitem__(self, key, value): + raise TypeError("'%s' object does not support item assignment" % self.__class__.__name__) + + +def calculate_elapsed_time(started_time, archived_time): + """ + @summary: 计算节点耗时 + @param started_time: 执行开始时间 + @param archived_time: 执行结束时间 + @return: + """ + if archived_time and started_time: + elapsed_time = (archived_time - started_time).total_seconds() + elif started_time: + elapsed_time = (timezone.now() - started_time).total_seconds() + else: + elapsed_time = 0 + return round(elapsed_time) + + +class ActionResult(object): + def __init__(self, result, message, extra=None): + self.result = result + self.message = message + self.extra = extra diff --git a/runtime/bamboo-pipeline/pipeline/eri/__init__.py b/runtime/bamboo-pipeline/pipeline/eri/__init__.py new file mode 100644 index 00000000..a95c0e4a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/__init__.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +default_app_config = "pipeline.eri.apps.ERIConfig" diff --git a/runtime/bamboo-pipeline/pipeline/eri/admin.py b/runtime/bamboo-pipeline/pipeline/eri/admin.py new file mode 100644 index 00000000..f93d93a8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/admin.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from pipeline.eri import models + + +@admin.register(models.Process) +class ProcessAdmin(admin.ModelAdmin): + list_display = ["id", "parent_id", "ack_num", "need_ack", "asleep", "suspended", "frozen", "dead"] + search_fields = ["id___exact", "parent_id____exact", "current_node_id____exact", "suspended_by____exact"] + + +@admin.register(models.Node) +class NodeAdmin(admin.ModelAdmin): + list_display = ["id", "node_id"] + search_fields = ["node_id__exact"] + + +@admin.register(models.State) +class StateAdmin(admin.ModelAdmin): + list_display = [ + "id", + "node_id", + "root_id", + "parent_id", + "name", + "version", + "loop", + "created_time", + "started_time", + "archived_time", + ] + search_fields = ["node_id__exact", "root_id__exact", "parent_id__exact"] + + +@admin.register(models.Schedule) +class ScheduleAdmin(admin.ModelAdmin): + list_display = ["id", "type", "process_id", "node_id", "finished", "expired", "version", "schedule_times"] + search_fields = ["id__exact", "node_id__exact"] + + +@admin.register(models.Data) +class DataAdmin(admin.ModelAdmin): + list_display = ["id", "node_id"] + search_fields = ["node_id__exact"] + + +@admin.register(models.ExecutionData) +class ExecutionDataAdmin(admin.ModelAdmin): + list_display = ["id", "node_id"] + search_fields = ["node_id__exact"] + + +@admin.register(models.CallbackData) +class CallbackDataAdmin(admin.ModelAdmin): + list_display = ["id", "node_id", "version", "data"] + search_fields = ["id__exact"] + + +@admin.register(models.ContextValue) +class ContextValueAdmin(admin.ModelAdmin): + list_display = ["id", "pipeline_id", "key", "type", "serializer", "value"] + search_fields = ["pipeline_id__exact"] + + +@admin.register(models.ContextOutputs) +class ContextOutputsAdmin(admin.ModelAdmin): + list_display = ["id", "pipeline_id", "outputs"] + search_fields = ["pipeline_id__exact"] + + +@admin.register(models.ExecutionHistory) +class ExecutionHistoryAdmin(admin.ModelAdmin): + list_display = ["id", "node_id", "loop", "started_time", "archived_time"] + search_fields = ["node_id__exact"] + + +@admin.register(models.LogEntry) +class LogEntryAdmin(admin.ModelAdmin): + list_display = ["id", "node_id", "version", "level_name", "message", "logged_at"] + search_fields = ["node_id__exact"] diff --git a/runtime/bamboo-pipeline/pipeline/eri/apps.py b/runtime/bamboo-pipeline/pipeline/eri/apps.py new file mode 100644 index 00000000..fa2c2a37 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/apps.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.apps import AppConfig + +from bamboo_engine.handlers import register + + +class ERIConfig(AppConfig): + name = "pipeline.eri" + verbose_name = "PipelineEngineRuntimeInterface" + + def ready(self): + from .celery.tasks import execute, schedule, timeout_check # noqa + + register() diff --git a/runtime/bamboo-pipeline/pipeline/eri/celery/__init__.py b/runtime/bamboo-pipeline/pipeline/eri/celery/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/celery/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/eri/celery/queues.py b/runtime/bamboo-pipeline/pipeline/eri/celery/queues.py new file mode 100644 index 00000000..261a4bfc --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/celery/queues.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from typing import Any, List + +from kombu import Exchange, Queue + + +class QueueResolver: + def __init__(self, queue: str): + self.queue = queue + + def resolve_task_queue_and_routing_key(self, task: Any) -> (str, str): + task_name = task + if not isinstance(task_name, str): + task_name = task.name + + queue_config = self.routes_config() + return queue_config[task_name]["queue"], queue_config[task_name]["routing_key"] + + def routes_config(self) -> dict: + suffix = "_%s" % self.queue if self.queue else "" + return { + "pipeline.eri.celery.tasks.execute": { + "queue": "er_execute%s" % suffix, + "routing_key": "er_execute%s" % suffix, + }, + "pipeline.eri.celery.tasks.schedule": { + "queue": "er_schedule%s" % suffix, + "routing_key": "er_schedule%s" % suffix, + }, + "pipeline.eri.celery.tasks.timeout_check": { + "queue": "er_timeout%s" % suffix, + "routing_key": "er_timeout%s" % suffix, + }, + } + + def queues(self) -> List[Queue]: + exchange = Exchange("default", type="direct") + return [ + Queue(queue_config["queue"], exchange, routing_key=queue_config["routing_key"], max_priority=255) + for queue_config in self.routes_config().values() + ] + + +CELERY_QUEUES = QueueResolver("").queues() diff --git a/runtime/bamboo-pipeline/pipeline/eri/celery/step.py b/runtime/bamboo-pipeline/pipeline/eri/celery/step.py new file mode 100644 index 00000000..976949be --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/celery/step.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from celery.bootsteps import StartStopStep +from prometheus_client import start_http_server + + +class PromServerStep(StartStopStep): + requires = {"celery.worker.components:Timer"} + port = 8001 # default port + + def start(self, worker): + start_http_server(self.port) diff --git a/runtime/bamboo-pipeline/pipeline/eri/celery/tasks.py b/runtime/bamboo-pipeline/pipeline/eri/celery/tasks.py new file mode 100644 index 00000000..529809ea --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/celery/tasks.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from typing import Optional + +from celery import task + +from bamboo_engine import states +from bamboo_engine.engine import Engine + +from pipeline.eri.runtime import BambooDjangoRuntime + + +@task(ignore_result=True) +def execute(process_id: int, node_id: str): + runtime = BambooDjangoRuntime() + Engine(runtime).execute(process_id=process_id, node_id=node_id) + + +@task(ignore_result=True) +def schedule(process_id: int, node_id: str, schedule_id: str, callback_data_id: Optional[int]): + runtime = BambooDjangoRuntime() + Engine(runtime).schedule( + process_id=process_id, node_id=node_id, schedule_id=schedule_id, callback_data_id=callback_data_id + ) + + +@task(ignore_result=True) +def timeout_check(self, process_id: int, node_id: str, version: str): + runtime = BambooDjangoRuntime() + state = runtime.get_state(node_id=node_id) + if state.name == states.RUNNING and state.version == version: + Engine(runtime).forced_fail_activity(node_id=node_id, ex_data="timeout kill") diff --git a/runtime/bamboo-pipeline/pipeline/eri/codec.py b/runtime/bamboo-pipeline/pipeline/eri/codec.py new file mode 100644 index 00000000..ebb87a42 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/codec.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json + +from typing import Any + +from django.utils.module_loading import import_string + +DATA_JSON_ENCODER_PATH = None +DATA_JSON_OBJECT_HOOK_PATH = None + +_LOCAL = {} + + +def _get_local(key, path): + if key in _LOCAL: + return _LOCAL[key] + + try: + _LOCAL[key] = import_string(path) + except ImportError: + _LOCAL[key] = None + + return _LOCAL[key] + + +def _get_data_json_encoder(): + if not DATA_JSON_ENCODER_PATH: + return None + return _get_local("data_json_encoder", DATA_JSON_ENCODER_PATH) + + +def _get_data_json_object_hook(): + if not DATA_JSON_OBJECT_HOOK_PATH: + return None + return _get_local("data_json_object_hook", DATA_JSON_OBJECT_HOOK_PATH) + + +def data_json_loads(data: str) -> Any: + return json.loads(data, object_hook=_get_data_json_object_hook()) + + +def data_json_dumps(data: Any) -> str: + return json.dumps(data, cls=_get_data_json_encoder()) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/__init__.py b/runtime/bamboo-pipeline/pipeline/eri/imp/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/context.py b/runtime/bamboo-pipeline/pipeline/eri/imp/context.py new file mode 100644 index 00000000..c13a687e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/context.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json +from typing import Dict, List, Set + +from django.db import transaction + +from bamboo_engine import metrics +from bamboo_engine.eri import ContextValue, ContextValueType + +from pipeline.eri.models import ContextValue as DBContextValue +from pipeline.eri.models import ContextOutputs +from pipeline.eri.imp.serializer import SerializerMixin + + +class ContextMixin(SerializerMixin): + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_VALUE_READ_TIME) + def get_context_values(self, pipeline_id: str, keys: set) -> List[ContextValue]: + """ + 获取某个流程上下文中的 keys 所指定的键对应变量的值 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :param keys: 变量键 + :type keys: set + :return: 变量值信息 + :rtype: List[ContextValue] + """ + qs = DBContextValue.objects.filter(pipeline_id=pipeline_id, key__in=keys).only( + "key", "type", "serializer", "value", "code" + ) + + return [ + ContextValue( + key=cv_model.key, + type=ContextValueType(cv_model.type), + value=self._deserialize(cv_model.value, cv_model.serializer), + code=cv_model.code or None, + ) + for cv_model in qs + ] + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_REF_READ_TIME) + def get_context_key_references(self, pipeline_id: str, keys: set) -> set: + """ + 获取某个流程上下文中 keys 所指定的变量直接和间接引用的其他所有变量的键 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :param keys: 变量 key 列表 + :type keys: set + :return: keys 所指定的变量直接和简介引用的其他所有变量的键 + :rtype: set + """ + qs = DBContextValue.objects.filter(pipeline_id=pipeline_id, key__in=keys).only("references") + + references = [] + for cv_model in qs: + references.extend(json.loads(cv_model.references)) + + return set(references) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_VALUE_UPSERT_TIME) + @transaction.atomic + def upsert_plain_context_values(self, pipeline_id: str, update: Dict[str, ContextValue]): + """ + 更新或创建新的普通上下文数据 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :param update: 更新数据 + :type update: Dict[str, ContextValue] + """ + exist_keys = DBContextValue.objects.filter(pipeline_id=pipeline_id).values_list("key", flat=True) + update_keys = set(update.keys()).intersection(exist_keys) + + # update + for k in update_keys: + context_value = update[k] + value, serializer = self._serialize(context_value.value) + + DBContextValue.objects.filter(pipeline_id=pipeline_id, key=k).update( + type=ContextValueType.PLAIN.value, value=value, serializer=serializer, code="", references="[]", + ) + + # insert + insert_keys = set(update.keys()).difference(exist_keys) + context_value_models = [] + for k in insert_keys: + context_value = update[k] + value, serializer = self._serialize(context_value.value) + + context_value_models.append( + DBContextValue( + pipeline_id=pipeline_id, + key=context_value.key, + type=ContextValueType.PLAIN.value, + serializer=serializer, + value=value, + code="", + references="[]", + ) + ) + + DBContextValue.objects.bulk_create(context_value_models, batch_size=500) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_VALUE_READ_TIME) + def get_context(self, pipeline_id: str) -> List[ContextValue]: + """ + 获取某个流程的所有上下文数据 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :return: [description] + :rtype: List[ContextValue] + """ + qs = DBContextValue.objects.filter(pipeline_id=pipeline_id).only("key", "type", "serializer", "value", "code") + + return [ + ContextValue( + key=cv_model.key, + type=ContextValueType(cv_model.type), + value=self._deserialize(cv_model.value, cv_model.serializer), + code=cv_model.code or None, + ) + for cv_model in qs + ] + + def get_context_outputs(self, pipeline_id: str) -> Set[str]: + """ + 获取流程上下文需要输出的数据 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :return: 输出数据 key + :rtype: Set[str] + """ + co_model = ContextOutputs.objects.get(pipeline_id=pipeline_id) + return set(json.loads(co_model.outputs)) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/data.py b/runtime/bamboo-pipeline/pipeline/eri/imp/data.py new file mode 100644 index 00000000..a3da59bf --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/data.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json +from typing import Dict + +from bamboo_engine import metrics, exceptions +from bamboo_engine.eri import Data, DataInput, ExecutionData, CallbackData + +from pipeline.eri import codec +from pipeline.eri.models import Data as DBData +from pipeline.eri.models import ExecutionData as DBExecutionData +from pipeline.eri.models import CallbackData as DBCallbackData +from pipeline.eri.imp.serializer import SerializerMixin + + +class DataMixin(SerializerMixin): + def _get_data_inputs(self, inputs: dict): + return {k: DataInput(need_render=v["need_render"], value=v["value"]) for k, v in inputs.items()} + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_DATA_READ_TIME) + def get_data(self, node_id: str) -> Data: + """ + 获取某个节点的数据对象 + + :param node_id: 节点 ID + :type node_id: str + :return: 数据对象实例 + :rtype: Data + """ + try: + data_model = DBData.objects.get(node_id=node_id) + except DBData.DoesNotExist: + raise exceptions.NotFoundError + return Data( + inputs=self._get_data_inputs(codec.data_json_loads(data_model.inputs)), + outputs=json.loads(data_model.outputs), + ) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_DATA_INPUTS_READ_TIME) + def get_data_inputs(self, node_id: str) -> Dict[str, DataInput]: + """ + 获取某个节点的输入数据 + + :param node_id: 节点 ID + :type node_id: str + :return: 输入数据字典 + :rtype: dict + """ + qs = DBData.objects.filter(node_id=node_id).only("inputs") + + if not qs: + raise exceptions.NotFoundError + + return self._get_data_inputs(codec.data_json_loads(qs[0].inputs)) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_DATA_OUTPUTS_READ_TIME) + def get_data_outputs(self, node_id: str) -> dict: + """ + 获取某个节点的输出数据 + + :param node_id: 节点 ID + :type node_id: str + :return: 输入数据字典 + :rtype: dict + """ + qs = DBData.objects.filter(node_id=node_id).only("outputs") + + if not qs: + raise exceptions.NotFoundError + + return json.loads(qs[0].outputs) + + def set_data_inputs(self, node_id: str, data: Dict[str, DataInput]): + """ + 将节点数据对象的 inputs 设置为 data + + : param node_id: 节点 ID + : type node_id: str + : param data: 目标数据 + : type data: dict + """ + inputs = codec.data_json_dumps({k: {"need_render": v.need_render, "value": v.value} for k, v in data.items()}) + if DBData.objects.filter(node_id=node_id).exists(): + DBData.objects.filter(node_id=node_id).update(inputs=inputs) + else: + DBData.objects.create(node_id=node_id, inputs=inputs, outputs="{}") + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_READ_TIME) + def get_execution_data(self, node_id: str) -> ExecutionData: + """ + 获取某个节点的执行数据 + + : param node_id: 节点 ID + : type node_id: str + : return: 执行数据实例 + : rtype: ExecutionData + """ + try: + data_model = DBExecutionData.objects.get(node_id=node_id) + except DBExecutionData.DoesNotExist: + raise exceptions.NotFoundError + return ExecutionData( + inputs=self._deserialize(data_model.inputs, data_model.inputs_serializer), + outputs=self._deserialize(data_model.outputs, data_model.outputs_serializer), + ) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_INPUTS_READ_TIME) + def get_execution_data_inputs(self, node_id: str) -> dict: + """ + 获取某个节点的执行数据输入 + + :param node_id: 节点 ID + :type node_id: str + :return: 执行数据输入 + :rtype: dict + """ + qs = DBExecutionData.objects.filter(node_id=node_id).only("inputs_serializer", "inputs") + + if not qs: + return {} + + return self._deserialize(qs[0].inputs, qs[0].inputs_serializer) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_READ_TIME) + def get_execution_data_outputs(self, node_id: str) -> dict: + """ + 获取某个节点的执行数据输出 + + :param node_id: 节点 ID + :type node_id: str + :return: 执行数据输出 + :rtype: dict + """ + qs = DBExecutionData.objects.filter(node_id=node_id).only("outputs_serializer", "outputs") + + if not qs: + return {} + + return self._deserialize(qs[0].outputs, qs[0].outputs_serializer) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_WRITE_TIME) + def set_execution_data(self, node_id: str, data: ExecutionData): + """ + 设置某个节点的执行数据 + + :param node_id: 节点 ID + :type node_id: str + :param data: 执行数据实例 + :type data: ExecutionData + """ + inputs, inputs_serializer = self._serialize(data.inputs) + outputs, outputs_serializer = self._serialize(data.outputs) + if DBExecutionData.objects.filter(node_id=node_id).exists(): + DBExecutionData.objects.filter(node_id=node_id).update( + inputs=inputs, + inputs_serializer=inputs_serializer, + outputs=outputs, + outputs_serializer=outputs_serializer, + ) + else: + DBExecutionData.objects.create( + node_id=node_id, + inputs=inputs, + inputs_serializer=inputs_serializer, + outputs=outputs, + outputs_serializer=outputs_serializer, + ) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_INPUTS_WRITE_TIME) + def set_execution_data_inputs(self, node_id: str, inputs: dict): + """ + 设置某个节点的执行数据输入 + + :param node_id: 节点 ID + :type node_id: str + :param outputs: 输出数据 + :type outputs: dict + """ + inputs, inputs_serializer = self._serialize(inputs) + if DBExecutionData.objects.filter(node_id=node_id).exists(): + DBExecutionData.objects.filter(node_id=node_id).update(inputs=inputs, inputs_serializer=inputs_serializer) + else: + DBExecutionData.objects.create( + node_id=node_id, + inputs=inputs, + inputs_serializer=inputs_serializer, + outputs="{}", + outputs_serializer=self.JSON_SERIALIZER, + ) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_WRITE_TIME) + def set_execution_data_outputs(self, node_id: str, outputs: dict): + """ + 设置某个节点的执行数据输出 + + :param node_id: 节点 ID + :type node_id: str + :param outputs: 输出数据 + :type outputs: dict + """ + outputs, outputs_serializer = self._serialize(outputs) + if DBExecutionData.objects.filter(node_id=node_id).exists(): + DBExecutionData.objects.filter(node_id=node_id).update( + outputs=outputs, outputs_serializer=outputs_serializer + ) + else: + DBExecutionData.objects.create( + node_id=node_id, + inputs="{}", + inputs_serializer=self.JSON_SERIALIZER, + outputs=outputs, + outputs_serializer=outputs_serializer, + ) + + def set_callback_data(self, node_id: str, version: str, data: dict) -> int: + """ + 设置某个节点执行数据的回调数据 + + :param node_id: 节点 ID + :type node_id: str + :param version: 节点执行版本 + :type version: str + :param data: 回调数据 + :type data: dict + :return: 回调数据 ID + :rtype: int + """ + return DBCallbackData.objects.create(node_id=node_id, version=version, data=json.dumps(data)).id + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CALLBACK_DATA_READ_TIME) + def get_callback_data(self, data_id: int) -> CallbackData: + """ + 获取回调数据 + + :param data_id: Data ID + :type data_id: int + :return: 回调数据实例 + :rtype: CallbackData + """ + data_model = DBCallbackData.objects.get(id=data_id) + return CallbackData( + id=data_model.id, node_id=data_model.node_id, version=data_model.version, data=json.loads(data_model.data) + ) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/executable_event.py b/runtime/bamboo-pipeline/pipeline/eri/imp/executable_event.py new file mode 100644 index 00000000..b39411fb --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/executable_event.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from typing import List + +from bamboo_engine.eri import ExecutableEvent + +from pipeline.core.flow.event import ExecutableEndEvent + + +class ExecutableEndEventWrapper(ExecutableEvent): + def __init__(self, end_event: ExecutableEndEvent): + self.end_event = end_event + + def execute(self, pipeline_stack: List[str], root_pipeline_id: str): + """ + execute 逻辑 + + :param pipeline_stack: 流程栈 + :type pipeline_stack: List[str] + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + """ + in_subprocess = len(pipeline_stack) > 1 + current_pipeline_id = pipeline_stack[-1] + + return self.end_event.execute(in_subprocess, root_pipeline_id, current_pipeline_id) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/execution_history.py b/runtime/bamboo-pipeline/pipeline/eri/imp/execution_history.py new file mode 100644 index 00000000..12988dee --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/execution_history.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from typing import List +from datetime import datetime + +from bamboo_engine.eri import ExecutionHistory, ExecutionShortHistory + +from pipeline.eri.models import ExecutionHistory as DBExecutionHistory +from pipeline.eri.imp.serializer import SerializerMixin + + +class ExecutionHistoryMixin(SerializerMixin): + def add_history( + self, + node_id: str, + started_time: datetime, + archived_time: datetime, + loop: int, + skip: bool, + retry: int, + version: str, + inputs: dict, + outputs: dict, + ) -> int: + """ + 为某个节点记录一次执行历史 + + : param node_id: 节点 ID + : type node_id: str + : param started_time: 开始时间 + : type started_time: datetime + : param archived_time: 归档时间 + : type archived_time: datetime + : param loop: 重入计数 + : type loop: int + : param skip: 是否跳过 + : type skip: bool + : param retry: 重试次数 + : type retry: int + : param version: 节点执行版本号 + : type version: str + : param inputs: 输入数据 + : type inputs: dict + : param outputs: 输出数据 + : type outputs: dict + """ + inputs, inputs_serializer = self._serialize(inputs) + outputs, outputs_serializer = self._serialize(outputs) + return DBExecutionHistory.objects.create( + node_id=node_id, + loop=loop, + retry=retry, + skip=skip, + version=version, + started_time=started_time, + archived_time=archived_time, + inputs=inputs, + inputs_serializer=inputs_serializer, + outputs=outputs, + outputs_serializer=outputs_serializer, + ).id + + def get_histories(self, node_id: str, loop: int = -1) -> List[ExecutionHistory]: + """ + 返回某个节点的历史记录 + + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 历史记录列表 + :rtype: List[History] + """ + fields = {"node_id": node_id} + if loop != -1: + fields["loop"] = loop + qs = DBExecutionHistory.objects.filter(**fields) + + return [ + ExecutionHistory( + id=model.id, + node_id=model.node_id, + started_time=model.started_time, + archived_time=model.archived_time, + loop=model.loop, + skip=model.skip, + retry=model.retry, + version=model.version, + inputs=self._deserialize(model.inputs, model.inputs_serializer), + outputs=self._deserialize(model.outputs, model.outputs_serializer), + ) + for model in qs + ] + + def get_short_histories(self, node_id: str, loop: int = -1) -> List[ExecutionShortHistory]: + """ + 返回某个节点的简要历史记录 + + :param node_id: 节点 ID + :type node_id: str + :param loop: 重入次数, -1 表示不过滤重入次数 + :type loop: int, optional + :return: 历史记录列表 + :rtype: List[ExecutionShortHistory] + """ + fields = {"node_id": node_id} + if loop != -1: + fields["loop"] = loop + qs = DBExecutionHistory.objects.filter(**fields).defer( + "inputs", "inputs_serializer", "outputs", "outputs_serializer" + ) + + return [ + ExecutionShortHistory( + id=model.id, + node_id=model.node_id, + started_time=model.started_time, + archived_time=model.archived_time, + loop=model.loop, + skip=model.skip, + retry=model.retry, + version=model.version, + ) + for model in qs + ] diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/hooks.py b/runtime/bamboo-pipeline/pipeline/eri/imp/hooks.py new file mode 100644 index 00000000..fcad885f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/hooks.py @@ -0,0 +1,252 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from typing import Optional + +from pipeline.eri.models import LogEntry + + +class HooksMixin: + def pre_prepare_run_pipeline( + self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options + ): + """ + 调用 pre_prepare_run_pipeline 前执行的钩子 + + :param pipeline: 流程描述对象 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + """ + + def post_prepare_run_pipeline( + self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options + ): + """ + 调用 pre_prepare_run_pipeline 后执行的钩子 + + :param pipeline: 流程描述对象 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + """ + + def pre_pause_pipeline(self, pipeline_id: str): + """ + 暂停 pipeline 前执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + def post_pause_pipeline(self, pipeline_id: str): + """ + 暂停 pipeline 后执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + def pre_revoke_pipeline(self, pipeline_id: str): + """ + 撤销 pipeline 前执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + def post_revoke_pipeline(self, pipeline_id: str): + """ + 撤销 pipeline 后执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + def pre_resume_pipeline(self, pipeline_id: str): + """ + 继续 pipeline 前执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + def post_resume_pipeline(self, pipeline_id: str): + """ + 继续 pipeline 后执行的钩子 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + """ + + def pre_resume_node(self, node_id: str): + """ + 继续节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + def post_resume_node(self, node_id: str): + """ + 继续节点后执行的钩子 + + :param node_id: [description]节点 ID + :type node_id: str + """ + + def pre_pause_node(self, node_id: str): + """ + 暂停节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + def post_pause_node(self, node_id: str): + """ + 暂停节点后执行的钩子 + + :param node_id: [description]节点 ID + :type node_id: str + """ + + def pre_retry_node(self, node_id: str, data: Optional[dict]): + """ + 重试节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param data: 重试时使用的节点执行输入 + :type data: Optional[dict] + """ + + def post_retry_node(self, node_id: str, data: Optional[dict]): + """ + 重试节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param data: 重试时使用的节点执行输入 + :type data: Optional[dict] + """ + + def pre_skip_node(self, node_id: str): + """ + 跳过节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + def post_skip_node(self, node_id: str): + """ + 跳过节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + """ + + def pre_skip_exclusive_gateway(self, node_id: str, flow_id: str): + """ + 跳过分支网关前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param flow_id: 跳过后选择的目标流 ID + :type flow_id: str + """ + + def post_skip_exclusive_gateway(self, node_id: str, flow_id: str): + """ + 跳过分支网关后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param flow_id: 跳过后选择的目标流 ID + :type flow_id: str + """ + + def pre_forced_fail_activity(self, node_id: str, ex_data: str): + """ + 强制失败节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param ex_data: 写入节点执行数据的失败信息 + :type ex_data: str + """ + + def post_forced_fail_activity(self, node_id: str, ex_data: str, old_version: str, new_version: str): + """ + 强制失败节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param ex_data: 写入节点执行数据的失败信息 + :type ex_data: str + :param old_version: 强制失败前的节点版本 + :type old_version: str + :param new_version: 强制失败后的节点版本 + :type new_version: str + """ + # 在强制失败刷新版本后更新已经记录的日志的版本 + LogEntry.objects.filter(node_id=node_id, version=old_version).update(version=new_version) + + def pre_callback(self, node_id: str, version: str, data: str): + """ + 回调节点前执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param version: 节点执行版本 + :type version: str + :param data: 回调数据 + :type data: str + """ + + def post_callback(self, node_id: str, version: str, data: str): + """ + 回调节点后执行的钩子 + + :param node_id: 节点 ID + :type node_id: str + :param version: 节点执行版本 + :type version: str + :param data: 回调数据 + :type data: str + """ + + def pre_retry_subprocess(self, node_id: str): + """ + 子流程重试前执行的钩子 + + :param node_id: 子流程节点 ID + :type node_id: str + """ + + def post_retry_subprocess(self, node_id: str): + """ + 子流程重试后执行的钩子 + + :param node_id: 子流程节点 ID + :type node_id: str + """ diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/node.py b/runtime/bamboo-pipeline/pipeline/eri/imp/node.py new file mode 100644 index 00000000..8d2b5fbd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/node.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json + +from bamboo_engine import metrics +from bamboo_engine.eri import ( + Node, + NodeType, + ServiceActivity, + SubProcess, + ExclusiveGateway, + ParallelGateway, + ConditionalParallelGateway, + ConvergeGateway, + EmptyStartEvent, + EmptyEndEvent, + ExecutableEndEvent, + Condition, +) + +from pipeline.eri.models import Node as DBNode + + +class NodeMixin: + def _get_node(self, node: DBNode): + node_detail = json.loads(node.detail) + node_type = node_detail["type"] + targets = node_detail["targets"] + common_args = dict( + id=node.node_id, + target_flows=list(targets.keys()), + target_nodes=list(targets.values()), + targets=node_detail["targets"], + root_pipeline_id=node_detail["root_pipeline_id"], + parent_pipeline_id=node_detail["parent_pipeline_id"], + can_skip=node_detail["can_skip"], + can_retry=node_detail["can_retry"], + ) + + if node_type == NodeType.ServiceActivity.value: + return ServiceActivity( + type=NodeType.ServiceActivity, + code=node_detail["code"], + version=node_detail["version"], + timeout=node_detail["timeout"], + error_ignorable=node_detail["error_ignorable"], + **common_args + ) + + elif node_type == NodeType.SubProcess.value: + return SubProcess(type=NodeType.SubProcess, start_event_id=node_detail["start_event_id"], **common_args) + + elif node_type == NodeType.ExclusiveGateway.value: + return ExclusiveGateway( + type=NodeType.ExclusiveGateway, + conditions=[Condition(**c) for c in node_detail["conditions"]], + **common_args + ) + + elif node_type == NodeType.ParallelGateway.value: + return ParallelGateway( + type=NodeType.ParallelGateway, converge_gateway_id=node_detail["converge_gateway_id"], **common_args + ) + + elif node_type == NodeType.ConditionalParallelGateway.value: + return ConditionalParallelGateway( + type=NodeType.ConditionalParallelGateway, + converge_gateway_id=node_detail["converge_gateway_id"], + conditions=[Condition(**c) for c in node_detail["conditions"]], + **common_args + ) + + elif node_type == NodeType.ConvergeGateway.value: + return ConvergeGateway(type=NodeType.ConvergeGateway, **common_args) + + elif node_type == NodeType.EmptyStartEvent.value: + return EmptyStartEvent(type=NodeType.EmptyStartEvent, **common_args) + + elif node_type == NodeType.EmptyEndEvent.value: + return EmptyEndEvent(type=NodeType.EmptyEndEvent, **common_args) + + elif node_type == NodeType.ExecutableEndEvent.value: + return ExecutableEndEvent(type=NodeType.ExecutableEndEvent, code=node_detail["code"], **common_args) + + else: + raise ValueError("unknown node type: {}".format(node_type)) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_NODE_READ_TIME) + def get_node(self, node_id: str) -> Node: + """ + 获取某个节点的详细信息 + + :param node_id: 节点 ID + :type node_id: str + :return: Node 实例 + :rtype: Node + """ + node = DBNode.objects.get(node_id=node_id) + return self._get_node(node) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/plugin_manager.py b/runtime/bamboo-pipeline/pipeline/eri/imp/plugin_manager.py new file mode 100644 index 00000000..0114691b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/plugin_manager.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.eri import Service, ExecutableEvent, Variable + +from pipeline.component_framework.library import ComponentLibrary +from pipeline.core.flow import FlowNodeClsFactory +from pipeline.core.data.library import VariableLibrary + +from pipeline.eri.imp.service import ServiceWrapper +from pipeline.eri.imp.executable_event import ExecutableEndEventWrapper +from pipeline.eri.imp.variable import VariableWrapper + + +class PipelinePluginManagerMixin: + def get_service(self, code: str, version: str) -> Service: + """ + 根据代号与版本获取特定服务对象实例 + + :param code: 服务唯一代号 + :type code: str + :param version: 服务版本 + :type version: str + :return: 服务对象实例 + :rtype: Service + """ + comp_cls = ComponentLibrary.get_component_class(code, version) + service = comp_cls.bound_service() + return ServiceWrapper(service) + + def get_executable_end_event(self, code: str) -> ExecutableEvent: + """ + 根据代号获取特定可执行结束事件实例 + + :param code: 可执行结束事件唯一代号 + :type code: str + :return: 可执行结束事件实例 + :rtype: ExecutableEvent: + """ + event_cls = FlowNodeClsFactory.get_node_cls(code) + event = event_cls(id=None) + return ExecutableEndEventWrapper(event) + + def get_compute_variable(self, code: str, key: str, value: Variable, additional_data: dict) -> Variable: + """ + 根据代号获取变量实例 + + :param code: 唯一代号 + :type code: str + :param key: 变量 key + :type key: str + :param value: 变量配置 + :type value: Any + :param additional_data: 额外数据字典 + :type additional_data: dict + :return: 变量实例 + :rtype: Variable + """ + var_cls = VariableLibrary.get_var_class(code=code) + return VariableWrapper(original_value=value, var_cls=var_cls, additional_data=additional_data) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/process.py b/runtime/bamboo-pipeline/pipeline/eri/imp/process.py new file mode 100644 index 00000000..222d5039 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/process.py @@ -0,0 +1,325 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json +from typing import List, Optional, Dict + +from django.utils import timezone +from django.db.models import F + +from bamboo_engine import metrics +from bamboo_engine.eri import ProcessInfo, SuspendedProcessInfo, DispatchProcess + +from pipeline.eri.models import Process + + +class ProcessMixin: + def beat(self, process_id: int): + """ + 进程心跳 + + :param process_id: 进程 ID + :type process_id: int + """ + Process.objects.filter(id=process_id).update(last_heartbeat=timezone.now()) + + def wake_up(self, process_id: int): + """ + 将当前进程标记为唤醒状态 + + :param process_id: 进程 ID + :type process_id: int + """ + Process.objects.filter(id=process_id).update(asleep=False) + + def sleep(self, process_id: int): + """ + 将当前进程标记为睡眠状态 + + :param process_id: 进程 ID + :type process_id: int + """ + Process.objects.filter(id=process_id).update(asleep=True) + + def suspend(self, process_id: int, by: str): + """ + 将当前进程标记为阻塞状态 + + :param process_id: 进程 ID + :type process_id: int + :param by: 造成阻塞的节点信息 + :type by: str + """ + Process.objects.filter(id=process_id).update(suspended=True, suspended_by=by) + + def resume(self, process_id: int): + """ + 将进程标记为非阻塞状态 + + :param process_id: 进程 ID + :type process_id: int + """ + Process.objects.filter(id=process_id).update(suspended=False, suspended_by="") + + def batch_resume(self, process_id_list: List[int]): + """ + 批量将进程标记为非阻塞状态 + + :param process_id_list: 进程 ID 列表 + :type process_id_list: List[int] + """ + Process.objects.filter(id__in=process_id_list).update(suspended=False, suspended_by="") + + def die(self, process_id: int): + """ + 将当前进程标记为非存活状态 + + :param process_id: 进程 ID + :type process_id: int + """ + Process.objects.filter(id=process_id).update(dead=True) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_PROCESS_READ_TIME) + def get_process_info(self, process_id: int) -> ProcessInfo: + """ + 获取某个进程的基本信息 + + :param process_id: 进程 ID + :type process_id: int + :return: 进程基本信息 + :rtype: ProcessInfo + """ + qs = Process.objects.filter(id=process_id).only( + "id", "destination_id", "root_pipeline_id", "pipeline_stack", "parent_id" + ) + + if len(qs) != 1: + raise Process.DoesNotExist("Process with id({}) does not exist".format(process_id)) + + process = qs[0] + return ProcessInfo( + process_id=process.id, + destination_id=process.destination_id, + root_pipeline_id=process.root_pipeline_id, + pipeline_stack=json.loads(process.pipeline_stack), + parent_id=process.parent_id, + ) + + def kill(self, process_id: int): + """ + 强制结束某个进程正在进行的活动,并将其标志为睡眠状态 + + :param process_id: 进程 ID + :type process_id: int + """ + Process.objects.filter(id=process_id).update(asleep=True) + + def get_suspended_process_info(self, suspended_by: str) -> List[SuspendedProcessInfo]: + """ + 获取由于 pipeline 暂停而被暂停执行的进程信息 + + : param suspended_by: 进程 ID + : type suspended_by: str + : return: 暂停的进程信息 + : rtype: SuspendedProcessInfo + """ + qs = Process.objects.filter(suspended_by=suspended_by).only("id", "current_node_id") + + return [SuspendedProcessInfo(process_id=p.id, current_node=p.current_node_id) for p in qs] + + def get_sleep_process_with_current_node_id(self, node_id: str) -> Optional[str]: + """ + 获取由于处于睡眠状态且当前节点 ID 为 node_id 的进程 ID + + : param node_id: 节点 ID + : type node_id: str + : return: 进程 ID + : rtype: str + """ + qs = Process.objects.filter(asleep=True, current_node_id=node_id).only("id") + + if len(qs) == 0: + return None + + if len(qs) != 1: + raise ValueError("found multiple sleep process({}) with current_node_id({})".format(qs, node_id)) + + return qs[0].id + + def get_process_id_with_current_node_id(self, node_id: str) -> Optional[str]: + """ + 获取当前节点 ID 为 node_id 且存活的进程 ID + + : param node_id: 节点 ID + : type node_id: str + : return: 进程 ID + : rtype: str + """ + qs = Process.objects.filter(dead=False, current_node_id=node_id).only("id") + + if len(qs) == 0: + return None + + if len(qs) != 1: + raise ValueError("found multiple process({}) with current_node_id({})".format(qs, node_id)) + + return qs[0].id + + def set_current_node(self, process_id: int, node_id: str): + """ + 将进程当前处理节点标记为 node + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + """ + Process.objects.filter(id=process_id).update(current_node_id=node_id) + + def child_process_finish(self, parent_id: int, process_id: int) -> bool: + """ + 标记某个进程的子进程执行完成,并返回是否能够唤醒父进程继续执行的标志位 + + :param parent_id: 父进程 ID + :type parent_id: int + :param process_id: 子进程 ID + :type process_id: int + :return: 是否能够唤醒父进程继续执行 + :rtype: bool + """ + Process.objects.filter(id=process_id).update(dead=True) + + Process.objects.filter(id=parent_id).update(ack_num=F("ack_num") + 1) + + # compare(where) and set(update) + row = Process.objects.filter(id=parent_id, ack_num=F("need_ack")).update(ack_num=0, need_ack=-1) + + return row != 0 + + def is_frozen(self, process_id: int) -> bool: + """ + 检测当前进程是否需要被冻结 + + :param process_id: 进程 ID + :type process_id: int + :return: 是否需要被冻结 + :rtype: bool + """ + return Process.objects.filter(id=process_id, frozen=True).exists() + + def freeze(self, process_id: int): + """ + 冻结当前进程 + + :param process_id: 进程 ID + :type process_id: int + """ + Process.objects.filter(id=process_id).update(frozen=True) + + def fork( + self, parent_id: str, root_pipeline_id: str, pipeline_stack: List[str], from_to: Dict[str, str], + ) -> List[DispatchProcess]: + """ + 根据当前进程 fork 出多个子进程 + + :param parent_id: 父进程 ID + :type parent_id: str + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + :param pipeline_stack: 子流程栈 + :type pipeline_stack: List[str] + :param from_to: 子进程的执行开始节点和目标节点 + :type from_to: Dict[str, str] + :return: 待调度进程信息列表 + :rtype: List[DispatchProcess] + """ + qs = Process.objects.filter(id=parent_id).only("priority", "queue") + stack_json = json.dumps(pipeline_stack) + + if not qs: + raise Process.DoesNotExist("Process with id({}) does not exist".format(parent_id)) + + children = [ + Process( + parent_id=parent_id, + asleep=True, + destination_id=destination, + current_node_id=current_node, + root_pipeline_id=root_pipeline_id, + pipeline_stack=stack_json, + priority=qs[0].priority, + queue=qs[0].queue, + ) + for current_node, destination in from_to.items() + ] + + Process.objects.bulk_create(children, batch_size=500) + + qs = Process.objects.filter(parent_id=parent_id, dead=False).only("id", "current_node_id") + + children_count = len(qs) + expect = len(from_to) + if children_count != expect: + raise ValueError( + "process({}) fork failed, children count({}) does not match expect({})".format( + parent_id, children_count, expect + ) + ) + + return [DispatchProcess(process_id=p.id, node_id=p.current_node_id) for p in qs] + + def join(self, process_id: int, children_id: List[str]): + """ + 让父进程等待子进程 + + :param process_id: 父进程 ID + :type process_id: int + :param children_id: 子进程 ID 列表 + :type children_id: List[str] + """ + Process.objects.filter(id=process_id).update(ack_num=0, need_ack=len(children_id)) + + def set_pipeline_stack(self, process_id: int, stack: List[str]): + """ + 设置进程的流程栈 + + :param process_id: 进程 ID + :type process_id: int + :param stack: 流程栈 + :type stack: List[str] + """ + Process.objects.filter(id=process_id).update(pipeline_stack=json.dumps(stack)) + + def get_process_info_with_root_pipeline(self, pipeline_id: str) -> List[ProcessInfo]: + """ + 根据根流程 ID 获取一批进程的信息 + + :param pipeline_id: 流程 ID + :type pipeline_id: str + :return: 进程基本信息 + :rtype: List[ProcessInfo] + """ + qs = Process.objects.filter(root_pipeline_id=pipeline_id).only( + "id", "destination_id", "root_pipeline_id", "pipeline_stack", "parent_id" + ) + + return [ + ProcessInfo( + process_id=process.id, + destination_id=process.destination_id, + root_pipeline_id=process.root_pipeline_id, + pipeline_stack=json.loads(process.pipeline_stack), + parent_id=process.parent_id, + ) + for process in qs + ] diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/schedule.py b/runtime/bamboo-pipeline/pipeline/eri/imp/schedule.py new file mode 100644 index 00000000..af88230c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/schedule.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db.models import F + +from bamboo_engine import metrics +from bamboo_engine.eri import Schedule, ScheduleType + +from pipeline.eri.models import Schedule as DBSchedule + + +class ScheduleMixin: + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_SCHEDULE_WRITE_TIME) + def set_schedule(self, process_id: int, node_id: str, version: str, schedule_type: ScheduleType) -> Schedule: + """ + 设置 schedule 对象 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + :param schedule_type: 调度类型 + :type schedule_type: ScheduleType + :return: 调度对象实例 + :rtype: Schedule + """ + schedule_model = DBSchedule.objects.create( + process_id=process_id, node_id=node_id, type=schedule_type.value, version=version + ) + return Schedule( + id=schedule_model.id, + type=schedule_type, + process_id=schedule_model.process_id, + node_id=schedule_model.node_id, + finished=schedule_model.finished, + expired=schedule_model.expired, + version=schedule_model.version, + times=schedule_model.schedule_times, + ) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_SCHEDULE_READ_TIME) + def get_schedule(self, schedule_id: str) -> Schedule: + """ + 获取 Schedule 对象 + + :param schedule_id: 调度实例 ID + :type schedule_id: str + :return: Schedule 对象实例 + :rtype: Schedule + """ + schedule_model = DBSchedule.objects.get(id=schedule_id) + + return Schedule( + id=schedule_model.id, + type=ScheduleType(schedule_model.type), + process_id=schedule_model.process_id, + node_id=schedule_model.node_id, + finished=schedule_model.finished, + expired=schedule_model.expired, + version=schedule_model.version, + times=schedule_model.schedule_times, + ) + + def get_schedule_with_node_and_version(self, node_id: str, version: str) -> Schedule: + """ + 通过节点 ID 和执行版本来获取 Scheudle 对象 + + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + :return: Schedule 对象 + :rtype: Schedule + """ + schedule_model = DBSchedule.objects.get(node_id=node_id, version=version) + + return Schedule( + id=schedule_model.id, + type=ScheduleType(schedule_model.type), + process_id=schedule_model.process_id, + node_id=schedule_model.node_id, + finished=schedule_model.finished, + expired=schedule_model.expired, + version=schedule_model.version, + times=schedule_model.schedule_times, + ) + + def apply_schedule_lock(self, schedule_id: str) -> bool: + """ + 获取 Schedule 对象的调度锁,返回是否成功获取锁 + + :param schedule_id: 调度实例 ID + :type schedule_id: str + :return: 是否成功获取锁 + :rtype: bool + """ + return DBSchedule.objects.filter(id=schedule_id, scheduling=False).update(scheduling=True) == 1 + + def release_schedule_lock(self, schedule_id: int): + """ + 释放指定 Schedule 的调度锁 + + :param schedule_id: Schedule ID + :type schedule_id: int + """ + DBSchedule.objects.filter(id=schedule_id, scheduling=True).update(scheduling=False) + + def expire_schedule(self, schedule_id: int): + """ + 将某个 Schedule 对象标记为已过期 + + :param schedule_id: 调度实例 ID + :type schedule_id: int + """ + DBSchedule.objects.filter(id=schedule_id).update(expired=True) + + def finish_schedule(self, schedule_id: int): + """ + 将某个 Schedule 对象标记为已完成 + + :param schedule_id: 调度实例 ID + :type schedule_id: int + """ + DBSchedule.objects.filter(id=schedule_id).update(finished=True) + + def add_schedule_times(self, schedule_id: int): + """ + 将某个 Schedule 对象的调度次数 +1 + + :param schedule_id: 调度实例 ID + :type schedule_id: int + """ + DBSchedule.objects.filter(id=schedule_id).update(schedule_times=F("schedule_times") + 1) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/serializer.py b/runtime/bamboo-pipeline/pipeline/eri/imp/serializer.py new file mode 100644 index 00000000..e5c51c66 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/serializer.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json +import pickle +import codecs +from typing import Any + + +class SerializerMixin: + JSON_SERIALIZER = "json" + PICKLE_SERIALIZER = "pickle" + + def _deserialize(self, data: str, serializer: str) -> Any: + if serializer == self.JSON_SERIALIZER: + return json.loads(data) + elif serializer == self.PICKLE_SERIALIZER: + return pickle.loads(codecs.decode(data.encode(), "base64")) + else: + raise ValueError("unsupport serializer type: {}".format(serializer)) + + def _serialize(self, data: Any) -> (str, str): + try: + return json.dumps(data), self.JSON_SERIALIZER + except TypeError: + return codecs.encode(pickle.dumps(data), "base64").decode(), self.PICKLE_SERIALIZER diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/service.py b/runtime/bamboo-pipeline/pipeline/eri/imp/service.py new file mode 100644 index 00000000..a474cffe --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/service.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from typing import Optional + +from bamboo_engine.eri import Service as ServiceInterface +from bamboo_engine.eri import Schedule, ExecutionData, CallbackData, ScheduleType + +from pipeline.core.flow.activity import Service +from pipeline.core.data.base import DataObject +from pipeline.eri.log import get_logger + + +class ServiceWrapper(ServiceInterface): + def __init__(self, service: Service): + self.service = service + + def pre_execute(self, data: ExecutionData, root_pipeline_data: ExecutionData): + """ + execute 执行前执行的逻辑 + + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + """ + pre_execute = getattr(self.service, "pre_execute", None) + if callable(pre_execute): + return pre_execute(DataObject(inputs=data.inputs, outputs=data.outputs)) + + def execute(self, data: ExecutionData, root_pipeline_data: ExecutionData) -> bool: + """ + execute 逻辑 + + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + :return: 是否执行成功 + :rtype: bool + """ + data_obj = DataObject(inputs=data.inputs, outputs=data.outputs) + parent_data_obj = DataObject(inputs=root_pipeline_data.inputs, outputs=root_pipeline_data.outputs) + + try: + execute_res = self.service.execute(data_obj, parent_data_obj) + finally: + # sync data object modification to execution data + data.inputs = data_obj.inputs + data.outputs = data_obj.outputs + + if execute_res is None: + execute_res = True + + return execute_res + + def schedule( + self, + schedule: Schedule, + data: ExecutionData, + root_pipeline_data: ExecutionData, + callback_data: Optional[CallbackData] = None, + ) -> bool: + """ + schedule 逻辑 + + :param schedule: Schedule 对象 + :type schedule: Schedule + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + :param callback_data: 回调数据, defaults to None + :type callback_data: Optional[CallbackData], optional + :return: [description] + :rtype: bool + """ + data_obj = DataObject(inputs=data.inputs, outputs=data.outputs) + parent_data_obj = DataObject(inputs=root_pipeline_data.inputs, outputs=root_pipeline_data.outputs) + + try: + schedule_res = self.service.schedule( + data_obj, parent_data_obj, callback_data.data if callback_data else None + ) + except Exception as e: + raise e + finally: + # sync data object modification to execution data + data.inputs = data_obj.inputs + data.outputs = data_obj.outputs + + if schedule_res is None: + schedule_res = True + + return schedule_res + + def need_schedule(self) -> bool: + """ + 服务是否需要调度 + + :return: 是否需要调度 + :rtype: bool + """ + return self.service.need_schedule() + + def schedule_type(self) -> Optional[ScheduleType]: + """ + 服务调度类型 + + :return: 调度类型 + :rtype: Optional[ScheduleType] + """ + if not self.service.need_schedule(): + return None + + if self.service.interval: + return ScheduleType.POLL + + if not self.service.multi_callback_enabled(): + return ScheduleType.CALLBACK + + return ScheduleType.MULTIPLE_CALLBACK + + def is_schedule_done(self) -> bool: + """ + 调度是否完成 + + :return: 调度是否完成 + :rtype: bool + """ + return self.service.is_schedule_finished() + + def schedule_after( + self, schedule: Optional[Schedule], data: ExecutionData, root_pipeline_data: ExecutionData + ) -> int: + """ + 计算下一次调度间隔 + + :param schedule: 调度对象,未进行调度时传入为空 + :type schedule: Optional[Schedule] + :param data: 节点执行数据 + :type data: ExecutionData + :param root_pipeline_data: 根流程执行数据 + :type root_pipeline_data: ExecutionData + :return: 调度间隔,单位为秒 + :rtype: int + """ + if self.service.interval is None: + return -1 + + if schedule is None: + return self.service.interval.next() + + # count will add in next, so minus 1 at here + self.service.interval.count = schedule.times - 1 + + return self.service.interval.next() + + def setup_runtime_attributes(self, **attrs): + """ + 装载运行时属性 + + :param attrs: 运行时属性 + :type attrs: Dict[str, Any] + """ + + attrs["logger"] = get_logger(node_id=attrs["id"], loop=attrs["loop"], version=attrs["version"]) + self.service.setup_runtime_attrs(**attrs) diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/state.py b/runtime/bamboo-pipeline/pipeline/eri/imp/state.py new file mode 100644 index 00000000..953af624 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/state.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from typing import Optional, Dict, List + +from django.utils import timezone +from bamboo_engine.eri import State +from bamboo_engine import states, metrics +from bamboo_engine.utils.string import unique_id +from bamboo_engine.exceptions import StateVersionNotMatchError + +from pipeline.eri.signals import post_set_state +from pipeline.eri.models import State as DBState + + +class StateMixin: + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_STATE_READ_TIME) + def get_state(self, node_id: str) -> State: + """ + 获取某个节点的状态对象 + + : param node_id: 节点 ID + : type node_id: str + : return: State 实例 + : rtype: State + """ + state = DBState.objects.get(node_id=node_id) + + return State( + node_id=state.node_id, + root_id=state.root_id, + parent_id=state.parent_id, + name=state.name, + version=state.version, + loop=state.loop, + inner_loop=state.inner_loop, + retry=state.retry, + skip=state.skip, + error_ignored=state.error_ignored, + created_time=state.created_time, + started_time=state.started_time, + archived_time=state.archived_time, + ) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_STATE_READ_TIME) + def get_state_or_none(self, node_id: str) -> Optional[State]: + """ + 获取某个节点的状态对象,如果不存在则返回 None + + : param node_id: 节点 ID + : type node_id: str + : return: State 实例 + : rtype: State + """ + try: + return self.get_state(node_id) + except DBState.DoesNotExist: + return None + + def get_state_by_root(self, root_id: str) -> List[State]: + """ + 根据根节点 ID 获取一批节点状态 + + :param root_id: 根节点 ID + :type root_id: str + :return: 节点状态列表 + :rtype: List[State] + """ + qs = DBState.objects.filter(root_id=root_id) + + return [ + State( + node_id=state.node_id, + root_id=state.root_id, + parent_id=state.parent_id, + name=state.name, + version=state.version, + loop=state.loop, + inner_loop=state.inner_loop, + retry=state.retry, + skip=state.skip, + error_ignored=state.error_ignored, + created_time=state.created_time, + started_time=state.started_time, + archived_time=state.archived_time, + ) + for state in qs + ] + + def get_state_by_parent(self, parent_id: str) -> List[State]: + """ + 根据父节点 ID 获取一批节点状态 + + :param parent_id: 父节点 ID + :type parent_id: str + :return: 节点状态列表 + :rtype: List[State] + """ + qs = DBState.objects.filter(parent_id=parent_id) + + return [ + State( + node_id=state.node_id, + root_id=state.root_id, + parent_id=state.parent_id, + name=state.name, + version=state.version, + loop=state.loop, + inner_loop=state.inner_loop, + retry=state.retry, + skip=state.skip, + error_ignored=state.error_ignored, + created_time=state.created_time, + started_time=state.started_time, + archived_time=state.archived_time, + ) + for state in qs + ] + + def batch_get_state_name(self, node_id_list: List[str]) -> Dict[str, str]: + """ + 批量获取一批节点的状态 + + :param node_id_list: 节点 ID 列表 + :type node_id_list: List[str] + :return: 节点ID -> 状态名称 + :rtype: Dict[str, str] + """ + qs = DBState.objects.filter(node_id__in=node_id_list).only("node_id", "name") + return {state.node_id: state.name for state in qs} + + def has_state(self, node_id: str) -> bool: + """ + 是否存在某个节点的的状态 + + :param node_id: 节点 ID + :type node_id: str + :return: 该节点状态是否存在 + :rtype: bool + """ + return DBState.objects.filter(node_id=node_id).exists() + + def reset_state_inner_loop(self, node_id: str) -> int: + """ + 设置节点的当前流程重入次数 + + :param node_id: 节点 ID + :type node_id: str + :return: 更新状态行数 + :rtype: int + """ + return DBState.objects.filter(node_id=node_id).update(inner_loop=0) + + def reset_children_state_inner_loop(self, node_id: str) -> int: + """ + 批量设置子流程节点的所有子节点inner_loop次数 + + :param node_id: 子流程节点 ID + :type node_id: str + :return: 更新状态行数 + :rtype: int + """ + return DBState.objects.filter(parent_id=node_id).update(inner_loop=0) + + def set_state_root_and_parent(self, node_id: str, root_id: str, parent_id: str): + """ + 设置节点的根流程和父流程 ID + + :param node_id: 节点 ID + :type node_id: str + :param root_id: 根流程 ID + :type root_id: str + :param parent_id: 父流程 ID + :type parent_id: str + """ + DBState.objects.filter(node_id=node_id).update(root_id=root_id, parent_id=parent_id) + + @metrics.setup_histogram(metrics.ENGINE_RUNTIME_STATE_WRITE_TIME) + def set_state( + self, + node_id: str, + to_state: str, + version: str = None, + loop: int = -1, + inner_loop: int = -1, + root_id: Optional[str] = None, + parent_id: Optional[str] = None, + is_retry: bool = False, + is_skip: bool = False, + reset_retry: bool = False, + reset_skip: bool = False, + error_ignored: bool = False, + reset_error_ignored: bool = False, + refresh_version: bool = False, + clear_started_time: bool = False, + set_started_time: bool = False, + clear_archived_time: bool = False, + set_archive_time: bool = False, + ) -> str: + """ + 设置节点的状态,如果节点存在,进行状态转换时需要满足状态转换状态机 + + :param node_id: 节点 ID + :type node_id: str + :param to_state: 目标状态 + :type to_state: str + :param loop: 循环次数, 为 -1 时表示不设置 + :type loop: int, optional + :param inner_loop: 当前流程循环次数, 为 -1 时表示不设置 + :type inner_loop: int, optional + :param version: 目标状态版本,为空时表示不做版本校验 + :type version: Optional[str], optional + :param root_id: 根节点 ID,为空时表示不设置 + :type root_id: Optional[str], optional + :param parent_id: 父节点 ID,为空时表示不设置 + :type parent_id: Optional[str], optional + :param is_retry: 是否增加重试次数 + :type is_retry: bool, optional + :param is_skip: 是否将跳过设置为 True + :type is_skip: bool, optional + :param reset_retry: 是否重置重试次数 + :type reset_retry: bool, optional + :param reset_skip: 是否重置跳过标志 + :type reset_skip: bool, optional + :param error_ignored: 是否为忽略错误跳过 + :type error_ignored: bool, optional + :param reset_error_ignored: 是否重置忽略错误标志 + :type reset_error_ignored: bool, optional + :param refresh_version: 是否刷新版本号 + :type refresh_version: bool, optional + :param clear_started_time: 是否清空开始时间 + :type clear_started_time: bool, optional + :param set_started_time: 是否设置开始时间 + :type set_started_time: bool, optional + :param clear_archived_time: 是否清空归档时间 + :type clear_archived_time: bool, optional + :param set_archive_time: 是否设置归档时间 + :type set_archive_time: bool, optional + :return: 该节点最新版本 + :rtype: str + """ + state = self.get_state_or_none(node_id) + ret_version = "" + + if state and version and state.version != version: + raise StateVersionNotMatchError("state version({}) not match {}".format(state.version, version)) + + fields = {} + + if loop != -1: + fields["loop"] = loop + + if inner_loop != -1: + fields["inner_loop"] = inner_loop + + if root_id: + fields["root_id"] = root_id + + if parent_id: + fields["parent_id"] = parent_id + + if is_retry and state: + fields["retry"] = state.retry + 1 + + if is_skip and state: + fields["skip"] = True + + if reset_retry and state: + fields["retry"] = 0 + + if reset_skip and state: + fields["skip"] = False + + if reset_error_ignored and state: + fields["error_ignored"] = False + + if error_ignored and state: + fields["error_ignored"] = True + + if refresh_version or state is None: + fields["version"] = unique_id("v") + + if clear_started_time and state: + fields["started_time"] = None + + if set_started_time: + fields["started_time"] = timezone.now() + + if clear_archived_time and state: + fields["archived_time"] = timezone.now() + + if set_archive_time: + fields["archived_time"] = timezone.now() + + if state: + if not states.can_transit(from_state=state.name, to_state=to_state): + raise RuntimeError( + "can't not transit node({}) state from {} to {}".format(node_id, state.name, to_state) + ) + + filters = {"node_id": node_id} + if version: + filters["version"] = version + + rows = DBState.objects.filter(**filters).update(name=to_state, **fields) + + if rows != 1: + raise StateVersionNotMatchError("state with version({}) not exist".format(version)) + ret_version = fields.get("version", state.version) + else: + state = DBState.objects.create(node_id=node_id, name=to_state, **fields) + ret_version = fields["version"] + + post_set_state.send( + sender=DBState, + node_id=node_id, + to_state=to_state, + version=ret_version, + root_id=state.root_id, + parent_id=state.parent_id, + loop=loop, + ) + return ret_version diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/task.py b/runtime/bamboo-pipeline/pipeline/eri/imp/task.py new file mode 100644 index 00000000..eab905ed --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/task.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from typing import Optional + +from celery import current_app + +from pipeline.eri.celery.queues import QueueResolver + +from pipeline.eri.models import Process + + +class TaskMixin: + def _get_task_route_params(self, task_name: str, process_id: int): + process = Process.objects.filter(id=process_id).only("priority", "queue").first() + resolver = QueueResolver(process.queue) + queue, routing_key = resolver.resolve_task_queue_and_routing_key(task_name) + return { + "queue": queue, + "priority": process.priority, + "routing_key": routing_key, + } + + def execute(self, process_id: int, node_id: str): + """ + 派发执行任务,执行任务被拉起执行时应该调用 Engine 实例的 execute 方法 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + """ + task_name = "pipeline.eri.celery.tasks.execute" + route_params = self._get_task_route_params(task_name, process_id) + + current_app.tasks[task_name].apply_async(kwargs={"process_id": process_id, "node_id": node_id}, **route_params) + + def schedule( + self, process_id: int, node_id: str, schedule_id: str, callback_data_id: Optional[int] = None, + ): + """ + 派发调度任务,调度任务被拉起执行时应该调用 Engine 实例的 schedule 方法 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param schedule_id: 调度 ID + :type schedule_id: str + """ + task_name = "pipeline.eri.celery.tasks.schedule" + route_params = self._get_task_route_params(task_name, process_id) + + current_app.tasks[task_name].apply_async( + kwargs={ + "process_id": process_id, + "node_id": node_id, + "schedule_id": schedule_id, + "callback_data_id": callback_data_id, + }, + **route_params, + ) + + def set_next_schedule( + self, + process_id: int, + node_id: str, + schedule_id: str, + schedule_after: int, + callback_data_id: Optional[int] = None, + ): + """ + 设置下次调度时间,调度倒数归零后应该执行 Engine 实例的 schedule 方法 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param schedule_id: 调度 ID + :type schedule_id: str + :param schedule_after: 调度倒数 + :type schedule_after: int + """ + task_name = "pipeline.eri.celery.tasks.schedule" + route_params = self._get_task_route_params(task_name, process_id) + + current_app.tasks[task_name].apply_async( + kwargs={ + "process_id": process_id, + "node_id": node_id, + "schedule_id": schedule_id, + "callback_data_id": callback_data_id, + }, + countdown=schedule_after, + **route_params, + ) + + def start_timeout_monitor(self, process_id: int, node_id: str, version: str, timeout: int): + """ + 开始对某个节点执行的超时监控,若超时时间归零后节点未进入归档状态,则强制失败该节点 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + :param timeout: 超时时间,单位为秒 + :type timeout: int + """ + task_name = "pipeline.eri.celery.tasks.timeout_check" + route_params = self._get_task_route_params(task_name, process_id) + + current_app.tasks[task_name].apply_async( + kwargs={"process_id": process_id, "node_id": node_id, "version": version}, countdown=timeout, **route_params + ) + + def stop_timeout_monitor( + self, process_id: int, node_id: str, version: str, + ): + """ + 停止对某个节点的超时监控 + + :param process_id: 进程 ID + :type process_id: int + :param node_id: 节点 ID + :type node_id: str + :param version: 执行版本 + :type version: str + """ + return diff --git a/runtime/bamboo-pipeline/pipeline/eri/imp/variable.py b/runtime/bamboo-pipeline/pipeline/eri/imp/variable.py new file mode 100644 index 00000000..5022a079 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/imp/variable.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import inspect +from typing import Any, Type + +from bamboo_engine.eri import Variable as VariableInterface +from pipeline.core.data.var import Variable + + +class VariableProxy: + def __init__(self, original_value: Variable, var_cls: Type, pipeline_data: dict): + self.get_value = getattr(var_cls, "get_value") + self.original_value = original_value + self.pipeline_data = pipeline_data + for name, value in inspect.getmembers(var_cls): + if not name.startswith("__") and not hasattr(self, name) and inspect.isfunction(value): + setattr(self, name, value) + + def get(self) -> Any: + self.value = self.original_value.get() + return self.get_value(self) + + +class VariableWrapper(VariableInterface): + def __init__(self, original_value: Variable, var_cls: Type, additional_data: dict): + self.var = VariableProxy(original_value=original_value, var_cls=var_cls, pipeline_data=additional_data) + + def get(self) -> Any: + return self.var.get() diff --git a/runtime/bamboo-pipeline/pipeline/eri/log.py b/runtime/bamboo-pipeline/pipeline/eri/log.py new file mode 100644 index 00000000..8de8fe94 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/log.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +from logging import LogRecord, LoggerAdapter + +from django.core.exceptions import AppRegistryNotReady +from bamboo_engine import local + +logger = logging.getLogger("pipeline.eri.log") + + +def get_logger(node_id: str, loop: int, version: str): + return LoggerAdapter(logger=logger, extra={"node_id": node_id, "loop": loop, "version": version}) + + +class ERINodeLogHandler(logging.Handler): + def emit(self, record: LogRecord): + from pipeline.eri.models import LogEntry + + LogEntry.objects.create( + node_id=record.node_id, + loop=record.loop, + version=record.version, + logger_name=record.name, + level_name=record.levelname, + message=self.format(record), + ) + + +class EngineContextLogHandler(logging.Handler): + def emit(self, record): + try: + from pipeline.eri.models import LogEntry + except AppRegistryNotReady: + return + + node_info = local.get_node_info() + if not node_info: + return + + LogEntry.objects.create( + node_id=node_info.node_id, + version=node_info.version, + loop=node_info.loop, + logger_name=record.name, + level_name=record.levelname, + message=self.format(record), + ) diff --git a/runtime/bamboo-pipeline/pipeline/eri/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/eri/migrations/0001_initial.py new file mode 100644 index 00000000..9b55e15e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/migrations/0001_initial.py @@ -0,0 +1,160 @@ +# Generated by Django 2.2.19 on 2021-03-09 03:41 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="CallbackData", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), + ("version", models.CharField(max_length=33, verbose_name="状态版本")), + ("data", models.TextField(verbose_name="回调数据")), + ], + ), + migrations.CreateModel( + name="ContextOutputs", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("pipeline_id", models.CharField(max_length=33, unique=True, verbose_name="流程 ID")), + ("outputs", models.TextField(verbose_name="输出配置")), + ], + ), + migrations.CreateModel( + name="Data", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("node_id", models.CharField(db_index=True, max_length=33, unique=True, verbose_name="节点 ID")), + ("inputs", models.TextField(verbose_name="原始输入数据")), + ("outputs", models.TextField(verbose_name="原始输出数据")), + ], + ), + migrations.CreateModel( + name="ExecutionData", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("node_id", models.CharField(db_index=True, max_length=33, unique=True, verbose_name="节点 ID")), + ("inputs_serializer", models.CharField(max_length=32, verbose_name="输入序列化器")), + ("outputs_serializer", models.CharField(max_length=32, verbose_name="输出序列化器")), + ("inputs", models.TextField(verbose_name="节点执行输入数据")), + ("outputs", models.TextField(verbose_name="节点执行输出数据")), + ], + ), + migrations.CreateModel( + name="Node", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("node_id", models.CharField(db_index=True, max_length=33, verbose_name="节点 ID")), + ("detail", models.TextField(verbose_name="节点详情")), + ], + ), + migrations.CreateModel( + name="Process", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("parent_id", models.BigIntegerField(db_index=True, default=-1, verbose_name="父进程 ID")), + ("ack_num", models.IntegerField(default=0, verbose_name="收到子进程 ACK 数量")), + ("need_ack", models.IntegerField(default=-1, verbose_name="需要收到的子进程 ACK 数量")), + ("asleep", models.BooleanField(default=True, verbose_name="是否处于休眠状态")), + ("suspended", models.BooleanField(default=False, verbose_name="是否处于暂停状态")), + ("frozen", models.BooleanField(default=False, verbose_name="是否处于冻结状态")), + ("dead", models.BooleanField(default=False, verbose_name="是否已经死亡")), + ("last_heartbeat", models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="上次心跳时间")), + ("destination_id", models.CharField(default="", max_length=33, verbose_name="执行终点 ID")), + ("current_node_id", models.CharField(db_index=True, default="", max_length=33, verbose_name="当前节点 ID")), + ("root_pipeline_id", models.CharField(max_length=33, verbose_name="根流程 ID")), + ( + "suspended_by", + models.CharField(db_index=True, default="", max_length=33, verbose_name="导致进程暂停的节点 ID"), + ), + ("priority", models.IntegerField(verbose_name="优先级")), + ("queue", models.CharField(default="", max_length=128, verbose_name="所属队列")), + ("pipeline_stack", models.TextField(default="[]", verbose_name="流程栈")), + ], + ), + migrations.CreateModel( + name="State", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("node_id", models.CharField(max_length=33, unique=True, verbose_name="节点 ID")), + ("root_id", models.CharField(db_index=True, default="", max_length=33, verbose_name="根节点 ID")), + ("parent_id", models.CharField(db_index=True, default="", max_length=33, verbose_name="父节点 ID")), + ("name", models.CharField(max_length=64, verbose_name="状态名")), + ("version", models.CharField(max_length=33, verbose_name="状态版本")), + ("loop", models.IntegerField(default=1, verbose_name="循环次数")), + ("retry", models.IntegerField(default=0, verbose_name="重试次数")), + ("skip", models.BooleanField(default=False, verbose_name="是否跳过")), + ("error_ignored", models.BooleanField(default=False, verbose_name="是否出错后自动忽略")), + ("created_time", models.DateTimeField(auto_now_add=True, verbose_name="创建时间")), + ("started_time", models.DateTimeField(null=True, verbose_name="开始时间")), + ("archived_time", models.DateTimeField(null=True, verbose_name="归档时间")), + ], + ), + migrations.CreateModel( + name="Schedule", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("type", models.IntegerField(verbose_name="调度类型")), + ("process_id", models.BigIntegerField(default=-1, verbose_name="进程 ID")), + ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), + ("finished", models.BooleanField(default=False, verbose_name="是否已完成")), + ("expired", models.BooleanField(default=False, verbose_name="是否已过期")), + ("scheduling", models.BooleanField(default=False, verbose_name="是否正在调度")), + ("version", models.CharField(max_length=33, verbose_name="状态版本")), + ("schedule_times", models.IntegerField(default=0, verbose_name="被调度次数")), + ], + options={"unique_together": {("node_id", "version")},}, + ), + migrations.CreateModel( + name="LogEntry", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), + ("loop", models.IntegerField(default=1, verbose_name="循环次数")), + ("logger_name", models.CharField(max_length=128, verbose_name="logger 名称")), + ("level_name", models.CharField(max_length=32, verbose_name="日志等级")), + ("message", models.TextField(null=True, verbose_name="日志内容")), + ("logged_at", models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="输出时间")), + ], + options={"index_together": {("node_id", "loop")},}, + ), + migrations.CreateModel( + name="ExecutionHistory", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), + ("loop", models.IntegerField(default=1, verbose_name="循环次数")), + ("retry", models.IntegerField(default=0, verbose_name="重试次数")), + ("skip", models.BooleanField(default=False, verbose_name="是否跳过")), + ("version", models.CharField(max_length=33, verbose_name="状态版本")), + ("started_time", models.DateTimeField(verbose_name="开始时间")), + ("archived_time", models.DateTimeField(verbose_name="归档时间")), + ("inputs_serializer", models.CharField(max_length=32, verbose_name="输入序列化器")), + ("outputs_serializer", models.CharField(max_length=32, verbose_name="输出序列化器")), + ("inputs", models.TextField(verbose_name="节点执行输入数据")), + ("outputs", models.TextField(verbose_name="节点执行输出数据")), + ], + options={"index_together": {("node_id", "loop")},}, + ), + migrations.CreateModel( + name="ContextValue", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), + ("pipeline_id", models.CharField(max_length=33, verbose_name="流程 ID")), + ("key", models.CharField(max_length=128, verbose_name="变量 key")), + ("type", models.IntegerField(verbose_name="变量类型")), + ("serializer", models.CharField(max_length=32, verbose_name="序列化器")), + ("code", models.CharField(default="", max_length=128, verbose_name="计算型变量类型唯一标志")), + ("value", models.TextField(verbose_name="变量值")), + ("references", models.TextField(verbose_name="所有对其他变量直接或间接的引用")), + ], + options={"unique_together": {("pipeline_id", "key")},}, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/eri/migrations/0002_auto_20210322_0233.py b/runtime/bamboo-pipeline/pipeline/eri/migrations/0002_auto_20210322_0233.py new file mode 100644 index 00000000..73e47cb7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/migrations/0002_auto_20210322_0233.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.19 on 2021-03-22 02:33 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("eri", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="process", + name="root_pipeline_id", + field=models.CharField(db_index=True, max_length=33, verbose_name="根流程 ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/eri/migrations/0003_logentry_version.py b/runtime/bamboo-pipeline/pipeline/eri/migrations/0003_logentry_version.py new file mode 100644 index 00000000..aaa18f61 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/migrations/0003_logentry_version.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.19 on 2021-03-29 12:15 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("eri", "0002_auto_20210322_0233"), + ] + + operations = [ + migrations.AddField( + model_name="logentry", + name="version", + field=models.CharField(default="", max_length=33, verbose_name="状态版本"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/eri/migrations/0004_state_inner_loop_.py b/runtime/bamboo-pipeline/pipeline/eri/migrations/0004_state_inner_loop_.py new file mode 100644 index 00000000..846ca99a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/migrations/0004_state_inner_loop_.py @@ -0,0 +1,16 @@ +# Generated by Django 2.2.16 on 2021-07-12 12:27 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("eri", "0003_logentry_version"), + ] + + operations = [ + migrations.AddField( + model_name="state", name="inner_loop", field=models.IntegerField(default=1, verbose_name="子流程内部循环次数"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/eri/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/eri/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/runtime/bamboo-pipeline/pipeline/eri/models.py b/runtime/bamboo-pipeline/pipeline/eri/models.py new file mode 100644 index 00000000..5f53b7fa --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/models.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + + +class Process(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + parent_id = models.BigIntegerField(_("父进程 ID"), default=-1, db_index=True) + ack_num = models.IntegerField(_("收到子进程 ACK 数量"), default=0) + need_ack = models.IntegerField(_("需要收到的子进程 ACK 数量"), default=-1) + asleep = models.BooleanField(_("是否处于休眠状态"), default=True) + suspended = models.BooleanField(_("是否处于暂停状态"), default=False) + frozen = models.BooleanField(_("是否处于冻结状态"), default=False) + dead = models.BooleanField(_("是否已经死亡"), default=False) + last_heartbeat = models.DateTimeField(_("上次心跳时间"), auto_now_add=True, db_index=True) + destination_id = models.CharField(_("执行终点 ID"), default="", max_length=33) + current_node_id = models.CharField(_("当前节点 ID"), default="", max_length=33, db_index=True) + root_pipeline_id = models.CharField(_("根流程 ID"), null=False, max_length=33, db_index=True) + suspended_by = models.CharField(_("导致进程暂停的节点 ID"), default="", max_length=33, db_index=True) + priority = models.IntegerField(_("优先级")) + queue = models.CharField(_("所属队列"), default="", max_length=128) + pipeline_stack = models.TextField(_("流程栈"), default="[]", null=False) + + +class Node(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), null=False, max_length=33, db_index=True) + detail = models.TextField(_("节点详情"), null=False) + + +class State(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), null=False, max_length=33, unique=True) + root_id = models.CharField(_("根节点 ID"), null=False, default="", max_length=33, db_index=True) + parent_id = models.CharField(_("父节点 ID"), null=False, default="", max_length=33, db_index=True) + name = models.CharField(_("状态名"), null=False, max_length=64) + version = models.CharField(_("状态版本"), null=False, max_length=33) + loop = models.IntegerField(_("循环次数"), default=1) + inner_loop = models.IntegerField(_("子流程内部循环次数"), default=1) + retry = models.IntegerField(_("重试次数"), default=0) + skip = models.BooleanField(_("是否跳过"), default=False) + error_ignored = models.BooleanField(_("是否出错后自动忽略"), default=False) + created_time = models.DateTimeField(_("创建时间"), auto_now_add=True) + started_time = models.DateTimeField(_("开始时间"), null=True) + archived_time = models.DateTimeField(_("归档时间"), null=True) + + +class Schedule(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + type = models.IntegerField(_("调度类型")) + process_id = models.BigIntegerField(_("进程 ID"), default=-1) + node_id = models.CharField(_("节点 ID"), null=False, max_length=33) + finished = models.BooleanField(_("是否已完成"), default=False) + expired = models.BooleanField(_("是否已过期"), default=False) + scheduling = models.BooleanField(_("是否正在调度"), default=False) + version = models.CharField(_("状态版本"), null=False, max_length=33) + schedule_times = models.IntegerField(_("被调度次数"), default=0) + + class Meta: + unique_together = ["node_id", "version"] + + +class Data(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), null=False, max_length=33, db_index=True, unique=True) + inputs = models.TextField(_("原始输入数据")) + outputs = models.TextField(_("原始输出数据")) + + +class ExecutionData(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), null=False, max_length=33, db_index=True, unique=True) + inputs_serializer = models.CharField(_("输入序列化器"), null=False, max_length=32) + outputs_serializer = models.CharField(_("输出序列化器"), null=False, max_length=32) + inputs = models.TextField(_("节点执行输入数据")) + outputs = models.TextField(_("节点执行输出数据")) + + +class CallbackData(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), null=False, max_length=33) + version = models.CharField(_("状态版本"), null=False, max_length=33) + data = models.TextField(_("回调数据")) + + +class ContextValue(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + pipeline_id = models.CharField(_("流程 ID"), null=False, max_length=33) + key = models.CharField(_("变量 key"), null=False, max_length=128) + type = models.IntegerField(_("变量类型")) + serializer = models.CharField(_("序列化器"), null=False, max_length=32) + code = models.CharField(_("计算型变量类型唯一标志"), default="", max_length=128) + value = models.TextField(_("变量值")) + references = models.TextField(_("所有对其他变量直接或间接的引用")) + + class Meta: + unique_together = ["pipeline_id", "key"] + + +class ContextOutputs(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + pipeline_id = models.CharField(_("流程 ID"), null=False, max_length=33, unique=True) + outputs = models.TextField(_("输出配置")) + + +class ExecutionHistory(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), null=False, max_length=33) + loop = models.IntegerField(_("循环次数"), default=1) + retry = models.IntegerField(_("重试次数"), default=0) + skip = models.BooleanField(_("是否跳过"), default=False) + version = models.CharField(_("状态版本"), null=False, max_length=33) + started_time = models.DateTimeField(_("开始时间"), null=False) + archived_time = models.DateTimeField(_("归档时间"), null=False) + inputs_serializer = models.CharField(_("输入序列化器"), null=False, max_length=32) + outputs_serializer = models.CharField(_("输出序列化器"), null=False, max_length=32) + inputs = models.TextField(_("节点执行输入数据")) + outputs = models.TextField(_("节点执行输出数据")) + + class Meta: + index_together = ["node_id", "loop"] + + +class LogEntry(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + node_id = models.CharField(_("节点 ID"), max_length=33) + version = models.CharField(_("状态版本"), default="", max_length=33) + loop = models.IntegerField(_("循环次数"), default=1) + logger_name = models.CharField(_("logger 名称"), max_length=128) + level_name = models.CharField(_("日志等级"), max_length=32) + message = models.TextField(_("日志内容"), null=True) + logged_at = models.DateTimeField(_("输出时间"), auto_now_add=True, db_index=True) + + class Meta: + index_together = ["node_id", "loop"] diff --git a/runtime/bamboo-pipeline/pipeline/eri/runtime.py b/runtime/bamboo-pipeline/pipeline/eri/runtime.py new file mode 100644 index 00000000..0fb0a54b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/runtime.py @@ -0,0 +1,529 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json +from typing import Optional, List + +from django.conf import settings +from django.db import transaction + +from kombu import Exchange, Queue, Connection + +from bamboo_engine import states +from bamboo_engine.template import Template +from bamboo_engine.eri import interfaces +from bamboo_engine.eri import EngineRuntimeInterface, NodeType, ContextValueType + +from pipeline.eri import codec +from pipeline.eri.imp.plugin_manager import PipelinePluginManagerMixin +from pipeline.eri.imp.hooks import HooksMixin +from pipeline.eri.imp.process import ProcessMixin +from pipeline.eri.imp.node import NodeMixin +from pipeline.eri.imp.state import StateMixin +from pipeline.eri.imp.schedule import ScheduleMixin +from pipeline.eri.imp.data import DataMixin +from pipeline.eri.imp.context import ContextMixin +from pipeline.eri.imp.execution_history import ExecutionHistoryMixin +from pipeline.eri.imp.task import TaskMixin +from pipeline.eri.celery.queues import QueueResolver + +from pipeline.eri.models import Node, Data, ContextValue, Process, ContextOutputs, LogEntry, ExecutionHistory, State + + +class BambooDjangoRuntime( + TaskMixin, + ExecutionHistoryMixin, + ContextMixin, + DataMixin, + ScheduleMixin, + StateMixin, + NodeMixin, + ProcessMixin, + PipelinePluginManagerMixin, + HooksMixin, + EngineRuntimeInterface, +): + CONTEXT_VALUE_TYPE_MAP = { + "plain": ContextValueType.PLAIN.value, + "splice": ContextValueType.SPLICE.value, + "lazy": ContextValueType.COMPUTE.value, + } + + ERI_SUPPORT_VERSION = 3 + + def __init__(self): + try: + eri_version = interfaces.version() + except AttributeError: + raise RuntimeError( + "bamboo_engine eri do not support version fetch, please make sure bamboo_engine version >= 1.1.6" + ) + + major_version = int(eri_version.split(".")[0]) + if major_version > self.ERI_SUPPORT_VERSION: + raise RuntimeError( + "unsupported bamboo_engine eri version: %s, expect version: <= %s.x.x" + % (eri_version, self.ERI_SUPPORT_VERSION) + ) + + def _data_inputs_assemble(self, pipeline_id: str, node_id: str, node_inputs: dict) -> (dict, List[ContextValue]): + inputs = {} + context_values = [] + for k, v in node_inputs.items(): + if v["type"] == "lazy": + if k.startswith("${") and k.endswith("}"): + cv_key = "${%s_%s}" % (k[2:-1], node_id) + else: + cv_key = "${%s_%s}" % (k, node_id) + if len(cv_key) > 128: + raise ValueError("var key %s length exceeds 128" % cv_key) + context_values.append( + ContextValue( + pipeline_id=pipeline_id, + key=cv_key, + type=ContextValueType.COMPUTE.value, + serializer=self.JSON_SERIALIZER, + value=json.dumps(v["value"]), + code=v.get("custom_type", ""), + ) + ) + inputs[k] = {"need_render": True, "value": cv_key} + else: + inputs[k] = {"need_render": v["type"] == "splice", "value": v["value"]} + + return inputs, context_values + + def _gen_executable_end_event_node(self, event: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: + return Node( + node_id=event["id"], + detail=json.dumps( + { + "id": event["id"], + "type": NodeType.ExecutableEndEvent.value, + "targets": {}, + "root_pipeline_id": root_id, + "parent_pipeline_id": parent_id, + "can_skip": False, + "can_retry": True, + "code": event["type"], + } + ), + ) + + def _gen_event_node(self, event: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: + return Node( + node_id=event["id"], + detail=json.dumps( + { + "id": event["id"], + "type": event["type"], + "targets": {event["outgoing"]: pipeline["flows"][event["outgoing"]]["target"]} + if event["type"] == NodeType.EmptyStartEvent.value + else {}, + "root_pipeline_id": root_id, + "parent_pipeline_id": parent_id, + "can_skip": event["type"] == NodeType.EmptyStartEvent.value, + "can_retry": True, + } + ), + ) + + def _gen_gateway_node(self, gateway: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: + if gateway["type"] != NodeType.ConvergeGateway.value: + targets = {flow_id: pipeline["flows"][flow_id]["target"] for flow_id in gateway["outgoing"]} + else: + targets = {gateway["outgoing"]: pipeline["flows"][gateway["outgoing"]]["target"]} + + detail = { + "id": gateway["id"], + "type": gateway["type"], + "targets": targets, + "root_pipeline_id": root_id, + "parent_pipeline_id": parent_id, + "can_retry": True, + "can_skip": False, + } + + if gateway["type"] == NodeType.ExclusiveGateway.value: + detail["can_skip"] = True + detail["conditions"] = [ + { + "name": flow_id, + "evaluation": cond["evaluate"], + "target_id": pipeline["flows"][flow_id]["target"], + "flow_id": flow_id, + } + for flow_id, cond in gateway["conditions"].items() + ] + elif gateway["type"] == NodeType.ParallelGateway.value: + detail["converge_gateway_id"] = gateway["converge_gateway_id"] + + elif gateway["type"] == NodeType.ConditionalParallelGateway.value: + detail["conditions"] = [ + { + "name": flow_id, + "evaluation": cond["evaluate"], + "target_id": pipeline["flows"][flow_id]["target"], + "flow_id": flow_id, + } + for flow_id, cond in gateway["conditions"].items() + ] + detail["converge_gateway_id"] = gateway["converge_gateway_id"] + elif gateway["type"] == NodeType.ConvergeGateway.value: + pass + else: + raise ValueError("unsupport gateway type {}: {}".format(gateway["type"], gateway)) + + return Node(node_id=gateway["id"], detail=json.dumps(detail)) + + def _gen_activity_node(self, act: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: + return Node( + node_id=act["id"], + detail=json.dumps( + { + "id": act["id"], + "type": NodeType.ServiceActivity.value, + "targets": {act["outgoing"]: pipeline["flows"][act["outgoing"]]["target"]}, + "root_pipeline_id": root_id, + "parent_pipeline_id": parent_id, + "can_skip": act["skippable"], + "code": act["component"]["code"], + "version": act["component"].get("version", "legacy"), + "timeout": act.get("timeout"), + "error_ignorable": act["error_ignorable"], + "can_retry": act["retryable"], + } + ), + ) + + def _gen_subproc_node(self, subproc: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: + return Node( + node_id=subproc["id"], + detail=json.dumps( + { + "id": subproc["id"], + "type": NodeType.SubProcess.value, + "targets": {subproc["outgoing"]: pipeline["flows"][subproc["outgoing"]]["target"]}, + "root_pipeline_id": root_id, + "parent_pipeline_id": parent_id, + "can_skip": False, + "can_retry": True, + "start_event_id": subproc["pipeline"]["start_event"]["id"], + } + ), + ) + + def _prepare( + self, pipeline: dict, root_id: str, subprocess_context: dict, parent_id: Optional[str] = None + ) -> (List[Node], List[Data], List[ContextValue], List[ContextOutputs]): + + parent_id = parent_id or root_id + + nodes = [] + datas = [] + context_values = [] + context_outputs = [] + + node_outputs = {} + context_var_references = {} + final_references = {} + + # collect all node outputs and initial reference + for key, input_data in pipeline["data"]["inputs"].items(): + source_act = input_data.get("source_act") + source_key = input_data.get("source_key") + if not source_act: + context_var_references[key] = Template(input_data["value"]).get_reference() + final_references[key] = set() + context_values.append( + ContextValue( + pipeline_id=pipeline["id"], + key=key, + type=self.CONTEXT_VALUE_TYPE_MAP[input_data["type"]], + serializer=self.JSON_SERIALIZER, + value=json.dumps(input_data["value"]), + code=input_data.get("custom_type", ""), + ) + ) + else: + if isinstance(source_act, list): + for sa in source_act: + node_outputs.setdefault(sa["source_act"], {})[sa["source_key"]] = key + else: + node_outputs.setdefault(source_act, {})[source_key] = key + + # pre_render_keys in start_event + if "pre_render_keys" in pipeline["data"] and pipeline["data"]["pre_render_keys"]: + datas.append( + Data( + node_id=pipeline["start_event"]["id"], + inputs=codec.data_json_dumps( + {"pre_render_keys": {"need_render": False, "value": pipeline["data"]["pre_render_keys"]}} + ), + outputs={}, + ) + ) + + # process activities + for act in pipeline["activities"].values(): + if act["type"] == NodeType.ServiceActivity.value: + # node + nodes.append(self._gen_activity_node(act=act, pipeline=pipeline, root_id=root_id, parent_id=parent_id)) + # data + data_inputs, compute_cvs = self._data_inputs_assemble(parent_id, act["id"], act["component"]["inputs"]) + datas.append( + Data( + node_id=act["id"], + inputs=codec.data_json_dumps(data_inputs), + outputs=json.dumps(node_outputs.get(act["id"], {})), + ) + ) + # compute context values + for cv in compute_cvs: + context_values.append(cv) + final_references[cv.key] = set() + context_var_references[cv.key] = Template(cv.value).get_reference() + + elif act["type"] == NodeType.SubProcess.value: + # node + nodes.append( + self._gen_subproc_node(subproc=act, pipeline=pipeline, root_id=root_id, parent_id=parent_id) + ) + # data + data_inputs, compute_cvs = self._data_inputs_assemble(parent_id, act["id"], act["params"]) + datas.append( + Data( + node_id=act["id"], + inputs=codec.data_json_dumps(data_inputs), + outputs=json.dumps(node_outputs.get(act["id"], {})), + ) + ) + # compute context values + for cv in compute_cvs: + context_values.append(cv) + final_references[cv.key] = set() + context_var_references[cv.key] = Template(cv.value).get_reference() + + # subprocess output + context_outputs.append( + ContextOutputs(pipeline_id=act["id"], outputs=json.dumps(act["pipeline"]["data"]["outputs"])) + ) + + # subprocess preset context + for key, value in subprocess_context.items(): + serialized, serializer = self._serialize(value) + context_values.append( + ContextValue( + pipeline_id=act["id"], + key=key, + type=self.CONTEXT_VALUE_TYPE_MAP["plain"], + serializer=serializer, + value=serialized, + references="[]", + ) + ) + + sub_nodes, sub_datas, sub_ctx_values, sub_ctx_outputs = self._prepare( + pipeline=act["pipeline"], + root_id=root_id, + subprocess_context=subprocess_context, + parent_id=act["id"], + ) + + nodes.extend(sub_nodes) + datas.extend(sub_datas) + context_values.extend(sub_ctx_values) + context_outputs.extend(sub_ctx_outputs) + else: + raise ValueError("unsupport act type {}: {}".format(act["type"], act["id"])) + + # process events + nodes.append( + self._gen_event_node(event=pipeline["start_event"], pipeline=pipeline, root_id=root_id, parent_id=parent_id) + ) + if pipeline["end_event"]["type"] == NodeType.EmptyEndEvent.value: + nodes.append( + self._gen_event_node( + event=pipeline["end_event"], pipeline=pipeline, root_id=root_id, parent_id=parent_id + ) + ) + else: + nodes.append( + self._gen_executable_end_event_node( + event=pipeline["end_event"], pipeline=pipeline, root_id=root_id, parent_id=parent_id + ) + ) + + # process gateways + for gateway in pipeline["gateways"].values(): + nodes.append( + self._gen_gateway_node(gateway=gateway, pipeline=pipeline, root_id=root_id, parent_id=parent_id) + ) + + # resolve final references (BFS) + # convert a:b, b:c,d -> a:b,c,d b:c,d + for key, references in context_var_references.items(): + queue = [] + queue.extend(references) + + while queue: + r = queue.pop() + + # processed + if r in final_references[key]: + continue + + final_references[key].add(r) + if r in context_var_references: + queue.extend(context_var_references[r]) + + for cv in context_values: + if cv.pipeline_id != parent_id: + continue + fr = final_references.get(cv.key) + cv.references = json.dumps(list(fr)) if fr else "[]" + + if parent_id == root_id: + context_outputs.append(ContextOutputs(pipeline_id=root_id, outputs=json.dumps(pipeline["data"]["outputs"]))) + + return nodes, datas, context_values, context_outputs + + def prepare_run_pipeline( + self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options + ) -> int: + """ + 进行 pipeline 执行前的准备工作,并返回 进程 ID,该函数执行完成后即代表 + pipeline 是随时可以通过 execute(process_id, start_event_id) 启动执行的 + 一般来说,应该完成以下工作: + - 准备好进程模型 + - 准备好流程中每个节点的信息 + - 准备好流程中每个节点数据对象的信息 + + :param pipeline: pipeline 描述对象 + :type pipeline: dict + :param root_pipeline_data 根流程数据 + :type root_pipeline_data: dict + :param root_pipeline_context 根流程上下文 + :type root_pipeline_context: dict + :param subprocess_context 子流程预置流程上下文 + :type subprocess_context: dict + :return: 进程 ID + :rtype: str + """ + + queue = options.get("queue", "") + priority = options.get("priority", 100) + pipeline_id = pipeline["id"] + + nodes, datas, context_values, context_outputs = self._prepare( + pipeline=pipeline, root_id=pipeline["id"], subprocess_context=subprocess_context + ) + datas.append( + Data( + node_id=pipeline_id, + inputs=codec.data_json_dumps( + {k: {"need_render": False, "value": v} for k, v in root_pipeline_data.items()} + ), + outputs="{}", + ) + ) + for key, value in root_pipeline_context.items(): + serialized, serializer = self._serialize(value) + context_values.append( + ContextValue( + pipeline_id=pipeline_id, + key=key, + type=self.CONTEXT_VALUE_TYPE_MAP["plain"], + serializer=serializer, + value=serialized, + references="[]", + ) + ) + batch_size = getattr(settings, "BAMBOO_DJANGO_ERI_PREPARE_BATCH_SIZE", 500) + + with transaction.atomic(): + pid = Process.objects.create( + root_pipeline_id=pipeline_id, + queue=queue, + priority=priority, + pipeline_stack='["{}"]'.format(pipeline_id), + ).id + self.set_state( + node_id=pipeline_id, to_state=states.RUNNING, root_id=pipeline_id, parent_id="", set_started_time=True, + ) + + Node.objects.bulk_create(nodes, batch_size=batch_size) + Data.objects.bulk_create(datas, batch_size=batch_size) + ContextValue.objects.bulk_create(context_values, batch_size=batch_size) + ContextOutputs.objects.bulk_create(context_outputs, batch_size=batch_size) + + return pid + + def node_rerun_limit(self, root_pipeline_id: str, node_id: str) -> int: + """ + 返回节点最大重入次数 + + :param root_pipeline_id: 根流程 ID + :type root_pipeline_id: str + :param node_id: 节点 ID + :type node_id: str + :return: 节点最大重入次数 + :rtype: int + """ + return int(getattr(settings, "BAMBOO_DJANGO_ERI_NODE_RERUN_LIMIT", 100)) + + def add_queue(self, name: str, routing_key: Optional[str] = ""): + """ + 在 Broker 中新增用户自定义队列,注意配合 CELERY_CREATE_MISSING_QUEUES 选项使用 + + :param name: 队列名 + :type name: str + :param routing_key: routing key + :type routing_key: str + """ + queue_resolver = QueueResolver(name) + + exchange = Exchange("default", type="direct") + with Connection(settings.BROKER_URL) as conn: + with conn.channel() as channel: + for queue_config in queue_resolver.routes_config().values(): + queue = Queue( + queue_config["queue"], exchange, routing_key=queue_config["routing_key"], max_priority=255 + ) + queue.declare(channel=channel) + + def get_plain_log_for_node(self, node_id: str, history_id: int = -1) -> str: + """ + 读取某个节点某一次执行的日志 + + :param node_id: 节点 ID + :type node_id: str + :param history_id: 执行历史 ID, -1 表示获取最新日志 + :type history_id: int, optional + :return: 节点日志 + :rtype: str + """ + if history_id != -1: + qs = ExecutionHistory.objects.filter(id=history_id).only("version") + else: + qs = State.objects.filter(node_id=node_id).only("version") + + if not qs: + return "" + version = qs.first().version + return "\n".join( + [ + e.message + for e in LogEntry.objects.order_by("id").filter(node_id=node_id, version=version).only("message") + ] + ) diff --git a/runtime/bamboo-pipeline/pipeline/eri/signals.py b/runtime/bamboo-pipeline/pipeline/eri/signals.py new file mode 100644 index 00000000..4b4d2cbb --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/eri/signals.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.dispatch import Signal + +post_set_state = Signal(providing_args=["node_id", "to_state", "version", "root_id", "parent_id", "loop"]) diff --git a/runtime/bamboo-pipeline/pipeline/exceptions.py b/runtime/bamboo-pipeline/pipeline/exceptions.py new file mode 100644 index 00000000..2b09162d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/exceptions.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class PipelineException(Exception): + pass + + +class FlowTypeError(PipelineException): + pass + + +class InvalidOperationException(PipelineException): + pass + + +class ConditionExhaustedException(PipelineException): + pass + + +class EvaluationException(PipelineException): + pass + + +class NodeNotExistException(PipelineException): + pass + + +class SourceKeyException(NodeNotExistException): + pass + + +class VariableHydrateException(PipelineException): + pass + + +class ParserException(PipelineException): + pass + + +class SubprocessRefError(PipelineException): + pass + + +class TemplateImportError(PipelineException): + pass + + +class SubprocessExpiredError(PipelineException): + pass + + +# +# data exception +# + + +class DataException(PipelineException): + pass + + +class DataInitException(DataException): + pass + + +class DataAttrException(DataException): + pass + + +class DataTypeErrorException(DataException): + pass + + +class CycleErrorException(DataException): + pass + + +class ConnectionValidateError(DataException): + def __init__(self, failed_nodes, detail, *args): + self.failed_nodes = failed_nodes + self.detail = detail + super(ConnectionValidateError, self).__init__(*args) + + +class ConvergeMatchError(DataException): + def __init__(self, gateway_id, *args): + self.gateway_id = gateway_id + super(ConvergeMatchError, self).__init__(*args) + + +class StreamValidateError(DataException): + def __init__(self, node_id, *args): + self.node_id = node_id + super(StreamValidateError, self).__init__(*args) + + +class IsolateNodeError(DataException): + pass + + +# +# component exception +# + + +class ComponentException(PipelineException): + pass + + +class ComponentDataFormatException(ComponentException): + pass + + +class ComponentNotExistException(ComponentException): + pass + + +class ComponentDataLackException(ComponentDataFormatException): + pass + + +# +# tag exception +# + + +class PipelineError(Exception): + pass + + +class TagError(PipelineError): + pass + + +class AttributeMissingError(TagError): + pass + + +class AttributeValidationError(TagError): + pass + + +# +# constant exception +# +class ConstantException(PipelineException): + pass + + +class ConstantNotExistException(ConstantException): + pass + + +class ConstantReferenceException(ConstantException): + pass + + +class ConstantTypeException(ConstantException): + pass + + +class ConstantSyntaxException(ConstantException): + pass + + +# +# context exception +# +class ContextError(PipelineError): + pass + + +class ReferenceNotExistError(ContextError): + pass + + +class InsufficientVariableError(ContextError): + pass + + +# +# periodic task exception +# +class InvalidCrontabException(PipelineException): + pass diff --git a/runtime/bamboo-pipeline/pipeline/log/__init__.py b/runtime/bamboo-pipeline/pipeline/log/__init__.py new file mode 100644 index 00000000..8eca6ab4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/__init__.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + + +def setup(level=None): + from pipeline.logging import pipeline_logger as logger + from pipeline.log.handlers import EngineLogHandler + + if level in set(logging._levelToName.values()): + logger.setLevel(level) + + logging._acquireLock() + try: + for hdl in logger.handlers: + if isinstance(hdl, EngineLogHandler): + break + else: + hdl = EngineLogHandler() + hdl.setLevel(logger.level) + logger.addHandler(hdl) + finally: + logging._releaseLock() + + +default_app_config = "pipeline.log.apps.LogConfig" diff --git a/runtime/bamboo-pipeline/pipeline/log/admin.py b/runtime/bamboo-pipeline/pipeline/log/admin.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/admin.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/log/apps.py b/runtime/bamboo-pipeline/pipeline/log/apps.py new file mode 100644 index 00000000..c2e476d9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/apps.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.apps import AppConfig + +from pipeline.conf import default_settings + + +class LogConfig(AppConfig): + name = "pipeline.log" + verbose_name = "Database Logging" + + def ready(self): + from pipeline.log import setup + + setup(level=default_settings.PIPELINE_LOG_LEVEL) diff --git a/runtime/bamboo-pipeline/pipeline/log/handlers.py b/runtime/bamboo-pipeline/pipeline/log/handlers.py new file mode 100644 index 00000000..64662b56 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/handlers.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from django.core.exceptions import AppRegistryNotReady + +from pipeline.engine.core import context + + +class EngineLogHandler(logging.Handler): + def emit(self, record): + try: + from . import models + except AppRegistryNotReady: + return + + models.LogEntry.objects.create( + logger_name=record.name, + level_name=record.levelname, + message=self.format(record), + exception=record.exc_text, + node_id=record._id, + ) + + +class EngineContextLogHandler(logging.Handler): + def emit(self, record): + try: + from . import models + except AppRegistryNotReady: + return + + node_id = context.get_node_id() + if not node_id: + return + + models.LogEntry.objects.create( + logger_name=record.name, + level_name=record.levelname, + message=self.format(record), + exception=record.exc_text, + node_id=node_id, + ) diff --git a/runtime/bamboo-pipeline/pipeline/log/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/log/migrations/0001_initial.py new file mode 100644 index 00000000..619d8a28 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/migrations/0001_initial.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="LogEntry", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("logger_name", models.SlugField(max_length=128)), + ("level_name", models.SlugField(max_length=32)), + ("message", models.TextField()), + ("exception", models.TextField()), + ("logged_at", models.DateTimeField(auto_now_add=True)), + ("node_id", models.CharField(max_length=32, db_index=True)), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/log/migrations/0002_auto_20180810_1054.py b/runtime/bamboo-pipeline/pipeline/log/migrations/0002_auto_20180810_1054.py new file mode 100644 index 00000000..a77441e9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/migrations/0002_auto_20180810_1054.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("log", "0001_initial"), + ] + + operations = [ + migrations.AlterField(model_name="logentry", name="exception", field=models.TextField(null=True),), + migrations.AlterField(model_name="logentry", name="message", field=models.TextField(null=True),), + ] diff --git a/runtime/bamboo-pipeline/pipeline/log/migrations/0003_logentry_history_id.py b/runtime/bamboo-pipeline/pipeline/log/migrations/0003_logentry_history_id.py new file mode 100644 index 00000000..f67290b0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/migrations/0003_logentry_history_id.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("log", "0002_auto_20180810_1054"), + ] + + operations = [ + migrations.AddField(model_name="logentry", name="history_id", field=models.IntegerField(default=-1),), + ] diff --git a/runtime/bamboo-pipeline/pipeline/log/migrations/0004_auto_20180814_1555.py b/runtime/bamboo-pipeline/pipeline/log/migrations/0004_auto_20180814_1555.py new file mode 100644 index 00000000..aad49e63 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/migrations/0004_auto_20180814_1555.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("log", "0003_logentry_history_id"), + ] + + operations = [ + migrations.AlterField( + model_name="logentry", + name="exception", + field=models.TextField(null=True, verbose_name="\u5f02\u5e38\u4fe1\u606f"), + ), + migrations.AlterField( + model_name="logentry", + name="history_id", + field=models.IntegerField(default=-1, verbose_name="\u8282\u70b9\u6267\u884c\u5386\u53f2 ID"), + ), + migrations.AlterField( + model_name="logentry", + name="level_name", + field=models.SlugField(max_length=32, verbose_name="\u65e5\u5fd7\u7b49\u7ea7"), + ), + migrations.AlterField( + model_name="logentry", + name="logged_at", + field=models.DateTimeField(auto_now_add=True, verbose_name="\u8f93\u51fa\u65f6\u95f4"), + ), + migrations.AlterField( + model_name="logentry", + name="logger_name", + field=models.SlugField(max_length=128, verbose_name="logger \u540d\u79f0"), + ), + migrations.AlterField( + model_name="logentry", + name="message", + field=models.TextField(null=True, verbose_name="\u65e5\u5fd7\u5185\u5bb9"), + ), + migrations.AlterField( + model_name="logentry", + name="node_id", + field=models.CharField(max_length=32, verbose_name="\u8282\u70b9 ID", db_index=True), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/log/migrations/0005_auto_20190729_1041.py b/runtime/bamboo-pipeline/pipeline/log/migrations/0005_auto_20190729_1041.py new file mode 100644 index 00000000..7becf5b2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/migrations/0005_auto_20190729_1041.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("log", "0004_auto_20180814_1555"), + ] + + operations = [ + migrations.AlterField( + model_name="logentry", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/log/migrations/0006_auto_20201201_1638.py b/runtime/bamboo-pipeline/pipeline/log/migrations/0006_auto_20201201_1638.py new file mode 100644 index 00000000..5b8ecd64 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/migrations/0006_auto_20201201_1638.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("log", "0005_auto_20190729_1041"), + ] + + operations = [ + migrations.AlterField( + model_name="logentry", + name="logged_at", + field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="输出时间"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/log/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/log/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/log/models.py b/runtime/bamboo-pipeline/pipeline/log/models.py new file mode 100644 index 00000000..5fa33fb0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/models.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import models +from django.utils import timezone +from django.utils.translation import ugettext_lazy as _ + + +class LogEntryManager(models.Manager): + def link_history(self, node_id, history_id): + self.filter(node_id=node_id, history_id=-1).update(history_id=history_id) + + def plain_log_for_node(self, node_id, history_id): + entries = self.order_by("id").filter(node_id=node_id, history_id=history_id) + plain_entries = [] + for entry in entries: + plain_entries.append( + "[%s %s] %s, exception: %s" + % (entry.logged_at.strftime("%Y-%m-%d %H:%M:%S"), entry.level_name, entry.message, entry.exception) + ) + return "\n".join(plain_entries) + + def delete_expired_log(self, interval): + expired_date = timezone.now() + timezone.timedelta(days=(-interval)) + to_be_deleted = self.filter(logged_at__lt=expired_date) + count = to_be_deleted.count() + to_be_deleted.delete() + return count + + +class LogEntry(models.Model): + id = models.BigAutoField(_("ID"), primary_key=True) + logger_name = models.SlugField(_("logger 名称"), max_length=128) + level_name = models.SlugField(_("日志等级"), max_length=32) + message = models.TextField(_("日志内容"), null=True) + exception = models.TextField(_("异常信息"), null=True) + logged_at = models.DateTimeField(_("输出时间"), auto_now_add=True, db_index=True) + + node_id = models.CharField(_("节点 ID"), max_length=32, db_index=True) + history_id = models.IntegerField(_("节点执行历史 ID"), default=-1) + + objects = LogEntryManager() diff --git a/runtime/bamboo-pipeline/pipeline/log/tasks.py b/runtime/bamboo-pipeline/pipeline/log/tasks.py new file mode 100644 index 00000000..be5cec16 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/tasks.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from celery.decorators import periodic_task +from celery.schedules import crontab +from django.conf import settings + +from pipeline.log.models import LogEntry + +logger = logging.getLogger(__name__) + + +@periodic_task(run_every=(crontab(minute=0, hour=0)), ignore_result=True) +def clean_expired_log(): + expired_interval = getattr(settings, "LOG_PERSISTENT_DAYS", None) + + if expired_interval is None: + expired_interval = 30 + logger.warning("LOG_PERSISTENT_DAYS are not found in settings, use default value: 30") + + del_num = LogEntry.objects.delete_expired_log(expired_interval) + logger.info("%s log entry are deleted" % del_num) diff --git a/runtime/bamboo-pipeline/pipeline/log/views.py b/runtime/bamboo-pipeline/pipeline/log/views.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/log/views.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/logging.py b/runtime/bamboo-pipeline/pipeline/logging.py new file mode 100644 index 00000000..4b04e89b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/logging.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + + +def get_pipeline_logger(): + return logging.getLogger(__name__) + + +pipeline_logger = get_pipeline_logger() diff --git a/runtime/bamboo-pipeline/pipeline/management/__init__.py b/runtime/bamboo-pipeline/pipeline/management/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/__init__.py b/runtime/bamboo-pipeline/pipeline/management/commands/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/app.py b/runtime/bamboo-pipeline/pipeline/management/commands/app.py new file mode 100644 index 00000000..5e21f97a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/app.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from __future__ import absolute_import, unicode_literals + +from celery import current_app + + +#: The Django-Celery app instance. +app = current_app._get_current_object() diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/base.py b/runtime/bamboo-pipeline/pipeline/management/commands/base.py new file mode 100644 index 00000000..45dc748b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/base.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from __future__ import absolute_import, unicode_literals + +import os +import sys + +import celery + +try: + import django_celery_beat +except ImportError: + import djcelery + +from kombu.utils.encoding import str_to_bytes +from django.core.management.base import BaseCommand + +DB_SHARED_THREAD = """\ +DatabaseWrapper objects created in a thread can only \ +be used in that same thread. The object with alias '{0}' \ +was created in thread id {1} and this is thread id {2}.\ +""" + + +def setenv(k, v): # noqa + os.environ[str_to_bytes(k)] = str_to_bytes(v) + + +def patch_thread_ident(): + # monkey patch django. + # This patch make sure that we use real threads to get the ident which + # is going to happen if we are using gevent or eventlet. + # -- patch taken from gunicorn + if getattr(patch_thread_ident, "called", False): + return + try: + from django.db.backends.base.base import BaseDatabaseWrapper, DatabaseError + + if "validate_thread_sharing" in BaseDatabaseWrapper.__dict__: + import threading + + _get_ident = threading.get_ident + + __old__init__ = BaseDatabaseWrapper.__init__ + + def _init(self, *args, **kwargs): + __old__init__(self, *args, **kwargs) + self._thread_ident = _get_ident() + + def _validate_thread_sharing(self): + if not self.allow_thread_sharing and self._thread_ident != _get_ident(): + raise DatabaseError(DB_SHARED_THREAD % (self.alias, self._thread_ident, _get_ident()),) + + BaseDatabaseWrapper.__init__ = _init + BaseDatabaseWrapper.validate_thread_sharing = _validate_thread_sharing + + patch_thread_ident.called = True + except ImportError: + pass + + +patch_thread_ident() + + +class CeleryCommand(BaseCommand): + options = () + if hasattr(BaseCommand, "option_list"): + options = BaseCommand.option_list + else: + + def add_arguments(self, parser): + option_typemap = {"string": str, "int": int, "float": float} + for opt in self.option_list: + option = {k: v for k, v in opt.__dict__.items() if v is not None} + flags = option.get("_long_opts", []) + option.get("_short_opts", []) + if option.get("default") == ("NO", "DEFAULT"): + option["default"] = None + if option.get("nargs") == 1: + del option["nargs"] + del option["_long_opts"] + del option["_short_opts"] + if "type" in option: + opttype = option["type"] + option["type"] = option_typemap.get(opttype, opttype) + parser.add_argument(*flags, **option) + + skip_opts = ["--app", "--loader", "--config", "--no-color"] + requires_system_checks = False + keep_base_opts = False + stdout, stderr = sys.stdout, sys.stderr + + def get_version(self): + def get_version(self): + try: + version = "celery {c.__version__}\ndjango-celery-beat {d.__version__}".format( + c=celery, d=django_celery_beat, + ) + except ImportError: + version = "celery {c.__version__}\ndjango-celery {d.__version__}".format(c=celery, d=djcelery,) + return version + + def execute(self, *args, **options): + broker = options.get("broker") + if broker: + self.set_broker(broker) + super(CeleryCommand, self).execute(*args, **options) + + def set_broker(self, broker): + setenv("CELERY_BROKER_URL", broker) + + def run_from_argv(self, argv): + self.handle_default_options(argv[2:]) + return super(CeleryCommand, self).run_from_argv(argv) + + def handle_default_options(self, argv): + acc = [] + broker = None + for i, arg in enumerate(argv): + # --settings and --pythonpath are also handled + # by BaseCommand.handle_default_options, but that is + # called with the resulting options parsed by optparse. + if "--settings=" in arg: + _, settings_module = arg.split("=") + setenv("DJANGO_SETTINGS_MODULE", settings_module) + elif "--pythonpath=" in arg: + _, pythonpath = arg.split("=") + sys.path.insert(0, pythonpath) + elif "--broker=" in arg: + _, broker = arg.split("=") + elif arg == "-b": + broker = argv[i + 1] + else: + acc.append(arg) + if broker: + self.set_broker(broker) + return argv if self.keep_base_opts else acc + + def die(self, msg): + sys.stderr.write(msg) + sys.stderr.write("\n") + sys.exit() + + def _is_unwanted_option(self, option): + return option._long_opts and option._long_opts[0] in self.skip_opts + + @property + def option_list(self): + return [x for x in self.options if not self._is_unwanted_option(x)] diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/celery.py b/runtime/bamboo-pipeline/pipeline/management/commands/celery.py new file mode 100644 index 00000000..5f007720 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/celery.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from __future__ import absolute_import, unicode_literals + +from optparse import make_option as Option + +from celery.bin import celery + +from pipeline.management.commands.app import app +from pipeline.management.commands.base import CeleryCommand + +base = celery.CeleryCommand(app=app) + + +class Command(CeleryCommand): + """The celery command.""" + + help = "celery commands, see celery help" + options = ( + Option("-A", "--app", default=None), + Option("--broker", default=None), + Option("--loader", default=None), + Option("--config", default=None), + Option("--workdir", default=None, dest="working_directory"), + Option("--result-backend", default=None), + Option("--no-color", "-C", action="store_true", default=None), + Option("--quiet", "-q", action="store_true"), + ) + if base.get_options() is not None: + options = options + CeleryCommand.options + base.get_options() + + def run_from_argv(self, argv): + argv = self.handle_default_options(argv) + base.execute_from_commandline(["{0[0]} {0[1]}".format(argv)] + argv[2:],) diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/celerybeat.py b/runtime/bamboo-pipeline/pipeline/management/commands/celerybeat.py new file mode 100644 index 00000000..73e0cfe7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/celerybeat.py @@ -0,0 +1,36 @@ +""" + +Start the celery clock service from the Django management command. + +""" +from __future__ import absolute_import, unicode_literals + +from optparse import make_option as Option + +from celery.bin import beat + +from pipeline.management.commands.app import app +from pipeline.management.commands.base import CeleryCommand + +beat = beat.beat(app=app) + + +class Command(CeleryCommand): + """Run the celery periodic task scheduler.""" + + help = 'Old alias to the "celery beat" command.' + options = ( + Option("-A", "--app", default=None), + Option("--broker", default=None), + Option("--loader", default=None), + Option("--config", default=None), + Option("--workdir", default=None, dest="working_directory"), + Option("--result-backend", default=None), + Option("--no-color", "-C", action="store_true", default=None), + Option("--quiet", "-q", action="store_true"), + ) + if beat.get_options() is not None: + options = options + CeleryCommand.options + beat.get_options() + + def handle(self, *args, **options): + beat.run(*args, **options) diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/create_atoms_app.py b/runtime/bamboo-pipeline/pipeline/management/commands/create_atoms_app.py new file mode 100644 index 00000000..d48d158c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/create_atoms_app.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import os +import sys + +from django.core.management import base, call_command +from django.template import Template, Context + +from pipeline.templates.create_plugins_app import js_file, plugins, py_file + +PY_COPYRIGHT = '''# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +''' + + +class Command(base.BaseCommand): + help = "Create an application for atoms development" + + def add_arguments(self, parser): + parser.add_argument("app_name", nargs=1, type=str) + + def handle(self, *args, **options): + + app_name = options["app_name"][0] + if os.path.isdir(app_name): + sys.stdout.write("the directory [%s] already exists, please try another name.\n") + return + + call_command("startapp", app_name) + + collection_path = "%s/components/collections" % app_name + tests_path = "%s/tests/components/collections/plugins_test" % app_name + static_collection_path = "{}/static/{}".format(app_name, app_name) + init_file_info = { + "%s/components/collections/__init__.py" % app_name: py_file.TEMPLATE, + "%s/components/__init__.py" % app_name: py_file.TEMPLATE, + "%s/components/collections/plugins.py" % app_name: plugins.TEMPLATE, + "%s/tests/__init__.py" % app_name: py_file.TEMPLATE, + "%s/tests/components/__init__.py" % app_name: py_file.TEMPLATE, + "%s/tests/components/collections/__init__.py" % app_name: py_file.TEMPLATE, + "%s/tests/components/collections/plugins_test/__init__.py" % app_name: py_file.TEMPLATE, + "{}/static/{}/plugins.js".format(app_name, app_name): js_file.TEMPLATE, + } + exist_file_path = [ + "%s/migrations/__init__.py" % app_name, + "%s/__init__.py" % app_name, + "%s/apps.py" % app_name, + ] + useless_file_path = [ + "%s/admin.py" % app_name, + "%s/models.py" % app_name, + "%s/tests.py" % app_name, + "%s/views.py" % app_name, + ] + os.makedirs(collection_path) + os.makedirs(tests_path) + os.makedirs(static_collection_path) + + empty_context = Context() + for p, tmpl in list(init_file_info.items()): + with open(p, "w+") as f: + f.write(Template(tmpl).render(empty_context)) + + for p in exist_file_path: + with open(p, "r") as f: + content = f.readlines() + + if content and content[0].startswith("# -*- coding: utf-8 -*-"): + content = content[1:] + + content.insert(0, PY_COPYRIGHT) + + with open(p, "w") as f: + f.writelines(content) + + for p in useless_file_path: + os.remove(p) diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/create_plugins_app.py b/runtime/bamboo-pipeline/pipeline/management/commands/create_plugins_app.py new file mode 100644 index 00000000..bcecf148 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/create_plugins_app.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .create_atoms_app import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/management/commands/generate_config.py b/runtime/bamboo-pipeline/pipeline/management/commands/generate_config.py new file mode 100644 index 00000000..777d340c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/management/commands/generate_config.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import os + +from django.core.management.base import BaseCommand +from django.template.loader import render_to_string + +from pipeline.conf import settings + + +class Command(BaseCommand): + help = "Generate Redis & Supervisor configuration file for pipeline" + + configs = { + os.path.join(settings.BASE_DIR, "etc/redis.conf"): "redis/redis.tmpl", + os.path.join(settings.BASE_DIR, "etc/supervisord.conf"): "supervisor/supervisor.tmpl", + } + + var_paths = [os.path.join(settings.BASE_DIR, "var/log/"), os.path.join(settings.BASE_DIR, "var/run/")] + + def add_arguments(self, parser): + parser.add_argument("-pc", dest="p_worker_num", default=2, help="Set number of worker bind with pipeline") + parser.add_argument("-sc", dest="s_worker_num", default=2, help="Set the number of worker bind with schedule") + parser.add_argument( + "--worker", + action="store_true", + dest="is_worker", + default=False, + help="is worker process group (default False)", + ) + parser.add_argument( + "--master", + action="store_true", + dest="is_master", + default=False, + help="is master process group (default False)", + ) + + def handle(self, *args, **options): + context = { + "settings": settings, + "is_master": options["is_master"], + "is_worker": options["is_worker"], + "p_worker_num": options["p_worker_num"], + "s_worker_num": options["s_worker_num"], + "uid": os.getuid(), + } + + for path in self.var_paths: + if not os.path.exists(path): + os.makedirs(path) + + for target_path, template_name in list(self.configs.items()): + dirname = os.path.dirname(target_path) + if not os.path.exists(dirname): + try: + os.makedirs(dirname) + except Exception: + pass + + with open(target_path, "wb+") as f: + f.write(render_to_string(template_name, context)) diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/migrations/0001_initial.py new file mode 100644 index 00000000..e4df9f43 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0001_initial.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +# Generated by Django 1.11.2 on 2017-11-24 10:43 + + +from django.db import migrations, models +import django.db.models.deletion +import pipeline.models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="PipelineInstance", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("instance_id", models.CharField(max_length=32, unique=True, verbose_name="\u5b9e\u4f8bID")), + ( + "name", + models.CharField( + default="\u9ed8\u8ba4\u5b9e\u4f8b", max_length=64, verbose_name="\u5b9e\u4f8b\u540d\u79f0" + ), + ), + ("creator", models.CharField(max_length=32, verbose_name="\u521b\u5efa\u8005")), + ("create_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), + ("executor", models.CharField(max_length=32, verbose_name="\u6267\u884c\u8005")), + ("start_time", models.DateTimeField(blank=True, null=True, verbose_name="\u542f\u52a8\u65f6\u95f4")), + ("finish_time", models.DateTimeField(blank=True, null=True, verbose_name="\u7ed3\u675f\u65f6\u95f4")), + ("description", models.TextField(blank=True, null=True, verbose_name="\u63cf\u8ff0")), + ("is_started", models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u7ecf\u542f\u52a8")), + ( + "is_finished", + models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u7ecf\u5b8c\u6210"), + ), + ( + "is_deleted", + models.BooleanField( + default=False, + help_text="\u8868\u793a\u5f53\u524d\u5b9e\u4f8b\u662f\u5426\u5220\u9664", + verbose_name="\u662f\u5426\u5df2\u7ecf\u5220\u9664", + ), + ), + ], + options={ + "ordering": ["-create_time"], + "verbose_name": "Pipeline\u5b9e\u4f8b", + "verbose_name_plural": "Pipeline\u5b9e\u4f8b", + }, + ), + migrations.CreateModel( + name="PipelineTemplate", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("template_id", models.CharField(max_length=32, unique=True, verbose_name="\u6a21\u677fID")), + ( + "name", + models.CharField( + default="\u9ed8\u8ba4\u6a21\u677f", max_length=64, verbose_name="\u6a21\u677f\u540d\u79f0" + ), + ), + ("create_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), + ("creator", models.CharField(max_length=32, verbose_name="\u521b\u5efa\u8005")), + ("description", models.TextField(blank=True, null=True, verbose_name="\u63cf\u8ff0")), + ("editor", models.CharField(blank=True, max_length=32, null=True, verbose_name="\u4fee\u6539\u8005")), + ("edit_time", models.DateTimeField(auto_now=True, verbose_name="\u4fee\u6539\u65f6\u95f4")), + ( + "is_deleted", + models.BooleanField( + default=False, + help_text="\u8868\u793a\u5f53\u524d\u6a21\u677f\u662f\u5426\u5220\u9664", + verbose_name="\u662f\u5426\u5220\u9664", + ), + ), + ], + options={ + "ordering": ["-edit_time"], + "verbose_name": "Pipeline\u6a21\u677f", + "verbose_name_plural": "Pipeline\u6a21\u677f", + }, + ), + migrations.CreateModel( + name="Snapshot", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "md5sum", + models.CharField( + max_length=32, unique=True, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5sum" + ), + ), + ("create_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), + ("data", pipeline.models.CompressJSONField(blank=True, null=True)), + ], + options={ + "ordering": ["-id"], + "verbose_name": "\u6a21\u677f\u5feb\u7167", + "verbose_name_plural": "\u6a21\u677f\u5feb\u7167", + }, + ), + migrations.CreateModel( + name="VariableModel", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("code", models.CharField(max_length=255, unique=True, verbose_name="\u53d8\u91cf\u7f16\u7801")), + ("status", models.BooleanField(default=True, verbose_name="\u53d8\u91cf\u662f\u5426\u53ef\u7528")), + ], + options={ + "ordering": ["-id"], + "verbose_name": "lazy \u53d8\u91cf", + "verbose_name_plural": "lazy \u53d8\u91cf", + }, + ), + migrations.AddField( + model_name="pipelinetemplate", + name="snapshot", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="pipeline.Snapshot", + verbose_name="\u6a21\u677f\u7ed3\u6784\u6570\u636e", + ), + ), + migrations.AddField( + model_name="pipelineinstance", + name="execution_snapshot", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="execution_snapshot", + to="pipeline.Snapshot", + verbose_name="\u7528\u4e8e\u5b9e\u4f8b\u6267\u884c\u7684\u7ed3\u6784\u6570\u636e", + ), + ), + migrations.AddField( + model_name="pipelineinstance", + name="snapshot", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="snapshot", + to="pipeline.Snapshot", + verbose_name="\u5b9e\u4f8b\u7ed3\u6784\u6570\u636e", + ), + ), + migrations.AddField( + model_name="pipelineinstance", + name="template", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="pipeline.PipelineTemplate", + verbose_name="Pipeline\u6a21\u677f", + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0002_auto_20180109_1825.py b/runtime/bamboo-pipeline/pipeline/migrations/0002_auto_20180109_1825.py new file mode 100644 index 00000000..c25e5169 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0002_auto_20180109_1825.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +# Generated by Django 1.11.2 on 2018-01-09 18:25 + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineinstance", + name="creator", + field=models.CharField(blank=True, max_length=32, verbose_name="\u521b\u5efa\u8005"), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="description", + field=models.TextField(blank=True, default="", verbose_name="\u63cf\u8ff0"), + preserve_default=False, + ), + migrations.AlterField( + model_name="pipelineinstance", + name="executor", + field=models.CharField(blank=True, max_length=32, verbose_name="\u6267\u884c\u8005"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0003_auto_20180206_1955.py b/runtime/bamboo-pipeline/pipeline/migrations/0003_auto_20180206_1955.py new file mode 100644 index 00000000..52d6c720 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0003_auto_20180206_1955.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +# Generated by Django 1.11.2 on 2018-02-06 19:55 + + +from django.db import migrations, models +import django.db.models.deletion +import pipeline.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0002_auto_20180109_1825"), + ] + + operations = [ + migrations.CreateModel( + name="TemplateScheme", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "unique_id", + models.CharField( + blank=True, max_length=97, unique=True, verbose_name="\u552f\u4e00\u65b9\u6848\u540d\u79f0" + ), + ), + ("name", models.CharField(max_length=64, verbose_name="\u65b9\u6848\u540d\u79f0")), + ("edit_time", models.DateTimeField(auto_now=True, verbose_name="\u4fee\u6539\u65f6\u95f4")), + ("data", pipeline.models.CompressJSONField(verbose_name="\u65b9\u6848\u6570\u636e")), + ], + ), + migrations.AlterModelOptions( + name="variablemodel", + options={ + "ordering": ["-id"], + "verbose_name": "Variable\u53d8\u91cf", + "verbose_name_plural": "Variable\u53d8\u91cf", + }, + ), + migrations.AlterField( + model_name="pipelineinstance", + name="name", + field=models.CharField(default=b"default_instance", max_length=64, verbose_name="\u5b9e\u4f8b\u540d\u79f0"), + ), + migrations.AlterField( + model_name="pipelinetemplate", + name="name", + field=models.CharField(default=b"default_template", max_length=64, verbose_name="\u6a21\u677f\u540d\u79f0"), + ), + migrations.AddField( + model_name="templatescheme", + name="template", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="pipeline.PipelineTemplate", + verbose_name="\u5bf9\u5e94\u6a21\u677f ID", + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0004_auto_20180516_1708.py b/runtime/bamboo-pipeline/pipeline/migrations/0004_auto_20180516_1708.py new file mode 100644 index 00000000..d0f69c03 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0004_auto_20180516_1708.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0003_auto_20180206_1955"), + ] + + operations = [ + migrations.AlterField( + model_name="templatescheme", + name="unique_id", + field=models.CharField(unique=True, max_length=97, verbose_name="\u65b9\u6848\u552f\u4e00ID", blank=True), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0005_pipelineinstance_tree_info.py b/runtime/bamboo-pipeline/pipeline/migrations/0005_pipelineinstance_tree_info.py new file mode 100644 index 00000000..64999d49 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0005_pipelineinstance_tree_info.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0004_auto_20180516_1708"), + ] + + operations = [ + migrations.AddField( + model_name="pipelineinstance", + name="tree_info", + field=models.ForeignKey( + related_name="tree_info", + verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", + to="pipeline.Snapshot", + null=True, + on_delete=models.SET_NULL, + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0006_auto_20180814_1622.py b/runtime/bamboo-pipeline/pipeline/migrations/0006_auto_20180814_1622.py new file mode 100644 index 00000000..8548b8ed --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0006_auto_20180814_1622.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import pipeline.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0005_pipelineinstance_tree_info"), + ] + + operations = [ + migrations.CreateModel( + name="TreeInfo", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("data", pipeline.models.CompressJSONField(null=True, blank=True)), + ], + ), + migrations.AlterField( + model_name="pipelineinstance", + name="tree_info", + field=models.ForeignKey( + related_name="tree_info", + verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", + to="pipeline.TreeInfo", + null=True, + on_delete=models.SET_NULL, + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0007_templaterelationship.py b/runtime/bamboo-pipeline/pipeline/migrations/0007_templaterelationship.py new file mode 100644 index 00000000..c5113be6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0007_templaterelationship.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0006_auto_20180814_1622"), + ] + + operations = [ + migrations.CreateModel( + name="TemplateRelationship", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("ancestor_template_id", models.CharField(max_length=32, verbose_name="\u6839\u6a21\u677fID")), + ( + "descendant_template_id", + models.CharField(max_length=32, verbose_name="\u5b50\u6d41\u7a0b\u6a21\u677fID"), + ), + ("refer_sum", models.IntegerField(verbose_name="\u5f15\u7528\u6b21\u6570")), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0007_templateversion.py b/runtime/bamboo-pipeline/pipeline/migrations/0007_templateversion.py new file mode 100644 index 00000000..a299b799 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0007_templateversion.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0006_auto_20180814_1622"), + ] + + operations = [ + migrations.CreateModel( + name="TemplateVersion", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ( + "md5", + models.CharField( + max_length=32, db_index=True, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5" + ), + ), + ("date", models.DateTimeField(auto_now_add=True, verbose_name="\u6dfb\u52a0\u65e5\u671f")), + ( + "snapshot_id", + models.ForeignKey( + verbose_name="\u6a21\u677f\u6570\u636e ID", to="pipeline.Snapshot", on_delete=models.CASCADE + ), + ), + ( + "template_id", + models.ForeignKey( + to="pipeline.PipelineTemplate", + to_field="template_id", + verbose_name="\u6a21\u677f ID", + on_delete=models.CASCADE, + ), + ), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0008_auto_20180824_1115.py b/runtime/bamboo-pipeline/pipeline/migrations/0008_auto_20180824_1115.py new file mode 100644 index 00000000..492669ae --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0008_auto_20180824_1115.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0007_templaterelationship"), + ("pipeline", "0007_templateversion"), + ] + + operations = [ + migrations.RenameField(model_name="templateversion", old_name="snapshot_id", new_name="snapshot",), + migrations.RemoveField(model_name="templateversion", name="template_id",), + migrations.AddField( + model_name="templateversion", + name="template", + field=models.ForeignKey( + default="", verbose_name="\u6a21\u677f ID", to="pipeline.PipelineTemplate", on_delete=models.CASCADE + ), + preserve_default=False, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0011_auto_20180906_1045.py b/runtime/bamboo-pipeline/pipeline/migrations/0011_auto_20180906_1045.py new file mode 100644 index 00000000..5bce064e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0011_auto_20180906_1045.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0008_auto_20180824_1115"), + ] + + operations = [ + migrations.RemoveField(model_name="templaterelationship", name="refer_sum",), + migrations.AddField( + model_name="templaterelationship", + name="subprocess_node_id", + field=models.CharField(default="", max_length=32, verbose_name="\u5b50\u6d41\u7a0b\u8282\u70b9 ID"), + preserve_default=False, + ), + migrations.AddField( + model_name="templaterelationship", + name="version", + field=models.CharField(default="", max_length=32, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5"), + preserve_default=False, + ), + migrations.AlterField( + model_name="templaterelationship", + name="ancestor_template_id", + field=models.CharField(max_length=32, verbose_name="\u6839\u6a21\u677fID", db_index=True), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0012_templatecurrentversion.py b/runtime/bamboo-pipeline/pipeline/migrations/0012_templatecurrentversion.py new file mode 100644 index 00000000..cd775113 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0012_templatecurrentversion.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0011_auto_20180906_1045"), + ] + + operations = [ + migrations.CreateModel( + name="TemplateCurrentVersion", + fields=[ + ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), + ("template_id", models.CharField(max_length=32, verbose_name="\u6a21\u677fID", db_index=True)), + ( + "current_version", + models.CharField(max_length=32, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5"), + ), + ], + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0013_old_template_process.py b/runtime/bamboo-pipeline/pipeline/migrations/0013_old_template_process.py new file mode 100644 index 00000000..732f303f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0013_old_template_process.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations +from django.db.models.signals import post_save + + +def reverse_func(apps, schema_editor): + pass + + +def forward_func(apps, schema_editor): + PipelineTemplate = apps.get_model("pipeline", "PipelineTemplate") + TemplateRelationship = apps.get_model("pipeline", "TemplateRelationship") + TemplateVersion = apps.get_model("pipeline", "TemplateVersion") + TemplateCurrentVersion = apps.get_model("pipeline", "TemplateCurrentVersion") + db_alias = schema_editor.connection.alias + template_list = PipelineTemplate.objects.using(db_alias).filter(is_deleted=False) + + for template in template_list: + TemplateRelationship.objects.using(db_alias).filter(ancestor_template_id=template.template_id).delete() + acts = list(template.snapshot.data["activities"].values()) + subprocess_nodes = [act for act in acts if act["type"] == "SubProcess"] + rs = [] + for sp in subprocess_nodes: + version = ( + sp.get("version") + or PipelineTemplate.objects.using(db_alias).get(template_id=sp["template_id"]).snapshot.md5sum + ) + rs.append( + TemplateRelationship( + ancestor_template_id=template.template_id, + descendant_template_id=sp["template_id"], + subprocess_node_id=sp["id"][:32], + version=version, + ) + ) + TemplateRelationship.objects.bulk_create(rs) + + versions = TemplateVersion.objects.using(db_alias).filter(template_id=template.id).order_by("-id") + if not (versions and versions[0].md5 == template.snapshot.md5sum): + TemplateVersion.objects.create(template=template, snapshot=template.snapshot, md5=template.snapshot.md5sum) + TemplateCurrentVersion.objects.update_or_create( + template_id=template.template_id, defaults={"current_version": template.snapshot.md5sum} + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("pipeline", "0012_templatecurrentversion"), + ] + + operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0014_auto_20181127_1053.py b/runtime/bamboo-pipeline/pipeline/migrations/0014_auto_20181127_1053.py new file mode 100644 index 00000000..c878dcfe --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0014_auto_20181127_1053.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0013_old_template_process"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineinstance", + name="name", + field=models.CharField( + default=b"default_instance", max_length=128, verbose_name="\u5b9e\u4f8b\u540d\u79f0" + ), + ), + migrations.AlterField( + model_name="pipelinetemplate", + name="name", + field=models.CharField( + default=b"default_template", max_length=128, verbose_name="\u6a21\u677f\u540d\u79f0" + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0015_auto_20181214_1453.py b/runtime/bamboo-pipeline/pipeline/migrations/0015_auto_20181214_1453.py new file mode 100644 index 00000000..35da8fda --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0015_auto_20181214_1453.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0014_auto_20181127_1053"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineinstance", + name="execution_snapshot", + field=models.ForeignKey( + related_name="execution_snapshot_instances", + verbose_name="\u7528\u4e8e\u5b9e\u4f8b\u6267\u884c\u7684\u7ed3\u6784\u6570\u636e", + to="pipeline.Snapshot", + null=True, + on_delete=models.SET_NULL, + ), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="snapshot", + field=models.ForeignKey( + related_name="snapshot_instances", + verbose_name="\u5b9e\u4f8b\u7ed3\u6784\u6570\u636e\uff0c\u6307\u5411\u5b9e\u4f8b\u5bf9\u5e94\u7684\u6a21\u677f\u7684\u7ed3\u6784\u6570\u636e", + to="pipeline.Snapshot", + on_delete=models.SET_NULL, + ), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="tree_info", + field=models.ForeignKey( + related_name="tree_info_instances", + verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", + to="pipeline.TreeInfo", + null=True, + on_delete=models.SET_NULL, + ), + ), + migrations.AlterField( + model_name="pipelinetemplate", + name="snapshot", + field=models.ForeignKey( + related_name="templates", + verbose_name="\u6a21\u677f\u7ed3\u6784\u6570\u636e", + to="pipeline.Snapshot", + on_delete=models.DO_NOTHING, + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0016_auto_20181220_0958.py b/runtime/bamboo-pipeline/pipeline/migrations/0016_auto_20181220_0958.py new file mode 100644 index 00000000..12c518c3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0016_auto_20181220_0958.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0015_auto_20181214_1453"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelinetemplate", + name="snapshot", + field=models.ForeignKey( + related_name="snapshot_templates", + verbose_name="\u6a21\u677f\u7ed3\u6784\u6570\u636e", + to="pipeline.Snapshot", + on_delete=models.DO_NOTHING, + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0017_pipelinetemplate_has_subprocess.py b/runtime/bamboo-pipeline/pipeline/migrations/0017_pipelinetemplate_has_subprocess.py new file mode 100644 index 00000000..1a73b386 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0017_pipelinetemplate_has_subprocess.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0016_auto_20181220_0958"), + ] + + operations = [ + migrations.AddField( + model_name="pipelinetemplate", + name="has_subprocess", + field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u542b\u6709\u5b50\u6d41\u7a0b"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0018_set_has_subprocess.py b/runtime/bamboo-pipeline/pipeline/migrations/0018_set_has_subprocess.py new file mode 100644 index 00000000..42dd1aee --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0018_set_has_subprocess.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations + +from pipeline.core.constants import PE + + +def reverse_func(apps, schema_editor): + pass + + +def forward_func(apps, schema_editor): + PipelineTemplate = apps.get_model("pipeline", "PipelineTemplate") + + for template in PipelineTemplate.objects.all(): + if not template.is_deleted: + acts = list(template.snapshot.data[PE.activities].values()) + template.has_subprocess = any([act for act in acts if act["type"] == PE.SubProcess]) + template.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("pipeline", "0017_pipelinetemplate_has_subprocess"), + ] + + operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0019_delete_variablemodel.py b/runtime/bamboo-pipeline/pipeline/migrations/0019_delete_variablemodel.py new file mode 100644 index 00000000..d7e677b9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0019_delete_variablemodel.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0018_set_has_subprocess"), + ] + + operations = [ + migrations.DeleteModel(name="VariableModel",), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0020_auto_20190906_1119.py b/runtime/bamboo-pipeline/pipeline/migrations/0020_auto_20190906_1119.py new file mode 100644 index 00000000..9e4be343 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0020_auto_20190906_1119.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0019_delete_variablemodel"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineinstance", + name="template", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="pipeline.PipelineTemplate", + verbose_name="Pipeline\u6a21\u677f", + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0021_auto_20190906_1143.py b/runtime/bamboo-pipeline/pipeline/migrations/0021_auto_20190906_1143.py new file mode 100644 index 00000000..6ecd8ed2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0021_auto_20190906_1143.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0020_auto_20190906_1119"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineinstance", + name="execution_snapshot", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="execution_snapshot_instances", + to="pipeline.Snapshot", + verbose_name="\u7528\u4e8e\u5b9e\u4f8b\u6267\u884c\u7684\u7ed3\u6784\u6570\u636e", + ), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="snapshot", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="snapshot_instances", + to="pipeline.Snapshot", + verbose_name="\u5b9e\u4f8b\u7ed3\u6784\u6570\u636e\uff0c\u6307\u5411\u5b9e\u4f8b\u5bf9\u5e94\u7684\u6a21\u677f\u7684\u7ed3\u6784\u6570\u636e", + ), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="tree_info", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="tree_info_instances", + to="pipeline.TreeInfo", + verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", + ), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0022_pipelineinstance_is_revoked.py b/runtime/bamboo-pipeline/pipeline/migrations/0022_pipelineinstance_is_revoked.py new file mode 100644 index 00000000..c261433f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0022_pipelineinstance_is_revoked.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0021_auto_20190906_1143"), + ] + + operations = [ + migrations.AddField( + model_name="pipelineinstance", + name="is_revoked", + field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u7ecf\u64a4\u9500"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0023_set_is_revoked.py b/runtime/bamboo-pipeline/pipeline/migrations/0023_set_is_revoked.py new file mode 100644 index 00000000..21d85a88 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0023_set_is_revoked.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from __future__ import unicode_literals + +from django.db import migrations + +from pipeline.engine import states + + +def reverse_func(apps, schema_editor): + pass + + +def forward_func(apps, schema_editor): + PipelineInstance = apps.get_model("pipeline", "PipelineInstance") + Status = apps.get_model("engine", "Status") + + revoked_status = Status.objects.filter(state=states.REVOKED).values("id", "archived_time") + id_to_time = {status["id"]: status["archived_time"] for status in revoked_status} + instances = PipelineInstance.objects.filter(instance_id__in=list(id_to_time.keys())) + for inst in instances: + inst.finish_time = id_to_time[inst.instance_id] + inst.is_revoked = True + inst.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("pipeline", "0022_pipelineinstance_is_revoked"), + ] + + operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0024_auto_20200213_0738.py b/runtime/bamboo-pipeline/pipeline/migrations/0024_auto_20200213_0738.py new file mode 100644 index 00000000..814aac12 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0024_auto_20200213_0738.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.23 on 2020-02-13 07:38 +from __future__ import unicode_literals + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0023_set_is_revoked"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineinstance", + name="execution_snapshot", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="execution_snapshot_instances", + to="pipeline.Snapshot", + verbose_name="用于实例执行的结构数据", + ), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="name", + field=models.CharField(default="default_instance", max_length=128, verbose_name="实例名称"), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="snapshot", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="snapshot_instances", + to="pipeline.Snapshot", + verbose_name="实例结构数据,指向实例对应的模板的结构数据", + ), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="tree_info", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="tree_info_instances", + to="pipeline.TreeInfo", + verbose_name="提前计算好的一些流程结构数据", + ), + ), + migrations.AlterField( + model_name="pipelinetemplate", + name="name", + field=models.CharField(default="default_template", max_length=128, verbose_name="模板名称"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0025_auto_20200813_1216.py b/runtime/bamboo-pipeline/pipeline/migrations/0025_auto_20200813_1216.py new file mode 100644 index 00000000..0ad908ab --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0025_auto_20200813_1216.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.29 on 2020-08-13 04:16 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0024_auto_20200213_0738"), + ] + + operations = [ + migrations.AlterField( + model_name="snapshot", + name="md5sum", + field=models.CharField(db_index=True, max_length=32, verbose_name="快照字符串的md5sum"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0026_auto_20201028_1049.py b/runtime/bamboo-pipeline/pipeline/migrations/0026_auto_20201028_1049.py new file mode 100644 index 00000000..284ed9b5 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0026_auto_20201028_1049.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.29 on 2020-10-28 02:49 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0025_auto_20200813_1216"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelinetemplate", + name="name", + field=models.CharField(db_index=True, default="default_template", max_length=128, verbose_name="模板名称"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0027_auto_20201123_1552.py b/runtime/bamboo-pipeline/pipeline/migrations/0027_auto_20201123_1552.py new file mode 100644 index 00000000..a9d39f62 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0027_auto_20201123_1552.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.29 on 2020-11-23 07:52 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0026_auto_20201028_1049"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelineinstance", + name="instance_id", + field=models.CharField(db_index=True, max_length=32, unique=True, verbose_name="实例ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0028_auto_20201227_1952.py b/runtime/bamboo-pipeline/pipeline/migrations/0028_auto_20201227_1952.py new file mode 100644 index 00000000..02941686 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0028_auto_20201227_1952.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.29 on 2020-12-27 11:52 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0027_auto_20201123_1552"), + ] + + operations = [ + migrations.AlterField( + model_name="pipelinetemplate", + name="create_time", + field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="创建时间"), + ), + migrations.AlterField( + model_name="pipelinetemplate", + name="edit_time", + field=models.DateTimeField(auto_now=True, db_index=True, verbose_name="修改时间"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0029_templaterelationship_always_use_latest.py b/runtime/bamboo-pipeline/pipeline/migrations/0029_templaterelationship_always_use_latest.py new file mode 100644 index 00000000..d4c665ec --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0029_templaterelationship_always_use_latest.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.19 on 2021-06-07 09:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0028_auto_20201227_1952"), + ] + + operations = [ + migrations.AddField( + model_name="templaterelationship", + name="always_use_latest", + field=models.BooleanField(default=False, verbose_name="是否永远使用最新版本"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0030_auto_20210607_1210.py b/runtime/bamboo-pipeline/pipeline/migrations/0030_auto_20210607_1210.py new file mode 100644 index 00000000..475af6cd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0030_auto_20210607_1210.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.19 on 2021-06-07 12:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0029_templaterelationship_always_use_latest"), + ] + + operations = [ + migrations.AlterField( + model_name="templaterelationship", + name="descendant_template_id", + field=models.CharField(db_index=True, max_length=32, verbose_name="子流程模板ID"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/0031_auto_20210624_2317.py b/runtime/bamboo-pipeline/pipeline/migrations/0031_auto_20210624_2317.py new file mode 100644 index 00000000..012a6057 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/0031_auto_20210624_2317.py @@ -0,0 +1,23 @@ +# Generated by Django 2.2.16 on 2021-06-24 15:17 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("pipeline", "0030_auto_20210607_1210"), + ] + + operations = [ + migrations.AddField( + model_name="pipelineinstance", + name="is_expired", + field=models.BooleanField(default=False, help_text="运行时被定期清理即为过期", verbose_name="是否已经过期"), + ), + migrations.AlterField( + model_name="pipelineinstance", + name="create_time", + field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="创建时间"), + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/models.py b/runtime/bamboo-pipeline/pipeline/models.py new file mode 100644 index 00000000..9e92952b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/models.py @@ -0,0 +1,800 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy +import hashlib +import logging +import queue +import zlib + +import ujson as json +from django.db import models, transaction +from django.utils import timezone +from django.utils.module_loading import import_string +from django.utils.translation import ugettext_lazy as _ + +from pipeline.conf import settings +from pipeline.constants import PIPELINE_DEFAULT_PRIORITY +from pipeline.core.constants import PE +from pipeline.signals import post_pipeline_finish, post_pipeline_revoke +from pipeline.engine.utils import ActionResult, calculate_elapsed_time +from pipeline.exceptions import SubprocessRefError +from pipeline.parser.context import get_pipeline_context +from pipeline.parser.utils import replace_all_id +from pipeline.service import task_service +from pipeline.utils.graph import Graph +from pipeline.utils.uniqid import node_uniqid, uniqid + +MAX_LEN_OF_NAME = 128 +logger = logging.getLogger("root") + + +class CompressJSONField(models.BinaryField): + def __init__(self, compress_level=6, *args, **kwargs): + super(CompressJSONField, self).__init__(*args, **kwargs) + self.compress_level = compress_level + + def get_prep_value(self, value): + value = super(CompressJSONField, self).get_prep_value(value) + return zlib.compress(json.dumps(value).encode("utf-8"), self.compress_level) + + def to_python(self, value): + value = super(CompressJSONField, self).to_python(value) + return json.loads(zlib.decompress(value).decode("utf-8")) + + def from_db_value(self, value, expression, connection, context): + return self.to_python(value) + + +class SnapshotManager(models.Manager): + def create_snapshot(self, data): + h = hashlib.md5() + h.update(json.dumps(data).encode("utf-8")) + snapshot = self.create(md5sum=h.hexdigest(), data=data) + return snapshot + + def data_for_snapshot(self, snapshot_id): + return self.get(id=snapshot_id).data + + +class Snapshot(models.Model): + """ + 数据快照 + """ + + md5sum = models.CharField(_("快照字符串的md5sum"), max_length=32, db_index=True) + create_time = models.DateTimeField(_("创建时间"), auto_now_add=True) + data = CompressJSONField(null=True, blank=True) + + objects = SnapshotManager() + + class Meta: + verbose_name = _("模板快照") + verbose_name_plural = _("模板快照") + ordering = ["-id"] + app_label = "pipeline" + + def __unicode__(self): + return str(self.md5sum) + + def has_change(self, data): + """ + 检测 data 的 md5 是否和当前存储的不一致 + @param data: + @return: 新的 md5,md5 是否有变化 + """ + h = hashlib.md5() + h.update(json.dumps(data).encode("utf-8")) + md5 = h.hexdigest() + return md5, self.md5sum != md5 + + +class TreeInfo(models.Model): + """ + pipeline 数据信息 + """ + + data = CompressJSONField(null=True, blank=True) + + +def get_subprocess_act_list(pipeline_data): + """ + 获取 pipeline 结构中所有的子流程节点 + @param pipeline_data: 流程结构数据 + @return: 子流程节点 + """ + activities = pipeline_data[PE.activities] + act_ids = [act_id for act_id in activities if activities[act_id][PE.type] == PE.SubProcess] + return [activities[act_id] for act_id in act_ids] + + +def _act_id_in_graph(act): + """ + 获取子流程节点引用的模板 ID + @param act: 子流程节点 + @return: 模板 ID:版本 或 模板ID + """ + return "{}:{}".format(act["template_id"], act["version"]) if act.get("version") else act["template_id"] + + +class TemplateManager(models.Manager): + def subprocess_ref_validate(self, data, root_id=None, root_name=None): + """ + 验证子流程引用是否合法 + @param data: + @param root_id: + @param root_name: + @return: 引用是否合法,相关信息 + """ + try: + sub_refs, name_map = self.construct_subprocess_ref_graph(data, root_id=root_id, root_name=root_name) + except PipelineTemplate.DoesNotExist as e: + return False, str(e) + + nodes = list(sub_refs.keys()) + flows = [] + for node in nodes: + for ref in sub_refs[node]: + if ref in nodes: + flows.append([node, ref]) + graph = Graph(nodes, flows) + # circle reference check + trace = graph.get_cycle() + if trace: + name_trace = " → ".join([name_map[proc_id] for proc_id in trace]) + return False, _("子流程引用链中存在循环引用:%s") % name_trace + + return True, "" + + def create_model(self, structure_data, **kwargs): + """ + 创建流程模板对象 + @param structure_data: pipeline 结构数据 + @param kwargs: 其他参数 + @return: 流程模板 + """ + result, msg = self.subprocess_ref_validate(structure_data) + + if not result: + raise SubprocessRefError(msg) + + snapshot = Snapshot.objects.create_snapshot(structure_data) + kwargs["snapshot"] = snapshot + kwargs["template_id"] = node_uniqid() + obj = self.create(**kwargs) + # version track + # TemplateVersion.objects.track(obj) + + return obj + + def delete_model(self, template_ids): + """ + 删除模板对象 + @param template_ids: 模板对象 ID 列表或 ID + @return: + """ + if not isinstance(template_ids, list): + template_ids = [template_ids] + qs = self.filter(template_id__in=template_ids) + for template in qs: + template.is_deleted = True + template.name = uniqid() + template.save() + + def construct_subprocess_ref_graph(self, pipeline_data, root_id=None, root_name=None): + """ + 构造子流程引用图 + @param pipeline_data: pipeline 结构数据 + @param root_id: 所有引用开始的根流程 ID + @param root_name: 根流程名 + @return: 子流程引用图,模板 ID -> 模板姓名映射字典 + """ + subprocess_act = get_subprocess_act_list(pipeline_data) + tid_queue = queue.Queue() + graph = {} + version = {} + name_map = {} + + if root_id: + graph[root_id] = [_act_id_in_graph(act) for act in subprocess_act] + name_map[root_id] = root_name + + for act in subprocess_act: + tid_queue.put(_act_id_in_graph(act)) + version[_act_id_in_graph(act)] = act.get("version") + + while not tid_queue.empty(): + tid = tid_queue.get() + template = self.get(template_id=tid.split(":")[0]) + name_map[tid] = template.name + subprocess_act = get_subprocess_act_list(template.data_for_version(version[tid])) + + for act in subprocess_act: + ref_tid = _act_id_in_graph(act) + graph.setdefault(tid, []).append(ref_tid) + version[_act_id_in_graph(act)] = act.get("version") + if ref_tid not in graph: + tid_queue.put(ref_tid) + if not subprocess_act: + graph[tid] = [] + + return graph, name_map + + def unfold_subprocess(self, pipeline_data): + """ + 展开 pipeline 数据中所有的子流程 + @param pipeline_data: pipeline 数据 + @return: + """ + id_maps = replace_all_id(pipeline_data) + activities = pipeline_data[PE.activities] + for act_id, act in list(activities.items()): + if act[PE.type] == PE.SubProcess: + subproc_data = self.get(template_id=act[PE.template_id]).data_for_version(act.get(PE.version)) + + sub_id_maps = self.unfold_subprocess(subproc_data) + # act_id is new id + id_maps[PE.subprocess_detail].update({act_id: sub_id_maps}) + + subproc_data[PE.id] = act_id + act[PE.pipeline] = subproc_data + return id_maps + + def replace_id(self, pipeline_data): + """ + 替换 pipeline 中所有 ID + @param pipeline_data: pipeline 数据 + @return: + """ + id_maps = replace_all_id(pipeline_data) + activities = pipeline_data[PE.activities] + for act_id, act in list(activities.items()): + if act[PE.type] == PE.SubProcess: + subproc_data = act[PE.pipeline] + sub_id_maps = self.replace_id(subproc_data) + # act_id is new id + id_maps[PE.subprocess_detail].update({act_id: sub_id_maps}) + + subproc_data[PE.id] = act_id + act[PE.pipeline] = subproc_data + return id_maps + + +class PipelineTemplate(models.Model): + """ + 流程模板 + """ + + template_id = models.CharField(_("模板ID"), max_length=32, unique=True) + name = models.CharField(_("模板名称"), max_length=MAX_LEN_OF_NAME, default="default_template", db_index=True) + create_time = models.DateTimeField(_("创建时间"), auto_now_add=True, db_index=True) + creator = models.CharField(_("创建者"), max_length=32) + description = models.TextField(_("描述"), null=True, blank=True) + editor = models.CharField(_("修改者"), max_length=32, null=True, blank=True) + edit_time = models.DateTimeField(_("修改时间"), auto_now=True, db_index=True) + snapshot = models.ForeignKey( + Snapshot, verbose_name=_("模板结构数据"), related_name="snapshot_templates", on_delete=models.DO_NOTHING + ) + has_subprocess = models.BooleanField(_("是否含有子流程"), default=False) + is_deleted = models.BooleanField(_("是否删除"), default=False, help_text=_("表示当前模板是否删除")) + + objects = TemplateManager() + + class Meta: + verbose_name = _("Pipeline模板") + verbose_name_plural = _("Pipeline模板") + ordering = ["-edit_time"] + app_label = "pipeline" + + def __unicode__(self): + return "{}-{}".format(self.template_id, self.name) + + @property + def data(self): + return self.snapshot.data + + @property + def version(self): + return self.snapshot.md5sum + + @property + def subprocess_version_info(self): + # 1. get all subprocess + subprocess_info = TemplateRelationship.objects.get_subprocess_info(self.template_id).values( + "descendant_template_id", "subprocess_node_id", "version", "always_use_latest" + ) + info = {"subproc_has_update": False, "details": []} + if not subprocess_info: + return info + + # 2. check whether subprocess is expired + temp_current_versions = { + item.template_id: item + for item in TemplateCurrentVersion.objects.filter( + template_id__in=[item["descendant_template_id"] for item in subprocess_info] + ) + } + + expireds = [] + for item in subprocess_info: + item["expired"] = ( + False + if item["version"] is None + or item["descendant_template_id"] not in temp_current_versions + or item["always_use_latest"] + else (item["version"] != temp_current_versions[item["descendant_template_id"]].current_version) + ) + info["details"].append(item) + expireds.append(item["expired"]) + + info["subproc_has_update"] = any(expireds) + + # 3. return + return info + + @property + def subprocess_has_update(self): + return self.subprocess_version_info["subproc_has_update"] + + def data_for_version(self, version): + """ + 获取某个版本的模板数据 + @param version: 版本号 + @return: 模板数据 + """ + if not version: + return self.data + return Snapshot.objects.filter(md5sum=version).order_by("-id").first().data + + def referencer(self): + """ + 获取引用了该模板的其他模板 + @return: 引用了该模板的其他模板 ID 列表 + """ + referencer = TemplateRelationship.objects.referencer(self.template_id) + template_id = self.__class__.objects.filter(template_id__in=referencer, is_deleted=False).values_list( + "template_id", flat=True + ) + return list(template_id) + + def clone_data(self): + """ + 获取该模板数据的克隆 + @return: ID 替换过后的模板数据 + """ + data = self.data + replace_all_id(self.data) + return data + + def update_template(self, structure_data, **kwargs): + """ + 更新当前模板的模板数据 + @param structure_data: pipeline 结构数据 + @param kwargs: 其他参数 + @return: + """ + result, msg = PipelineTemplate.objects.subprocess_ref_validate(structure_data, self.template_id, self.name) + if not result: + raise SubprocessRefError(msg) + + snapshot = Snapshot.objects.create_snapshot(structure_data) + kwargs["snapshot"] = snapshot + kwargs["edit_time"] = timezone.now() + exclude_keys = ["template_id", "creator", "create_time", "is_deleted"] + for key in exclude_keys: + kwargs.pop(key, None) + for key, value in list(kwargs.items()): + setattr(self, key, value) + self.save() + + def gen_instance(self, inputs=None, **kwargs): + """ + 使用该模板创建实例 + @param inputs: 自定义输入 + @param kwargs: 其他参数 + @return: 实例对象 + """ + instance, _ = PipelineInstance.objects.create_instance( + template=self, exec_data=copy.deepcopy(self.data), inputs=inputs, **kwargs + ) + return instance + + def set_has_subprocess_bit(self): + acts = list(self.data[PE.activities].values()) + self.has_subprocess = any([act["type"] == PE.SubProcess for act in acts]) + + +class TemplateRelationShipManager(models.Manager): + def get_subprocess_info(self, template_id): + """ + 获取某个模板中所有的子流程信息 + @param template_id: 模板 ID + @return: 该模板所引用的子流程相关信息 + """ + return self.filter(ancestor_template_id=template_id) + + def referencer(self, template_id): + """ + 获取引用了某个模板的其他模板 + @param template_id: 被引用的模板 + @return: 引用了该模板的其他模板 ID 列表 + """ + return list(set(self.filter(descendant_template_id=template_id).values_list("ancestor_template_id", flat=True))) + + +class TemplateRelationship(models.Model): + """ + 流程模板引用关系:直接引用 + """ + + ancestor_template_id = models.CharField(_("根模板ID"), max_length=32, db_index=True) + descendant_template_id = models.CharField(_("子流程模板ID"), max_length=32, null=False, db_index=True) + subprocess_node_id = models.CharField(_("子流程节点 ID"), max_length=32, null=False) + version = models.CharField(_("快照字符串的md5"), max_length=32, null=False) + always_use_latest = models.BooleanField(_("是否永远使用最新版本"), default=False) + + objects = TemplateRelationShipManager() + + +class TemplateCurrentVersionManager(models.Manager): + def update_current_version(self, template): + """ + 更新某个模板的当前版本 + @param template: 模板对象 + @return: 记录模板当前版本的对象 + """ + obj, __ = self.update_or_create( + template_id=template.template_id, defaults={"current_version": template.version} + ) + return obj + + +class TemplateCurrentVersion(models.Model): + """ + 记录流程模板当前版本的表 + """ + + template_id = models.CharField(_("模板ID"), max_length=32, db_index=True) + current_version = models.CharField(_("快照字符串的md5"), max_length=32, null=False) + + objects = TemplateCurrentVersionManager() + + +class TemplateVersionManager(models.Manager): + def track(self, template): + """ + 记录模板的版本号 + @param template: 被记录模板 + @return: 版本跟踪对象 + """ + if not template.snapshot: + return None + + # don't track if latest version is same as current version + versions = self.filter(template_id=template.id).order_by("-id") + if versions and versions[0].md5 == template.snapshot.md5sum: + return versions[0] + + return self.create(template=template, snapshot=template.snapshot, md5=template.snapshot.md5sum) + + +class TemplateVersion(models.Model): + """ + 模板版本号记录节点 + """ + + template = models.ForeignKey(PipelineTemplate, verbose_name=_("模板 ID"), null=False, on_delete=models.CASCADE) + snapshot = models.ForeignKey(Snapshot, verbose_name=_("模板数据 ID"), null=False, on_delete=models.CASCADE) + md5 = models.CharField(_("快照字符串的md5"), max_length=32, db_index=True) + date = models.DateTimeField(_("添加日期"), auto_now_add=True) + + objects = TemplateVersionManager() + + +class TemplateScheme(models.Model): + """ + 模板执行方案 + """ + + template = models.ForeignKey( + PipelineTemplate, verbose_name=_("对应模板 ID"), null=False, blank=False, on_delete=models.CASCADE + ) + unique_id = models.CharField(_("方案唯一ID"), max_length=97, unique=True, null=False, blank=True) + name = models.CharField(_("方案名称"), max_length=64, null=False, blank=False) + edit_time = models.DateTimeField(_("修改时间"), auto_now=True) + data = CompressJSONField(verbose_name=_("方案数据")) + + +class InstanceManager(models.Manager): + def create_instance(self, template, exec_data, spread=False, inputs=None, **kwargs): + """ + 创建流程实例对象 + @param template: 流程模板 + @param exec_data: 执行用流程数据 + @param spread: exec_data 是否已经展开 + @param kwargs: 其他参数 + @param inputs: 自定义输入 + @return: 实例对象 + """ + if not spread: + id_maps = PipelineTemplate.objects.unfold_subprocess(exec_data) + else: + id_maps = PipelineTemplate.objects.replace_id(exec_data) + + inputs = inputs or {} + + for key, val in list(inputs.items()): + if key in exec_data["data"]["inputs"]: + exec_data["data"]["inputs"][key]["value"] = val + + instance_id = node_uniqid() + exec_data["id"] = instance_id + exec_snapshot = Snapshot.objects.create_snapshot(exec_data) + TreeInfo.objects.create() + if template is not None: + kwargs["template"] = template + kwargs["snapshot_id"] = template.snapshot.id + kwargs["instance_id"] = instance_id + kwargs["execution_snapshot_id"] = exec_snapshot.id + return self.create(**kwargs), id_maps + + def delete_model(self, instance_ids): + """ + 删除流程实例对象 + @param instance_ids: 实例 ID 或 ID 列表 + @return: + """ + if not isinstance(instance_ids, list): + instance_ids = [instance_ids] + qs = self.filter(instance_id__in=instance_ids) + for instance in qs: + instance.is_deleted = True + instance.name = uniqid() + instance.save() + + def set_started(self, instance_id, executor): + """ + 将实例的状态设置为已开始 + @param instance_id: 实例 ID + @param executor: 执行者 + @return: + """ + self.filter(instance_id=instance_id).update(start_time=timezone.now(), is_started=True, executor=executor) + + def set_finished(self, instance_id): + """ + 将实例的状态设置为已完成 + @param instance_id: 实例 ID + @return: + """ + self.filter(instance_id=instance_id).update(finish_time=timezone.now(), is_finished=True) + post_pipeline_finish.send(sender=PipelineInstance, instance_id=instance_id) + + def set_revoked(self, instance_id): + """ + 将实例的状态设置为已撤销 + @param instance_id: 实例 ID + @return: + """ + self.filter(instance_id=instance_id).update(finish_time=timezone.now(), is_revoked=True) + post_pipeline_revoke.send(sender=PipelineInstance, instance_id=instance_id) + + +class PipelineInstance(models.Model): + """ + 流程实例对象 + """ + + instance_id = models.CharField(_("实例ID"), max_length=32, unique=True, db_index=True) + template = models.ForeignKey( + PipelineTemplate, verbose_name=_("Pipeline模板"), null=True, blank=True, on_delete=models.SET_NULL + ) + name = models.CharField(_("实例名称"), max_length=MAX_LEN_OF_NAME, default="default_instance") + creator = models.CharField(_("创建者"), max_length=32, blank=True) + create_time = models.DateTimeField(_("创建时间"), auto_now_add=True, db_index=True) + executor = models.CharField(_("执行者"), max_length=32, blank=True) + start_time = models.DateTimeField(_("启动时间"), null=True, blank=True) + finish_time = models.DateTimeField(_("结束时间"), null=True, blank=True) + description = models.TextField(_("描述"), blank=True) + is_started = models.BooleanField(_("是否已经启动"), default=False) + is_finished = models.BooleanField(_("是否已经完成"), default=False) + is_revoked = models.BooleanField(_("是否已经撤销"), default=False) + is_deleted = models.BooleanField(_("是否已经删除"), default=False, help_text=_("表示当前实例是否删除")) + is_expired = models.BooleanField(_("是否已经过期"), default=False, help_text=_("运行时被定期清理即为过期")) + snapshot = models.ForeignKey( + Snapshot, + blank=True, + null=True, + related_name="snapshot_instances", + verbose_name=_("实例结构数据,指向实例对应的模板的结构数据"), + on_delete=models.SET_NULL, + ) + execution_snapshot = models.ForeignKey( + Snapshot, + blank=True, + null=True, + related_name="execution_snapshot_instances", + verbose_name=_("用于实例执行的结构数据"), + on_delete=models.SET_NULL, + ) + tree_info = models.ForeignKey( + TreeInfo, + blank=True, + null=True, + related_name="tree_info_instances", + verbose_name=_("提前计算好的一些流程结构数据"), + on_delete=models.SET_NULL, + ) + + objects = InstanceManager() + + class Meta: + verbose_name = _("Pipeline实例") + verbose_name_plural = _("Pipeline实例") + ordering = ["-create_time"] + app_label = "pipeline" + + def __unicode__(self): + return "{}-{}".format(self.instance_id, self.name) + + @property + def data(self): + return self.snapshot.data + + @property + def execution_data(self): + return self.execution_snapshot.data + + @property + def node_id_set(self): + if not self.tree_info: + self.calculate_tree_info(save=True) + return set(self.tree_info.data["node_id_set"]) + + @property + def elapsed_time(self): + return calculate_elapsed_time(self.start_time, self.finish_time) + + def set_execution_data(self, data): + """ + 设置实例的执行用流程数据 + @param data: 执行用流程数据 + @return: + """ + self.execution_snapshot.data = data + self.execution_snapshot.save() + + def _replace_id(self, exec_data): + """ + 替换执行用流程数据中的所有 ID + @param exec_data: 执行用流程数据 + @return: + """ + replace_all_id(exec_data) + activities = exec_data[PE.activities] + for act_id, act in list(activities.items()): + if act[PE.type] == PE.SubProcess: + self._replace_id(act["pipeline"]) + act["pipeline"]["id"] = act_id + + def clone(self, creator, **kwargs): + """ + 返回当前实例对象的克隆 + @param creator: 创建者 + @param kwargs: 其他参数 + @return: 当前实例对象的克隆 + """ + name = kwargs.get("name") or timezone.localtime(timezone.now()).strftime("clone%Y%m%d%H%m%S") + instance_id = node_uniqid() + + exec_data = self.execution_data + self._replace_id(exec_data) + # replace root id + exec_data["id"] = instance_id + new_snapshot = Snapshot.objects.create_snapshot(exec_data) + + return self.__class__.objects.create( + template=self.template, + instance_id=instance_id, + name=name, + creator=creator, + description=self.description, + snapshot=self.snapshot, + execution_snapshot=new_snapshot, + ) + + def start(self, executor, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): + """ + 启动当前流程 + @param executor: 执行者 + @param check_workers: 是否检测 worker 的状态 + @return: 执行结果 + """ + + with transaction.atomic(): + instance = self.__class__.objects.select_for_update().get(id=self.id) + if instance.is_started: + return ActionResult(result=False, message="pipeline instance already started.") + + pipeline_data = instance.execution_data + + try: + parser_cls = import_string(settings.PIPELINE_PARSER_CLASS) + except ImportError: + return ActionResult(result=False, message="invalid parser class: %s" % settings.PIPELINE_PARSER_CLASS) + + instance.start_time = timezone.now() + instance.is_started = True + instance.executor = executor + + parser = parser_cls(pipeline_data) + pipeline = parser.parse( + root_pipeline_data=get_pipeline_context( + instance, obj_type="instance", data_type="data", username=executor + ), + root_pipeline_context=get_pipeline_context( + instance, obj_type="instance", data_type="context", username=executor + ), + ) + + # calculate tree info + instance.calculate_tree_info() + + instance.save() + + act_result = task_service.run_pipeline(pipeline, check_workers=check_workers, priority=priority, queue=queue) + + if not act_result.result: + with transaction.atomic(): + instance = self.__class__.objects.select_for_update().get(id=self.id) + instance.start_time = None + instance.is_started = False + instance.executor = "" + instance.save() + + return act_result + + def _get_node_id_set(self, node_id_set, data): + """ + 递归获取当前实例中所有节点的 ID(包括子流程中的节点) + @param node_id_set: 节点 ID 集合 + @param data: 流程数据 + @return: + """ + node_id_set.add(data[PE.start_event]["id"]) + node_id_set.add(data[PE.end_event]["id"]) + for gid in data[PE.gateways]: + node_id_set.add(gid) + for aid, act_data in list(data[PE.activities].items()): + node_id_set.add(aid) + if act_data[PE.type] == PE.SubProcess: + self._get_node_id_set(node_id_set, act_data["pipeline"]) + + def calculate_tree_info(self, save=False): + """ + 计算当前流程实例执行用流程数据中的一些基本信息 + @param save: 是否在计算完后保存实例对象 + @return: + """ + self.tree_info = TreeInfo.objects.create() + node_id_set = set({}) + + # get node id set + self._get_node_id_set(node_id_set, self.execution_data) + + tree_info = {"node_id_set": list(node_id_set)} + self.tree_info.data = tree_info + self.tree_info.save() + + if save: + self.save() diff --git a/runtime/bamboo-pipeline/pipeline/parser/__init__.py b/runtime/bamboo-pipeline/pipeline/parser/__init__.py new file mode 100644 index 00000000..33eaef78 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/parser/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .pipeline_parser import PipelineParser # noqa diff --git a/runtime/bamboo-pipeline/pipeline/parser/context.py b/runtime/bamboo-pipeline/pipeline/parser/context.py new file mode 100644 index 00000000..2f6e6cb0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/parser/context.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from importlib import import_module + +from pipeline.conf import settings + + +def get_pipeline_context(obj, obj_type, data_type="data", username=""): + """ + @summary: pipeline context hook + @param obj: PipelineTemplete or PipelineInstance object + @param obj_type: template or instance + @param data_type: data(for component parent_data.inputs) or context(for pipeline root context) + @param username: + @return: + """ + context = {} + if obj_type == "template": + context_path = settings.PIPELINE_TEMPLATE_CONTEXT + elif obj_type == "instance": + context_path = settings.PIPELINE_INSTANCE_CONTEXT + else: + return context + if context_path: + mod, func = context_path.rsplit(".", 1) + mod = import_module(mod) + func = getattr(mod, func) + context = func(obj, data_type, username) + if not isinstance(context, dict): + context = {"data": context} + return context diff --git a/runtime/bamboo-pipeline/pipeline/parser/pipeline_parser.py b/runtime/bamboo-pipeline/pipeline/parser/pipeline_parser.py new file mode 100644 index 00000000..3a69598c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/parser/pipeline_parser.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from copy import deepcopy + +from django.utils.module_loading import import_string + +from pipeline import exceptions +from pipeline.component_framework.library import ComponentLibrary +from pipeline.core.constants import PE +from pipeline.core.data.base import DataObject +from pipeline.core.data.context import Context +from pipeline.core.data.converter import get_variable +from pipeline.core.data.hydration import hydrate_node_data, hydrate_subprocess_context +from pipeline.core.flow import ( + Condition, + ConditionalParallelGateway, + ConvergeGateway, + ExclusiveGateway, + FlowNodeClsFactory, + ParallelGateway, + SequenceFlow, +) +from pipeline.core.pipeline import Pipeline, PipelineSpec +from pipeline.validators.base import validate_pipeline_tree + + +def classify_inputs(pipeline_inputs, params, is_subprocess, root_pipeline_params=None): + """ + @summary: classify pipeline inputs into different parts + @param pipeline_inputs: pipeline or subprocess inputs + @param params: pipeline or subprocess params, which can cover item whose is_param is True in inputs + @param is_subprocess: whether pipeline is root or subprocess + @param root_pipeline_params: root pipeline params which should deliver to all subprocess + @return: + """ + # data from activity outputs + act_outputs = {} + # params should deliver to son subprocess + subprocess_params = {} + # context scope to resolving inputs + scope_info = deepcopy(root_pipeline_params) + for key, info in list(pipeline_inputs.items()): + source_act = info.get(PE.source_act) + if isinstance(source_act, str): + act_outputs.setdefault(info[PE.source_act], {}).update({info[PE.source_key]: key}) + continue + elif isinstance(source_act, list): + for source_info in source_act: + act_outputs.setdefault(source_info[PE.source_act], {}).update({source_info[PE.source_key]: key}) + + is_param = info.get(PE.is_param, False) + info = params.get(key, info) if is_param else info + if is_subprocess and is_param: + subprocess_params.update({key: info}) + continue + + scope_info.update({key: info}) + result = {"act_outputs": act_outputs, "scope_info": scope_info, "subprocess_params": subprocess_params} + return result + + +class PipelineParser(object): + def __init__(self, pipeline_tree, cycle_tolerate=False): + validate_pipeline_tree(pipeline_tree, cycle_tolerate=cycle_tolerate) + self.pipeline_tree = deepcopy(pipeline_tree) + self.cycle_tolerate = cycle_tolerate + + def parse(self, root_pipeline_data=None, root_pipeline_context=None): + """ + @summary: parse pipeline json tree to object with root data + @param root_pipeline_data: like business info or operator, which can be accessed by parent_data in + Component.execute + @param root_pipeline_context: params for pipeline to resolving inputs data + @return: + """ + return self._parse(root_pipeline_data, root_pipeline_context) + + def _parse( + self, root_pipeline_data=None, root_pipeline_params=None, params=None, is_subprocess=False, parent_context=None + ): + """ + @summary: parse pipeline and subprocess recursively + @param root_pipeline_data: root data from root pipeline parsing, witch will be passed to subprocess recursively + @param root_pipeline_params: params from root pipeline for all subprocess + @param params: params from parent for son subprocess + @param is_subprocess: whither is subprocess + @param parent_context: parent context for activity of subprocess to resolving inputs + @return: Pipeline object + """ + if root_pipeline_data is None: + root_pipeline_data = {} + if root_pipeline_params is None: + root_pipeline_params = {} + if params is None: + params = {} + + pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] + classification = classify_inputs(pipeline_inputs, params, is_subprocess, root_pipeline_params) + + output_keys = self.pipeline_tree[PE.data][PE.outputs] + context = Context(classification["act_outputs"], output_keys) + for key, info in list(classification["scope_info"].items()): + var = get_variable(key, info, context, root_pipeline_data) + context.set_global_var(key, var) + + pipeline_data = deepcopy(root_pipeline_data) + if is_subprocess: + if parent_context is None: + raise exceptions.DataTypeErrorException("parent context of subprocess cannot be none") + for key, info in list(classification["subprocess_params"].items()): + var = get_variable(key, info, parent_context, pipeline_data) + pipeline_data.update({key: var}) + + start = self.pipeline_tree[PE.start_event] + start_cls = FlowNodeClsFactory.get_node_cls(start[PE.type]) + if "pre_render_keys" in self.pipeline_tree[PE.data]: + start_event = start_cls( + id=start[PE.id], + name=start[PE.name], + data=DataObject({"pre_render_keys": self.pipeline_tree[PE.data][PE.pre_render_keys]}), + ) + else: + start_event = start_cls(id=start[PE.id], name=start[PE.name]) + + end = self.pipeline_tree[PE.end_event] + end_cls = FlowNodeClsFactory.get_node_cls(end[PE.type]) + end_event = end_cls(id=end[PE.id], name=end[PE.name], data=DataObject({})) + + acts = self.pipeline_tree[PE.activities] + act_objs = [] + for act in list(acts.values()): + act_cls = FlowNodeClsFactory.get_node_cls(act[PE.type]) + if act[PE.type] == PE.ServiceActivity: + component = ComponentLibrary.get_component( + component_code=act[PE.component][PE.code], + data_dict=act[PE.component][PE.inputs], + version=act[PE.component].get(PE.version), + ) + service = component.service() + data = component.data_for_execution(context, pipeline_data) + handler_path = act.get("failure_handler") + failure_handler = import_string(handler_path) if handler_path else None + act_objs.append( + act_cls( + id=act[PE.id], + service=service, + name=act[PE.name], + data=data, + error_ignorable=act.get(PE.error_ignorable, False), + skippable=act[PE.skippable] if PE.skippable in act else act.get(PE.skippable_old, True), + retryable=act[PE.retryable] if PE.retryable in act else act.get(PE.retryable_old, True), + timeout=act.get(PE.timeout), + failure_handler=failure_handler, + ) + ) + elif act[PE.type] == PE.SubProcess: + sub_tree = act[PE.pipeline] + params = act[PE.params] + sub_parser = PipelineParser(pipeline_tree=sub_tree, cycle_tolerate=self.cycle_tolerate) + act_objs.append( + act_cls( + id=act[PE.id], + pipeline=sub_parser._parse( + root_pipeline_data=root_pipeline_data, + root_pipeline_params=root_pipeline_params, + params=params, + is_subprocess=True, + parent_context=context, + ), + name=act[PE.name], + ) + ) + else: + raise exceptions.FlowTypeError("Unknown Activity type: %s" % act[PE.type]) + + gateways = self.pipeline_tree[PE.gateways] + flows = self.pipeline_tree[PE.flows] + gateway_objs = [] + for gw in list(gateways.values()): + gw_cls = FlowNodeClsFactory.get_node_cls(gw[PE.type]) + if gw[PE.type] in {PE.ParallelGateway, PE.ConditionalParallelGateway}: + gateway_objs.append( + gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name]) + ) + elif gw[PE.type] in {PE.ExclusiveGateway, PE.ConvergeGateway}: + gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) + else: + raise exceptions.FlowTypeError("Unknown Gateway type: %s" % gw[PE.type]) + + flow_objs_dict = {} + for fl in list(flows.values()): + flow_nodes = act_objs + gateway_objs + if fl[PE.source] == start[PE.id]: + source = start_event + else: + source = [x for x in flow_nodes if x.id == fl[PE.source]][0] + if fl[PE.target] == end[PE.id]: + target = end_event + else: + target = [x for x in flow_nodes if x.id == fl[PE.target]][0] + flow_objs_dict[fl[PE.id]] = SequenceFlow(fl[PE.id], source, target) + flow_objs = list(flow_objs_dict.values()) + + # add incoming and outgoing flow to acts + if not isinstance(start[PE.outgoing], list): + start[PE.outgoing] = [start[PE.outgoing]] + for outgoing_id in start[PE.outgoing]: + start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) + + if not isinstance(end[PE.incoming], list): + end[PE.incoming] = [end[PE.incoming]] + for incoming_id in end[PE.incoming]: + end_event.incoming.add_flow(flow_objs_dict[incoming_id]) + + for act in act_objs: + incoming = acts[act.id][PE.incoming] + if isinstance(incoming, list): + for s in incoming: + act.incoming.add_flow(flow_objs_dict[s]) + else: + act.incoming.add_flow(flow_objs_dict[incoming]) + + act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) + + for gw in gateway_objs: + if isinstance(gw, ExclusiveGateway) or isinstance(gw, ConditionalParallelGateway): + for flow_id, con in list(gateways[gw.id][PE.conditions].items()): + con_obj = Condition(con[PE.evaluate], flow_objs_dict[flow_id]) + gw.add_condition(con_obj) + + if isinstance(gateways[gw.id][PE.incoming], list): + for incoming_id in gateways[gw.id][PE.incoming]: + gw.incoming.add_flow(flow_objs_dict[incoming_id]) + else: + gw.incoming.add_flow(flow_objs_dict[gateways[gw.id][PE.incoming]]) + + for outgoing_id in gateways[gw.id][PE.outgoing]: + gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) + + elif isinstance(gw, ParallelGateway): + if isinstance(gateways[gw.id][PE.incoming], list): + for incoming_id in gateways[gw.id][PE.incoming]: + gw.incoming.add_flow(flow_objs_dict[incoming_id]) + else: + gw.incoming.add_flow(flow_objs_dict[gateways[gw.id][PE.incoming]]) + + for outgoing_id in gateways[gw.id][PE.outgoing]: + gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) + + elif isinstance(gw, ConvergeGateway): + for incoming_id in gateways[gw.id][PE.incoming]: + gw.incoming.add_flow(flow_objs_dict[incoming_id]) + gw.outgoing.add_flow(flow_objs_dict[gateways[gw.id][PE.outgoing]]) + + else: + raise exceptions.FlowTypeError("Unknown Gateway type: %s" % type(gw)) + + context.duplicate_variables() + pipeline_data = DataObject(pipeline_data) + pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) + return Pipeline(self.pipeline_tree[PE.id], pipeline_spec) + + def get_act(self, act_id, subprocess_stack=None, root_pipeline_data=None, root_pipeline_context=None): + if subprocess_stack is None: + subprocess_stack = [] + pipeline = self.parse(root_pipeline_data, root_pipeline_context) + for sub_id in subprocess_stack: + subprocess_act = [x for x in pipeline.spec.activities if x.id == sub_id][0] + hydrate_subprocess_context(subprocess_act) + pipeline = subprocess_act.pipeline + act = [x for x in pipeline.spec.activities if x.id == act_id][0] + return act + + def get_act_inputs(self, act_id, subprocess_stack=None, root_pipeline_data=None, root_pipeline_context=None): + act = self.get_act(act_id, subprocess_stack, root_pipeline_data, root_pipeline_context) + hydrate_node_data(act) + inputs = act.data.inputs + return inputs diff --git a/runtime/bamboo-pipeline/pipeline/parser/schemas.py b/runtime/bamboo-pipeline/pipeline/parser/schemas.py new file mode 100644 index 00000000..c806bbac --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/parser/schemas.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +PIPELINE_TREE_PARSER = { + "type": "object", + "properties": { + "data": {"type": "object", "properties": {"inputs": {"type": "object"}, "outputs": {"type": "object"}}}, + "activities": {"type": "object"}, + "end_event": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "incoming": {"type": "string"}, + "name": {"type": "string"}, + "outgoing": {"type": "string"}, + "type": {"type": "string"}, + }, + }, + "flows": {"type": "object"}, + "gateways": {"type": "object"}, + "id": {"type": "string"}, + "line": {"type": "array"}, + "location": {"type": "array"}, + "start_event": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "incoming": {"type": "string"}, + "name": {"type": "string"}, + "outgoing": {"type": "string"}, + "type": {"type": "string"}, + }, + }, + }, +} diff --git a/runtime/bamboo-pipeline/pipeline/parser/utils.py b/runtime/bamboo-pipeline/pipeline/parser/utils.py new file mode 100644 index 00000000..ca25e68a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/parser/utils.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from pipeline.utils.uniqid import node_uniqid, line_uniqid +from pipeline.core.constants import PE +from pipeline.exceptions import NodeNotExistException + +logger = logging.getLogger("root") + +BRANCH_SELECT_GATEWAYS = {PE.ExclusiveGateway, PE.ConditionalParallelGateway} + + +def recursive_replace_id(pipeline_data): + pipeline_data[PE.id] = node_uniqid() + replace_all_id(pipeline_data) + activities = pipeline_data[PE.activities] + for act_id, act in list(activities.items()): + if act[PE.type] == PE.SubProcess: + recursive_replace_id(act[PE.pipeline]) + act[PE.pipeline][PE.id] = act_id + + +def replace_all_id(pipeline_data): + flows = pipeline_data[PE.flows] + node_map = {} + flow_map = {} + + # step.1 replace nodes id + + # replace events id + start_event_id = node_uniqid() + end_event_id = node_uniqid() + node_map[pipeline_data[PE.start_event][PE.id]] = start_event_id + node_map[pipeline_data[PE.end_event][PE.id]] = end_event_id + + start_event_id_maps = _replace_event_id(flows, pipeline_data[PE.start_event], start_event_id) + end_event_id_maps = _replace_event_id(flows, pipeline_data[PE.end_event], end_event_id) + + # replace activities id + activity_id_maps = {} + activities = pipeline_data[PE.activities] + keys = list(activities.keys()) + for old_id in keys: + substituted_id = node_uniqid() + node_map[old_id] = substituted_id + _replace_activity_id(flows, activities, old_id, substituted_id) + activity_id_maps[old_id] = substituted_id + + # replace gateways id + gateway_id_maps = {} + gateways = pipeline_data[PE.gateways] + keys = list(gateways.keys()) + for old_id in keys: + substituted_id = node_uniqid() + node_map[old_id] = substituted_id + _replace_gateway_id(flows, gateways, old_id, substituted_id) + gateway_id_maps[old_id] = substituted_id + + # step.2 replace flows id + flow_id_maps = {} + keys = list(flows.keys()) + for old_id in keys: + substituted_id = line_uniqid() + flow_map[old_id] = substituted_id + _replace_flow_id(flows, old_id, substituted_id, pipeline_data) + flow_id_maps[old_id] = substituted_id + + # step.3 replace id in data + _replace_id_in_data(pipeline_data, node_map) + + # step.4 try to replace front end data + _replace_front_end_data_id(pipeline_data, node_map, flow_map) + + return { + PE.start_event: start_event_id_maps, + PE.end_event: end_event_id_maps, + PE.activities: activity_id_maps, + PE.gateways: gateway_id_maps, + PE.flows: flow_id_maps, + PE.subprocess_detail: {}, + } + + +def _replace_id_in_data(pipeline_data, node_map): + for _, var_info in list(pipeline_data.get(PE.data, {}).get(PE.inputs, {}).items()): + if PE.source_act in var_info: + if isinstance(var_info[PE.source_act], str): + var_info[PE.source_act] = node_map[var_info[PE.source_act]] + else: + for source_info in var_info[PE.source_act]: + source_info[PE.source_act] = node_map[var_info[PE.source_act]] + + +def _replace_front_end_data_id(pipeline_data, node_map, flow_map): + if "line" in pipeline_data: + for line in pipeline_data["line"]: + line[PE.id] = flow_map[line[PE.id]] + line[PE.source][PE.id] = node_map[line[PE.source][PE.id]] + line[PE.target][PE.id] = node_map[line[PE.target][PE.id]] + if "location" in pipeline_data: + for location in pipeline_data["location"]: + location[PE.id] = node_map[location[PE.id]] + if "constants" in pipeline_data: + for key, constant in list(pipeline_data[PE.constants].items()): + source_info = constant.get("source_info", None) + if source_info: + replaced_constant = {} + for source_step, source_keys in list(source_info.items()): + try: + replaced_constant[node_map[source_step]] = source_keys + except KeyError as e: + message = "replace pipeline template id error: %s" % e + logger.exception(message) + raise NodeNotExistException(message) + constant["source_info"] = replaced_constant + + +def _replace_flow_id(flows, flow_id, substituted_id, pipeline_data): + flow = flows[flow_id] + flow[PE.id] = substituted_id + + _replace_flow_in_node(flow[PE.source], pipeline_data, substituted_id, flow_id, PE.outgoing) + _replace_flow_in_node(flow[PE.target], pipeline_data, substituted_id, flow_id, PE.incoming) + + flows.pop(flow_id) + flows[substituted_id] = flow + + +def _replace_flow_in_node(node_id, pipeline_data, substituted_id, flow_id, field): + if node_id in pipeline_data[PE.activities]: + node = pipeline_data[PE.activities][node_id] + elif node_id in pipeline_data[PE.gateways]: + node = pipeline_data[PE.gateways][node_id] + if node[PE.type] in BRANCH_SELECT_GATEWAYS and field == PE.outgoing: + _replace_flow_in_exclusive_gateway_conditions(node, substituted_id, flow_id) + elif node_id == pipeline_data[PE.start_event][PE.id]: + node = pipeline_data[PE.start_event] + elif node_id == pipeline_data[PE.end_event][PE.id]: + node = pipeline_data[PE.end_event] + sequence = node[field] + if isinstance(sequence, list): + i = sequence.index(flow_id) + sequence.pop(i) + sequence.insert(i, substituted_id) + else: + node[field] = substituted_id + + +def _replace_flow_in_exclusive_gateway_conditions(gateway, substituted_id, flow_id): + conditions = gateway[PE.conditions] + conditions[substituted_id] = conditions[flow_id] + conditions.pop(flow_id) + + +def _replace_gateway_id(flows, gateways, gateway_id, substituted_id): + try: + gateway = gateways[gateway_id] + gateway[PE.id] = substituted_id + + if gateway[PE.type] == PE.ConvergeGateway: + flows[gateway[PE.outgoing]][PE.source] = substituted_id + for flow_id in gateway[PE.incoming]: + flows[flow_id][PE.target] = substituted_id + # replace converge_gateway_id + for g_id, gw in list(gateways.items()): + if PE.converge_gateway_id in gw and gw[PE.converge_gateway_id] == gateway_id: + gw[PE.converge_gateway_id] = substituted_id + else: + incoming = gateway[PE.incoming] + + if isinstance(incoming, list): + for flow_id in incoming: + flows[flow_id][PE.target] = substituted_id + else: + flows[gateway[PE.incoming]][PE.target] = substituted_id + + for flow_id in gateway[PE.outgoing]: + flows[flow_id][PE.source] = substituted_id + + gateways.pop(gateway_id) + gateways[substituted_id] = gateway + except KeyError as e: + message = "replace gateway id error: %s" % e + logger.exception(message) + raise NodeNotExistException(message) + + +def _replace_activity_id(flows, activities, act_id, substituted_id): + try: + activity = activities[act_id] + activity[PE.id] = substituted_id + + incoming = activity[PE.incoming] + + if isinstance(incoming, list): + for s in incoming: + flows[s][PE.target] = substituted_id + else: + flows[activity[PE.incoming]][PE.target] = substituted_id + + flows[activity[PE.outgoing]][PE.source] = substituted_id + + activities.pop(act_id) + activities[substituted_id] = activity + except KeyError as e: + message = "replace activity id error: %s" % e + logger.exception(message) + raise NodeNotExistException(message) + + +def _replace_event_id(flows, event, substituted_id): + replace_maps = {} + try: + replace_maps[event[PE.id]] = substituted_id + event[PE.id] = substituted_id + if event[PE.incoming]: + if isinstance(event[PE.incoming], list): + for incoming in event[PE.incoming]: + flows[incoming][PE.target] = substituted_id + else: + flows[event[PE.incoming]][PE.target] = substituted_id + else: + flows[event[PE.outgoing]][PE.source] = substituted_id + except KeyError as e: + message = "replace event id error: %s" % e + logger.exception(message) + raise NodeNotExistException(message) + + return replace_maps diff --git a/runtime/bamboo-pipeline/pipeline/service/__init__.py b/runtime/bamboo-pipeline/pipeline/service/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/service/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/service/pipeline_engine_adapter/__init__.py b/runtime/bamboo-pipeline/pipeline/service/pipeline_engine_adapter/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/service/pipeline_engine_adapter/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/service/pipeline_engine_adapter/adapter_api.py b/runtime/bamboo-pipeline/pipeline/service/pipeline_engine_adapter/adapter_api.py new file mode 100644 index 00000000..96c726f4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/service/pipeline_engine_adapter/adapter_api.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.constants import PIPELINE_DEFAULT_PRIORITY +from pipeline.engine import api +from pipeline.log.models import LogEntry + +STATE_MAP = { + "CREATED": "RUNNING", + "READY": "RUNNING", + "RUNNING": "RUNNING", + "BLOCKED": "BLOCKED", + "SUSPENDED": "SUSPENDED", + "FINISHED": "FINISHED", + "FAILED": "FAILED", + "REVOKED": "REVOKED", +} + + +def run_pipeline(pipeline_instance, instance_id=None, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): + return api.start_pipeline(pipeline_instance, check_workers=check_workers, priority=priority, queue=queue) + + +def pause_pipeline(pipeline_id): + return api.pause_pipeline(pipeline_id) + + +def revoke_pipeline(pipeline_id): + return api.revoke_pipeline(pipeline_id) + + +def resume_pipeline(pipeline_id): + return api.resume_pipeline(pipeline_id) + + +def pause_activity(act_id): + return api.pause_node_appointment(act_id) + + +def resume_activity(act_id): + return api.resume_node_appointment(act_id) + + +def retry_activity(act_id, inputs=None): + return api.retry_node(act_id, inputs=inputs) + + +def skip_activity(act_id): + return api.skip_node(act_id) + + +def pause_subprocess(subprocess_id): + return api.pause_subprocess(subprocess_id) + + +def skip_exclusive_gateway(gateway_id, flow_id): + return api.skip_exclusive_gateway(gateway_id, flow_id) + + +def forced_fail(node_id, ex_data=""): + return api.forced_fail(node_id, ex_data=ex_data) + + +def get_inputs(act_id): + return api.get_inputs(act_id) + + +def get_outputs(act_id): + return api.get_outputs(act_id) + + +def get_activity_histories(act_id): + histories = api.get_activity_histories(act_id) + for item in histories: + item["started_time"] = _better_time_or_none(item["started_time"]) + item["finished_time"] = _better_time_or_none(item.pop("archived_time")) + return histories + + +def callback(act_id, data=None): + return api.activity_callback(act_id, data) + + +def get_state(node_id): + tree = api.get_status_tree(node_id, max_depth=100) + + res = _map(tree) + + # collect all atom + descendants = {} + _collect_descendants(tree, descendants) + res["children"] = descendants + + # return + return res + + +def _get_node_state(tree): + status = [] + + # return state when meet leaf + if not tree.get("children", []): + return STATE_MAP[tree["state"]] + + # iterate children and get child state recursively + for identifier_code, child_tree in list(tree["children"].items()): + status.append(_get_node_state(child_tree)) + + # summary parent state + return STATE_MAP[_get_parent_state_from_children_state(tree["state"], status)] + + +def _get_parent_state_from_children_state(parent_state, children_state_list): + """ + @summary: 根据子任务状态计算父任务状态 + @param parent_state: + @param children_state_list: + @return: + """ + children_state_set = set(children_state_list) + if parent_state == "BLOCKED": + if "RUNNING" in children_state_set: + parent_state = "RUNNING" + if "FAILED" in children_state_set: + parent_state = "FAILED" + return parent_state + + +def _collect_descendants(tree, descendants): + # iterate children for tree + for identifier_code, child_tree in list(tree["children"].items()): + child_status = _map(child_tree) + descendants[identifier_code] = child_status + + # collect children + if child_tree["children"]: + _collect_descendants(child_tree, descendants) + + +def _better_time_or_none(time): + return time.strftime("%Y-%m-%d %H:%M:%S") if time else time + + +def _map(tree): + tree.setdefault("children", {}) + return { + "id": tree["id"], + "state": _get_node_state(tree), + "start_time": _better_time_or_none(tree["started_time"]), + "finish_time": _better_time_or_none(tree["archived_time"]), + "loop": tree["loop"], + "retry": tree["retry"], + "skip": tree["skip"], + } + + +def get_plain_log_for_node(node_id, history_id): + return LogEntry.objects.plain_log_for_node(node_id=node_id, history_id=history_id) diff --git a/runtime/bamboo-pipeline/pipeline/service/task_service.py b/runtime/bamboo-pipeline/pipeline/service/task_service.py new file mode 100644 index 00000000..c469335c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/service/task_service.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import importlib + +from pipeline.conf import settings +from pipeline.constants import PIPELINE_DEFAULT_PRIORITY + +adapter_api = importlib.import_module(settings.PIPELINE_ENGINE_ADAPTER_API) + + +def run_pipeline(pipeline, instance_id=None, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): + return adapter_api.run_pipeline(pipeline, instance_id, check_workers=check_workers, priority=priority, queue=queue) + + +def pause_pipeline(pipeline_id): + return adapter_api.pause_pipeline(pipeline_id) + + +def revoke_pipeline(pipeline_id): + return adapter_api.revoke_pipeline(pipeline_id) + + +def resume_pipeline(pipeline_id): + return adapter_api.resume_pipeline(pipeline_id) + + +def pause_activity(act_id): + return adapter_api.pause_activity(act_id) + + +def resume_activity(act_id): + return adapter_api.resume_activity(act_id) + + +def retry_activity(act_id, inputs=None): + return adapter_api.retry_activity(act_id, inputs=inputs) + + +def skip_activity(act_id): + return adapter_api.skip_activity(act_id) + + +def skip_exclusive_gateway(gateway_id, flow_id): + return adapter_api.skip_exclusive_gateway(gateway_id, flow_id) + + +def forced_fail(act_id, ex_data=""): + return adapter_api.forced_fail(act_id, ex_data) + + +def get_state(node_id): + return adapter_api.get_state(node_id) + + +def get_topo_tree(pipeline_id): + return adapter_api.get_topo_tree(pipeline_id) + + +def get_inputs(act_id): + return adapter_api.get_inputs(act_id) + + +def get_outputs(act_id): + return adapter_api.get_outputs(act_id) + + +def get_activity_histories(act_id): + return adapter_api.get_activity_histories(act_id) + + +def callback(act_id, data=None): + return adapter_api.callback(act_id, data) + + +def get_plain_log_for_node(node_id, history_id=-1): + return adapter_api.get_plain_log_for_node(node_id, history_id) diff --git a/runtime/bamboo-pipeline/pipeline/signals/__init__.py b/runtime/bamboo-pipeline/pipeline/signals/__init__.py new file mode 100644 index 00000000..503402ce --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/signals/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.dispatch import Signal + +post_pipeline_finish = Signal(providing_args=["instance_id"]) +post_pipeline_revoke = Signal(providing_args=["instance_id"]) diff --git a/runtime/bamboo-pipeline/pipeline/signals/handlers.py b/runtime/bamboo-pipeline/pipeline/signals/handlers.py new file mode 100644 index 00000000..46e54a15 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/signals/handlers.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import transaction +from django.db.models.signals import post_save, pre_save +from django.dispatch import receiver + +from pipeline.core.constants import PE +from pipeline.core.pipeline import Pipeline +from pipeline.engine.signals import pipeline_end, pipeline_revoke +from pipeline.models import ( + PipelineInstance, + PipelineTemplate, + TemplateCurrentVersion, + TemplateRelationship, + TemplateVersion, +) + + +@receiver(pre_save, sender=PipelineTemplate) +def pipeline_template_pre_save_handler(sender, instance, **kwargs): + template = instance + + if template.is_deleted: + return + + template.set_has_subprocess_bit() + + +@receiver(post_save, sender=PipelineTemplate) +def pipeline_template_post_save_handler(sender, instance, created, **kwargs): + template = instance + + if template.is_deleted: + TemplateRelationship.objects.filter(ancestor_template_id=template.template_id).delete() + return + + with transaction.atomic(): + TemplateRelationship.objects.filter(ancestor_template_id=template.template_id).delete() + acts = list(template.data[PE.activities].values()) + subprocess_nodes = [act for act in acts if act["type"] == PE.SubProcess] + rs = [] + for sp in subprocess_nodes: + version = sp.get("version") or PipelineTemplate.objects.get(template_id=sp["template_id"]).version + always_use_latest = sp.get("always_use_latest", False) + rs.append( + TemplateRelationship( + ancestor_template_id=template.template_id, + descendant_template_id=sp["template_id"], + subprocess_node_id=sp["id"], + version=version, + always_use_latest=always_use_latest, + ) + ) + if rs: + TemplateRelationship.objects.bulk_create(rs) + TemplateVersion.objects.track(template) + TemplateCurrentVersion.objects.update_current_version(template) + + +@receiver(pipeline_end, sender=Pipeline) +def pipeline_end_handler(sender, root_pipeline_id, **kwargs): + try: + PipelineInstance.objects.set_finished(root_pipeline_id) + except PipelineInstance.DoesNotExist: # task which do not belong to any instance + pass + + +@receiver(pipeline_revoke, sender=Pipeline) +def pipeline_revoke_handler(sender, root_pipeline_id, **kwargs): + try: + PipelineInstance.objects.set_revoked(root_pipeline_id) + except PipelineInstance.DoesNotExist: # task which do not belong to any instance + pass diff --git a/runtime/bamboo-pipeline/pipeline/templates/__init__.py b/runtime/bamboo-pipeline/pipeline/templates/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/templates/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/__init__.py b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/js_file.py b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/js_file.py new file mode 100644 index 00000000..b3490b88 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/js_file.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +TEMPLATE = """ +/** +* Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +* Edition) available. +* Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* http://opensource.org/licenses/MIT +* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +* specific language governing permissions and limitations under the License. +*/ +""" diff --git a/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/plugins.py b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/plugins.py new file mode 100644 index 00000000..2b9e84c9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/plugins.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +TEMPLATE = """ +# -*- coding: utf-8 -*- +\"\"\" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +\"\"\" + +from pipeline.core.flow.activity import Service +from pipeline.component_framework.component import Component +""" diff --git a/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/py_file.py b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/py_file.py new file mode 100644 index 00000000..bb7f2758 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/templates/create_plugins_app/py_file.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +TEMPLATE = """ +# -*- coding: utf-8 -*- +\"\"\" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +\"\"\" + +import logging + +from pipeline.conf import settings +from pipeline.core.flow.activity import Service +from pipeline.component_framework.component import Component + +logger = logging.getLogger('celery') +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/builder/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_base.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_base.py new file mode 100644 index 00000000..66924fe1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_base.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow.base import Element + + +class TestElement(TestCase): + def test_init(self): + e1 = Element() + self.assertEqual(len(e1.id), 32) + self.assertIsNone(e1.name) + self.assertListEqual(e1.outgoing, []) + + e2 = Element(id="id", name="name", outgoing=[1]) + self.assertEqual(e2.id, "id") + self.assertEqual(e2.name, "name") + self.assertEqual(e2.outgoing, [1]) + + def test_extend(self): + e1 = Element() + e2 = Element() + e3 = Element() + + ret = e1.extend(e2).extend(e3) + self.assertEqual(ret, e3) + self.assertEqual(e1.outgoing, [e2]) + self.assertEqual(e2.outgoing, [e3]) + + def test_connect(self): + e1 = Element() + e2 = Element() + + ret = e1.connect(e2) + self.assertEqual(ret, e1) + self.assertEqual(ret.outgoing, [e2]) + + e3 = Element() + e4 = Element() + e5 = Element() + e6 = Element() + + ret = e3.connect(e4, e5, e6) + self.assertEqual(ret, e3) + self.assertEqual(e3.outgoing, [e4, e5, e6]) + + def test_to(self): + e1 = Element() + e2 = Element() + + self.assertEqual(e1.to(e1), e1) + self.assertEqual(e1.to(e2), e2) + self.assertEqual(e2.to(e1), e1) + + def test_converge(self): + e1 = Element() + e2 = Element() + e3 = Element() + e4 = Element() + e5 = Element() + + e1.connect(e2, e3, e4) + ret = e1.converge(e5) + self.assertEqual(ret, e5) + self.assertEqual(e2.outgoing, [e5]) + self.assertEqual(e3.outgoing, [e5]) + self.assertEqual(e4.outgoing, [e5]) + + e6 = Element() + e7 = Element() + e8 = Element() + e9 = Element() + e10 = Element() + + ret = e6.extend(e7).extend(e8).to(e6).extend(e9).to(e6).converge(e10) + + self.assertEqual(ret, e10) + self.assertEqual(e6.outgoing, [e7, e9]) + self.assertEqual(e7.outgoing, [e8]) + self.assertEqual(e8.outgoing, [e10]) + self.assertEqual(e9.outgoing, [e10]) + + e11 = Element() + self.assertEqual(e11.tail(), e11) + + def test_type(self): + e1 = Element() + self.assertRaises(NotImplementedError, e1.type) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_conditional_parallel.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_conditional_parallel.py new file mode 100644 index 00000000..53eec913 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_conditional_parallel.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow.gateway import ConditionalParallelGateway +from pipeline.core.constants import PE + + +class ConditionalParallelGatewayTestCase(TestCase): + def test_type(self): + e = ConditionalParallelGateway() + self.assertEqual(e.type(), PE.ConditionalParallelGateway) + + def test_init(self): + e = ConditionalParallelGateway() + self.assertEqual(len(e.id), 32) + self.assertIsNone(e.name) + self.assertEqual(e.outgoing, []) + self.assertEqual(e.conditions, {}) + + e = ConditionalParallelGateway(id="123", name="test_eg", outgoing=[1, 2, 3], conditions={0: "123"}) + self.assertEqual(e.id, "123") + self.assertEqual(e.name, "test_eg") + self.assertEqual(e.outgoing, [1, 2, 3]) + self.assertEqual(e.conditions, {0: "123"}) + + def test_add_condition(self): + e = ConditionalParallelGateway(id="123", name="test_eg", outgoing=[1, 2, 3], conditions={0: "123"}) + e.add_condition(1, "456") + self.assertEqual(e.conditions, {0: "123", 1: "456"}) + + def test_link_conditions_with(self): + e = ConditionalParallelGateway( + id="123", name="test_eg", outgoing=[1, 2, 3], conditions={0: "123", 1: "456", 2: "789"} + ) + + outgoing = ["abc", "def", "ghi"] + conditions = e.link_conditions_with(outgoing) + self.assertEqual( + conditions, {"abc": {"evaluate": "123"}, "def": {"evaluate": "456"}, "ghi": {"evaluate": "789"}} + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_converge_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_converge_gateway.py new file mode 100644 index 00000000..ddfe757f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_converge_gateway.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import ConvergeGateway +from pipeline.core.constants import PE + + +class ConvergeGatewayTestCase(TestCase): + def test_type(self): + e = ConvergeGateway() + self.assertEqual(e.type(), PE.ConvergeGateway) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_data.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_data.py new file mode 100644 index 00000000..a3822def --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_data.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import Data, Var + + +class DataTestCase(TestCase): + def test_init(self): + data = Data() + self.assertEqual(data.inputs, {}) + self.assertEqual(data.outputs, []) + + def test_to_dict(self): + data = Data() + + data.inputs["${constant_1}"] = Var(type=Var.PLAIN, value="value_1") + data.inputs["${constant_2}"] = {"type": "plain", "value": "value_2"} + + data.outputs.append("${constant_1}") + + d = data.to_dict() + + self.assertEqual( + d, + { + "inputs": { + "${constant_1}": {"type": "plain", "value": "value_1"}, + "${constant_2}": {"type": "plain", "value": "value_2"}, + }, + "outputs": ["${constant_1}"], + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_data_input.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_data_input.py new file mode 100644 index 00000000..2ce97889 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_data_input.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import DataInput + + +class DataInputTestCase(TestCase): + def test_init(self): + input = DataInput(type=DataInput.PLAIN, value="val") + self.assertEqual(input.type, DataInput.PLAIN) + self.assertEqual(input.value, "val") + self.assertEqual(input.custom_type, None) + + def test_to_dict(self): + input = DataInput(type=DataInput.PLAIN, value="val", custom_type="source_tag") + d = input.to_dict() + self.assertEqual(d, {"type": "plain", "value": "val", "is_param": True}) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_empty_end_event.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_empty_end_event.py new file mode 100644 index 00000000..02ad58d9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_empty_end_event.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import EmptyEndEvent +from pipeline.core.constants import PE + + +class EmptyEndEventTestCase(TestCase): + def test_type(self): + e = EmptyEndEvent() + self.assertEqual(e.type(), PE.EmptyEndEvent) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_empty_start_event.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_empty_start_event.py new file mode 100644 index 00000000..ab870c70 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_empty_start_event.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import EmptyStartEvent +from pipeline.core.constants import PE + + +class EmptyStartEventTestCase(TestCase): + def test_type(self): + e = EmptyStartEvent() + self.assertEqual(e.type(), PE.EmptyStartEvent) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_exclusive_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_exclusive_gateway.py new file mode 100644 index 00000000..ec19d0ae --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_exclusive_gateway.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import ExclusiveGateway +from pipeline.core.constants import PE + + +class ExclusiveGatewayTestCase(TestCase): + def test_type(self): + e = ExclusiveGateway() + self.assertEqual(e.type(), PE.ExclusiveGateway) + + def test_init(self): + e = ExclusiveGateway() + self.assertEqual(len(e.id), 32) + self.assertIsNone(e.name) + self.assertEqual(e.outgoing, []) + self.assertEqual(e.conditions, {}) + + e = ExclusiveGateway(id="123", name="test_eg", outgoing=[1, 2, 3], conditions={0: "123"}) + self.assertEqual(e.id, "123") + self.assertEqual(e.name, "test_eg") + self.assertEqual(e.outgoing, [1, 2, 3]) + self.assertEqual(e.conditions, {0: "123"}) + + def test_add_condition(self): + e = ExclusiveGateway(id="123", name="test_eg", outgoing=[1, 2, 3], conditions={0: "123"}) + e.add_condition(1, "456") + self.assertEqual(e.conditions, {0: "123", 1: "456"}) + + def test_link_conditions_with(self): + e = ExclusiveGateway(id="123", name="test_eg", outgoing=[1, 2, 3], conditions={0: "123", 1: "456", 2: "789"}) + + outgoing = ["abc", "def", "ghi"] + conditions = e.link_conditions_with(outgoing) + self.assertEqual( + conditions, {"abc": {"evaluate": "123"}, "def": {"evaluate": "456"}, "ghi": {"evaluate": "789"}} + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_node_output.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_node_output.py new file mode 100644 index 00000000..1311eccf --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_node_output.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import NodeOutput + + +class NodeOutputTestCase(TestCase): + def test_init(self): + output = NodeOutput(type=NodeOutput.PLAIN, value="val", source_act="1", source_key="2") + self.assertEqual(output.type, NodeOutput.PLAIN) + self.assertEqual(output.value, None) + self.assertEqual(output.source_act, "1") + self.assertEqual(output.source_key, "2") + + def test_to_dict(self): + output = NodeOutput(type=NodeOutput.PLAIN, value="val", source_act="1", source_key="2") + d = output.to_dict() + self.assertTrue(d, {"type": "plain", "value": "val", "source_act": "1", "source_key": "2"}) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_parallel_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_parallel_gateway.py new file mode 100644 index 00000000..48eb03e0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_parallel_gateway.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import ParallelGateway +from pipeline.core.constants import PE + + +class ParallelGatewayTestCase(TestCase): + def test_type(self): + e = ParallelGateway() + self.assertEqual(e.type(), PE.ParallelGateway) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_params.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_params.py new file mode 100644 index 00000000..fd154d4d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_params.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import Params, Var + + +class DataTestCase(TestCase): + def test_init(self): + params = Params() + self.assertEqual(params.params, {}) + + def test_to_dict(self): + params = Params() + + params.params["${constant_1}"] = Var(type=Var.PLAIN, value="value_1") + params.params["${constant_2}"] = {"type": "plain", "value": "value_2"} + + d = params.to_dict() + + self.assertEqual( + d, + { + "${constant_1}": {"type": "plain", "value": "value_1"}, + "${constant_2}": {"type": "plain", "value": "value_2"}, + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_rewritable_output.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_rewritable_output.py new file mode 100644 index 00000000..4b261fd7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_rewritable_output.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import RewritableNodeOutput + + +class RewritableNodeOutputTestCase(TestCase): + def test_init(self): + output = RewritableNodeOutput( + type=RewritableNodeOutput.PLAIN, value="val", source_act=[{"source_act": "act", "source_key": "key"}] + ) + self.assertEqual(output.type, RewritableNodeOutput.PLAIN) + self.assertEqual(output.value, None) + self.assertEqual(output.source_act, [{"source_act": "act", "source_key": "key"}]) + + def test_to_dict(self): + output = RewritableNodeOutput( + type=RewritableNodeOutput.PLAIN, + value="val", + source_act=[{"source_act": "act", "source_key": "key"}, {"source_act": "act2", "source_key": "key2"}], + ) + d = output.to_dict() + self.assertTrue( + d, + { + "type": "plain", + "value": "val", + "source_act": [ + {"source_act": "act", "source_key": "key"}, + {"source_act": "act2", "source_key": "key2"}, + ], + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_service_activity.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_service_activity.py new file mode 100644 index 00000000..a2dd7509 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_service_activity.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import ServiceActivity +from pipeline.builder.flow.data import Var +from pipeline.core.constants import PE + + +class ServiceActivityTestCase(TestCase): + def test_init(self): + act = ServiceActivity() + self.assertIsNotNone(act.component) + self.assertIsNone(act.component.code) + self.assertEqual(act.component.inputs, {}) + + act = ServiceActivity(component_code="test") + self.assertEqual(act.component.code, "test") + + def test_type(self): + act = ServiceActivity() + self.assertEqual(act.type(), PE.ServiceActivity) + + def test_component_dict(self): + act = ServiceActivity() + act.component.code = "http" + act.component.inputs.parent_data = Var(type=Var.SPLICE, value="${parent_data}") + act.component.inputs.val = Var(type=Var.PLAIN, value="${val}") + act.component.inputs.lazy_val = Var(type=Var.LAZY, value="${val}", custom_type="test_tag") + + cd = act.component_dict() + self.assertEqual( + cd, + { + "code": "http", + "inputs": { + "parent_data": {"type": "splice", "value": "${parent_data}"}, + "val": {"type": "plain", "value": "${val}"}, + "lazy_val": {"type": "lazy", "value": "${val}", "custom_type": "test_tag"}, + }, + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_subprocess.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_subprocess.py new file mode 100644 index 00000000..41764320 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_subprocess.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import SubProcess +from pipeline.core.constants import PE + + +class SubProcessTestCase(TestCase): + def test_init(self): + subproc = SubProcess("template_id") + self.assertEqual(subproc.start, "template_id") + self.assertEqual(subproc.params, {}) + self.assertIsNone(subproc.data) + + subproc = SubProcess(start="template_id", data={"data_key": "data_val"}, params={"1": "2"}) + self.assertEqual(subproc.start, "template_id") + self.assertEqual(subproc.data, {"data_key": "data_val"}) + self.assertEqual(subproc.params, {"1": "2"}) + + def test_type(self): + subproc = SubProcess("template_id") + self.assertEqual(subproc.type(), PE.SubProcess) diff --git a/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_var.py b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_var.py new file mode 100644 index 00000000..680e4b1f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/builder/flow/test_var.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.builder.flow import Var + + +class VarTestCase(TestCase): + def test_init(self): + var = Var(type="test_type", value="test_value", custom_type="source_tag") + self.assertEqual(var.type, "test_type") + self.assertEqual(var.value, "test_value") + self.assertEqual(var.custom_type, "source_tag") + + def test_to_dict(self): + splice_var = Var(type=Var.SPLICE, value="val", custom_type="source_tag") + plain_var = Var(type=Var.PLAIN, value="val", custom_type="source_tag") + lazy_var = Var(type=Var.LAZY, value="val", custom_type="source_tag") + + self.assertEqual(splice_var.to_dict(), {"type": Var.SPLICE, "value": "val"}) + self.assertEqual(plain_var.to_dict(), {"type": Var.PLAIN, "value": "val"}) + self.assertEqual(lazy_var.to_dict(), {"type": Var.LAZY, "value": "val", "custom_type": "source_tag"}) diff --git a/runtime/bamboo-pipeline/pipeline/tests/component_framework/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/component_framework/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/component_framework/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_base.py b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_base.py new file mode 100644 index 00000000..c3fb3c7c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_base.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.component_framework.component import Component +from pipeline.component_framework.library import ComponentLibrary +from pipeline.component_framework.models import ComponentModel +from pipeline.core.flow.activity import Service + + +class TestBase(TestCase): + def tearDown(self): + ComponentModel.objects.all().delete() + ComponentLibrary.components = {} + + def test_no_name_component(self): + class NoNameComponentService(Service): + def execute(self, data, parent_data): + pass + + try: + + class NoNameComponent(Component): + bound_service = NoNameComponentService + code = "no_name_component" + form = "form_path" + + def outputs_format(cls): + return {} + + def clean_execute_data(self, context): + return {} + + except ValueError as e: + self.assertNotEqual(str(e).find("name"), -1) + + def test_no_code_component(self): + class NoCodeComponentService(Service): + def execute(self, data, parent_data): + pass + + try: + + class NoCodeComponent(Component): + bound_service = NoCodeComponentService + name = "no code component" + form = "form_path" + + def outputs_format(cls): + return {} + + def clean_execute_data(self, context): + return {} + + except ValueError as e: + self.assertNotEqual(str(e).find("code"), -1) + + def test_no_form_component(self): + class NoFormComponentService(Service): + def execute(self, data, parent_data): + pass + + try: + + class NoCodeComponent(Component): + bound_service = NoFormComponentService + name = "no form component" + code = "no_form_component" + + def outputs_format(cls): + return {} + + def clean_execute_data(self, context): + return {} + + except ValueError as e: + self.assertNotEqual(str(e).find("form"), -1) + + def test_no_service_component(self): + try: + + class NoServiceComponent(Component): + name = "no service component" + code = "no_service_component" + form = "form_path" + + def outputs_format(cls): + return {} + + def clean_execute_data(self, context): + return {} + + except ValueError as e: + self.assertNotEqual(str(e).find("service"), -1) + + def test_wrong_class_service_component(self): + try: + + class WrongClassComponent(Component): + name = "wrong class component" + code = "wrong_class_component" + form = "form_path" + bound_service = int + + def outputs_format(cls): + return {} + + def clean_execute_data(self, context): + return {} + + except ValueError as e: + self.assertNotEqual(str(e).find("service"), -1) diff --git a/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_base_ignore_component.py b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_base_ignore_component.py new file mode 100644 index 00000000..c0adbbd1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_base_ignore_component.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.component_framework.component import Component +from pipeline.component_framework.library import ComponentLibrary +from pipeline.component_framework.models import ComponentModel +from pipeline.core.flow.activity import Service +from pipeline.exceptions import ComponentNotExistException + +__register_ignore__ = True + + +class TestBaseIgnoreComponent(TestCase): + def tearDown(self): + ComponentModel.objects.all().delete() + ComponentLibrary.components = {} + + def test_ignore_component(self): + class IgnoreService(Service): + def execute(self, data, parent_data): + pass + + class IgnoreComponent(Component): + name = "ignore_service" + bound_service = IgnoreService + code = "ignore_component" + form = "form path" + + def outputs_format(self): + return {"result": bool, "message": str} + + def clean_execute_data(self, context): + return {} + + self.assertRaises(ComponentNotExistException, ComponentLibrary.get_component_class, "ignore_component") diff --git a/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_component.py b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_component.py new file mode 100644 index 00000000..fb2c9872 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_component.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.component_framework.component import Component +from pipeline.component_framework.library import ComponentLibrary +from pipeline.component_framework.models import ComponentModel +from pipeline.core.data.base import DataObject +from pipeline.core.data.var import PlainVariable +from pipeline.core.flow.activity import Service +from pipeline.exceptions import ComponentDataLackException + + +class TestComponent(TestCase): + def setUp(self): + class CCUpdateHostModuleService(Service): + def execute(self, data, parent_data): + pass + + def outputs(self): + return [ + self.OutputItem(name="key_1", key="key_1", type="int"), + self.OutputItem(name="key_2", key="key_2", type="str"), + ] + + def inputs(self): + return [ + self.InputItem(name="key_3", key="key_3", type="int", required=True), + self.InputItem(name="key_4", key="key_4", type="int", required=False), + ] + + class CCUpdateHostModuleComponent(Component): + name = "修改主机所属模块" + bound_service = CCUpdateHostModuleService + code = "cc_update_module" + form = "form path" + + class CCUpdateHostModuleComponentEmbeddedForm(Component): + name = "修改主机所属模块" + bound_service = CCUpdateHostModuleService + code = "cc_update_module_embedded_form" + embedded_form = True + form = "form path" + + self.service = CCUpdateHostModuleService + self.component = CCUpdateHostModuleComponent + self.component_embedded_form = CCUpdateHostModuleComponentEmbeddedForm + + def tearDown(self): + ComponentModel.objects.all().delete() + ComponentLibrary.components = {} + + def test_init(self): + self.component({}) + + def test_outputs_format(self): + outputs_format = self.component({}).outputs_format() + self.assertEqual( + outputs_format, + [ + {"name": "key_1", "key": "key_1", "type": "int", "schema": {}}, + {"name": "key_2", "key": "key_2", "type": "str", "schema": {}}, + ], + ) + + def test_inputs_format(self): + inputs_format = self.component({}).inputs_format() + self.assertEqual( + inputs_format, + [ + {"name": "key_3", "key": "key_3", "type": "int", "required": True, "schema": {}}, + {"name": "key_4", "key": "key_4", "type": "int", "required": False, "schema": {}}, + ], + ) + + def test_clean_execution_data(self): + data = {"test": "test"} + data_after_clean = self.component(data).clean_execute_data(None) + self.assertEqual(data, data_after_clean) + + def test_service(self): + service = self.component({}).service() + self.assertIsInstance(service, self.service) + + def test_data_for_execution(self): + v1 = PlainVariable(name="key_1", value="value_1") + v2 = PlainVariable(name="key_2", value="value_2") + data = {"key_1": {"value": v1}, "key_2": {"value": v2}} + component = self.component(data) + execution_data = component.data_for_execution({}, {}) + self.assertIsInstance(execution_data, DataObject) + self.assertEqual(execution_data.get_inputs(), {"key_1": v1, "key_2": v2}) + + def test_data_for_execution_lack_of_inputs(self): + PlainVariable(name="key_1", value="value_1") + data = {"key_1": None, "key_2": None} + component = self.component(data) + self.assertRaises(ComponentDataLackException, execution_data=component.data_for_execution, args=[None, None]) + + def test_form_is_embedded(self): + self.assertFalse(self.component.form_is_embedded()) + self.assertTrue(self.component_embedded_form.form_is_embedded()) diff --git a/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_constant_pool.py b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_constant_pool.py new file mode 100644 index 00000000..05bdd4c2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_constant_pool.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline import exceptions +from pipeline.component_framework.constant import ConstantPool +from pipeline.utils.utils import has_circle + + +class TestConstantPool(TestCase): + def setUp(self): + data = { + "${key_a}": {"value": "haha"}, + "${key_b}": {"value": "str_${key_a}"}, + "${key_c}": {"value": "str_${key_b}"}, + } + resolved_data = { + "${key_a}": {"value": "haha"}, + "${key_b}": {"value": "str_haha"}, + "${key_c}": {"value": "str_str_haha"}, + } + pool = ConstantPool(data) + self.pool = pool + self.resolved_data = resolved_data + + def test_resolve(self): + self.pool.resolve() + self.assertEqual(self.pool.pool, self.resolved_data) + + def test_has_circle(self): + g1 = { + "a": ["b", "d"], + "b": ["f", "c"], + "c": ["a", "d", "e"], + "d": ["e"], + "e": [], + "f": ["c"], + "g": ["f", "h"], + "h": ["f", "j"], + "i": ["h"], + "j": ["i"], + } + + g2 = {"a": [], "b": ["a"], "c": ["b", "a"], "d": []} + + g3 = { + "a": ["b", "d"], + "b": ["f", "c"], + "c": ["d", "e"], + "d": ["e"], + "e": [], + "f": ["c"], + "g": ["f", "h"], + "h": ["f", "j"], + "i": ["h"], + "j": ["i"], + } + + g4 = { + "a": ["b", "d"], + "b": ["f", "c"], + "c": ["d", "e"], + "d": ["e"], + "e": [], + "f": ["c"], + "g": ["f", "h"], + "h": ["f", "j"], + "i": ["h"], + "j": [], + } + + g5 = {"a": ["a"]} + + g6 = {"a": ["b"], "b": ["c"], "c": ["a"]} + + self.assertTrue(has_circle(g1)[0]) + self.assertFalse(has_circle(g2)[0]) + self.assertTrue(has_circle(g3)[0]) + self.assertFalse(has_circle(g4)[0]) + self.assertTrue(has_circle(g5)[0]) + self.assertTrue(has_circle(g6)[0]) + + def test_resolve_value(self): + self.assertEqual(self.pool.resolve_value("value_${key_c}"), "value_str_str_haha") + self.assertEqual(self.pool.resolve_value("value_${key_a}_${key_d}"), "value_haha_${key_d}") + + def test_resolve_constant(self): + self.assertEqual(self.pool.resolve_constant("${key_a}"), "haha") + self.assertRaises(exceptions.ConstantNotExistException, self.pool.resolve_constant, "${key_d}") diff --git a/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_library.py b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_library.py new file mode 100644 index 00000000..da613ed9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_library.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.component_framework.component import Component +from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION +from pipeline.component_framework.library import ComponentLibrary +from pipeline.component_framework.models import ComponentModel +from pipeline.core.flow.activity import Service +from pipeline.exceptions import ComponentNotExistException + + +class TestRegistry(TestCase): + def setUp(self): + class NewTestService(Service): + pass + + class NewTestComponent(Component): + name = "name" + code = "new_test_component" + bound_service = NewTestService + form = "form path" + + self.component = NewTestComponent + + def tearDown(self): + ComponentModel.objects.all().delete() + ComponentLibrary.components = {} + + def test_component(self): + class TestService(Service): + pass + + class TestComponent(Component): + name = "name" + code = "code" + bound_service = TestService + form = "form path" + + self.assertEqual(ComponentLibrary.components["code"][LEGACY_PLUGINS_VERSION], TestComponent) + + def test_get_component_class(self): + class TestService(Service): + pass + + class TestComponent(Component): + name = "name" + code = "code" + bound_service = TestService + form = "form path" + + def clean_execute_data(self, context): + pass + + def outputs_format(self): + pass + + class TestComponent2(Component): + name = "name" + code = "code_2" + bound_service = TestService + form = "form path" + version = "1.0" + + def clean_execute_data(self, context): + pass + + def outputs_format(self): + pass + + self.assertEqual(ComponentLibrary.get_component_class("code"), TestComponent) + self.assertRaises(ComponentNotExistException, ComponentLibrary.get_component_class, "code", "1.0") + self.assertRaises(ComponentNotExistException, ComponentLibrary.get_component_class, "code2") + self.assertEqual(ComponentLibrary.get_component_class("code_2", "1.0"), TestComponent2) + + def test_get_component__raise(self): + self.assertRaises(ComponentNotExistException, ComponentLibrary.get_component, "c_not_exist", {}) + + def test_args_new(self): + component = ComponentLibrary(self.component.code) + self.assertEqual(component, self.component) + + def test_get_component(self): + class TestService(Service): + pass + + class TestComponent(Component): + name = "name" + code = "code" + bound_service = TestService + form = "form path" + + def clean_execute_data(self, context): + pass + + def outputs_format(self): + pass + + self.assertEqual(ComponentLibrary.get_component("code", {}).__class__, TestComponent) + + def test_register_component(self): + component_cls = "component_token" + ComponentLibrary.register_component(component_code="code_1", version="1", component_cls=component_cls) + ComponentLibrary.register_component(component_code="code_1", version="2", component_cls=component_cls) + self.assertEqual( + ComponentLibrary.components, + { + "new_test_component": { + LEGACY_PLUGINS_VERSION: ComponentLibrary.get_component_class("new_test_component") + }, + "code_1": {"1": component_cls, "2": component_cls}, + }, + ) + + def test_component_list(self): + class TestService(Service): + pass + + class TestComponent(Component): + name = "name" + code = "code" + bound_service = TestService + form = "form path" + + def clean_execute_data(self, context): + pass + + def outputs_format(self): + pass + + class TestComponent2(Component): + name = "name" + code = "code_2" + bound_service = TestService + form = "form path" + version = "1.0" + + def clean_execute_data(self, context): + pass + + def outputs_format(self): + pass + + expect_list = [] + for _, component_map in ComponentLibrary.components.items(): + expect_list.extend(component_map.values()) + + component_list = ComponentLibrary.component_list() + + self.assertEqual(component_list, expect_list) + self.assertEqual(len(component_list), 3) diff --git a/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_models.py b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_models.py new file mode 100644 index 00000000..992aa70f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/component_framework/test_models.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.component_framework.component import Component +from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION +from pipeline.component_framework.library import ComponentLibrary +from pipeline.component_framework.models import ComponentModel +from pipeline.core.flow.activity import Service + +__group_name__ = "gn" +__group_icon__ = "gi" + + +class TestModels(TestCase): + @classmethod + def setUpClass(cls): + ComponentModel.objects.all().delete() # env clean + + @classmethod + def tearDownClass(cls): + pass + + def setUp(self): + class CCUpdateHostModuleService(Service): + def execute(self, data, parent_data): + pass + + def outputs(self): + return [ + self.OutputItem(name="key_1", key="key_1", type="int"), + self.OutputItem(name="key_2", key="key_2", type="str"), + ] + + def outputs_format(self): + pass + + class CCUpdateHostModuleComponent(Component): + name = "1234" + bound_service = CCUpdateHostModuleService + code = "cc_update_module" + form = "form path" + + class CCUpdateHostModuleComponentV2(Component): + name = "1234" + bound_service = CCUpdateHostModuleService + code = "cc_update_module" + form = "form path" + version = "2.0" + + self.service = CCUpdateHostModuleService + self.component = CCUpdateHostModuleComponent + self.component_v2 = CCUpdateHostModuleComponentV2 + + def tearDown(self): + ComponentModel.objects.all().delete() + ComponentLibrary.components = {} + + def test_unicode(self): + component = ComponentModel.objects.get(code=self.component.code, version=LEGACY_PLUGINS_VERSION) + self.assertEqual(component.name, component.__unicode__()) + + def test_group_name(self): + component = ComponentModel.objects.get(code=self.component.code, version=LEGACY_PLUGINS_VERSION) + self.assertEqual(component.group_name, self.component.group_name) + + def test_version(self): + component = ComponentModel.objects.get(code=self.component.code, version=LEGACY_PLUGINS_VERSION) + component_v2 = ComponentModel.objects.get(code=self.component.code, version="2.0") + self.assertEqual(component.version, LEGACY_PLUGINS_VERSION) + self.assertEqual(component_v2.version, "2.0") + + def test_group_icon(self): + component = ComponentModel.objects.get(code=self.component.code, version=LEGACY_PLUGINS_VERSION) + self.assertEqual(component.group_icon, self.component.group_icon) + + def test_get_component_dict(self): + d = ComponentModel.objects.get_component_dict() + self.assertEqual(d, {"cc_update_module": "gn-1234"}) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/core/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/test_base.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_base.py new file mode 100644 index 00000000..1a18f484 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_base.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import jsonschema +import ujson as json +from django.test import TestCase + +from pipeline import exceptions +from pipeline.core.data.base import DataObject +from pipeline.core.data.schemas import BASE_PARAM +from pipeline.utils.collections import FancyDict + + +class TestData(TestCase): + def test_data_object(self): + inputs = {"args": "1", "kwargs": {"1": 1, "2": 2}} + + self.assertRaises(exceptions.DataTypeErrorException, DataObject, None) + + data_object = DataObject(inputs) + self.assertIsInstance(data_object, DataObject) + self.assertIsInstance(data_object.inputs, FancyDict) + self.assertIsInstance(data_object.outputs, FancyDict) + + self.assertEqual(data_object.get_inputs(), inputs) + self.assertEqual(data_object.get_outputs(), {}) + + self.assertEqual(data_object.get_one_of_inputs("args"), "1") + self.assertEqual(data_object.inputs.args, "1") + self.assertIsNone(data_object.get_one_of_outputs("args")) + + self.assertRaises(exceptions.DataTypeErrorException, data_object.reset_outputs, None) + self.assertTrue(data_object.reset_outputs({"a": "str"})) + self.assertEqual(data_object.outputs.a, "str") + + data_object.update_outputs({"args": "1", "kwargs": {"1": 1, "2": 2}}) + self.assertEqual(data_object.get_outputs(), {"a": "str", "args": "1", "kwargs": {"1": 1, "2": 2}}) + print(data_object.inputs) + print(data_object.outputs) + self.assertEqual(jsonschema.validate(json.loads(data_object.serializer()), BASE_PARAM), None) + + def test_inputs_copy(self): + inputs = {"args": "1", "kwargs": {"1": 1, "2": 2}} + data_object = DataObject(inputs=inputs) + inputs_copy = data_object.inputs_copy() + self.assertIsInstance(inputs_copy, FancyDict) + self.assertEqual(inputs_copy, inputs) + self.assertFalse(inputs is inputs_copy) + + def test_outputs_copy(self): + outputs = {"args": "1", "kwargs": {"1": 1, "2": 2}} + data_object = DataObject(inputs={}, outputs=outputs) + outputs_copy = data_object.outputs_copy() + self.assertIsInstance(outputs_copy, FancyDict) + self.assertEqual(outputs_copy, outputs) + self.assertFalse(outputs_copy is outputs) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/test_context.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_context.py new file mode 100644 index 00000000..619d32f1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_context.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from copy import deepcopy +from mock import MagicMock, call + +from django.test import TestCase + +from pipeline import exceptions +from pipeline.core.data import base, context + + +class TestContext(TestCase): + def setUp(self): + act_outputs = { + "act_id_1": {"output_1": "gk_1_1", "output_2": "gk_1_2"}, + "act_id_2": {"output_1": "gk_2_1"}, + "act_id_4": {"output_1": "gk_4_1", "output_2": "gk_4_2"}, + } + self.context = context.Context(act_outputs) + + class Activity(object): + pass + + act_1 = Activity() + act_1.id = "act_id_1" + data_1 = base.DataObject({}) + data_1.set_outputs("output_1", "value_1_1") + data_1.set_outputs("output_2", "value_1_2") + act_1.data = data_1 + self.act_1 = act_1 + + act_2 = Activity() + act_2.id = "act_id_2" + data_2 = base.DataObject({}) + data_2.set_outputs("output_1", "value_2_1") + data_2.set_outputs("output_2", "value_2_2") + data_2.set_outputs("output_3", "value_2_3") + act_2.data = data_2 + self.act_2 = act_2 + + act_3 = Activity() + act_3.id = "act_id_3" + data_3 = base.DataObject({}) + data_3.set_outputs("output_1", "value_3_1") + data_3.set_outputs("output_2", "value_3_2") + data_3.set_outputs("output_3", "value_3_3") + act_3.data = data_3 + self.act_3 = act_3 + + act_4 = Activity() + act_4.id = "act_id_4" + data_4 = base.DataObject({}) + data_4.set_outputs("output_1", "value_4_1") + act_4.data = data_4 + self.act_4 = act_4 + + def test_extract_output(self): + self.assertEqual(self.context.change_keys, set()) + self.context.extract_output(self.act_1) + self.assertEqual(self.context.variables, {"gk_1_1": "value_1_1", "gk_1_2": "value_1_2"}) + self.assertEqual(self.context.change_keys, {"gk_1_1", "gk_1_2"}) + self.context.extract_output(self.act_2) + self.assertEqual(self.context.variables, {"gk_1_1": "value_1_1", "gk_1_2": "value_1_2", "gk_2_1": "value_2_1"}) + self.assertEqual(self.context.change_keys, {"gk_1_1", "gk_1_2", "gk_2_1"}) + self.context.extract_output(self.act_3) + self.assertEqual(self.context.variables, {"gk_1_1": "value_1_1", "gk_1_2": "value_1_2", "gk_2_1": "value_2_1"}) + self.assertEqual(self.context.change_keys, {"gk_1_1", "gk_1_2", "gk_2_1"}) + self.context.extract_output(self.act_4, set_miss=False) + self.assertEqual( + self.context.variables, + {"gk_1_1": "value_1_1", "gk_1_2": "value_1_2", "gk_2_1": "value_2_1", "gk_4_1": "value_4_1"}, + ) + self.assertEqual(self.context.change_keys, {"gk_1_1", "gk_1_2", "gk_2_1", "gk_4_1"}) + + def test_get(self): + self.context.extract_output(self.act_1) + self.assertEqual(self.context.get("gk_1_1"), "value_1_1") + self.assertRaises(exceptions.ReferenceNotExistError, self.context.get, "key_not_exist") + + def test_set_global_var(self): + self.assertEqual(self.context.change_keys, set()) + self.context.set_global_var("key", "test_val") + self.assertEqual(self.context.get("key"), "test_val") + self.assertEqual(self.context.change_keys, {"key"}) + + def test_update_global_var(self): + self.assertEqual(self.context.change_keys, set()) + self.context.set_global_var("key_1", "test_val") + self.context.update_global_var({"key_1": "test_val1", "key_2": "test_val2"}) + self.assertEqual(self.context.get("key_1"), "test_val1") + self.assertEqual(self.context.get("key_2"), "test_val2") + self.assertEqual(self.context.change_keys, {"key_1", "key_2"}) + + def test_mark_as_output(self): + self.context.mark_as_output("key") + self.assertEqual(self.context._output_key, {"key"}) + + def test_output(self): + class MockPipeline(object): + def __init__(self, data): + self.data = data + + pipeline = MockPipeline(base.DataObject({})) + self.context.mark_as_output("gk_1_1") + self.context.mark_as_output("gk_1_2") + self.context.extract_output(self.act_1) + self.context.write_output(pipeline) + self.assertEqual(pipeline.data.get_outputs(), {"gk_1_1": "value_1_1", "gk_1_2": "value_1_2"}) + + def test_clear(self): + self.context.set_global_var("key", "test_val") + self.context.clear() + self.assertRaises(exceptions.ReferenceNotExistError, self.context.get, "key") + + def test_duplicate_variables(self): + self.context.set_global_var("k1", None) + self.context.set_global_var("k2", None) + + self.assertRaises(exceptions.InvalidOperationException, self.context.recover_variable) + self.context.duplicate_variables() + self.context.set_global_var("k1", "v1") + self.context.set_global_var("k2", "v2") + self.context.set_global_var("gk_1_1", "v3") + self.context.set_global_var("gk_1_2", "v4") + self.context.set_global_var("gk_2_1", "v5") + + self.context.recover_variable() + self.assertIsNone(self.context.get("k1")) + self.assertIsNone(self.context.get("k2")) + self.assertEqual(self.context.get("gk_1_1"), "v3") + self.assertEqual(self.context.get("gk_1_2"), "v4") + self.assertEqual(self.context.get("gk_2_1"), "v5") + + def test_change_keys(self): + del self.context._change_keys + self.assertFalse(hasattr(self.context, "_change_keys")) + self.context.change_keys + self.assertTrue(hasattr(self.context, "_change_keys")) + self.assertEqual(self.context._change_keys, set()) + self.assertEqual(self.context.change_keys, set()) + + def test_raw_variables(self): + del self.context._raw_variables + self.assertFalse(hasattr(self.context, "_raw_variables")) + self.context.raw_variables + self.assertTrue(hasattr(self.context, "_raw_variables")) + self.assertIsNone(self.context._raw_variables) + self.assertIsNone(self.context.raw_variables) + + def test_clear_change_keys(self): + self.assertEqual(self.context.change_keys, set()) + self.context.update_global_var({"key_1": "test_val1", "key_2": "test_val2"}) + self.assertEqual(self.context.change_keys, {"key_1", "key_2"}) + self.context.clear_change_keys() + self.assertEqual(self.context.change_keys, set()) + + def test_sync_change(self): + child_context = context.Context({}) + self.context.update_global_var({"key_1": "test_val1", "key_2": "test_val2"}) + self.context.clear_change_keys() + child_context.variables = deepcopy(self.context.variables) + child_context.set_global_var("key_1", "new_val_1") + child_context.set_global_var("key_3", "new_val_3") + self.context.sync_change(child_context) + self.assertEqual(self.context.variables, {"key_1": "new_val_1", "key_2": "test_val2", "key_3": "new_val_3"}) + + def test_sync_change_with_splice_vars(self): + from pipeline.core.data.var import SpliceVariable + + child_context = context.Context({}) + self.context.update_global_var( + { + "key_1": "test_val1", + "key_2": "test_val2", + "key_3": "value3", # not splice + "key_4": SpliceVariable("key_4", "val3", self.context), # splice sync success + "key_5": SpliceVariable("key_5", "val5", self.context), # splice parent not none + "key_6": SpliceVariable("key_5", "val6", self.context), # splice child none + } + ) + self.context.variables["key_5"]._value = "old_val5" + self.context.clear_change_keys() + + child_context.variables = deepcopy(self.context.variables) + child_context.set_global_var("key_1", "new_val_1") + child_context.set_global_var("key_0", "new_val_0") + child_context.variables["key_3"] = SpliceVariable("key_3", "val3", child_context) + child_context.variables["key_4"] = SpliceVariable("key_4", "val4", child_context) + child_context.variables["key_5"] = SpliceVariable("key_5", "val5", child_context) + child_context.variables["key_6"] = SpliceVariable("key_6", "val6", child_context) + child_context.variables["key_4"]._value = "val4" + child_context.variables["key_5"]._value = "val5" + + self.assertIsNone(self.context.variables["key_4"]._value) + self.context.sync_change(child_context) + self.assertEqual(self.context.variables["key_0"], "new_val_0") + self.assertEqual(self.context.variables["key_1"], "new_val_1") + self.assertEqual(self.context.variables["key_2"], "test_val2") + self.assertEqual(self.context.variables["key_3"], "value3") + self.assertEqual(self.context.variables["key_4"]._value, "val4") + self.assertEqual(self.context.variables["key_5"]._value, "old_val5") + self.assertEqual(self.context.variables["key_6"]._value, None) + + def test_write_output__missing_some_keys(self): + test_context = context.Context({}) + test_context._output_key = ["key1", "key2", "key3"] + test_context.variables = {"key1": "val1", "key2": "val2"} + + mock_pipeline = MagicMock() + mock_pipeline.data = MagicMock() + test_context.write_output(mock_pipeline) + mock_pipeline.data.set_outputs.assert_has_calls( + [call("key1", "val1"), call("key2", "val2"), call("key3", "key3")] + ) + + +class TestOutputRef(TestCase): + def setUp(self): + act_outputs = {"act_id_1": {"output_1": "gk_1_1", "output_2": "gk_1_2"}, "act_id_2": {"output_1": "gk_2_1"}} + self.context = context.Context(act_outputs) + + class Activity(object): + pass + + act_1 = Activity() + act_1.id = "act_id_1" + data_1 = base.DataObject({}) + data_1.set_outputs("output_1", "value_1_1") + data_1.set_outputs("output_2", "value_1_2") + act_1.data = data_1 + self.act_1 = act_1 + + def test_value(self): + ref = context.OutputRef("gk_1_1", self.context) + self.context.extract_output(self.act_1) + self.assertEqual(ref.value, "value_1_1") + + def test_deep_copy(self): + ref = context.OutputRef("gk_1_1", self.context) + ref_copy = deepcopy(ref) + self.assertTrue(ref_copy is ref) + self.assertTrue(ref_copy.context is ref.context) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/test_converter.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_converter.py new file mode 100644 index 00000000..3efa2ac9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_converter.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline import exceptions +from pipeline.core.data.context import Context +from pipeline.core.data.converter import get_variable + + +class TestConverter(TestCase): + def setUp(self): + self.key = "bk_timing" + self.info = {"type": "plain", "value": "1"} + self.context = Context({}) + self.pipeline_data = { + "language": "zh-cn", + "task_id": 63, + "biz_cc_name": "UTC", + "task_name": "20180918165807", + "executor": "username", + "operator": "username", + "biz_cc_id": 81, + } + + def test_get_variable(self): + variable = get_variable(self.key, self.info, self.context, self.pipeline_data) + self.assertEqual(variable.name, "bk_timing") + self.assertEqual(variable.value, "1") + + self.info["type"] = "splice" + variable1 = get_variable(self.key, self.info, self.context, self.pipeline_data) + self.assertEqual(variable1.name, "bk_timing") + self.assertEqual(variable1.value, "1") + self.assertEqual(variable1._refs, {}) + + self.key = "${ip}" + self.info = { + "custom_type": "ip", + "source_tag": "var_ip_picker.ip_picker", + "type": "lazy", + "value": {"var_ip_custom_value": "1.1.1.11.1", "var_ip_method": "custom", "var_ip_tree": ""}, + } + self.context = Context(self.key) + self.pipeline_data = { + "language": "zh-cn", + "task_id": 78, + "biz_cc_name": "UTC", + "task_name": "20180918175615", + "executor": "username", + "operator": "username", + "biz_cc_id": 0, + } + + variable2 = get_variable(self.key, self.info, self.context, self.pipeline_data) + self.assertEqual(variable2.name, "${ip}") + self.assertEqual(variable2.value, self.info["value"]) + self.assertEqual(variable2._refs, {}) + self.assertEqual(variable2.code, "ip") + + self.info["type"] = "exception" + self.assertRaises( + exceptions.DataTypeErrorException, get_variable, self.key, self.info, self.context, self.pipeline_data + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/test_expression.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_expression.py new file mode 100644 index 00000000..1fb62322 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_expression.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy +import datetime + +from django.test import TestCase + +from pipeline.core.data import expression, sandbox +from pipeline.core.data.expression import format_constant_key, deformat_constant_key + + +class TestConstantTemplate(TestCase): + def setUp(self): + pass + + def test_format_constant_key(self): + self.assertEqual(format_constant_key("a"), "${a}") + + def test_deformat_constant_key(self): + self.assertEqual(deformat_constant_key("${a}"), "a") + + def test_get_reference(self): + all_in_cons_template = expression.ConstantTemplate(["${a}", ["${a}", "${a+int(b)}"]]) + self.assertEqual(set(all_in_cons_template.get_reference()), {"a", "b", "int"}) + + comma_exclude_template = expression.ConstantTemplate(['${a["c"]}', ['${"%s" % a}', "${a+int(b)}"]]) + self.assertEqual(set(comma_exclude_template.get_reference()), {"a", "b", "int"}) + + def test_get_templates(self): + cons_tmpl = expression.ConstantTemplate(["${a}", ["${a}", "${a+int(b)}"]]) + self.assertEqual(set(cons_tmpl.get_templates()), {"${a+int(b)}", "${a}"}) + + def test_resolve_data(self): + list_template = expression.ConstantTemplate(["${a}", ["${a}", "${a+int(b)}"]]) + self.assertEqual(list_template.resolve_data({"a": 2, "b": "3"}), [2, [2, "5"]]) + + tuple_template = expression.ConstantTemplate(("${a}", ("${a}", "${a+int(b)}"))) + self.assertEqual(tuple_template.resolve_data({"a": 2, "b": "3"}), (2, (2, "5"))) + + dict_template = expression.ConstantTemplate({"aaaa": {"a": "${a}", "b": "${a+int(b)}"}}) + self.assertEqual(dict_template.resolve_data({"a": 2, "b": "3"}), {"aaaa": {"a": 2, "b": "5"}}) + + def test_get_string_templates(self): + cons_tmpl = expression.ConstantTemplate("") + self.assertEqual(cons_tmpl.get_string_templates("${a}"), ["${a}"]) + + def test_resolve_template_with_curly_braces(self): + cons_tmpl = expression.ConstantTemplate("") + one_template = '${"test_{}".format(a)}' + self.assertEqual(cons_tmpl.resolve_string(one_template, {"a": "1"}), "test_1") + ano_template = '${f"test_{a}"}' + self.assertEqual(cons_tmpl.resolve_template(ano_template, {"a": "2"}), "test_2") + + def test_resolve_string(self): + cons_tmpl = expression.ConstantTemplate("") + one_template = "${a}" + self.assertEqual(cons_tmpl.resolve_string(one_template, {"a": "1"}), "1") + + def test_get_template_reference(self): + cons_tmpl = expression.ConstantTemplate("") + self.assertEqual(cons_tmpl.get_template_reference("${a}"), ["a"]) + + def test_resolve_template(self): + cons_tmpl = expression.ConstantTemplate("") + simple = "${a}" + self.assertEqual(cons_tmpl.resolve_template(simple, {"a": "1"}), "1") + + calculate = "${a+int(b)}" + self.assertEqual(cons_tmpl.resolve_template(calculate, {"a": 2, "b": "3"}), "5") + + split = "${a[0]}" + self.assertEqual(cons_tmpl.resolve_template(split, {"a": [1, 2]}), "1") + + dict_item = '${a["b"]}' + self.assertEqual(cons_tmpl.resolve_template(dict_item, {"a": {"b": 1}}), "1") + + not_exists = "{a}" + self.assertEqual(cons_tmpl.resolve_template(not_exists, {}), not_exists) + + resolve_syntax_error = "${a.b}" + self.assertEqual(cons_tmpl.resolve_template(resolve_syntax_error, {}), resolve_syntax_error) + + template_syntax_error = "${a:b}" + self.assertEqual(cons_tmpl.resolve_template(template_syntax_error, {}), template_syntax_error) + + def test_resolve_template__with_sandbox(self): + + r1 = expression.ConstantTemplate.resolve_template("""${exec(print(''))}""", {}) + self.assertEqual(r1, """${exec(print(''))}""") + + if "datetime" in expression.SANDBOX: + expression.SANDBOX.pop("datetime") + r2 = expression.ConstantTemplate.resolve_template("""${datetime.datetime.now().strftime("%Y")}""", {}) + self.assertEqual(r2, """${datetime.datetime.now().strftime("%Y")}""") + + sandbox._shield_words(expression.SANDBOX, ["exec", "compile"]) + sandbox._import_modules(expression.SANDBOX, {"datetime": "datetime"}) + + r1 = expression.ConstantTemplate.resolve_template("""${exec(print(''))}""", {}) + self.assertEqual(r1, """${exec(print(''))}""") + + r2 = expression.ConstantTemplate.resolve_template("""${datetime.datetime.now().strftime("%Y")}""", {}) + year = datetime.datetime.now().strftime("%Y") + self.assertEqual(r2, year) + + # clean + expression.SANDBOX.pop("exec") + expression.SANDBOX.pop("compile") + + def test_resolve(self): + list_template = expression.ConstantTemplate(["${a}", ["${a}", "${a+int(b)}"]]) + self.assertEqual(list_template.resolve_data({"a": 2, "b": "3"}), [2, [2, "5"]]) + + tuple_template = expression.ConstantTemplate(("${a}", ("${a}", "${a+int(b)}"))) + self.assertEqual(tuple_template.resolve_data({"a": 2, "b": "3"}), (2, (2, "5"))) + + dict_template = expression.ConstantTemplate({"aaaa": {"a": "${a}", "b": "${a+int(b)}"}}) + self.assertEqual(dict_template.resolve_data({"a": 2, "b": "3"}), {"aaaa": {"a": 2, "b": "5"}}) + + def test_get_reference_complex(self): + all_in_cons_template = expression.ConstantTemplate(["${a}", ["${a}", "${a+int(b)}"]]) + self.assertEqual(set(all_in_cons_template.get_reference()), set(["a", "b", "int"])) + + comma_exclude_template = expression.ConstantTemplate(['${a["c"]}', ['${"%s" % a}', "${a+int(b)}"]]) + self.assertEqual(set(comma_exclude_template.get_reference()), set(["a", "b", "int"])) + + def test_built_in_functions__without_args(self): + int_template = expression.ConstantTemplate("${int}") + self.assertEqual(int_template.resolve_data({}), "int") + + int_template = expression.ConstantTemplate("${str}") + self.assertEqual(int_template.resolve_data({}), "str") + + def test_built_in_functions__with_args(self): + int_template = expression.ConstantTemplate("${int(111)}") + self.assertEqual(int_template.resolve_data({}), "111") + + int_template = expression.ConstantTemplate("${str('aaa')}") + self.assertEqual(int_template.resolve_data({}), "aaa") + + def test_built_in_functions__cover(self): + int_template = expression.ConstantTemplate("${int}") + self.assertEqual(int_template.resolve_data({"int": "cover"}), "cover") + + def test_template_join(self): + template = expression.ConstantTemplate("a-${1 if t else 2}-${a}") + self.assertEqual(template.resolve_data({"t": False, "a": "c"}), "a-2-c") + template = expression.ConstantTemplate("${'a-%s-c' % 1 if t else 2}") + self.assertEqual(template.resolve_data({"t": True}), "a-1-c") + + def test_mako_attack(self): + sandbox_copy = copy.deepcopy(sandbox.SANDBOX) + shield_words = [ + "ascii", + "bytearray", + "bytes", + "callable", + "chr", + "classmethod", + "compile", + "delattr", + "dir", + "divmod", + "exec", + "eval", + "filter", + "frozenset", + "getattr", + "globals", + "hasattr", + "hash", + "help", + "id", + "input", + "isinstance", + "issubclass", + "iter", + "locals", + "map", + "memoryview", + "next", + "object", + "open", + "print", + "property", + "repr", + "setattr", + "staticmethod", + "super", + "type", + "vars", + "__import__", + ] + sandbox._shield_words(sandbox.SANDBOX, shield_words) + attack_templates = [ + '${"".__class__.__mro__[-1].__subclasses__()[127].__init__.__globals__["system"]("whoami")}', # noqa + '${getattr("", dir(0)[0][0] + dir(0)[0][0] + "class" + dir(0)[0][0]+ dir(0)[0][0])}', # noqa + 'a-${__import__("os").system("whoami")}', + "${while True: pass}", + """<% import json %> ${json.codecs.builtins.exec('import os; os.system("whoami")')}""", # noqa + ] + for at in attack_templates: + self.assertEqual(expression.ConstantTemplate(at).resolve_data({}), at) + + sandbox.SANDBOX = sandbox_copy diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/test_library.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_library.py new file mode 100644 index 00000000..a438000b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_library.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data import library +from pipeline.core.data.context import Context +from pipeline.core.data.var import LazyVariable + + +class TestLibrary(TestCase): + class VarIpPickerVariable(LazyVariable): + code = "ip" + form = "var.js" + + def get_value(self): + return self.value + + def setUp(self): + self.name = "${ip}" + self.info = { + "source_tag": "var_ip_picker.ip_picker", + "custom_type": "ip", + "type": "lazy", + "value": {"var_ip_custom_value": "1.1.1.11.1", "var_ip_method": "custom", "var_ip_tree": ""}, + } + self.context = Context(self.name) + self.pipeline_data = { + "language": "zh-cn", + "task_id": 78, + "biz_cc_name": "UTC", + "task_name": "20180918175615", + "executor": "username", + "operator": "username", + "biz_cc_id": 0, + } + self.code = "ip" + + def test_get_var_class(self): + cls = library.VariableLibrary + variable_class = cls.get_var_class(self.info["custom_type"]) + self.assertEqual(variable_class.code, self.code) + + def test_get_var(self): + cls = library.VariableLibrary + variable = cls.get_var(self.code, self.name, self.info["value"], self.context, self.pipeline_data) + self.assertEqual(variable.name, self.name) + self.assertEqual(variable.value, self.info["value"]) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/test_sandbox.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_sandbox.py new file mode 100644 index 00000000..33d1d4cd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_sandbox.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data import sandbox + + +def _test(*arg, **kwargs): + return "called" + + +class TestSandbox(TestCase): + def test_mock_str_meta(self): + class MockTest(metaclass=sandbox.MockStrMeta): + call = _test + str_return = "_test" + + print(sandbox.SANDBOX["_test"]) + self.assertEqual(sandbox.SANDBOX["_test"], MockTest) + + self.assertEqual(MockTest(), "called") + + def test_shield_words(self): + _sandbox = {} + sandbox._shield_words(_sandbox, ["compile", "exec"]) + self.assertDictEqual(_sandbox, {"compile": None, "exec": None}) + + def test_import_modules(self): + _sandbox = {} + sandbox._import_modules(_sandbox, {"datetime": "datetime", "pipeline.core": "core"}) + self.assertEqual(type(_sandbox["datetime"]).__name__, "module") + self.assertEqual(type(_sandbox["core"]).__name__, "module") diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/data/test_var.py b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_var.py new file mode 100644 index 00000000..677af0f0 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/data/test_var.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data import base, context, var + + +class TestPlainVariable(TestCase): + def test_get(self): + pv = var.PlainVariable("name", "value") + self.assertEqual(pv.get(), "value") + + +class TestSpliceVariable(TestCase): + def setUp(self): + act_outputs = { + "act_id_1": {"output_1": "${gk_1_1}", "output_2": "${gk_1_2}"}, + "act_id_2": {"output_1": "${gk_2_1}"}, + } + self.context = context.Context(act_outputs) + + class Activity(object): + pass + + act_1 = Activity() + act_1.id = "act_id_1" + data_1 = base.DataObject({}) + data_1.set_outputs("output_1", "value_1_1") + data_1.set_outputs("output_2", "value_1_2") + act_1.data = data_1 + self.act_1 = act_1 + + self.context_1 = context.Context({}) + self.context_1.variables["${grandparent_key}"] = "grandparent_value" + + def test_get(self): + sv = var.SpliceVariable(name="name", value="${gk_1_1}_${gk_1_2}_${key_not_exist}", context=self.context) + self.context.extract_output(self.act_1) + self.assertEqual(sv.get(), "value_1_1_value_1_2_${key_not_exist}") + + def test_object_get(self): + value = { + "key1": ["${gk_1_1}_test1", "${gk_1_2}_test2"], + "key2": {"key2_1": "${gk_1_1}_${gk_1_2}_${key_not_exist}"}, + } + sv = var.SpliceVariable(name="name", value=value, context=self.context) + self.context.extract_output(self.act_1) + test_value = { + "key1": ["value_1_1_test1", "value_1_2_test2"], + "key2": {"key2_1": "value_1_1_value_1_2_${key_not_exist}"}, + } + self.assertEqual(sv.get(), test_value) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_array_item_schema.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_array_item_schema.py new file mode 100644 index 00000000..474b7cc6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_array_item_schema.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from django.test import TestCase + +from pipeline.core.flow.io import ArrayItemSchema, IntItemSchema + + +class ArrayItemSchemaTestCase(TestCase): + def setUp(self): + self.description = "a simple item" + self.enum = ["1", "2", "3"] + + self.item_description = "a integer" + self.item_schema = IntItemSchema(description=self.item_description) + + def test_as_dict(self): + schema = ArrayItemSchema(description=self.description, enum=self.enum, item_schema=self.item_schema) + + schema_dict = schema.as_dict() + self.assertEqual( + schema_dict, + { + "type": "array", + "description": self.description, + "enum": self.enum, + "items": {"type": "int", "description": self.item_description, "enum": []}, + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_input_item.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_input_item.py new file mode 100644 index 00000000..2c679fd5 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_input_item.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase +from mock import MagicMock + +from pipeline.core.flow.io import InputItem + + +class InputItemTestCase(TestCase): + def setUp(self): + self.name = "input item" + self.key = "input_key" + self.type = "string" + self.required = False + schema = MagicMock() + schema.as_dict = MagicMock(return_value="schema dict") + self.schema = schema + + def test_as_dict(self): + input_item = InputItem(name=self.name, key=self.key, type=self.type, required=self.required, schema=self.schema) + item_dict = input_item.as_dict() + + self.assertEqual( + item_dict, + { + "name": self.name, + "key": self.key, + "type": self.type, + "required": self.required, + "schema": self.schema.as_dict(), + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_object_item_schema.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_object_item_schema.py new file mode 100644 index 00000000..98d41bd2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_object_item_schema.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from django.test import TestCase + +from pipeline.core.flow.io import ArrayItemSchema, IntItemSchema, ObjectItemSchema, StringItemSchema + + +class ObjectItemSchemaTestCase(TestCase): + def setUp(self): + self.maxDiff = None + self.description = "a simple item" + + self.int_description = "a integer" + self.int_schema = IntItemSchema(description=self.int_description) + self.string_description = "a string" + self.string_schema = StringItemSchema(description=self.string_description) + self.array_description = "a array" + self.array_item_description = "item in array" + self.array_schema = ArrayItemSchema( + description=self.array_description, item_schema=StringItemSchema(description=self.array_item_description) + ) + self.inner_object_description = "inner object" + self.inner_object_schema = ObjectItemSchema( + description=self.inner_object_description, + property_schemas={"int_key": self.int_schema, "str_key": self.string_schema}, + ) + + def test_as_dict(self): + object_schema = ObjectItemSchema( + description=self.description, + property_schemas={"array_key": self.array_schema, "object_key": self.inner_object_schema}, + ) + + schema_dict = object_schema.as_dict() + self.assertEqual( + schema_dict, + { + "type": "object", + "description": self.description, + "enum": [], + "properties": { + "array_key": { + "type": "array", + "description": self.array_description, + "enum": [], + "items": {"type": "string", "description": self.array_item_description, "enum": []}, + }, + "object_key": { + "type": "object", + "description": self.inner_object_description, + "enum": [], + "properties": { + "int_key": {"type": "int", "description": self.int_description, "enum": []}, + "str_key": {"type": "string", "description": self.string_description, "enum": []}, + }, + }, + }, + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_output_item.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_output_item.py new file mode 100644 index 00000000..fcfd23be --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_output_item.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase +from mock import MagicMock + +from pipeline.core.flow.io import OutputItem + + +class OutputItemTestCase(TestCase): + def setUp(self): + self.name = "input item" + self.key = "input_key" + self.type = "string" + self.required = False + schema = MagicMock() + schema.as_dict = MagicMock(return_value="schema dict") + self.schema = schema + + def test_as_dict(self): + input_item = OutputItem(name=self.name, key=self.key, type=self.type, schema=self.schema) + item_dict = input_item.as_dict() + + self.assertEqual( + item_dict, {"name": self.name, "key": self.key, "type": self.type, "schema": self.schema.as_dict()} + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_simple_item_schema.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_simple_item_schema.py new file mode 100644 index 00000000..bb54284d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/io/test_simple_item_schema.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.io import BooleanItemSchema, FloatItemSchema, IntItemSchema, StringItemSchema + + +class SimpleItemSchemaTestCase(TestCase): + def setUp(self): + self.description = "a simple item" + self.enum = ["1", "2", "3"] + + def test_as_dict(self): + schema = StringItemSchema(description=self.description, enum=self.enum) + + schema_dict = schema.as_dict() + self.assertEqual(schema_dict, {"type": "string", "description": self.description, "enum": self.enum}) + + def test_type(self): + self.assertEqual("int", IntItemSchema._type()) + self.assertEqual("string", StringItemSchema._type()) + self.assertEqual("float", FloatItemSchema._type()) + self.assertEqual("boolean", BooleanItemSchema._type()) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_activity.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_activity.py new file mode 100644 index 00000000..1d74ab91 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_activity.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data.base import DataObject +from pipeline.core.flow.activity import * # noqa + + +class TestActivity(TestCase): + def test_base_activity(self): + act_id = "1" + base_act = Activity(act_id) + self.assertTrue(isinstance(base_act, FlowNode)) + self.assertEqual(act_id, base_act.id) + + def test_service_activity(self): + class TestService(Service): + def execute(self, data, parent_data): + return True + + act_id = "1" + service = TestService() + inputs = {"args": [1, 2, 3], "kwargs": {"1": 1, "2": 2}} + service_act = ServiceActivity(id=act_id, service=service, data=DataObject(inputs)) + self.assertTrue(isinstance(service_act, Activity)) + self.assertEqual(service, service_act.service) + + service_act.setup_runtime_attrs(id="123", root_pipeline_id="456") + self.assertEqual(service_act.service._runtime_attrs, {"id": "123", "root_pipeline_id": "456"}) + self.assertEqual(service_act.service.id, "123") + self.assertEqual(service_act.service.root_pipeline_id, "456") + + def test_subprocess(self): + act_id = "1" + + class MockData(object): + def __init__(self, val): + self.val = val + + def inputs_copy(self): + pass + + def outputs_copy(self): + pass + + class MockPipeline(object): + def __init__(self, data): + self.data = MockData(data) + + pipeline = MockPipeline("data") + sub_process = SubProcess(act_id, pipeline) + self.assertTrue(isinstance(sub_process, Activity)) + self.assertEqual(sub_process.data, pipeline.data) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_conditional_parallel_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_conditional_parallel_gateway.py new file mode 100644 index 00000000..f323d8e7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_conditional_parallel_gateway.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.base import SequenceFlow +from pipeline.core.flow.gateway import Condition, ConditionalParallelGateway, ParallelGateway +from pipeline.exceptions import ConditionExhaustedException, InvalidOperationException +from pipeline.tests.mock import * # noqa + + +class ConditionalParallelGatewayTestCase(TestCase): + def setUp(self): + self.id = "id" + self.name = "name" + self.data = "data" + self.converge_gateway_id = "converge_gateway_id" + self.conditions = [1, 2, 3] + + def test_init(self): + cpg_1 = ConditionalParallelGateway(id=self.id, converge_gateway_id=self.converge_gateway_id) + self.assertEqual(cpg_1.id, self.id) + self.assertEqual(cpg_1.converge_gateway_id, self.converge_gateway_id) + self.assertEqual(cpg_1.conditions, []) + self.assertIsNone(cpg_1.name) + self.assertIsNone(cpg_1.data) + + cpg_2 = ConditionalParallelGateway( + id=self.id, + converge_gateway_id=self.converge_gateway_id, + conditions=self.conditions, + name=self.name, + data=self.data, + ) + self.assertEqual(cpg_2.id, self.id) + self.assertEqual(cpg_2.converge_gateway_id, self.converge_gateway_id) + self.assertEqual(cpg_2.conditions, self.conditions) + self.assertEqual(cpg_2.name, self.name) + self.assertEqual(cpg_2.data, self.data) + + def test_add_condition(self): + cpg = ConditionalParallelGateway(id=self.id, converge_gateway_id=self.converge_gateway_id) + cpg.add_condition("condition_1") + cpg.add_condition("condition_2") + self.assertEqual(cpg.conditions, ["condition_1", "condition_2"]) + + def test_targets_meet_condition__normal(self): + node_1 = ParallelGateway(id="1", converge_gateway_id="cvg") + node_2 = ParallelGateway(id="2", converge_gateway_id="cvg") + node_3 = ParallelGateway(id="3", converge_gateway_id="cvg") + node_4 = ParallelGateway(id="4", converge_gateway_id="cvg") + condition_1 = Condition(evaluate="1 == 1", sequence_flow=SequenceFlow(id=self.id, source=node_1, target=node_1)) + condition_2 = Condition(evaluate="1 == 0", sequence_flow=SequenceFlow(id=self.id, source=node_2, target=node_2)) + condition_3 = Condition(evaluate="1 == 1", sequence_flow=SequenceFlow(id=self.id, source=node_3, target=node_3)) + condition_4 = Condition(evaluate="1 == 0", sequence_flow=SequenceFlow(id=self.id, source=node_4, target=node_4)) + cpg = ConditionalParallelGateway( + id=self.id, + converge_gateway_id=self.converge_gateway_id, + conditions=[condition_1, condition_2, condition_3, condition_4], + ) + + targets = cpg.targets_meet_condition({}) + self.assertEqual(targets, [node_1, node_3]) + + def test_targets_meet_condition__raise_exhausted(self): + condition_1 = Condition(evaluate="1 == 0", sequence_flow=SequenceFlow(id=self.id, source=None, target=None)) + condition_2 = Condition(evaluate="1 == 0", sequence_flow=SequenceFlow(id=self.id, source=None, target=None)) + condition_3 = Condition(evaluate="1 == 0", sequence_flow=SequenceFlow(id=self.id, source=None, target=None)) + condition_4 = Condition(evaluate="1 == 0", sequence_flow=SequenceFlow(id=self.id, source=None, target=None)) + cpg = ConditionalParallelGateway( + id=self.id, + converge_gateway_id=self.converge_gateway_id, + conditions=[condition_1, condition_2, condition_3, condition_4], + ) + + self.assertRaises(ConditionExhaustedException, cpg.targets_meet_condition, {}) + + def test_next(self): + cpg = ConditionalParallelGateway(id=self.id, converge_gateway_id=self.converge_gateway_id) + self.assertRaises(InvalidOperationException, cpg.next) + + def test_skip(self): + cpg = ConditionalParallelGateway(id=self.id, converge_gateway_id=self.converge_gateway_id) + self.assertRaises(InvalidOperationException, cpg.skip) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_converge_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_converge_gateway.py new file mode 100644 index 00000000..80a997f9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_converge_gateway.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.base import FlowNode, SequenceFlow +from pipeline.core.flow.gateway import ConvergeGateway, Gateway, ParallelGateway + + +class TestConvergeGateway(TestCase): + def test_converge_gateway(self): + gw_id = "1" + cvg_gateway = ConvergeGateway(gw_id) + self.assertTrue(isinstance(cvg_gateway, FlowNode)) + self.assertTrue(isinstance(cvg_gateway, Gateway)) + + def test_next(self): + cvg_gateway = ConvergeGateway("1") + parallel_gateway = ParallelGateway("2", "cvg") + out_flow = SequenceFlow("flow", cvg_gateway, parallel_gateway) + cvg_gateway.outgoing.add_flow(out_flow) + parallel_gateway.incoming.add_flow(out_flow) + self.assertEqual(parallel_gateway, cvg_gateway.next()) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_event.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_event.py new file mode 100644 index 00000000..220254d6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_event.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.event import * # noqa + + +class TestEvent(TestCase): + def test_event(self): + event_id = "1" + event = Event(event_id) + self.assertTrue(isinstance(event, FlowNode)) + self.assertEqual(event_id, event.id) + + def test_throw_event(self): + event_id = "1" + event = ThrowEvent(event_id) + self.assertTrue(isinstance(event, Event)) + + def test_catch_event(self): + event_id = "1" + event = CatchEvent(event_id) + self.assertTrue(isinstance(event, Event)) + + def test_start_event(self): + event_id = "1" + event = StartEvent(event_id) + self.assertTrue(isinstance(event, CatchEvent)) + + def test_end_event(self): + event_id = "1" + event = EndEvent(event_id) + self.assertTrue(isinstance(event, ThrowEvent)) + + def test_empty_start_event(self): + event_id = "1" + event = EmptyStartEvent(event_id) + self.assertTrue(isinstance(event, StartEvent)) + + def test_empty_end_event(self): + event_id = "1" + event = EmptyEndEvent(event_id) + self.assertTrue(isinstance(event, EndEvent)) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_exclusive_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_exclusive_gateway.py new file mode 100644 index 00000000..e3d946c3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_exclusive_gateway.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.base import FlowNode, SequenceFlow +from pipeline.core.flow.gateway import Condition, ExclusiveGateway, Gateway, ParallelGateway +from pipeline.exceptions import ConditionExhaustedException, EvaluationException + + +class TestExclusiveGateway(TestCase): + def setUp(self): + ex_gateway1 = ExclusiveGateway(id="1") + next_node1 = ParallelGateway(id="1", converge_gateway_id="cvg") + next_node2 = ParallelGateway(id="2", converge_gateway_id="cvg") + flow1 = SequenceFlow("flow1", ex_gateway1, next_node1) + flow2 = SequenceFlow("flow2", ex_gateway1, next_node2) + condition1 = Condition("a == 1", flow1) + condition2 = Condition("a != 1", flow2) + ex_gateway1.add_condition(condition1) + ex_gateway1.add_condition(condition2) + ex_gateway1.outgoing.add_flow(flow1) + ex_gateway1.outgoing.add_flow(flow2) + next_node1.incoming.add_flow(flow1) + next_node2.incoming.add_flow(flow2) + + self.gateway_for_test_determine = ex_gateway1 + + ex_gateway2 = ExclusiveGateway(id="2") + next_node3 = ParallelGateway(id="3", converge_gateway_id="cvg") + next_node4 = ParallelGateway(id="4", converge_gateway_id="cvg") + next_node5 = ParallelGateway(id="5", converge_gateway_id="cvg") + flow3 = SequenceFlow("flow3", ex_gateway2, next_node3) + flow4 = SequenceFlow("flow4", ex_gateway2, next_node4) + flow5 = SequenceFlow("flow5", ex_gateway2, next_node5, is_default=True) + condition3 = Condition("a == 1", flow3) + condition4 = Condition("a != 1", flow4) + ex_gateway2.add_condition(condition3) + ex_gateway2.add_condition(condition4) + ex_gateway2.outgoing.add_flow(flow3) + ex_gateway2.outgoing.add_flow(flow4) + ex_gateway2.outgoing.add_flow(flow5) + next_node3.incoming.add_flow(flow3) + next_node4.incoming.add_flow(flow4) + next_node5.incoming.add_flow(flow5) + + self.gateway_for_test_next = ex_gateway2 + + self.nodes = [next_node1, next_node2, next_node3, next_node4, next_node5] + + def test_exclusive_gateway(self): + gw_id = "1" + conditions = [Condition(None, None), Condition(None, None)] + ex_gateway = ExclusiveGateway(id=gw_id, conditions=conditions) + self.assertTrue(isinstance(ex_gateway, FlowNode)) + self.assertTrue(isinstance(ex_gateway, Gateway)) + self.assertEqual(conditions, ex_gateway.conditions) + + def test_add_condition(self): + ex_gateway = ExclusiveGateway(id="1") + flow1 = SequenceFlow("flow1", ex_gateway, None) + self.assertEqual([], ex_gateway.conditions) + ex_gateway.add_condition(flow1) + self.assertEqual([flow1], ex_gateway.conditions) + + def test_determine_next_flow_success(self): + flow1 = self.gateway_for_test_determine.outgoing.flows[0] + flow2 = self.gateway_for_test_determine.outgoing.flows[1] + data1 = {"a": 1} + data2 = {"a": 2} + self.assertEqual(flow1, self.gateway_for_test_determine._determine_next_flow_with_boolrule(data1)) + self.assertEqual(flow2, self.gateway_for_test_determine._determine_next_flow_with_boolrule(data2)) + + def test_determine_next_flow_exhausted(self): + self.gateway_for_test_determine.conditions[1].evaluate = "a > 1" + data = {"a": -1} + self.assertIsNone(self.gateway_for_test_determine._determine_next_flow_with_boolrule(data)) + + def test_determine_next_evaluation_exception(self): + self.gateway_for_test_determine.conditions[0].evaluate = "c == 1" + data = {"a": 1} + self.assertRaises(EvaluationException, self.gateway_for_test_determine._determine_next_flow_with_boolrule, data) + + def test_next_success(self): + node = self.gateway_for_test_next.outgoing.flows[1].target + data = {"a": 2} + self.assertEqual(node, self.gateway_for_test_next.next(data)) + + def test_next_exhausted(self): + node = self.gateway_for_test_next.outgoing.flows[2].target + self.gateway_for_test_next.conditions[1].evaluate = "a > 1" + data = {"a": 0} + self.assertEqual(node, self.gateway_for_test_next.next(data)) + self.gateway_for_test_next.outgoing.flows[2].is_default = False + self.assertRaises(ConditionExhaustedException, self.gateway_for_test_next.next, data) + + def test_next_raise(self): + origin = self.gateway_for_test_next.conditions[1].evaluate + self.gateway_for_test_next.conditions[1].evaluate = "b > 1" + data = {"a": 0} + self.assertRaises(EvaluationException, self.gateway_for_test_next.next, data) + self.gateway_for_test_next.conditions[1].evaluate = origin diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_collection.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_collection.py new file mode 100644 index 00000000..1dc6d69b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_collection.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.base import SequenceFlow, SequenceFlowCollection +from pipeline.exceptions import InvalidOperationException + + +class Obj(object): + pass + + +class TestFlowCollection(TestCase): + def test_sequence_flow_collection(self): + flow1 = SequenceFlow("1", Obj(), Obj()) + flow2 = SequenceFlow("4", Obj(), Obj()) + flow3 = SequenceFlow("7", Obj(), Obj()) + flows = [flow1, flow2, flow3] + flow_dict = {flow1.id: flow1, flow2.id: flow2, flow3.id: flow3} + collection = SequenceFlowCollection(*flows) + self.assertEqual(flows, collection.flows) + self.assertEqual(flow_dict, collection.flow_dict) + + def test_get_flow(self): + flow1 = SequenceFlow("1", Obj(), Obj()) + flow2 = SequenceFlow("4", Obj(), Obj()) + flow3 = SequenceFlow("7", Obj(), Obj()) + flows = [flow1, flow2, flow3] + collection = SequenceFlowCollection(*flows) + self.assertEqual(flow1, collection.get_flow(flow1.id)) + self.assertEqual(flow2, collection.get_flow(flow2.id)) + self.assertEqual(flow3, collection.get_flow(flow3.id)) + + def test_unique_one(self): + flow1 = SequenceFlow("1", Obj(), Obj()) + flow2 = SequenceFlow("4", Obj(), Obj()) + flow3 = SequenceFlow("7", Obj(), Obj()) + flows = [flow1, flow2, flow3] + not_unique_collection = SequenceFlowCollection(*flows) + unique_collection = SequenceFlowCollection(flow1) + self.assertEqual(flow1, unique_collection.unique_one()) + self.assertRaises(InvalidOperationException, not_unique_collection.unique_one) + + def test_is_empty(self): + flow1 = SequenceFlow("1", Obj(), Obj()) + flow2 = SequenceFlow("4", Obj(), Obj()) + flow3 = SequenceFlow("7", Obj(), Obj()) + flows = [flow1, flow2, flow3] + not_empty_collection = SequenceFlowCollection(*flows) + empty_collection = SequenceFlowCollection() + self.assertTrue(empty_collection.is_empty()) + self.assertFalse(not_empty_collection.is_empty()) + + def test_add_flow(self): + flow1 = SequenceFlow("1", Obj(), Obj()) + flow2 = SequenceFlow("4", Obj(), Obj()) + flow3 = SequenceFlow("7", Obj(), Obj()) + flow4 = SequenceFlow("10", Obj(), Obj()) + flows = [flow1, flow2, flow3] + flows_after_added = [flow1, flow2, flow3, flow4] + flow_dict_after_added = {flow1.id: flow1, flow2.id: flow2, flow3.id: flow3, flow4.id: flow4} + collection = SequenceFlowCollection(*flows) + collection.add_flow(flow4) + self.assertEqual(flows_after_added, collection.flows) + self.assertEqual(flow_dict_after_added, collection.flow_dict) + + def test_all_target(self): + targets = [Obj(), Obj(), Obj()] + flow1 = SequenceFlow("1", Obj(), targets[0]) + flow2 = SequenceFlow("4", Obj(), targets[1]) + flow3 = SequenceFlow("7", Obj(), targets[2]) + flows = [flow1, flow2, flow3] + collection = SequenceFlowCollection(*flows) + self.assertEqual(targets, collection.all_target_node()) + + def test_all_source(self): + sources = [Obj(), Obj(), Obj()] + flow1 = SequenceFlow("1", sources[0], Obj()) + flow2 = SequenceFlow("4", sources[1], Obj()) + flow3 = SequenceFlow("7", sources[2], Obj()) + flows = [flow1, flow2, flow3] + collection = SequenceFlowCollection(*flows) + self.assertEqual(sources, collection.all_source_node()) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_element.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_element.py new file mode 100644 index 00000000..95398068 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_element.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.base import FlowElement + + +class TestBase(TestCase): + def test_flow_element(self): + element_id = "1" + name = "name" + flow_element = FlowElement(element_id, name) + self.assertEqual(element_id, flow_element.id) + self.assertEqual(name, flow_element.name) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_node.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_node.py new file mode 100644 index 00000000..bd0fd5ad --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_node.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.base import FlowElement, FlowNode, SequenceFlowCollection + + +class MockNode(FlowNode): + def next(self): + raise Exception() + + +class TestFlowNode(TestCase): + def test_flow_node(self): + node_id = "1" + flow_node = MockNode(node_id) + self.assertTrue(isinstance(flow_node, FlowElement)) + self.assertEqual(node_id, flow_node.id) + default_collection_node = MockNode(node_id) + self.assertTrue(isinstance(default_collection_node.incoming, SequenceFlowCollection)) + self.assertTrue(isinstance(default_collection_node.outgoing, SequenceFlowCollection)) + + def test_next_exec_is_retry(self): + node_id = "1" + flow_node = MockNode(node_id) + flow_node.next_exec_is_retry() + self.assertTrue(hasattr(flow_node, FlowNode.ON_RETRY)) + + def test_on_retry(self): + node_id = "1" + flow_node = MockNode(node_id) + flow_node.next_exec_is_retry() + self.assertTrue(flow_node.on_retry()) + + def test_retry_at_current_exec(self): + node_id = "1" + flow_node = MockNode(node_id) + flow_node.next_exec_is_retry() + self.assertTrue(flow_node.on_retry()) + flow_node.retry_at_current_exec() + self.assertFalse(flow_node.on_retry()) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_node_cls_factory.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_node_cls_factory.py new file mode 100644 index 00000000..0ad31c40 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_flow_node_cls_factory.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase +from mock import MagicMock, patch + +from pipeline.core import flow +from pipeline.core.flow import EndEvent, FlowNodeClsFactory + + +class FlowNodeClsFactoryTestCase(TestCase): + @patch("pipeline.core.flow.post_new_end_event_register", MagicMock()) + def test_register_node__is_not_end_event(self): + node_cls = MagicMock() + + FlowNodeClsFactory.register_node("key", node_cls) + + self.assertEqual(FlowNodeClsFactory.get_node_cls("key"), node_cls) + flow.post_new_end_event_register.send.assert_not_called() + + FlowNodeClsFactory.nodes_cls.pop("key") + + @patch("pipeline.core.flow.post_new_end_event_register", MagicMock()) + def test_register_node__with_end_event(self): + class TestEnd(EndEvent): + pass + + FlowNodeClsFactory.register_node("key", TestEnd) + self.assertEqual(FlowNodeClsFactory.get_node_cls("key"), TestEnd) + flow.post_new_end_event_register.send.assert_called_once_with( + sender=EndEvent, node_type="key", node_cls=TestEnd + ) + + FlowNodeClsFactory.nodes_cls.pop("key") + + def test_register_node__with_exist_type(self): + self.assertRaises(KeyError, FlowNodeClsFactory.register_node, "ServiceActivity", MagicMock()) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_parallel_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_parallel_gateway.py new file mode 100644 index 00000000..a3544531 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_parallel_gateway.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.base import FlowNode +from pipeline.core.flow.gateway import Gateway, ParallelGateway +from pipeline.exceptions import InvalidOperationException + + +class TestParallelGateway(TestCase): + def test_parallel_gateway(self): + gw_id = "1" + pl_gateway = ParallelGateway(gw_id, "cvg") + self.assertTrue(isinstance(pl_gateway, FlowNode)) + self.assertTrue(isinstance(pl_gateway, Gateway)) + + def test_next(self): + gw_id = "1" + pl_gateway = ParallelGateway(gw_id, None, None) + self.assertRaises(InvalidOperationException, pl_gateway.next) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_sequence_flow.py b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_sequence_flow.py new file mode 100644 index 00000000..bfd7bb0e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/flow/test_sequence_flow.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data.base import DataObject +from pipeline.core.flow.activity import ServiceActivity +from pipeline.core.flow.base import FlowElement, SequenceFlow + + +class TestSequenceFlow(TestCase): + def test_sequence_flow(self): + flow_id = "1" + source = ServiceActivity(id="1", service=None, data=DataObject({})) + target = ServiceActivity(id="2", service=None, data=DataObject({})) + flow = SequenceFlow(flow_id, source, target) + self.assertTrue(isinstance(flow, FlowElement)) + self.assertEqual(flow_id, flow.id) + self.assertEqual(source, flow.source) + self.assertEqual(target, flow.target) + self.assertEqual(False, flow.is_default) diff --git a/runtime/bamboo-pipeline/pipeline/tests/core/test_pipeline.py b/runtime/bamboo-pipeline/pipeline/tests/core/test_pipeline.py new file mode 100644 index 00000000..01b80717 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/core/test_pipeline.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data.base import DataObject +from pipeline.core.flow.activity import ServiceActivity +from pipeline.core.flow.base import SequenceFlow +from pipeline.core.flow.event import EmptyEndEvent, EmptyStartEvent +from pipeline.core.pipeline import * # noqa + + +class TestPipeline(TestCase): + def test_node(self): + start_event = EmptyStartEvent(id="a") + act = ServiceActivity(id="b", service=None, data=DataObject({})) + end_event = EmptyEndEvent(id="c") + + flow_ab = SequenceFlow("ab", start_event, act) + flow_bc = SequenceFlow("bc", act, end_event) + + start_event.outgoing.add_flow(flow_ab) + act.incoming.add_flow(flow_ab) + act.outgoing.add_flow(flow_bc) + end_event.incoming.add_flow(flow_bc) + + spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act], [], None, None) + pipeline = Pipeline("pipeline", spec) + self.assertEqual(act, pipeline.node("b")) + + def test_start_event(self): + start_event = EmptyStartEvent(id="a") + act = ServiceActivity(id="b", service=None, data=DataObject({})) + end_event = EmptyEndEvent(id="c") + + flow_ab = SequenceFlow("ab", start_event, act) + flow_bc = SequenceFlow("bc", act, end_event) + + start_event.outgoing.add_flow(flow_ab) + act.incoming.add_flow(flow_ab) + act.outgoing.add_flow(flow_bc) + end_event.incoming.add_flow(flow_bc) + + spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act], [], None, None) + pipeline = Pipeline("pipeline", spec) + self.assertEqual(start_event, pipeline.start_event) + + def test_end_event(self): + start_event = EmptyStartEvent(id="a") + act = ServiceActivity(id="b", service=None, data=DataObject({})) + end_event = EmptyEndEvent(id="c") + + flow_ab = SequenceFlow("ab", start_event, act) + flow_bc = SequenceFlow("bc", act, end_event) + + start_event.outgoing.add_flow(flow_ab) + act.incoming.add_flow(flow_ab) + act.outgoing.add_flow(flow_bc) + end_event.incoming.add_flow(flow_bc) + + spec = PipelineSpec(start_event, end_event, [flow_ab, flow_bc], [act], [], None, None) + pipeline = Pipeline("pipeline", spec) + self.assertEqual(end_event, pipeline.end_event) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/test_api.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/test_api.py new file mode 100644 index 00000000..af2d656d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/test_api.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import sys + +from django.test import TestCase +from django.utils.module_loading import import_string + +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + + +class EngineDataAPITestCase(TestCase): + @classmethod + def setUpClass(cls): + cls.mock_settings = MagicMock() + cls.settings_patch = patch(ENGINE_DATA_API_SETTINGS, cls.mock_settings) + cls.import_backend_patch = patch(ENGINE_DATA_API_IMPORT_BACKEND, MagicMock()) + cls.settings_patch.start() + cls.import_backend_patch.start() + + cls.api = import_string("pipeline.engine.core.data.api") + cls.write_methods = ["set_object", "del_object", "expire_cache"] + cls.read_methods = ["get_object", "cache_for"] + cls.method_params = { + "set_object": ["key", "obj"], + "del_object": ["key"], + "expire_cache": ["key", "obj", "expires"], + "cache_for": ["key"], + "get_object": ["key"], + } + + @classmethod + def tearDownClass(cls): + cls.settings_patch.stop() + cls.import_backend_patch.stop() + + def setUp(self): + self.backend = MagicMock() + self.candidate_backend = MagicMock() + self.mock_settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE = False + + def test_write__without_candidate(self): + for method in self.write_methods: + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, None): + getattr(self.api, method)(*self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_not_called() + + sys.stdout.write("{} pass test_write__without_candidate test\n".format(method)) + + def test_write__without_candiate_raise_err(self): + for method in self.write_methods: + + setattr(self.backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, None): + self.assertRaises(Exception, getattr(self.api, method), *self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_not_called() + + sys.stdout.write("{} pass test_write__without_candiate_raise_err test\n".format(method)) + + def test_write__with_candidate(self): + for method in self.write_methods: + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + getattr(self.api, method)(*self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_write__with_candidate test\n".format(method)) + + def test_write__with_candidate_main_raise_err(self): + for method in self.write_methods: + + setattr(self.backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + getattr(self.api, method)(*self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_write__with_candidate_main_raise_err test\n".format(method)) + + def test_write__with_candidate_raise_err(self): + for method in self.write_methods: + + setattr(self.candidate_backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + getattr(self.api, method)(*self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_write__with_candidate_raise_err test\n".format(method)) + + def test_write__with_candidate_both_raise_err(self): + for method in self.write_methods: + + setattr(self.backend, method, MagicMock(side_effect=Exception)) + setattr(self.candidate_backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + self.assertRaises(Exception, getattr(self.api, method), *self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_write__with_candidate_both_raise_err test\n".format(method)) + + def test_write__with_auto_expire(self): + self.mock_settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE = True + self.mock_settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE_SECONDS = 30 + + for method in self.write_methods: + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + getattr(self.api, method)(*self.method_params[method]) + if method == "set_object": + getattr(self.backend, "expire_cache").assert_called_once_with( + *self.method_params[method], expires=30 + ) + self.backend.expire_cache.reset_mock() + else: + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_write__with_candidate_both_raise_err test\n".format(method)) + + def test_read__without_candidate(self): + for method in self.read_methods: + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, None): + data = getattr(self.api, method)(*self.method_params[method]) + self.assertIsNotNone(data) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_not_called() + + sys.stdout.write("{} pass test_read__without_candidate test\n".format(method)) + + def test_read__without_candidate_raise_err(self): + for method in self.read_methods: + + setattr(self.backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, None): + self.assertRaises(Exception, getattr(self.api, method), *self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_not_called() + + sys.stdout.write("{} pass test_read__without_candidate_raise_err test\n".format(method)) + + def test_read__with_candidate_not_use(self): + for method in self.read_methods: + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + data = getattr(self.api, method)(*self.method_params[method]) + self.assertIsNotNone(data) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_not_called() + + sys.stdout.write("{} pass test_read__with_candidate_not_use test\n".format(method)) + + def test_read__with_candidate_use(self): + for method in self.read_methods: + + setattr(self.backend, method, MagicMock(return_value=None)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + data = getattr(self.api, method)(*self.method_params[method]) + self.assertIsNotNone(data) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_read__with_candidate_use test\n".format(method)) + + def test_read__with_candidate_err(self): + for method in self.read_methods: + + setattr(self.backend, method, MagicMock(return_value=None)) + setattr(self.candidate_backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + data = getattr(self.api, method)(*self.method_params[method]) + self.assertIsNone(data) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_read__with_candidate_err test\n".format(method)) + + def test_read__with_candidate_main_raise_err(self): + for method in self.read_methods: + + setattr(self.backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + data = getattr(self.api, method)(*self.method_params[method]) + self.assertIsNotNone(data) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_read__with_candidate_main_raise_err test\n".format(method)) + + def test_read__with_candidate_both_raise_err(self): + for method in self.read_methods: + + setattr(self.backend, method, MagicMock(side_effect=Exception)) + setattr(self.candidate_backend, method, MagicMock(side_effect=Exception)) + + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + self.assertRaises(Exception, getattr(self.api, method), *self.method_params[method]) + getattr(self.backend, method).assert_called_once_with(*self.method_params[method]) + getattr(self.candidate_backend, method).assert_called_once_with(*self.method_params[method]) + + sys.stdout.write("{} pass test_read__with_candidate_both_raise_err test\n".format(method)) + + def test_set_schedule_data(self): + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + self.api.set_schedule_data("key", "data") + self.backend.set_object.assert_called_once_with("key_schedule_parent_data", "data") + self.candidate_backend.set_object.assert_called_once_with("key_schedule_parent_data", "data") + + def test_delete_parent_data(self): + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + self.api.delete_parent_data("key") + self.backend.del_object.assert_called_once_with("key_schedule_parent_data") + self.candidate_backend.del_object.assert_called_once_with("key_schedule_parent_data") + + def test_get_schedule_parent_data(self): + with patch(ENGINE_DATA_API_BACKEND, self.backend): + with patch(ENGINE_DATA_API_CANDIDATE_BACKEND, self.candidate_backend): + data = self.api.get_schedule_parent_data("key") + self.assertIsNotNone(data) + self.backend.get_object.assert_called_once_with("key_schedule_parent_data") + self.candidate_backend.get_object.assert_not_called() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/test_mysql_backend.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/test_mysql_backend.py new file mode 100644 index 00000000..47c70685 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/data/test_mysql_backend.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import time + +from django.test import TestCase + +from pipeline.engine.core.data.mysql_backend import MySQLDataBackend + + +class MySQLBackendTestCase(TestCase): + def setUp(self): + self.backend = MySQLDataBackend() + self.key = "test_key" + self.obj = {"a": "a", 1: "1", 2: "2", "list": [4, 5, 6]} + self.expire = 5 + + def test_set_object(self): + result = self.backend.set_object(self.key, self.obj) + self.assertTrue(result) + + def test_get_object(self): + self.backend.set_object(self.key, self.obj) + obj = self.backend.get_object(self.key) + self.assertEqual(self.obj, obj) + + def test_del_object(self): + self.backend.set_object(self.key, self.obj) + result = self.backend.del_object(self.key) + self.assertTrue(result) + none = self.backend.get_object(self.key) + self.assertIsNone(none) + + def test_expire_cache(self): + self.backend.expire_cache(self.key, self.obj, self.expire) + time.sleep(self.expire + 1) + none = self.backend.cache_for(self.key) + self.assertIsNone(none) + + def test_cache_for(self): + self.backend.expire_cache(self.key, self.obj, self.expire) + obj = self.backend.cache_for(self.key) + self.assertEqual(self.obj, obj) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_conditional_parallel_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_conditional_parallel_handler.py new file mode 100644 index 00000000..9fa0e3a1 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_conditional_parallel_handler.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.gateway import ConditionalParallelGateway +from pipeline.engine.core import handlers +from pipeline.engine.core.handlers import conditional_parallel +from pipeline.engine.models import PipelineProcess, Status +from pipeline.exceptions import ConditionExhaustedException, PipelineException +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.conditional_parallel_handler = handlers.ConditionalParallelGatewayHandler() + +hydrate_context = "hydrate_context" +targets = [IdentifyObject(), IdentifyObject(), IdentifyObject()] + + +class ConditionalParallelGatewayHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.ConditionalParallelGatewayHandler.element_cls(), ConditionalParallelGateway) + + @patch(CPG_HYDRATE_DATA, MagicMock(return_value=hydrate_context)) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + def test_handle__get_targets_raise(self): + e_message = "e_message" + context_variables = "variables" + + cpg = MagicMock() + cpg.targets_meet_condition = MagicMock(side_effect=ConditionExhaustedException(e_message)) + status = MockStatus(loop=0) + process = MockPipelineProcess(top_pipeline_context=MockContext(variables=context_variables)) + + result = handlers.conditional_parallel_handler(process, cpg, status) + self.assertIsNone(result.next_node) + self.assertTrue(result.should_return) + self.assertTrue(result.should_sleep) + + conditional_parallel.hydrate_data.assert_called_once_with(context_variables) + + cpg.targets_meet_condition.assert_called_once_with(hydrate_context) + + Status.objects.fail.assert_called_once_with(cpg, ex_data=e_message) + + process.join.assert_not_called() + + @patch(CPG_HYDRATE_DATA, MagicMock(return_value=hydrate_context)) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + def test_handle__fork_child_raise(self): + e_message = "e_message" + context_variables = "variables" + converge_gateway_id = "converge_gateway_id" + + cpg = MagicMock() + cpg.targets_meet_condition = MagicMock(return_value=targets) + cpg.converge_gateway_id = converge_gateway_id + status = MockStatus(loop=0) + process = MockPipelineProcess(top_pipeline_context=MockContext(variables=context_variables)) + + with patch(PIPELINE_PROCESS_FORK_CHILD, MagicMock(side_effect=PipelineException(e_message))): + result = handlers.conditional_parallel_handler(process, cpg, status) + self.assertIsNone(result.next_node) + self.assertTrue(result.should_return) + self.assertTrue(result.should_sleep) + + conditional_parallel.hydrate_data.assert_called_once_with(context_variables) + + cpg.targets_meet_condition.assert_called_once_with(hydrate_context) + + Status.objects.fail.assert_called_once_with(cpg, ex_data=e_message) + + process.join.assert_not_called() + + @patch(CPG_HYDRATE_DATA, MagicMock(return_value=hydrate_context)) + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + def test_handle__normal(self): + context_variables = "variables" + converge_gateway_id = "converge_gateway_id" + children = [1, 2, 3] + + for loop in [0, 1, 2, 3]: + cpg = MagicMock() + cpg.targets_meet_condition = MagicMock(return_value=targets) + cpg.converge_gateway_id = converge_gateway_id + status = MockStatus(loop=loop) + process = MockPipelineProcess(top_pipeline_context=MockContext(variables=context_variables)) + + with patch(PIPELINE_PROCESS_FORK_CHILD, MagicMock(side_effect=children)): + result = handlers.conditional_parallel_handler(process, cpg, status) + self.assertIsNone(result.next_node) + self.assertTrue(result.should_return) + self.assertTrue(result.should_sleep) + + if loop > 1: + process.top_pipeline.context.recover_variable.assert_called_once() + + conditional_parallel.hydrate_data.assert_called_once_with(context_variables) + + cpg.targets_meet_condition.assert_called_once_with(hydrate_context) + + PipelineProcess.objects.fork_child.assert_has_calls( + [ + mock.call( + parent=process, current_node_id=targets[0].id, destination_id=cpg.converge_gateway_id + ), + mock.call( + parent=process, current_node_id=targets[1].id, destination_id=cpg.converge_gateway_id + ), + mock.call( + parent=process, current_node_id=targets[2].id, destination_id=cpg.converge_gateway_id + ), + ] + ) + + process.join.assert_called_once_with(children) + + Status.objects.finish.assert_called_once_with(cpg) + + conditional_parallel.hydrate_data.reset_mock() + + Status.objects.finish.reset_mock() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_converge_gateway_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_converge_gateway_handler.py new file mode 100644 index 00000000..5b549e88 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_converge_gateway_handler.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.gateway import ConvergeGateway +from pipeline.engine.core import handlers +from pipeline.engine.exceptions import ChildDataSyncError +from pipeline.engine.models import Status +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.converge_gateway_handler = handlers.ConvergeGatewayHandler() + + +class ConvergeGatewayHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.ConvergeGatewayHandler.element_cls(), ConvergeGateway) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + def test_handle__normal_has_children(self): + converge_gateway = MockConvergeGateway() + process = MockPipelineProcess(children=[1, 2, 3]) + + hdl_result = handlers.converge_gateway_handler(process, converge_gateway, MockStatus()) + + process.sync_with_children.assert_called_once() + + Status.objects.finish.assert_called_once_with(converge_gateway) + + self.assertEqual(hdl_result.next_node, converge_gateway.next()) + self.assertFalse(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + def test_handle__normal_without_children(self): + converge_gateway = MockConvergeGateway() + process = MockPipelineProcess(children=[]) + + hdl_result = handlers.converge_gateway_handler(process, converge_gateway, MockStatus()) + + process.sync_with_children.assert_not_called() + + Status.objects.finish.assert_called_once_with(converge_gateway) + + self.assertEqual(hdl_result.next_node, converge_gateway.next()) + self.assertFalse(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + def test_handle__sync_raise_exception(self): + converge_gateway = MockConvergeGateway() + e = ChildDataSyncError() + process = MockPipelineProcess(children=[1, 2, 3], sync_exception=e) + + hdl_result = handlers.converge_gateway_handler(process, converge_gateway, MockStatus()) + + process.sync_with_children.assert_called_once() + + Status.objects.fail.assert_called_once_with( + converge_gateway, ex_data="Sync branch context error, check data backend status please." + ) + + Status.objects.finish.assert_not_called() + + self.assertIsNone(hdl_result.next_node) + self.assertTrue(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_empty_end_event_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_empty_end_event_handler.py new file mode 100644 index 00000000..42037a7e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_empty_end_event_handler.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data.base import DataObject as RealDataObject +from pipeline.core.flow.activity import SubProcess +from pipeline.core.flow.event import EmptyEndEvent +from pipeline.engine import states +from pipeline.engine.core import handlers +from pipeline.engine.models import Data, Status +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.empty_end_event_handler = handlers.EmptyEndEventHandler() + + +class EmptyEndEventHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.EmptyEndEventHandler.element_cls(), EmptyEndEvent) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + def test_handle__is_subprocess(self): + end_event = EndEventObject() + context = MockContext() + sub_pipeline = PipelineObject(context=MockContext()) + process = MockPipelineProcess( + top_pipeline=PipelineObject(context=context, node=MockSubprocessActivity()), + pipeline_stack=["root_pipeline", sub_pipeline], + ) + + hdl_result = handlers.empty_end_event_handler(process, end_event, MockStatus()) + + sub_pipeline.context.write_output.assert_called_once_with(sub_pipeline) + + sub_process_node = process.top_pipeline.node(sub_pipeline.id) + Status.objects.finish.assert_has_calls([mock.call(end_event), mock.call(sub_process_node)]) + + process.top_pipeline.context.extract_output.assert_called_once_with(sub_process_node) + + self.assertEqual(hdl_result.next_node, sub_process_node.next()) + self.assertFalse(hdl_result.should_sleep) + self.assertFalse(hdl_result.should_return) + + @patch(PIPELINE_STATUS_SELECT_FOR_UPDATE, MagicMock(return_value=MockQuerySet())) + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock()) + @patch(PIPELINE_DATA_WRITE_NODE_DATA, MagicMock()) + def test_handle__is_not_subprocess(self): + context = MockContext() + subproc_1 = SubProcess(id=uniqid(), pipeline=PipelineObject(context=MockContext(), data=RealDataObject({}))) + subproc_2 = SubProcess(id=uniqid(), pipeline=PipelineObject(context=MockContext(), data=RealDataObject({}))) + spec = PipelineSpecObject(activities=[ServiceActObject(), subproc_1, subproc_2]) + process = MockPipelineProcess(top_pipeline_context=context, top_pipeline_spec=spec) + top_pipeline = process.top_pipeline + end_event = EndEventObject() + + hdl_result = handlers.empty_end_event_handler(process, end_event, MockStatus()) + + top_pipeline.context.write_output.assert_called_once_with(top_pipeline) + + Data.objects.write_node_data.assert_called_once_with(top_pipeline) + + Status.objects.finish.assert_called_once_with(end_event) + + Status.objects.transit.assert_called_once_with(top_pipeline.id, to_state=states.FINISHED, is_pipeline=True) + + end_event.pipeline_finish.assert_called_once_with(process.root_pipeline.id) + + subproc_1.pipeline.context.clear.assert_called_once() + + subproc_2.pipeline.context.clear.assert_called_once() + + top_pipeline.context.clear.assert_called_once() + + process.destroy.assert_called_once() + + self.assertIsNone(hdl_result.next_node) + self.assertTrue(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_empty_start_event_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_empty_start_event_handler.py new file mode 100644 index 00000000..6e26138c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_empty_start_event_handler.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data import var +from pipeline.core.flow.event import EmptyStartEvent +from pipeline.engine.core import handlers +from pipeline.engine.models import Status +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.empty_start_event_handler = handlers.EmptyStartEventHandler() + + +class EmptyStartEventHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.EmptyStartEventHandler.element_cls(), EmptyStartEvent) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + def test_handle(self): + context_variables = {"${pre_render_key}": var.PlainVariable(name="pre_render_key", value="test pre_render_key")} + process = MockPipelineProcess(top_pipeline_context=MockContext(variables=context_variables)) + start_event = StartEventObject(data=MockData(get_inputs_return={"pre_render_keys": ["${pre_render_key}"]})) + + hdl_result = handlers.empty_start_event_handler(process, start_event, MockStatus()) + + Status.objects.finish.assert_called_once_with(start_event) + + self.assertEqual(hdl_result.next_node, start_event.next()) + self.assertFalse(hdl_result.should_sleep) + self.assertFalse(hdl_result.should_return) + self.assertEqual(process.top_pipeline.context.variables["${pre_render_key}"], "test pre_render_key") diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_exclusive_gateway_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_exclusive_gateway_handler.py new file mode 100644 index 00000000..045564a9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_exclusive_gateway_handler.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.gateway import ExclusiveGateway +from pipeline.engine.core import handlers +from pipeline.engine.core.handlers import exclusive_gateway as exg_h +from pipeline.engine.models import Status +from pipeline.exceptions import PipelineException +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.exclusive_gateway_handler = handlers.ExclusiveGatewayHandler() + + +class ExclusiveGatewayHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.ExclusiveGatewayHandler.element_cls(), ExclusiveGateway) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + def test_handle__normal(self): + for loop in (1, 2, 3): + hydrate_data_return = "hydrate_data_return" + exclusive_gateway = MockExclusiveGateway() + context = MockContext() + process = MockPipelineProcess(top_pipeline_context=context) + + with patch(EXG_HYDRATE_DATA, MagicMock(return_value=hydrate_data_return)): + hdl_result = handlers.exclusive_gateway_handler(process, exclusive_gateway, MockStatus(loop=loop)) + + if loop > 1: + context.recover_variable.assert_called_once() + else: + context.recover_variable.assert_not_called() + + exg_h.hydrate_data.assert_called_once_with(context.variables) + + exclusive_gateway.next.assert_called_once_with(hydrate_data_return) + + Status.objects.finish.assert_called_once_with(exclusive_gateway) + + self.assertEqual(hdl_result.next_node, exclusive_gateway.next()) + self.assertFalse(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) + + Status.objects.finish.reset_mock() + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + def test_handle__next_raise_exception(self): + hydrate_data_return = "hydrate_data_return" + e = PipelineException("ex_data") + exclusive_gateway = MockExclusiveGateway(next_exception=e) + context = MockContext() + process = MockPipelineProcess(top_pipeline_context=context) + + with patch(EXG_HYDRATE_DATA, MagicMock(return_value=hydrate_data_return)): + hdl_result = handlers.exclusive_gateway_handler(process, exclusive_gateway, MockStatus()) + + exg_h.hydrate_data.assert_called_once_with(context.variables) + + exclusive_gateway.next.assert_called_once_with(hydrate_data_return) + + Status.objects.fail.assert_called_once_with(exclusive_gateway, ex_data=str(e)) + + Status.objects.finish.assert_not_called() + + self.assertIsNone(hdl_result.next_node) + self.assertTrue(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_executable_end_event_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_executable_end_event_handler.py new file mode 100644 index 00000000..c87ecd5a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_executable_end_event_handler.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.event import ExecutableEndEvent +from pipeline.engine.core import handlers +from pipeline.engine.models import Status +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.executable_end_event_handler = handlers.ExecutableEndEventHandler() + + +class ExecutableEndEventHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.ExecutableEndEventHandler.element_cls(), ExecutableEndEvent) + + @patch(ENGINE_HANDLERS_END_EVENT_HANDLE, MagicMock(return_value="token_1")) + def test_handle__success(self): + end_event = ExecutableEndEventObject() + process = MockPipelineProcess( + in_subprocess_return=True, root_pipeline=IdentifyObject(), top_pipeline=IdentifyObject() + ) + status = MockStatus() + + hdl_result = handlers.executable_end_event_handler(process, end_event, status) + + self.assertEqual(hdl_result, "token_1") + + end_event.execute.assert_called_once_with( + in_subprocess=process.in_subprocess, + root_pipeline_id=process.root_pipeline.id, + current_pipeline_id=process.top_pipeline.id, + ) + + super_handle = super(handlers.ExecutableEndEventHandler, handlers.executable_end_event_handler).handle + super_handle.assert_called_once_with(process, end_event, status) + + @patch(ENGINE_HANDLERS_END_EVENT_HANDLE, MagicMock()) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + @patch( + "pipeline.engine.core.handlers.endevent.executable_end_event.traceback.format_exc", + MagicMock(return_value="token_2"), + ) + def test_handle__raise_exception(self): + end_event = ExecutableEndEventObject() + end_event.execute = MagicMock(side_effect=Exception) + process = MockPipelineProcess( + in_subprocess_return=True, root_pipeline=IdentifyObject(), top_pipeline=IdentifyObject() + ) + status = MockStatus() + + hdl_result = handlers.executable_end_event_handler(process, end_event, status) + + self.assertIsNone(hdl_result.next_node) + self.assertFalse(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + + end_event.execute.assert_called_once_with( + in_subprocess=process.in_subprocess, + root_pipeline_id=process.root_pipeline.id, + current_pipeline_id=process.top_pipeline.id, + ) + + self.assertEqual(end_event.data.outputs.ex_data, "token_2") + + Status.objects.fail.assert_called_once_with(end_event, "token_2") + + super_handle = super(handlers.ExecutableEndEventHandler, handlers.executable_end_event_handler).handle + super_handle.assert_not_called() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_handlers_factory.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_handlers_factory.py new file mode 100644 index 00000000..760b2c3d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_handlers_factory.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase +from mock import MagicMock + +from pipeline.core.flow.activity import ServiceActivity, SubProcess +from pipeline.core.flow.event import EmptyEndEvent, EmptyStartEvent, ExecutableEndEvent +from pipeline.core.flow.gateway import ConditionalParallelGateway, ConvergeGateway, ExclusiveGateway, ParallelGateway +from pipeline.engine.core.handlers import HandlersFactory +from pipeline.engine.core.handlers.conditional_parallel import ConditionalParallelGatewayHandler +from pipeline.engine.core.handlers.converge_gateway import ConvergeGatewayHandler +from pipeline.engine.core.handlers.empty_start_event import EmptyStartEventHandler +from pipeline.engine.core.handlers.endevent import EmptyEndEventHandler, ExecutableEndEventHandler +from pipeline.engine.core.handlers.exclusive_gateway import ExclusiveGatewayHandler +from pipeline.engine.core.handlers.parallel_gateway import ParallelGatewayHandler +from pipeline.engine.core.handlers.service_activity import ServiceActivityHandler +from pipeline.engine.core.handlers.subprocess import SubprocessHandler + + +class CustomEndEventOne(ExecutableEndEvent): + def execute(self, in_subprocess, root_pipeline_id, current_pipeline_id): + pass + + +class CustomEndEventTwo(ExecutableEndEvent): + def execute(self, in_subprocess, root_pipeline_id, current_pipeline_id): + pass + + +class EngineHandlerTestCase(TestCase): + def setUp(self): + self.empty_start_event = EmptyStartEvent(id="1") + self.empty_end_event = EmptyEndEvent(id="2") + self.service_activity = ServiceActivity(id="3", service=None) + self.subprocess = SubProcess(id="4", pipeline=MagicMock()) + self.exclusive_gateway = ExclusiveGateway(id="5") + self.parallel_gateway = ParallelGateway(id="6", converge_gateway_id=None) + self.conditional_parallel_gateway = ConditionalParallelGateway(id="7", converge_gateway_id=None) + self.converge_gateway = ConvergeGateway(id="8") + self.executable_end_event_1 = CustomEndEventOne(id="9") + self.executable_end_event_2 = CustomEndEventTwo(id="9") + + def test_handlers_for(self): + self.assertIsInstance(HandlersFactory.handlers_for(self.empty_start_event), EmptyStartEventHandler) + self.assertIsInstance(HandlersFactory.handlers_for(self.empty_end_event), EmptyEndEventHandler) + self.assertIsInstance(HandlersFactory.handlers_for(self.service_activity), ServiceActivityHandler) + self.assertIsInstance(HandlersFactory.handlers_for(self.subprocess), SubprocessHandler) + self.assertIsInstance(HandlersFactory.handlers_for(self.exclusive_gateway), ExclusiveGatewayHandler) + self.assertIsInstance(HandlersFactory.handlers_for(self.parallel_gateway), ParallelGatewayHandler) + self.assertIsInstance( + HandlersFactory.handlers_for(self.conditional_parallel_gateway), ConditionalParallelGatewayHandler + ) + self.assertIsInstance(HandlersFactory.handlers_for(self.converge_gateway), ConvergeGatewayHandler) + self.assertIsInstance(HandlersFactory.handlers_for(self.executable_end_event_1), ExecutableEndEventHandler) + self.assertIsInstance(HandlersFactory.handlers_for(self.executable_end_event_2), ExecutableEndEventHandler) + + def test_find_cluster_root_cls(self): + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.empty_start_event), EmptyStartEvent) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.empty_end_event), EmptyEndEvent) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.service_activity), ServiceActivity) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.subprocess), SubProcess) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.exclusive_gateway), ExclusiveGateway) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.parallel_gateway), ParallelGateway) + self.assertEqual( + HandlersFactory.find_cluster_root_cls(self.conditional_parallel_gateway), ConditionalParallelGateway + ) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.converge_gateway), ConvergeGateway) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.executable_end_event_1), ExecutableEndEvent) + self.assertEqual(HandlersFactory.find_cluster_root_cls(self.executable_end_event_2), ExecutableEndEvent) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_parallel_gateway_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_parallel_gateway_handler.py new file mode 100644 index 00000000..33e6baf8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_parallel_gateway_handler.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.flow.gateway import ParallelGateway +from pipeline.engine.core import handlers +from pipeline.engine.models import PipelineProcess, Status +from pipeline.exceptions import PipelineException +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.parallel_gateway_handler = handlers.ParallelGatewayHandler() + + +class ParallelGatewayHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.ParallelGatewayHandler.element_cls(), ParallelGateway) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + def test_handle__normal(self): + process = MockPipelineProcess() + parallel_gateway = MockParallelGateway() + children = [MockPipelineProcess() for _ in range(len(parallel_gateway.outgoing.all_target_node()))] + + with patch(PIPELINE_PROCESS_FORK_CHILD, MagicMock(side_effect=children)): + hdl_result = handlers.parallel_gateway_handler(process, parallel_gateway, MockStatus()) + + fork_child_calls = [ + mock.call( + parent=process, current_node_id=target.id, destination_id=parallel_gateway.converge_gateway_id + ) + for target in parallel_gateway.outgoing.all_target_node() + ] + PipelineProcess.objects.fork_child.assert_has_calls(fork_child_calls) + + process.join.assert_called_once_with(children) + + Status.objects.finish.assert_called_once_with(parallel_gateway) + + self.assertIsNone(hdl_result.next_node) + self.assertTrue(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + def test_handle__fork_raise_exception(self): + process = MockPipelineProcess() + parallel_gateway = MockParallelGateway() + e_msg = "e_msg" + + with patch(PIPELINE_PROCESS_FORK_CHILD, MagicMock(side_effect=PipelineException(e_msg))): + hdl_result = handlers.parallel_gateway_handler(process, parallel_gateway, MockStatus()) + + PipelineProcess.objects.fork_child.assert_called() + + Status.objects.fail.assert_called_once_with(parallel_gateway, e_msg) + + process.join.assert_not_called() + + Status.objects.finish.assert_not_called() + + self.assertIsNone(hdl_result.next_node) + self.assertTrue(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_service_act_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_service_act_handler.py new file mode 100644 index 00000000..1503bb37 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_service_act_handler.py @@ -0,0 +1,612 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import itertools + +from django.test import TestCase + +from pipeline.conf import default_settings +from pipeline.core.flow.activity import ServiceActivity +from pipeline.django_signal_valve import valve +from pipeline.engine import signals +from pipeline.engine.core import handlers +from pipeline.engine.core.handlers import service_activity as service_act_h +from pipeline.engine.models import Data, ScheduleService, Status +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.service_activity_handler = handlers.ServiceActivityHandler() + + +class ServiceActivityHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.ServiceActivityHandler.element_cls(), ServiceActivity) + + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + @patch(SERVICE_ACT_HYDRATE_NODE_DATA, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_START_SEND, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, MagicMock()) + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_handle__execute_raise_exception_and_not_ignore_error(self): + + for loop, timeout in itertools.product((1, 2), (5, None)): + root_pipeline_data = "root_pipeline_data" + ex_data = "ex_data" + top_context = MockContext() + + process = MockPipelineProcess(root_pipeline_data=root_pipeline_data, top_pipeline_context=top_context) + service_act = ServiceActObject( + interval=None, + execute_exception=Exception(), + error_ignorable=False, + timeout=timeout, + data=MockData(get_one_of_outputs_return=ex_data), + ) + status = MockStatus(loop=loop) + + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + + hdl_result = handlers.service_activity_handler(process, service_act, status) + + if loop > 1: + service_act.prepare_rerun_data.assert_called_once() + top_context.recover_variable.assert_called_once() + else: + service_act.prepare_rerun_data.assert_not_called() + + self.assertEqual( + service_act.data.inputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + self.assertEqual( + service_act.data.outputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + + top_context.extract_output.assert_called_once_with(service_act, set_miss=False) + + service_act_h.hydrate_node_data.assert_called_once() + + if timeout: + signals.service_activity_timeout_monitor_start.send.assert_called_once_with( + sender=service_act.__class__, + node_id=service_act.id, + version=status.version, + root_pipeline_id=process.root_pipeline.id, + countdown=service_act.timeout, + ) + else: + signals.service_activity_timeout_monitor_start.send.assert_not_called() + + service_act.setup_runtime_attrs.assert_called_once_with( + id=service_act.id, root_pipeline_id=process.root_pipeline_id + ) + service_act.execute.assert_called_once_with(root_pipeline_data) + + self.assertIsNotNone(service_act.data.outputs.ex_data) + + service_act.data.get_one_of_outputs.assert_called_once_with("ex_data") + + Status.objects.fail.assert_called_once_with(service_act, ex_data) + + service_act.failure_handler.assert_called_once_with(process.root_pipeline.data) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=service_act.__class__, node_id=service_act.id, version=status.version + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + valve.send.assert_called_once_with( + signals, + "activity_failed", + sender=process.root_pipeline, + pipeline_id=process.root_pipeline.id, + pipeline_activity_id=service_act.id, + subprocess_id_stack=process.subprocess_stack, + ) + + self.assertIsNone(hdl_result.next_node) + self.assertFalse(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + + # reset mock + service_act_h.hydrate_node_data.reset_mock() + signals.service_activity_timeout_monitor_start.send.reset_mock() + Status.objects.fail.reset_mock() + signals.service_activity_timeout_monitor_end.send.reset_mock() + valve.send.reset_mock() + + @patch(PIPELINE_SCHEDULE_SERVICE_SET_SCHEDULE, MagicMock()) + @patch(SERVICE_ACT_HYDRATE_NODE_DATA, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_START_SEND, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, MagicMock()) + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_handle__execute_raise_exception_and_ignore_error(self): + for loop, timeout, need_schedule, finish_call_success in itertools.product( + (1, 2), (5, None), (True, False), (True, False) + ): + root_pipeline_data = "root_pipeline_data" + ex_data = "ex_data" + top_context = MockContext() + + process = MockPipelineProcess(root_pipeline_data=root_pipeline_data, top_pipeline_context=top_context) + service_act = ServiceActObject( + interval=None, + execute_exception=Exception(), + error_ignorable=True, + timeout=timeout, + need_schedule=need_schedule, + result_bit=False, + data=MockData(get_one_of_outputs_return={"ex_data": ex_data, ServiceActivity.result_bit: False}), + ) + status = MockStatus(loop=loop) + + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_STATUS_FINISH, MagicMock(return_value=finish_call_success)): + + hdl_result = handlers.service_activity_handler(process, service_act, status) + + if loop > 1: + service_act.prepare_rerun_data.assert_called_once() + top_context.recover_variable.assert_called_once() + else: + service_act.prepare_rerun_data.assert_not_called() + + self.assertEqual( + service_act.data.inputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + self.assertEqual( + service_act.data.outputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + + service_act_h.hydrate_node_data.assert_called_once() + + if timeout: + signals.service_activity_timeout_monitor_start.send.assert_called_once_with( + sender=service_act.__class__, + node_id=service_act.id, + version=status.version, + root_pipeline_id=process.root_pipeline.id, + countdown=service_act.timeout, + ) + else: + signals.service_activity_timeout_monitor_start.send.assert_not_called() + + service_act.setup_runtime_attrs.assert_called_once_with( + id=service_act.id, root_pipeline_id=process.root_pipeline_id + ) + + service_act.execute.assert_called_once_with(root_pipeline_data) + + service_act.ignore_error.assert_called_once() + + self.assertIsNotNone(service_act.data.outputs.ex_data) + + ScheduleService.objects.set_schedule.assert_not_called() + + top_context.extract_output.assert_has_calls( + [mock.call(service_act, set_miss=False), mock.call(service_act)] + ) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=service_act.__class__, node_id=service_act.id, version=status.version + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Status.objects.finish.assert_called_once_with(service_act, True) + + if finish_call_success: + self.assertEqual(hdl_result.next_node, service_act.next()) + self.assertFalse(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) + else: + self.assertIsNone(hdl_result.next_node) + self.assertFalse(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + + # reset mock + service_act_h.hydrate_node_data.reset_mock() + signals.service_activity_timeout_monitor_start.send.reset_mock() + Status.objects.finish.reset_mock() + signals.service_activity_timeout_monitor_end.send.reset_mock() + valve.send.reset_mock() + + @patch(PIPELINE_SCHEDULE_SERVICE_SET_SCHEDULE, MagicMock()) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + @patch(SERVICE_ACT_HYDRATE_NODE_DATA, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_START_SEND, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, MagicMock()) + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_handle__execute_return_false_and_not_ignore_error(self): + for loop, timeout in itertools.product((1, 2), (5, None)): + root_pipeline_data = "root_pipeline_data" + ex_data = "ex_data" + top_context = MockContext() + + process = MockPipelineProcess(root_pipeline_data=root_pipeline_data, top_pipeline_context=top_context) + service_act = ServiceActObject( + interval=None, + execute_return=False, + error_ignorable=False, + timeout=timeout, + data=MockData(get_one_of_outputs_return=ex_data), + ) + status = MockStatus(loop=loop) + + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + + hdl_result = handlers.service_activity_handler(process, service_act, status) + + if loop > 1: + service_act.prepare_rerun_data.assert_called_once() + top_context.recover_variable.assert_called_once() + else: + service_act.prepare_rerun_data.assert_not_called() + + self.assertEqual( + service_act.data.inputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + self.assertEqual( + service_act.data.outputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + + top_context.extract_output.assert_called_once_with(service_act, set_miss=False) + + service_act_h.hydrate_node_data.assert_called_once() + + if timeout: + signals.service_activity_timeout_monitor_start.send.assert_called_once_with( + sender=service_act.__class__, + node_id=service_act.id, + version=status.version, + root_pipeline_id=process.root_pipeline.id, + countdown=service_act.timeout, + ) + else: + signals.service_activity_timeout_monitor_start.send.assert_not_called() + + service_act.setup_runtime_attrs.assert_called_once_with( + id=service_act.id, root_pipeline_id=process.root_pipeline_id + ) + + service_act.execute.assert_called_once_with(root_pipeline_data) + + service_act.data.get_one_of_outputs.assert_called_once_with("ex_data") + + Status.objects.fail.assert_called_once_with(service_act, ex_data) + + service_act.failure_handler.assert_called_once_with(process.root_pipeline.data) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=service_act.__class__, node_id=service_act.id, version=status.version + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + valve.send.assert_called_once_with( + signals, + "activity_failed", + sender=process.root_pipeline, + pipeline_id=process.root_pipeline.id, + pipeline_activity_id=service_act.id, + subprocess_id_stack=process.subprocess_stack, + ) + + self.assertIsNone(hdl_result.next_node) + self.assertFalse(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + + # reset mock + service_act_h.hydrate_node_data.reset_mock() + signals.service_activity_timeout_monitor_start.send.reset_mock() + Status.objects.fail.reset_mock() + signals.service_activity_timeout_monitor_end.send.reset_mock() + valve.send.reset_mock() + + @patch(PIPELINE_SCHEDULE_SERVICE_SET_SCHEDULE, MagicMock()) + @patch(SERVICE_ACT_HYDRATE_NODE_DATA, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_START_SEND, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, MagicMock()) + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_handle__execute_return_true_and_is_ignore_error(self): + for loop, timeout, need_schedule, finish_call_success in itertools.product( + (1, 2), (5, None), (True, False), (True, False) + ): + root_pipeline_data = "root_pipeline_data" + ex_data = "ex_data" + top_context = MockContext() + + process = MockPipelineProcess(root_pipeline_data=root_pipeline_data, top_pipeline_context=top_context) + service_act = ServiceActObject( + interval=None, + execute_return=True, + error_ignorable=True, + timeout=timeout, + need_schedule=need_schedule, + result_bit=False, + data=MockData(get_one_of_outputs_return={"ex_data": ex_data, ServiceActivity.result_bit: False}), + ) + status = MockStatus(loop=loop) + + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_STATUS_FINISH, MagicMock(return_value=finish_call_success)): + + hdl_result = handlers.service_activity_handler(process, service_act, status) + + if loop > 1: + service_act.prepare_rerun_data.assert_called_once() + top_context.recover_variable.assert_called_once() + else: + service_act.prepare_rerun_data.assert_not_called() + + self.assertEqual( + service_act.data.inputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + self.assertEqual( + service_act.data.outputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + + service_act_h.hydrate_node_data.assert_called_once() + + if timeout: + signals.service_activity_timeout_monitor_start.send.assert_called_once_with( + sender=service_act.__class__, + node_id=service_act.id, + version=status.version, + root_pipeline_id=process.root_pipeline.id, + countdown=service_act.timeout, + ) + else: + signals.service_activity_timeout_monitor_start.send.assert_not_called() + + service_act.setup_runtime_attrs.assert_called_once_with( + id=service_act.id, root_pipeline_id=process.root_pipeline_id + ) + + service_act.execute.assert_called_once_with(root_pipeline_data) + + service_act.ignore_error.assert_not_called() + + service_act.data.set_outputs.assert_not_called() + + ScheduleService.objects.set_schedule.assert_not_called() + + top_context.extract_output.assert_has_calls( + [mock.call(service_act, set_miss=False), mock.call(service_act)] + ) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=service_act.__class__, node_id=service_act.id, version=status.version + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Status.objects.finish.assert_called_once_with(service_act, True) + + if finish_call_success: + self.assertEqual(hdl_result.next_node, service_act.next()) + self.assertFalse(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) + else: + self.assertIsNone(hdl_result.next_node) + self.assertFalse(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + + # reset mock + service_act_h.hydrate_node_data.reset_mock() + signals.service_activity_timeout_monitor_start.send.reset_mock() + Status.objects.finish.reset_mock() + signals.service_activity_timeout_monitor_end.send.reset_mock() + valve.send.reset_mock() + + @patch(PIPELINE_SCHEDULE_SERVICE_SET_SCHEDULE, MagicMock()) + @patch(PIPELINE_DATA_WRITE_NODE_DATA, MagicMock()) + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + @patch(SERVICE_ACT_HYDRATE_NODE_DATA, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_START_SEND, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, MagicMock()) + def test_handle__execute_return_true_and_need_schedule(self): + for loop, timeout, error_ignore in itertools.product((1, 2), (5, None), (True, False)): + root_pipeline_data = "root_pipeline_data" + ex_data = "ex_data" + top_context = MockContext() + + process = MockPipelineProcess(root_pipeline_data=root_pipeline_data, top_pipeline_context=top_context) + service_act = ServiceActObject( + interval=None, + execute_return=True, + error_ignorable=error_ignore, + timeout=timeout, + need_schedule=True, + result_bit=True, + data=MockData(get_one_of_outputs_return={"ex_data": ex_data, ServiceActivity.result_bit: False}), + ) + + status = MockStatus(loop=loop) + + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + + hdl_result = handlers.service_activity_handler(process, service_act, status) + + if loop > 1: + service_act.prepare_rerun_data.assert_called_once() + top_context.recover_variable.assert_called_once() + else: + service_act.prepare_rerun_data.assert_not_called() + + self.assertEqual( + service_act.data.inputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + self.assertEqual( + service_act.data.outputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + + service_act_h.hydrate_node_data.assert_called_once() + + if timeout: + signals.service_activity_timeout_monitor_start.send.assert_called_once_with( + sender=service_act.__class__, + node_id=service_act.id, + version=status.version, + root_pipeline_id=process.root_pipeline.id, + countdown=service_act.timeout, + ) + else: + signals.service_activity_timeout_monitor_start.send.assert_not_called() + + service_act.setup_runtime_attrs.assert_called_once_with( + id=service_act.id, root_pipeline_id=process.root_pipeline_id + ) + + service_act.execute.assert_called_once_with(root_pipeline_data) + + service_act.ignore_error.assert_not_called() + + service_act.data.set_outputs.assert_not_called() + + Data.objects.write_node_data.assert_called_once_with(service_act) + + top_context.extract_output.assert_called_once_with(service_act, set_miss=False) + + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Status.objects.finish.assert_not_called() + + self.assertIsNone(hdl_result.next_node) + self.assertTrue(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + self.assertEqual(hdl_result.after_sleep_call, ScheduleService.objects.set_schedule) + self.assertEqual(hdl_result.args, []) + self.assertEqual( + hdl_result.kwargs, + dict( + activity_id=service_act.id, + service_act=service_act.shell(), + process_id=process.id, + version=status.version, + parent_data=process.top_pipeline.data, + ), + ) + + # reset mock + service_act_h.hydrate_node_data.reset_mock() + signals.service_activity_timeout_monitor_start.send.reset_mock() + Status.objects.finish.reset_mock() + Data.objects.write_node_data.reset_mock() + ScheduleService.objects.set_schedule.reset_mock() + signals.service_activity_timeout_monitor_end.send.reset_mock() + + @patch(PIPELINE_SCHEDULE_SERVICE_SET_SCHEDULE, MagicMock()) + @patch(PIPELINE_DATA_WRITE_NODE_DATA, MagicMock()) + @patch(PIPELINE_STATUS_FINISH, MagicMock()) + @patch(SERVICE_ACT_HYDRATE_NODE_DATA, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_START_SEND, MagicMock()) + @patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, MagicMock()) + def test_handle__execute_return_true_and_do_not_need_schedule(self): + for loop, timeout, error_ignore, finish_call_success, on_retry in itertools.product( + (1, 2), (5, None), (True, False), (True, False), (True, False) + ): + root_pipeline_data = "root_pipeline_data" + ex_data = "ex_data" + top_context = MockContext() + + process = MockPipelineProcess(root_pipeline_data=root_pipeline_data, top_pipeline_context=top_context) + service_act = ServiceActObject( + interval=None, + execute_return=True, + error_ignorable=error_ignore, + timeout=timeout, + need_schedule=False, + result_bit=True, + data=MockData(get_one_of_outputs_return={"ex_data": ex_data, ServiceActivity.result_bit: False}), + on_retry=on_retry, + ) + status = MockStatus(loop=loop) + + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_STATUS_FINISH, MagicMock(return_value=finish_call_success)): + + hdl_result = handlers.service_activity_handler(process, service_act, status) + + if loop > 1 and not on_retry: + service_act.prepare_rerun_data.assert_called_once() + top_context.recover_variable.assert_called_once() + service_act.retry_at_current_exec.assert_not_called() + else: + service_act.prepare_rerun_data.assert_not_called() + + if on_retry: + service_act.retry_at_current_exec.assert_called_once() + + self.assertEqual( + service_act.data.inputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + self.assertEqual( + service_act.data.outputs._loop, status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET + ) + + service_act_h.hydrate_node_data.assert_called_once() + + if timeout: + signals.service_activity_timeout_monitor_start.send.assert_called_once_with( + sender=service_act.__class__, + node_id=service_act.id, + version=status.version, + root_pipeline_id=process.root_pipeline.id, + countdown=service_act.timeout, + ) + else: + signals.service_activity_timeout_monitor_start.send.assert_not_called() + + service_act.setup_runtime_attrs.assert_called_once_with( + id=service_act.id, root_pipeline_id=process.root_pipeline_id + ) + + service_act.execute.assert_called_once_with(root_pipeline_data) + + service_act.ignore_error.assert_not_called() + + service_act.data.set_outputs.assert_not_called() + + ScheduleService.objects.set_schedule.assert_not_called() + + top_context.extract_output.assert_has_calls( + [mock.call(service_act, set_miss=False), mock.call(service_act)] + ) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=service_act.__class__, node_id=service_act.id, version=status.version + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Status.objects.finish.assert_called_once_with(service_act, False) + + if finish_call_success: + self.assertEqual(hdl_result.next_node, service_act.next()) + self.assertFalse(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) + else: + self.assertIsNone(hdl_result.next_node) + self.assertFalse(hdl_result.should_return) + self.assertTrue(hdl_result.should_sleep) + + service_act_h.hydrate_node_data.reset_mock() + signals.service_activity_timeout_monitor_start.send.reset_mock() + signals.service_activity_timeout_monitor_end.send.reset_mock() + Status.objects.finish.reset_mock() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_subprocess_handler.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_subprocess_handler.py new file mode 100644 index 00000000..11971eeb --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/handlers/test_subprocess_handler.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import itertools + +from django.test import TestCase + +from pipeline.core.flow.activity import SubProcess +from pipeline.engine.core import handlers +from pipeline.engine.core.handlers import subprocess as subprocess_h +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +handlers.subprocess_handler = handlers.SubprocessHandler() + + +class SubprocessHandlerTestCase(TestCase): + def test_element_cls(self): + self.assertEqual(handlers.SubprocessHandler.element_cls(), SubProcess) + + @patch(SUBPROCESS_HYDRATE_NODE_DATA, MagicMock()) + def test_handle(self): + for data_inputs, loop in itertools.product(({}, {"k1": "v1", "k2": "v2"}), (1, 2, 3)): + top_context = MockContext() + process = MockPipelineProcess(top_pipeline_context=top_context) + data = MockData(get_inputs_return=data_inputs) + context = MockContext() + subprocess_act = MockSubprocessActivity(pipeline_data=data, pipeline_context=context) + + hdl_result = handlers.subprocess_handler(process, subprocess_act, MockStatus(loop=loop)) + + if loop > 1: + subprocess_act.prepare_rerun_data.assert_called_once() + context.recover_variable.assert_called_once() + top_context.recover_variable.assert_called_once() + else: + subprocess_act.prepare_rerun_data.assert_not_called() + context.recover_variable.assert_not_called() + top_context.recover_variable.assert_not_called() + + top_context.extract_output.assert_called_once_with(subprocess_act, set_miss=False) + + subprocess_h.hydrate_node_data.assert_called_once_with(subprocess_act) + + if data_inputs: + calls = [mock.call(k, v) for k, v in list(data_inputs.items())] + context.set_global_var.assert_has_calls(calls) + + process.push_pipeline.assert_called_once_with(subprocess_act.pipeline, is_subprocess=True) + + process.take_snapshot.assert_called_once() + + self.assertEqual(hdl_result.next_node, subprocess_act.pipeline.start_event) + self.assertFalse(hdl_result.should_return) + self.assertFalse(hdl_result.should_sleep) + + subprocess_h.hydrate_node_data.reset_mock() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_api.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_api.py new file mode 100644 index 00000000..2537f4fd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_api.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import socket + +import mock +from celery import current_app +from django.test import TestCase +from redis.exceptions import ConnectionError + +from pipeline.conf import settings +from pipeline.django_signal_valve import valve +from pipeline.engine.core import api, data +from pipeline.engine.exceptions import RabbitMQConnectionError +from pipeline.engine.models import FunctionSwitch + + +class EngineCoreApiTestCase(TestCase): + @mock.patch("pipeline.engine.models.FunctionSwitch.objects.freeze_engine", mock.MagicMock()) + def test_freeze(self): + api.freeze() + FunctionSwitch.objects.freeze_engine.assert_called_once() + + @mock.patch( + "pipeline.engine.models.FunctionSwitch.objects.unfreeze_engine", mock.MagicMock(), + ) + @mock.patch("pipeline.django_signal_valve.valve.open_valve", mock.MagicMock()) + def test_unfreeze(self): + class MockFrozenProcess(object): + pass + + res = [] + for i in range(10): + p = MockFrozenProcess() + p.unfreeze = mock.MagicMock() + res.append(p) + + def mock_process_filter(*args, **kwargs): + return res + + with mock.patch("pipeline.engine.models.PipelineProcess.objects.filter", mock_process_filter): + api.unfreeze() + FunctionSwitch.objects.unfreeze_engine.assert_called_once() + valve.open_valve.assert_called_once() + for mock_process in res: + mock_process.unfreeze.assert_called_once() + + @mock.patch("celery.current_app.control.ping", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.expire_cache", mock.MagicMock()) + def test_workers(self): + + # throw error + def throw_conn_error(*args, **kwargs): + raise ConnectionError() + + with mock.patch("pipeline.engine.core.data.cache_for", throw_conn_error): + self.assertRaises(ConnectionError, api.workers) + + # cache situation + def return_worker_list(*args, **kwargs): + return ["worker-1", "worker-2"] + + with mock.patch("pipeline.engine.core.data.cache_for", return_worker_list): + worker = api.workers() + self.assertEqual(worker, return_worker_list()) + current_app.control.ping.assert_not_called() + data.expire_cache.assert_not_called() + + # no cache + def return_none(*args, **kwargs): + return None + + with mock.patch("pipeline.engine.core.data.cache_for", return_none): + # no workers + + def no_workers(*args, **kwargs): + return [] + + current_app.control.ping.reset_mock() + data.expire_cache.reset_mock() + + with mock.patch("celery.current_app.control.ping", no_workers): + worker = api.workers() + self.assertEqual(worker, no_workers()) + data.expire_cache.assert_not_called() + + # has workers + + def two_workers(*args, **kwargs): + return ["w1", "w2"] + + current_app.control.ping.reset_mock() + data.expire_cache.reset_mock() + + with mock.patch("celery.current_app.control.ping", two_workers): + worker = api.workers() + self.assertEqual(worker, two_workers()) + data.expire_cache.assert_called_with( + "__pipeline__workers__", two_workers(), settings.PIPELINE_WORKER_STATUS_CACHE_EXPIRES, + ) + + # raise exception + + def raise_mq_conn_error(*args, **kwargs): + raise socket.error() + + current_app.control.ping.reset_mock() + data.expire_cache.reset_mock() + + with mock.patch("celery.current_app.control.ping", raise_mq_conn_error): + self.assertRaises(RabbitMQConnectionError, api.workers) + data.expire_cache.assert_not_called() + + # retry test + ping_mock = mock.MagicMock(side_effect=[[], two_workers()]) + + with mock.patch("celery.current_app.control.ping", ping_mock): + worker = api.workers() + self.assertEqual(worker, two_workers()) + ping_mock.assert_has_calls([mock.call(timeout=1), mock.call(timeout=2)]) + data.expire_cache.assert_called_with( + "__pipeline__workers__", two_workers(), settings.PIPELINE_WORKER_STATUS_CACHE_EXPIRES, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_runtime.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_runtime.py new file mode 100644 index 00000000..beb31dd7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_runtime.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine import states +from pipeline.engine.core import runtime +from pipeline.engine.models import FunctionSwitch, NodeRelationship, Status +from pipeline.tests.engine.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +PIPELINE_BUILD_RELATIONSHIP = "pipeline.engine.models.NodeRelationship.objects.build_relationship" +PIPELINE_STATUS_TRANSIT = "pipeline.engine.models.Status.objects.transit" +PIPELINE_ENGINE_IS_FROZEN = "pipeline.engine.models.FunctionSwitch.objects.is_frozen" +PIPELINE_SETTING_RERUN_MAX_LIMIT = "pipeline.engine.core.runtime.RERUN_MAX_LIMIT" + + +class RuntimeTestCase(TestCase): + def test_runtime_exception_handler(self): + process = MockPipelineProcess() + process.exit_gracefully = MagicMock() + e = Exception() + + # raise case + with runtime.runtime_exception_handler(process): + raise e + + process.exit_gracefully.assert_called_with(e) + + process.exit_gracefully.reset_mock() + + # normal case + with runtime.runtime_exception_handler(process): + pass + + process.exit_gracefully.assert_not_called() + + @patch(PIPELINE_BUILD_RELATIONSHIP, MagicMock()) + @patch(PIPELINE_ENGINE_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_SETTING_RERUN_MAX_LIMIT, 0) + def test_run_loop(self): + # 1. test child meet destination + destination_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=destination_node), + destination_id=destination_node.id, + current_node_id=destination_node.id, + ) + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_called_with(destination_node.id) + + process.root_sleep_check.assert_not_called() + + process.sleep.assert_not_called() + + process.subproc_sleep_check.assert_not_called() + + FunctionSwitch.objects.is_frozen.assert_not_called() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_not_called() + + process.refresh_current_node.assert_not_called() + + NodeRelationship.objects.build_relationship.assert_not_called() + + self.assertEqual(process.current_node_id, destination_node.id) + + # 2. test root sleep check return true、 + + # 2.1. root pipeline is revoke + current_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id + ) + process.root_sleep_check = MagicMock(return_value=(True, states.REVOKED)) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.sleep.assert_called_once() + process.sleep.assert_called_with(do_not_save=True) + + process.subproc_sleep_check.assert_not_called() + + FunctionSwitch.objects.is_frozen.assert_not_called() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_not_called() + + process.refresh_current_node.assert_not_called() + + NodeRelationship.objects.build_relationship.assert_not_called() + + self.assertEqual(process.current_node_id, current_node.id) + + # 2.2. root pipeline is not revoke + for state in states.SLEEP_STATES.difference({states.REVOKED}): + current_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id + ) + process.root_sleep_check = MagicMock(return_value=(True, state)) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.sleep.assert_called_once_with(do_not_save=False) + + process.subproc_sleep_check.assert_not_called() + + FunctionSwitch.objects.is_frozen.assert_not_called() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_not_called() + + process.refresh_current_node.assert_not_called() + + NodeRelationship.objects.build_relationship.assert_not_called() + + self.assertEqual(process.current_node_id, current_node.id) + + # 3. test sub process sleep check return true + current_node = IdentifyObject() + subproc_above = uniqid() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id + ) + process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING)) + process.subproc_sleep_check = MagicMock(return_value=(True, subproc_above)) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.subproc_sleep_check.assert_called() + + process.sleep.assert_called_once_with(adjust_status=True, adjust_scope=subproc_above) + + FunctionSwitch.objects.is_frozen.assert_not_called() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_not_called() + + process.refresh_current_node.assert_not_called() + + NodeRelationship.objects.build_relationship.assert_not_called() + + self.assertEqual(process.current_node_id, current_node.id) + + # 4. test engine is frozen + with patch(PIPELINE_ENGINE_IS_FROZEN, MagicMock(return_value=True)): + current_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id + ) + process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING)) + process.subproc_sleep_check = MagicMock(return_value=(False, [])) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.subproc_sleep_check.assert_called() + + process.sleep.assert_not_called() + + FunctionSwitch.objects.is_frozen.assert_called_once() + + process.freeze.assert_called_once() + + Status.objects.transit.assert_not_called() + + process.refresh_current_node.assert_not_called() + + NodeRelationship.objects.build_relationship.assert_not_called() + + self.assertEqual(process.current_node_id, current_node.id) + + FunctionSwitch.objects.is_frozen.reset_mock() + + # 5. test transit fail + with patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=False))): + current_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id + ) + process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING)) + process.subproc_sleep_check = MagicMock(return_value=(False, [])) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.subproc_sleep_check.assert_called() + + FunctionSwitch.objects.is_frozen.assert_called_once() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_called_with( + id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__) + ) + + process.sleep.assert_called_once_with(adjust_status=True) + + process.refresh_current_node.assert_not_called() + + NodeRelationship.objects.build_relationship.assert_not_called() + + self.assertEqual(process.current_node_id, current_node.id) + + FunctionSwitch.objects.is_frozen.reset_mock() + Status.objects.transit.reset_mock() + + # 6. test normal + hdl = MagicMock(return_value=MockHandlerResult(should_return=True, should_sleep=False)) + + with patch("pipeline.engine.core.runtime.HandlersFactory.handlers_for", MagicMock(return_value=hdl)): + # 6.1. test should return + current_node = IdentifyObject(name="name") + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id + ) + process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING)) + process.subproc_sleep_check = MagicMock(return_value=(False, [])) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.subproc_sleep_check.assert_called() + + FunctionSwitch.objects.is_frozen.assert_called_once() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_called_with( + id=current_node.id, to_state=states.RUNNING, start=True, name=current_node.name + ) + + process.refresh_current_node.assert_called_once_with(current_node.id) + + NodeRelationship.objects.build_relationship.assert_called_once_with( + process.top_pipeline.id, current_node.id + ) + + hdl.assert_called_once_with(process, current_node, None) + + process.sleep.assert_not_called() + + self.assertEqual(process.current_node_id, current_node.id) + + FunctionSwitch.objects.is_frozen.reset_mock() + Status.objects.transit.reset_mock() + NodeRelationship.objects.build_relationship.reset_mock() + hdl.reset_mock() + + # 6.2. test should sleep + for should_return in (False, True): + hdl.return_value = MockHandlerResult( + should_return=should_return, + should_sleep=True, + after_sleep_call=MagicMock(), + args=["token1", "token2"], + kwargs={"kwargs": "token3"}, + ) + + current_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), + destination_id=uniqid(), + current_node_id=current_node.id, + ) + process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING)) + process.subproc_sleep_check = MagicMock(return_value=(False, [])) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.subproc_sleep_check.assert_called() + + FunctionSwitch.objects.is_frozen.assert_called_once() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_called_with( + id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__) + ) + + process.refresh_current_node.assert_called_once_with(current_node.id) + + NodeRelationship.objects.build_relationship.assert_called_once_with( + process.top_pipeline.id, current_node.id + ) + + hdl.assert_called_once_with(process, current_node, None) + + process.sleep.assert_called_once_with(adjust_status=True) + + hdl.return_value.after_sleep_call.assert_called_once_with("token1", "token2", kwargs="token3") + + self.assertEqual(process.current_node_id, current_node.id) + + FunctionSwitch.objects.is_frozen.reset_mock() + Status.objects.transit.reset_mock() + NodeRelationship.objects.build_relationship.reset_mock() + hdl.reset_mock() + + # 6.3. test execute 3 node and return + nodes = [IdentifyObject(), IdentifyObject(), IdentifyObject()] + hdl.return_value = None + hdl.side_effect = [ + MockHandlerResult(should_return=False, should_sleep=False, next_node=nodes[0]), + MockHandlerResult(should_return=False, should_sleep=False, next_node=nodes[1]), + MockHandlerResult(should_return=True, should_sleep=True, next_node=nodes[2]), + ] + + current_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject( + nodes={ + current_node.id: current_node, + nodes[0].id: nodes[0], + nodes[1].id: nodes[1], + nodes[2].id: nodes[2], + } + ), + destination_id=uniqid(), + current_node_id=current_node.id, + ) + process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING)) + process.subproc_sleep_check = MagicMock(return_value=(False, [])) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_has_calls([mock.call(), mock.call(), mock.call()]) + + process.subproc_sleep_check.assert_has_calls([mock.call(), mock.call(), mock.call()]) + + FunctionSwitch.objects.is_frozen.assert_has_calls([mock.call(), mock.call(), mock.call()]) + + process.freeze.assert_not_called() + + Status.objects.transit.assert_has_calls( + [ + mock.call( + id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__) + ), + mock.call(id=nodes[0].id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)), + mock.call(id=nodes[1].id, to_state=states.RUNNING, start=True, name=str(current_node.__class__)), + ] + ) + + process.refresh_current_node.assert_has_calls( + [mock.call(current_node.id), mock.call(nodes[0].id), mock.call(nodes[1].id)] + ) + + NodeRelationship.objects.build_relationship.assert_has_calls( + [ + mock.call(process.top_pipeline.id, current_node.id), + mock.call(process.top_pipeline.id, nodes[0].id), + mock.call(process.top_pipeline.id, nodes[1].id), + ] + ) + + hdl.assert_has_calls( + [ + mock.call(process, current_node, None), + mock.call(process, nodes[0], None), + mock.call(process, nodes[1], None), + ] + ) + + process.sleep.assert_called_once_with(adjust_status=True) + + self.assertEqual(process.current_node_id, nodes[1].id) + + @patch(PIPELINE_BUILD_RELATIONSHIP, MagicMock()) + @patch(PIPELINE_ENGINE_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True, extra=MockStatus(loop=11)))) + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + def __fail_with_node_reach_run_limit(self): + with patch(PIPELINE_SETTING_RERUN_MAX_LIMIT, 10): + current_node = IdentifyObject() + process = MockPipelineProcess( + top_pipeline=PipelineObject(node=current_node), destination_id=uniqid(), current_node_id=current_node.id + ) + process.root_sleep_check = MagicMock(return_value=(False, states.RUNNING)) + process.subproc_sleep_check = MagicMock(return_value=(False, [])) + + runtime.run_loop(process) + + process.destroy_and_wake_up_parent.assert_not_called() + + process.root_sleep_check.assert_called() + + process.subproc_sleep_check.assert_called() + + FunctionSwitch.objects.is_frozen.assert_called_once() + + process.freeze.assert_not_called() + + Status.objects.transit.assert_called_with( + id=current_node.id, to_state=states.RUNNING, start=True, name=str(current_node.__class__) + ) + + Status.objects.fail.assert_called_once_with(current_node, "rerun times exceed max limit: 10") + + process.sleep.assert_called_once_with(adjust_status=True) + + process.refresh_current_node.assert_not_called() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_schedule.py b/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_schedule.py new file mode 100644 index 00000000..3feac0e3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/core/test_schedule.py @@ -0,0 +1,794 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import itertools + +from django.test import TestCase +from mock import call + +from pipeline.django_signal_valve import valve +from pipeline.engine import signals +from pipeline.engine.core import schedule +from pipeline.engine.models import Data, PipelineProcess, ScheduleService, Status +from pipeline.tests.engine.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + +PARENT_DATA = "PARENT_DATA" + + +class ScheduleTestCase(TestCase): + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock()) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock()) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + def test_schedule_exception_handler__no_raise(self): + # no raise + process_id = uniqid() + schedule_id = "{}{}".format(uniqid(), uniqid()) + with schedule.schedule_exception_handler(process_id, schedule_id): + pass + + Status.objects.filter.assert_not_called() + PipelineProcess.objects.get.assert_not_called() + schedule.delete_parent_data.assert_not_called() + + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock()) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + def test_schedule_exception_handler__raise_and_find_corresponding_status(self): + # raise and find corresponding status + e = Exception() + process = MockPipelineProcess() + process_id = uniqid() + schedule_id = "{}{}".format(uniqid(), uniqid()) + with mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=process)): + with schedule.schedule_exception_handler(process_id, schedule_id): + raise e + + Status.objects.filter.assert_called_once_with(id=schedule_id[:32], version=schedule_id[32:]) + + process.exit_gracefully.assert_called_once_with(e) + + schedule.delete_parent_data.assert_called_once_with(schedule_id) + + schedule.delete_parent_data.reset_mock() + + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=False))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + def test_schedule_exception_handler__raise_and_not_find_corresponding_status(self): + e = Exception() + process = MockPipelineProcess() + process_id = uniqid() + schedule_id = "{}{}".format(uniqid(), uniqid()) + with mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=process)): + with schedule.schedule_exception_handler(process_id, schedule_id): + raise e + + Status.objects.filter.assert_called_once_with(id=schedule_id[:32], version=schedule_id[32:]) + + PipelineProcess.objects.get.assert_not_called() + + process.exit_gracefully.assert_not_called() + + schedule.delete_parent_data.assert_called_once_with(schedule_id) + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=False))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + def test_schedule_can_not_find_status(self): + mock_ss = MockScheduleService() + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.destroy.assert_called_once() + + schedule.delete_parent_data.assert_not_called() + + # reset mock + mock_ss.destroy.reset_mock() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + def test_schedule__can_not_get_schedule_parent_data(self): + mock_ss = MockScheduleService() + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + with mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=None)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.destroy.assert_not_called() + + mock_ss.service_act.schedule.assert_not_called() + + schedule.delete_parent_data.assert_called_with(mock_ss.id) + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=False))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + def test_schedule__schedule_return_fail_and_transit_fail(self): + process = MockPipelineProcess() + mock_ss = MockScheduleService(schedule_return=False) + + with mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=process)): + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_with(PARENT_DATA, mock_ss.callback_data) + + self.assertEqual(mock_ss.schedule_times, 1) + + schedule.set_schedule_data.assert_called_once_with(mock_ss.id, PARENT_DATA) + + mock_ss.destroy.assert_called_once() + + Data.objects.write_node_data.assert_not_called() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_FAIL_SEND, mock.MagicMock()) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + def test_schedule__schedule_return_fail_and_transit_success(self): + for timeout in (True, False): + process = MockPipelineProcess() + mock_ss = MockScheduleService(schedule_return=False, service_timeout=timeout) + with mock.patch( + PIPELINE_PROCESS_SELECT_FOR_UPDATE, mock.MagicMock(return_value=MockQuerySet(get_return=process)) + ): + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_with(PARENT_DATA, mock_ss.callback_data) + + self.assertEqual(mock_ss.schedule_times, 1) + + schedule.set_schedule_data.assert_called_once_with(mock_ss.id, PARENT_DATA) + + mock_ss.destroy.assert_not_called() + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=mock_ss.service_act.__class__, + node_id=mock_ss.service_act.id, + version=mock_ss.version, + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Data.objects.write_node_data.assert_called_once_with(mock_ss.service_act, ex_data=None) + + process.adjust_status.assert_called_once() + + mock_ss.service_act.schedule_fail.assert_called_once() + + signals.service_schedule_fail.send.assert_called_with( + sender=ScheduleService, + activity_shell=mock_ss.service_act, + schedule_service=mock_ss, + ex_data=None, + ) + + valve.send.assert_called_once_with( + signals, + "activity_failed", + sender=process.root_pipeline, + pipeline_id=process.root_pipeline_id, + pipeline_activity_id=mock_ss.service_act.id, + subprocess_id_stack=process.subprocess_stack, + ) + + # reset mock + schedule.set_schedule_data.reset_mock() + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + signals.service_schedule_fail.send.reset_mock() + valve.send.reset_mock() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + def test_schedule__schedule_raise_exception_and_transit_fail(self): + e = Exception() + mock_ss = MockScheduleService(schedule_exception=e) + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + # 3.1.1. transit fail + with mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=False))): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with(PARENT_DATA, mock_ss.callback_data) + + self.assertEqual(mock_ss.schedule_times, 1) + + mock_ss.destroy.assert_called_once() + + Data.objects.write_node_data.assert_not_called() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_FAIL_SEND, mock.MagicMock()) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + def test_schedule__schedule_raise_exception_and_transit_success(self): + for timeout in (True, False): + e = Exception() + mock_ss = MockScheduleService(schedule_exception=e, service_timeout=timeout) + process = MockPipelineProcess() + + with mock.patch( + PIPELINE_PROCESS_SELECT_FOR_UPDATE, mock.MagicMock(return_value=MockQuerySet(get_return=process)) + ): + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with(PARENT_DATA, mock_ss.callback_data) + + self.assertEqual(mock_ss.schedule_times, 1) + + mock_ss.destroy.assert_not_called() + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=mock_ss.service_act.__class__, + node_id=mock_ss.service_act.id, + version=mock_ss.version, + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Data.objects.write_node_data.assert_called() + + process.adjust_status.assert_called_once() + + mock_ss.service_act.schedule_fail.assert_called_once() + + signals.service_schedule_fail.send.assert_called() + + valve.send.assert_called_once_with( + signals, + "activity_failed", + sender=process.root_pipeline, + pipeline_id=process.root_pipeline_id, + pipeline_activity_id=mock_ss.service_act.id, + subprocess_id_stack=process.subprocess_stack, + ) + + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + signals.service_schedule_fail.send.reset_mock() + valve.send.reset_mock() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_FAIL_SEND, mock.MagicMock()) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + def test_schedule__schedule_raise_exception_and_process_is_not_alive(self): + for timeout in (True, False): + e = Exception() + mock_ss = MockScheduleService(schedule_exception=e, service_timeout=timeout) + process = MockPipelineProcess(is_alive=False) + + with mock.patch( + PIPELINE_PROCESS_SELECT_FOR_UPDATE, mock.MagicMock(return_value=MockQuerySet(get_return=process)) + ): + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with(PARENT_DATA, mock_ss.callback_data) + + self.assertEqual(mock_ss.schedule_times, 1) + + mock_ss.destroy.assert_not_called() + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=mock_ss.service_act.__class__, + node_id=mock_ss.service_act.id, + version=mock_ss.version, + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Data.objects.write_node_data.assert_called() + + process.adjust_status.assert_not_called() + + mock_ss.service_act.schedule_fail.assert_not_called() + + signals.service_schedule_fail.send.assert_not_called() + + valve.send.assert_not_called() + + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + signals.service_schedule_fail.send.reset_mock() + valve.send.reset_mock() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_FAIL_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_SUCCESS_SEND, mock.MagicMock()) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + def test_schedule__schedule_raise_exception_and_ignore_error_and_transit_success(self): + parent_data_return = "data" + + for timeout, process_alive in itertools.product((True, False), (True, False)): + mock_ss = MockScheduleService( + schedule_exception=Exception(), + service_timeout=timeout, + service_err_ignore=True, + schedule_done=True, + result_bit=False, + ) + mock_context = MockContext() + mock_status = MockEngineModelStatus(error_ignorable=False) + mock_top_pipeline_data = MockData() + process = MockPipelineProcess( + is_alive=process_alive, top_pipeline_data=mock_top_pipeline_data, top_pipeline_context=mock_context + ) + mock_parent_data = MockData(get_outputs_return=parent_data_return) + + with mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=mock_parent_data)): + + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + + with mock.patch(PIPELINE_STATUS_GET, mock.MagicMock(return_value=mock_status)): + + with mock.patch( + PIPELINE_PROCESS_GET, mock.MagicMock(return_value=process), + ): + + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with( + mock_parent_data, mock_ss.callback_data + ) + + mock_ss.service_act.ignore_error.assert_called_once() + + mock_ss.service_act.finish_schedule.assert_called_once() + + self.assertEqual(mock_ss.schedule_times, 1) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=mock_ss.service_act.__class__, + node_id=mock_ss.service_act.id, + version=mock_ss.version, + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Data.objects.write_node_data.assert_called_once_with(mock_ss.service_act) + + self.assertTrue(mock_status.error_ignorable) + mock_status.save.assert_called_once() + + if not process_alive: + mock_ss.destroy.assert_called_once() + + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + + continue + else: + mock_ss.destroy.assert_not_called() + + process.top_pipeline.data.update_outputs.assert_called_once_with(parent_data_return) + + mock_context.extract_output.assert_called_once_with(mock_ss.service_act) + + process.save.assert_called_once() + + schedule.delete_parent_data.assert_called_once_with(mock_ss.id) + + mock_ss.finish.assert_called_once() + + signals.service_schedule_success.send.assert_called_once_with( + sender=ScheduleService, activity_shell=mock_ss.service_act, schedule_service=mock_ss + ) + + valve.send.assert_called_once_with( + signals, + "wake_from_schedule", + sender=ScheduleService, + process_id=mock_ss.process_id, + activity_id=mock_ss.activity_id, + ) + + # reset mock + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + schedule.delete_parent_data.reset_mock() + signals.service_schedule_success.send.reset_mock() + valve.send.reset_mock() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=False))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + def test_schedule__schedule_return_success_and_wait_callback_but_transit_fail(self): + for timeout in (True, False): + mock_ss = MockScheduleService( + schedule_return=True, service_timeout=timeout, wait_callback=True, result_bit=True + ) + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with(PARENT_DATA, mock_ss.callback_data) + + self.assertEqual(mock_ss.schedule_times, 1) + + mock_ss.destroy.assert_called_once() + + Data.objects.write_node_data.assert_not_called() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_FAIL_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_SUCCESS_SEND, mock.MagicMock()) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + def test_schedule__schedule_return_success_and_wait_callback_and_transit_success(self): + parent_data_return = "data" + + for timeout, result_bit, process_alive, schedule_return in itertools.product( + (True, False), (True, False), (True, False), (True, None) + ): + mock_ss = MockScheduleService( + shcedule_return=schedule_return, + service_timeout=timeout, + wait_callback=True, + multi_callback_enabled=False, + result_bit=result_bit, + ) + mock_context = MockContext() + mock_status = MockEngineModelStatus(error_ignorable=False) + mock_top_pipeline_data = MockData() + process = MockPipelineProcess( + is_alive=process_alive, top_pipeline_data=mock_top_pipeline_data, top_pipeline_context=mock_context + ) + mock_parent_data = MockData(get_outputs_return=parent_data_return) + + with mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=mock_parent_data)): + + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + + with mock.patch(PIPELINE_STATUS_GET, mock.MagicMock(return_value=mock_status)): + + with mock.patch( + PIPELINE_PROCESS_GET, mock.MagicMock(return_value=process), + ): + + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with( + mock_parent_data, mock_ss.callback_data + ) + + self.assertEqual(mock_ss.schedule_times, 1) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=mock_ss.service_act.__class__, + node_id=mock_ss.service_act.id, + version=mock_ss.version, + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Data.objects.write_node_data.assert_called_once_with(mock_ss.service_act) + + if not result_bit: + self.assertTrue(mock_status.error_ignorable) + mock_status.save.assert_called_once() + else: + self.assertFalse(mock_status.error_ignorable) + mock_status.save.assert_not_called() + + if not process_alive: + mock_ss.destroy.assert_called_once() + + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + + continue + else: + mock_ss.destroy.assert_not_called() + + process.top_pipeline.data.update_outputs.assert_called_once_with(parent_data_return) + + mock_context.extract_output.assert_called_once_with(mock_ss.service_act) + + process.save.assert_called_once() + + schedule.delete_parent_data.assert_called_once_with(mock_ss.id) + + mock_ss.finish.assert_called_once() + + signals.service_schedule_success.send.assert_called_once_with( + sender=ScheduleService, activity_shell=mock_ss.service_act, schedule_service=mock_ss + ) + + valve.send.assert_called_once_with( + signals, + "wake_from_schedule", + sender=ScheduleService, + process_id=mock_ss.process_id, + activity_id=mock_ss.activity_id, + ) + + # reset mock + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + schedule.delete_parent_data.reset_mock() + signals.service_schedule_success.send.reset_mock() + valve.send.reset_mock() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_TIMEOUT_END_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_FAIL_SEND, mock.MagicMock()) + @mock.patch(ENGINE_SIGNAL_ACT_SCHEDULE_SUCCESS_SEND, mock.MagicMock()) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + def test_schedule__schedule_return_success_and_finished(self): + parent_data_return = "data" + + for timeout, result_bit, process_alive in itertools.product((True, False), (True, False), (True, False)): + mock_ss = MockScheduleService( + schedule_return=True, service_timeout=timeout, schedule_done=True, result_bit=result_bit + ) + mock_context = MockContext() + mock_status = MockEngineModelStatus(error_ignorable=False) + mock_top_pipeline_data = MockData() + process = MockPipelineProcess( + is_alive=process_alive, top_pipeline_data=mock_top_pipeline_data, top_pipeline_context=mock_context + ) + + mock_parent_data = MockData(get_outputs_return=parent_data_return) + with mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=process)): + with mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=mock_parent_data)): + + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + + with mock.patch(PIPELINE_STATUS_GET, mock.MagicMock(return_value=mock_status)): + + with mock.patch( + PIPELINE_PROCESS_GET, mock.MagicMock(return_value=process), + ): + + schedule.schedule(process.id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with( + mock_parent_data, mock_ss.callback_data + ) + + self.assertEqual(mock_ss.schedule_times, 1) + + if timeout: + signals.service_activity_timeout_monitor_end.send.assert_called_once_with( + sender=mock_ss.service_act.__class__, + node_id=mock_ss.service_act.id, + version=mock_ss.version, + ) + else: + signals.service_activity_timeout_monitor_end.send.assert_not_called() + + Data.objects.write_node_data.assert_called_once_with(mock_ss.service_act) + + if not result_bit: + self.assertTrue(mock_status.error_ignorable) + mock_status.save.assert_called_once() + else: + self.assertFalse(mock_status.error_ignorable) + mock_status.save.assert_not_called() + + if not process_alive: + mock_ss.destroy.assert_called_once() + + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + + continue + else: + mock_ss.destroy.assert_not_called() + + process.top_pipeline.data.update_outputs.assert_called_once_with(parent_data_return) + + mock_context.extract_output.assert_called_once_with(mock_ss.service_act) + + process.save.assert_called_once() + + schedule.delete_parent_data.assert_called_once_with(mock_ss.id) + + mock_ss.finish.assert_called_once() + + signals.service_schedule_success.send.assert_called_once_with( + sender=ScheduleService, activity_shell=mock_ss.service_act, schedule_service=mock_ss + ) + + valve.send.assert_called_once_with( + signals, + "wake_from_schedule", + sender=ScheduleService, + process_id=mock_ss.process_id, + activity_id=mock_ss.activity_id, + ) + + # reset mock + signals.service_activity_timeout_monitor_end.send.reset_mock() + Data.objects.write_node_data.reset_mock() + schedule.delete_parent_data.reset_mock() + signals.service_schedule_success.send.reset_mock() + valve.send.reset_mock() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock(return_value=MockActionResult(result=False))) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + @mock.patch(SCHEDULE_DELETE_PARENT_DATA, mock.MagicMock()) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + def test_schedule__schedule_return_success_and_need_next_schedule(self): + mock_ss = MockScheduleService(schedule_return=True, result_bit=True) + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.service_act.schedule.assert_called_once_with(PARENT_DATA, mock_ss.callback_data) + + self.assertEqual(mock_ss.schedule_times, 1) + + schedule.set_schedule_data.assert_called_once_with(mock_ss.id, PARENT_DATA) + + mock_ss.set_next_schedule.assert_called_once() + + Data.objects.write_node_data.assert_called_once() + + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_PROCESS_GET, mock.MagicMock(return_value=MockPipelineProcess())) + @mock.patch(SCHEDULE_GET_SCHEDULE_PARENT_DATA, mock.MagicMock(return_value=PARENT_DATA)) + @mock.patch(SCHEDULE_SET_SCHEDULE_DATA, mock.MagicMock()) + @mock.patch(PIPELINE_DATA_WRITE_NODE_DATA, mock.MagicMock()) + def test_schedule__schedule_return_success_and_wait_multi_callback(self): + mock_ss = MockScheduleService( + schedule_return=True, wait_callback=True, schedule_done=False, multi_callback_enabled=True, result_bit=True + ) + + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule_calls, set_schedule_data_calls = [], [] + for schedule_times in range(1, 5): + + schedule.schedule(process_id, mock_ss.id) + + schedule_calls.append(call(PARENT_DATA, mock_ss.callback_data)) + mock_ss.service_act.schedule.assert_has_calls(schedule_calls) + + self.assertEqual(mock_ss.schedule_times, schedule_times) + + set_schedule_data_calls.append(call(mock_ss.id, PARENT_DATA)) + schedule.set_schedule_data.assert_has_calls(set_schedule_data_calls) + + mock_ss.save.assert_called() + mock_ss.set_next_schedule.assert_not_called() + + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=False))) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + def test_schedule__status_is_not_running(self): + mock_ss = MockScheduleService( + schedule_return=True, + wait_callback=False, + schedule_done=False, + multi_callback_enabled=False, + result_bit=True, + ) + + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.destroy.assert_called_once() + valve.send.assert_not_called() + + @mock.patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(PIPELINE_SCHEDULE_SERVICE_FILTER, mock.MagicMock(return_value=MockQuerySet(update_return=0))) + @mock.patch(SIGNAL_VALVE_SEND, mock.MagicMock()) + def test_schedule__get_lock_failed_and_multi_calback_not_enbaled(self): + mock_ss = MockScheduleService( + schedule_return=True, + wait_callback=False, + schedule_done=False, + multi_callback_enabled=False, + result_bit=True, + ) + + with mock.patch(PIPELINE_SCHEDULE_SERVICE_GET, mock.MagicMock(return_value=mock_ss)): + process_id = uniqid() + + schedule.schedule(process_id, mock_ss.id) + + mock_ss.destroy.assert_not_called() + valve.send.assert_not_called() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/health/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/health/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/health/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/test_doctors.py b/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/test_doctors.py new file mode 100644 index 00000000..f4415107 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/test_doctors.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from datetime import timedelta + +from django.test import TestCase +from django.utils import timezone + +from pipeline.core.pipeline import Pipeline +from pipeline.engine import signals +from pipeline.engine.health.zombie.doctors import RunningNodeZombieDoctor +from pipeline.engine.models import Status +from pipeline.tests.engine.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + + +class RunningNodeZombieDoctorTestCase(TestCase): + def test_confirm__proc_current_node_id_is_none(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + proc.current_node_id = None + self.assertFalse(doctor.confirm(proc)) + + @patch(PIPELINE_STATUS_GET, MagicMock(side_effect=Status.DoesNotExist)) + def test_confirm__status_not_exist(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + + def test_confirm__status_refresh_at_is_none(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + status = MagicMock() + status.state_refresh_at = None + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + self.assertFalse(doctor.confirm(proc)) + + def test_confirm__status_is_not_running(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + status = MagicMock() + status.state = "FINISHED" + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + self.assertFalse(doctor.confirm(proc)) + + def test_confirm__not_detect_schedule_wait_callback(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + status = MagicMock() + status.state = "RUNNING" + schedule = MagicMock() + schedule.wait_callback = True + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(return_value=schedule)): + self.assertFalse(doctor.confirm(proc)) + + def test_confirm__detect_schedule_wait_callback(self): + doctor = RunningNodeZombieDoctor(1, True) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + status = MagicMock() + status.state = "RUNNING" + schedule = MagicMock() + schedule.wait_callback = True + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(return_value=schedule)): + self.assertFalse(doctor.confirm(proc)) + + def test_confirm__detect_schedule_wait_callback_overtime(self): + doctor = RunningNodeZombieDoctor(1, True) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + status = MagicMock() + status.state = "RUNNING" + status.state_refresh_at = timezone.now() - timedelta(seconds=2) + schedule = MagicMock() + schedule.wait_callback = True + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(return_value=schedule)): + self.assertTrue(doctor.confirm(proc)) + + def test_confirm__stuck_time_less_than_max_stuck_time(self): + doctor = RunningNodeZombieDoctor(100) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + status = MagicMock() + status.state_refresh_at = timezone.now() - timedelta(seconds=2) + schedule = MagicMock() + schedule.wait_callback = False + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(return_value=schedule)): + self.assertFalse(doctor.confirm(proc)) + + def test_confirm(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + self.assertFalse(doctor.confirm(proc)) + status = MagicMock() + status.state_refresh_at = timezone.now() - timedelta(seconds=2) + schedule = MagicMock() + schedule.wait_callback = False + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(return_value=schedule)): + self.assertFalse(doctor.confirm(proc)) + + @patch(PIPELINE_STATUS_RAW_FAIL, MagicMock(side_effect=Exception)) + def test_cure__raw_fail_raise(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + doctor.cure(proc) + + def test_cure__raw_fail_failed(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + result = MagicMock() + result.result = False + + with patch(PIPELINE_STATUS_RAW_FAIL, MagicMock(return_value=result)): + doctor.cure(proc) + + @mock.patch(ENGINE_ACTIVITY_FAIL_SIGNAL, mock.MagicMock()) + def test_cure(self): + doctor = RunningNodeZombieDoctor(1) + proc = MagicMock() + proc.id = "proc" + proc.is_sleep = False + proc.root_pipeline_id = uniqid() + proc.current_node_id = uniqid() + status = MagicMock() + status.version = None + revoke = MagicMock() + result = MagicMock() + result.result = True + result.extra = status + + with patch(PIPELINE_STATUS_RAW_FAIL, MagicMock(return_value=result)): + with patch(PIPELINE_CELERYTASK_REVOKE, revoke): + doctor.cure(proc) + + self.assertIsNotNone(status.version) + status.save.assert_called_once() + revoke.assert_called_once_with(proc.id, kill=True) + proc.adjust_status.assert_called_once() + self.assertTrue(proc.is_sleep) + proc.save.assert_called_once() + signals.activity_failed.send.assert_called_with( + sender=Pipeline, pipeline_id=proc.root_pipeline_id, pipeline_activity_id=proc.current_node_id + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/test_heal.py b/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/test_heal.py new file mode 100644 index 00000000..3baca06b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/health/zombie/test_heal.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase +from mock import MagicMock, patch + +from pipeline.engine.health.zombie.doctors import ZombieProcDoctor +from pipeline.engine.health.zombie.heal import DummyZombieProcHealer, ZombieProcHealer, get_healer +from pipeline.tests.mock_settings import * # noqa + + +class HealTestCase(TestCase): + def test_get_healer__empty_dr_settings(self): + settings = MagicMock() + settings.ENGINE_ZOMBIE_PROCESS_DOCTORS = None + with patch(ENGINE_HEALTH_ZOMBIE_HEAL_DEFAULT_SETTINGS, settings): + healer = get_healer() + + self.assertIsInstance(healer, DummyZombieProcHealer) + + def test_get_healer__doctors_init_all_failed(self): + settings = MagicMock() + settings.ENGINE_ZOMBIE_PROCESS_DOCTORS = [{}, {}] + with patch(ENGINE_HEALTH_ZOMBIE_HEAL_DEFAULT_SETTINGS, settings): + healer = get_healer() + + self.assertIsInstance(healer, DummyZombieProcHealer) + + def test_get_healer(self): + settings = MagicMock() + settings.ENGINE_ZOMBIE_PROCESS_DOCTORS = [ + { + "class": "pipeline.engine.health.zombie.doctors.RunningNodeZombieDoctor", + "config": {"max_stuck_time": 30}, + }, + { + "class": "pipeline.engine.health.zombie.doctors.RunningNodeZombieDoctor", + "config": {"max_stuck_time": 15}, + }, + {"class": "not_exist_class", "config": {"whatever": "whatever"}}, + ] + with patch(ENGINE_HEALTH_ZOMBIE_HEAL_DEFAULT_SETTINGS, settings): + healer = get_healer() + + self.assertIsInstance(healer, ZombieProcHealer) + self.assertEqual(len(healer.doctors), 2) + self.assertIsInstance(healer.doctors[0], ZombieProcDoctor) + self.assertIsInstance(healer.doctors[1], ZombieProcDoctor) + self.assertEqual(healer.doctors[0].max_stuck_time, 30) + self.assertEqual(healer.doctors[1].max_stuck_time, 15) + + +class ZombieProcHealerTestCase(TestCase): + def test_heal__emptry_doctors(self): + healer = ZombieProcHealer([]) + healer._get_process_ids = MagicMock(return_value=[1, 2, 3]) + healer.heal() + + def test_heal__process_state_not_fit(self): + doctor_1 = MagicMock() + healer = ZombieProcHealer([doctor_1]) + healer._get_process_ids = MagicMock(return_value=[1, 2, 3]) + + proc_1 = MagicMock() + proc_1.id = 1 + proc_1.is_alive = False + proc_1.is_frozen = False + + proc_2 = MagicMock() + proc_2.id = 2 + proc_2.is_alive = True + proc_2.is_frozen = True + + proc_3 = MagicMock() + proc_3.id = 3 + proc_3.is_alive = False + proc_3.is_frozen = True + + processes = {1: proc_1, 2: proc_2, 3: proc_3} + + def get(id): + return processes[id] + + with patch(PIPELINE_PROCESS_GET, get): + self.assertFalse(not healer.doctors) + healer.heal() + doctor_1.confirm.assert_not_called() + doctor_1.cure.assert_not_called() + + def test_heal(self): + + doctor_1 = MagicMock() + doctor_1_confirm_count = {"count": 0} + + def doctor_1_confirm(proc, count=doctor_1_confirm_count): + count["count"] += 1 + return proc.id == 1 + + doctor_1.confirm = doctor_1_confirm + + doctor_2 = MagicMock() + doctor_2_confirm_count = {"count": 0} + + def doctor_2_confirm(proc, count=doctor_2_confirm_count): + count["count"] += 1 + return proc.id == 2 + + doctor_2.confirm = doctor_2_confirm + + doctor_3 = MagicMock() + doctor_3_confirm_count = {"count": 0} + + def doctor_3_confirm(proc, count=doctor_3_confirm_count): + count["count"] += 1 + return proc.id == 3 + + doctor_3.confirm = doctor_3_confirm + + healer = ZombieProcHealer([doctor_1, doctor_2, doctor_3]) + healer._get_process_ids = MagicMock(return_value=[1, 2, 3]) + + proc_1 = MagicMock() + proc_1.id = 1 + proc_1.is_alive = True + proc_1.is_frozen = False + + proc_2 = MagicMock() + proc_2.id = 2 + proc_2.is_alive = True + proc_2.is_frozen = False + + proc_3 = MagicMock() + proc_3.id = 3 + proc_3.is_alive = True + proc_3.is_frozen = False + + processes = {1: proc_1, 2: proc_2, 3: proc_3} + + def get(id): + return processes[id] + + with patch(PIPELINE_PROCESS_GET, get): + healer.heal() + + self.assertEqual(doctor_1_confirm_count["count"], 3) + self.assertEqual(doctor_2_confirm_count["count"], 2) + self.assertEqual(doctor_3_confirm_count["count"], 1) + + doctor_1.cure.assert_called_once_with(proc_1) + doctor_2.cure.assert_called_once_with(proc_2) + doctor_3.cure.assert_called_once_with(proc_3) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/mock.py b/runtime/bamboo-pipeline/pipeline/tests/engine/mock.py new file mode 100644 index 00000000..afad2dcf --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/mock.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from pipeline.tests.mock import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_data.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_data.py new file mode 100644 index 00000000..42618851 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_data.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.data.base import DataObject +from pipeline.engine.models import Data + +from ..mock import IdentifyObject + + +class DataTestCase(TestCase): + def test_write_node_data(self): + node = IdentifyObject() + data_obj = DataObject({"input_key": "value"}, outputs={"output_key": "value"}) + node.data = data_obj + + Data.objects.write_node_data(node) + data = Data.objects.get(id=node.id) + self.assertEqual(data.inputs, data_obj.inputs) + self.assertEqual(data.outputs, data_obj.outputs) + self.assertIsNone(data.ex_data) + + data_obj.inputs = {"new_inputs": "new_value"} + Data.objects.write_node_data(node, ex_data="ex_data") + data = Data.objects.get(id=node.id) + self.assertEqual(data.inputs, data_obj.inputs) + self.assertEqual(data.outputs, data_obj.outputs) + self.assertEqual(data.ex_data, "ex_data") + + data_obj.outputs.ex_data = "new_ex_data" + Data.objects.write_node_data(node, ex_data="ex_data") + data = Data.objects.get(id=node.id) + self.assertEqual(data.inputs, data_obj.inputs) + self.assertEqual(data.outputs, data_obj.outputs) + self.assertEqual(data.ex_data, "new_ex_data") + + def test_forced_fail(self): + node = IdentifyObject() + Data.objects.forced_fail(node.id, ex_data="") + data = Data.objects.get(id=node.id) + self.assertEqual(data.outputs, {"_forced_failed": True}) + self.assertEqual(data.ex_data, "") + + Data.objects.forced_fail(node.id, ex_data="ex_data") + data = Data.objects.get(id=node.id) + self.assertEqual(data.outputs, {"_forced_failed": True}) + self.assertEqual(data.ex_data, "ex_data") + + def test_write_ex_data(self): + node = IdentifyObject() + outoput = {"k": "v"} + ex_data = "ex_data" + new_ex_data = "new_ex_data" + + Data.objects.write_ex_data(node.id, ex_data=ex_data) + data = Data.objects.get(id=node.id) + self.assertEqual(data.ex_data, "ex_data") + + data.outputs = outoput + data.save() + Data.objects.write_ex_data(node.id, ex_data=new_ex_data) + data = Data.objects.get(id=node.id) + self.assertEqual(data.outputs, outoput) + self.assertEqual(data.ex_data, new_ex_data) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_history.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_history.py new file mode 100644 index 00000000..8cf41f64 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_history.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import datetime + +import mock +from django.test import TestCase +from django.utils import timezone + +from pipeline.engine.models import History, HistoryData +from pipeline.engine.utils import calculate_elapsed_time +from pipeline.tests.mock_settings import * # noqa + +from ..mock import * # noqa + + +class HistoryTestCase(TestCase): + def test_record(self): + def data_get(*args, **kwargs): + data = Object() + data.inputs = {"input": "value"} + data.outputs = {"outputs": "value"} + data.ex_data = "ex_data" + return data + + status = MockStatus(skip=True) + status.name = "name" + status.started_time = timezone.now() + status.archived_time = timezone.now() + with mock.patch(PIPELINE_DATA_GET, data_get): + history = History.objects.record(status) + self.assertEqual(history.identifier, status.id) + self.assertEqual(history.started_time, status.started_time) + self.assertEqual(history.archived_time, status.archived_time) + self.assertEqual(history.loop, status.loop) + self.assertTrue(history.skip, status.skip) + self.assertIsInstance(history.data, HistoryData) + history_data = HistoryData.objects.get(id=history.data.id) + self.assertEqual(history_data.inputs, data_get().inputs) + self.assertEqual(history_data.outputs, data_get().outputs) + self.assertEqual(history_data.ex_data, data_get().ex_data) + + def test_get_histories(self): + def data_get(*args, **kwargs): + data = Object() + data.inputs = {"input": "value"} + data.outputs = {"outputs": "value"} + data.ex_data = "ex_data" + return data + + started = timezone.now() + archived = timezone.now() + status = MockStatus(skip=False) + status.name = "name" + + # no need microseconds + status.started_time = datetime.datetime( + year=started.year, + month=started.month, + day=started.day, + hour=started.hour, + minute=started.minute, + second=started.second, + tzinfo=started.tzinfo, + ) + status.archived_time = datetime.datetime( + year=archived.year, + month=archived.month, + day=archived.day, + hour=archived.hour, + minute=archived.minute, + second=archived.second, + tzinfo=archived.tzinfo, + ) + with mock.patch(PIPELINE_DATA_GET, data_get): + for i in range(3): + History.objects.record(status) + + history_list = History.objects.get_histories(status.id) + self.assertEqual(len(history_list), 3) + for history in history_list: + self.assertEqual(history["started_time"], status.started_time) + self.assertEqual(history["archived_time"], status.archived_time) + self.assertEqual( + history["elapsed_time"], calculate_elapsed_time(status.started_time, status.archived_time) + ) + self.assertEqual(history["inputs"], data_get().inputs) + self.assertEqual(history["outputs"], data_get().outputs) + self.assertEqual(history["ex_data"], data_get().ex_data) + self.assertEqual(history["loop"], status.loop) + self.assertEqual(history["skip"], status.skip) + self.assertTrue("history_id" in history) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_node_celery_task.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_node_celery_task.py new file mode 100644 index 00000000..05921794 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_node_celery_task.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import mock +from django.test import TestCase + +from pipeline.engine.models import NodeCeleryTask, SendFailedCeleryTask + +from ..mock import * # noqa + + +class TestNodeCeleryTask(TestCase): + def test_bind(self): + node_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + NodeCeleryTask.objects.bind(node_id=node_id, celery_task_id=celery_task_id) + task = NodeCeleryTask.objects.get(node_id=node_id, celery_task_id=celery_task_id) + self.assertEqual(task.node_id, node_id) + self.assertEqual(task.celery_task_id, celery_task_id) + + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + NodeCeleryTask.objects.bind(node_id=node_id, celery_task_id=celery_task_id) + task.refresh_from_db() + self.assertEqual(task.node_id, node_id) + self.assertEqual(task.celery_task_id, celery_task_id) + + def test_unbind(self): + node_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + NodeCeleryTask.objects.bind(node_id=node_id, celery_task_id=celery_task_id) + task = NodeCeleryTask.objects.get(node_id=node_id, celery_task_id=celery_task_id) + NodeCeleryTask.objects.unbind(node_id) + task.refresh_from_db() + self.assertEqual(task.celery_task_id, "") + + def test_destroy(self): + node_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + NodeCeleryTask.objects.bind(node_id=node_id, celery_task_id=celery_task_id) + NodeCeleryTask.objects.destroy(node_id) + self.assertRaises(NodeCeleryTask.DoesNotExist, NodeCeleryTask.objects.get, node_id=node_id) + + def test_start_task__record_error(self): + task = MagicMock() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + task.apply_async = MagicMock(return_value=celery_task_id) + task.name = "name_token" + node_id = uniqid() + kwargs = {"a": "1", "b": 2} + mock_watch = MagicMock() + + with patch("pipeline.engine.models.core.SendFailedCeleryTask.watch", mock_watch): + NodeCeleryTask.objects.start_task(node_id, task=task, kwargs=kwargs) + + mock_watch.assert_called_once_with( + name=task.name, kwargs=kwargs, type=SendFailedCeleryTask.TASK_TYPE_NODE, extra_kwargs={"node_id": node_id}, + ) + task.apply_async.assert_called_with(a="1", b=2) + self.assertEqual( + NodeCeleryTask.objects.filter(node_id=node_id, celery_task_id=task.apply_async.return_value).count(), 1, + ) + + def test_start_task__no_record_error(self): + task = MagicMock() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + task.apply_async = MagicMock(return_value=celery_task_id) + task.name = "name_token" + node_id = uniqid() + kwargs = {"a": "1", "b": 2} + mock_watch = MagicMock() + + with patch("pipeline.engine.models.core.SendFailedCeleryTask.watch", mock_watch): + NodeCeleryTask.objects.start_task(node_id, task=task, kwargs=kwargs, record_error=False) + + mock_watch.assert_not_called() + task.apply_async.assert_called_with(a="1", b=2) + self.assertEqual( + NodeCeleryTask.objects.filter(node_id=node_id, celery_task_id=task.apply_async.return_value).count(), 1, + ) + + @mock.patch("pipeline.engine.models.core.revoke", mock.MagicMock()) + def test_revoke(self): + from pipeline.engine.models.core import revoke + + node_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + NodeCeleryTask.objects.bind(node_id=node_id, celery_task_id=celery_task_id) + NodeCeleryTask.objects.revoke(node_id) + revoke.assert_called_with(celery_task_id) + self.assertRaises( + NodeCeleryTask.DoesNotExist, NodeCeleryTask.objects.get, node_id=node_id, celery_task_id=celery_task_id, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_node_relationship.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_node_relationship.py new file mode 100644 index 00000000..fdd48a11 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_node_relationship.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.models import NodeRelationship + + +class TestNodeRelationship(TestCase): + def test_build_relationship(self): + NodeRelationship.objects.build_relationship("1", "1") + NodeRelationship.objects.build_relationship("1", "2") + NodeRelationship.objects.build_relationship("1", "3") + NodeRelationship.objects.build_relationship("2", "4") + NodeRelationship.objects.build_relationship("2", "5") + NodeRelationship.objects.build_relationship("3", "6") + + def count(ancestor_id, descendant_id): + return NodeRelationship.objects.filter(ancestor_id=ancestor_id, descendant_id=descendant_id).count() + + def distance(ancestor_id, descendant_id): + return NodeRelationship.objects.get(ancestor_id=ancestor_id, descendant_id=descendant_id).distance + + def get(ancestor_id, descendant_id): + return NodeRelationship.objects.get(ancestor_id=ancestor_id, descendant_id=descendant_id) + + # rebuild check + NodeRelationship.objects.build_relationship("1", "2") + self.assertEqual(count("1", "1"), 1) + self.assertEqual(count("1", "2"), 1) + self.assertEqual(count("1", "3"), 1) + self.assertEqual(count("2", "4"), 1) + self.assertEqual(count("2", "5"), 1) + self.assertEqual(count("3", "6"), 1) + + # distance check + self.assertEqual(distance("1", "1"), 0) + self.assertEqual(distance("2", "2"), 0) + self.assertEqual(distance("3", "3"), 0) + self.assertEqual(distance("4", "4"), 0) + self.assertEqual(distance("5", "5"), 0) + self.assertEqual(distance("6", "6"), 0) + self.assertEqual(distance("1", "2"), 1) + self.assertEqual(distance("1", "3"), 1) + self.assertEqual(distance("1", "4"), 2) + self.assertEqual(distance("1", "5"), 2) + self.assertEqual(distance("1", "6"), 2) + self.assertEqual(distance("2", "4"), 1) + self.assertEqual(distance("2", "5"), 1) + self.assertEqual(distance("3", "6"), 1) + + # invalid descendant check + self.assertRaises(NodeRelationship.DoesNotExist, get, "2", "6") + self.assertRaises(NodeRelationship.DoesNotExist, get, "3", "4") + self.assertRaises(NodeRelationship.DoesNotExist, get, "3", "5") diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_pipeline_model.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_pipeline_model.py new file mode 100644 index 00000000..236696ff --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_pipeline_model.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import mock +from django.test import TestCase + +from pipeline.core.pipeline import Pipeline +from pipeline.django_signal_valve import valve +from pipeline.engine import signals +from pipeline.engine.models import PipelineModel, PipelineProcess + +from ..mock import * # noqa + +valve.unload_valve_function() + + +class TestPipelineModel(TestCase): + def test_prepare_for_pipeline(self): + pipeline = PipelineObject() + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + priority = 5 + pipeline_model = PipelineModel.objects.prepare_for_pipeline( + pipeline=pipeline, process=process, priority=priority + ) + self.assertEqual(pipeline_model.process.id, process.id) + self.assertEqual(pipeline_model.id, pipeline.id) + self.assertEqual(pipeline_model.priority, priority) + + def test_priority_for_pipeline(self): + pipeline = PipelineObject() + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + priority = 5 + PipelineModel.objects.prepare_for_pipeline(pipeline=pipeline, process=process, priority=priority) + self.assertEqual(PipelineModel.objects.priority_for_pipeline(pipeline_id=pipeline.id), priority) + + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + def test_pipeline_ready(self): + process_id = uniqid() + PipelineModel.objects.pipeline_ready(process_id=process_id) + valve.send.assert_called_with(signals, "pipeline_ready", sender=Pipeline, process_id=process_id) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_pipeline_process.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_pipeline_process.py new file mode 100644 index 00000000..e944766e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_pipeline_process.py @@ -0,0 +1,878 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import traceback + +from django.test import TestCase + +from pipeline.django_signal_valve import valve +from pipeline.engine import exceptions, signals, states +from pipeline.engine.models import Status +from pipeline.engine.models.core import PipelineModel, PipelineProcess, ProcessSnapshot, SubProcessRelationship +from pipeline.engine.utils import Stack +from pipeline.tests.mock_settings import * # noqa + +from ..mock import * # noqa + +valve.unload_valve_function() + + +class TestPipelineProcess(TestCase): + def test_prepare_for_pipeline(self): + pipeline = PipelineObject() + + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + self.assertEqual(len(process.id), 32) + self.assertEqual(process.root_pipeline_id, pipeline.id) + self.assertEqual(process.current_node_id, pipeline.start_event.id) + self.assertIsNotNone(process.snapshot) + self.assertEqual(process.top_pipeline.id, pipeline.id) + + def test_fork_child(self): + context = MockContext() + context.clear_change_keys = MagicMock() + pipeline = PipelineObject(context=context) + current_node_id = uniqid() + destination_id = uniqid() + + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + child = PipelineProcess.objects.fork_child( + parent=process, current_node_id=current_node_id, destination_id=destination_id + ) + self.assertEqual(len(child.id), 32) + self.assertEqual(process.root_pipeline_id, child.root_pipeline_id) + self.assertEqual(len(child.pipeline_stack), 1) + self.assertEqual(child.top_pipeline.id, process.top_pipeline.id) + self.assertEqual(process.children, child.children) + self.assertEqual(process.root_pipeline.id, child.root_pipeline.id) + self.assertEqual(process.subprocess_stack, child.subprocess_stack) + self.assertEqual(process.id, child.parent_id) + self.assertEqual(child.current_node_id, current_node_id) + self.assertEqual(child.destination_id, destination_id) + child.top_pipeline.prune.assert_called_once_with(current_node_id, destination_id) + + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_process_ready(self): + from pipeline.django_signal_valve.valve import send + + process_id = uniqid() + current_node_id = uniqid() + + PipelineProcess.objects.process_ready(process_id) + send.assert_called_with( + signals, + "process_ready", + sender=PipelineProcess, + process_id=process_id, + current_node_id=None, + call_from_child=False, + ) + PipelineProcess.objects.process_ready(process_id, current_node_id, False) + send.assert_called_with( + signals, + "process_ready", + sender=PipelineProcess, + process_id=process_id, + current_node_id=current_node_id, + call_from_child=False, + ) + PipelineProcess.objects.process_ready(process_id, current_node_id, True) + send.assert_called_with( + signals, + "process_ready", + sender=PipelineProcess, + process_id=process_id, + current_node_id=current_node_id, + call_from_child=True, + ) + + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_batch_process_ready(self): + from pipeline.django_signal_valve.valve import send + + process_id_list = [uniqid(), uniqid(), uniqid()] + pipeline_id = uniqid() + + PipelineProcess.objects.batch_process_ready(process_id_list, pipeline_id) + send.assert_called_with( + signals, + "batch_process_ready", + sender=PipelineProcess, + process_id_list=process_id_list, + pipeline_id=pipeline_id, + ) + + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_child_process_ready(self): + from pipeline.django_signal_valve.valve import send + + child_id = uniqid() + + PipelineProcess.objects.child_process_ready(child_id) + send.assert_called_with(signals, "child_process_ready", sender=PipelineProcess, child_id=child_id) + + def test_properties(self): + process = PipelineProcess.objects.create() + pipeline_stack = Stack(["pipeline1", "pipeline2"]) + subprocess_stack = Stack(["subprocess1", "subprocess2"]) + children = ["child1", "child2"] + root_pipeline = "root_pipeline" + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=pipeline_stack, + children=children, + root_pipeline=root_pipeline, + subprocess_stack=subprocess_stack, + ) + process.snapshot = mock_snapshot + self.assertEqual(process.pipeline_stack, pipeline_stack) + self.assertEqual(process.children, children) + self.assertEqual(process.root_pipeline, root_pipeline) + self.assertEqual(process.top_pipeline, pipeline_stack.top()) + self.assertEqual(process.subprocess_stack, subprocess_stack) + + def test_push_pipeline(self): + pipeline = "pipeline_%s" % uniqid() + subproc_pipeline = PipelineObject() + process = PipelineProcess.objects.create() + pipeline_stack = Stack(["pipeline1", "pipeline2"]) + subprocess_stack = Stack(["subprocess1", "subprocess2"]) + children = ["child1", "child2"] + root_pipeline = "root_pipeline" + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=pipeline_stack, + children=children, + root_pipeline=root_pipeline, + subprocess_stack=subprocess_stack, + ) + process.snapshot = mock_snapshot + process.id = uniqid() + + process.push_pipeline(pipeline, is_subprocess=False) + self.assertEqual(process.top_pipeline, pipeline) + + process.push_pipeline(subproc_pipeline, is_subprocess=True) + self.assertEqual(process.top_pipeline, subproc_pipeline) + self.assertTrue( + SubProcessRelationship.objects.filter(subprocess_id=subproc_pipeline.id, process_id=process.id).exists() + ) + + def test_pop_pipeline(self): + subproc_pipeline = PipelineObject() + process = PipelineProcess.objects.create() + pipeline_stack = Stack(["pipeline1", "pipeline2"]) + subprocess_stack = Stack(["subprocess1", "subprocess2"]) + children = ["child1", "child2"] + root_pipeline = "root_pipeline" + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=pipeline_stack, + children=children, + root_pipeline=root_pipeline, + subprocess_stack=subprocess_stack, + ) + process.snapshot = mock_snapshot + process.id = uniqid() + + process.push_pipeline(subproc_pipeline, is_subprocess=True) + self.assertEqual(process.top_pipeline, subproc_pipeline) + self.assertTrue( + SubProcessRelationship.objects.filter(subprocess_id=subproc_pipeline.id, process_id=process.id).exists() + ) + + pop_pipeline = process.pop_pipeline() + self.assertEqual(pop_pipeline.id, subproc_pipeline.id) + self.assertFalse( + SubProcessRelationship.objects.filter(subprocess_id=subproc_pipeline.id, process_id=process.id).exists() + ) + + pop_pipeline = process.pop_pipeline() + self.assertEqual(pop_pipeline, "pipeline2") + + pop_pipeline = process.pop_pipeline() + self.assertEqual(pop_pipeline, "pipeline1") + + def test_join(self): + children = [IdentifyObject(), IdentifyObject(), IdentifyObject()] + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), children=[], root_pipeline="root_pipeline", subprocess_stack=Stack() + ) + process = PipelineProcess.objects.create() + process.snapshot = mock_snapshot + + process.join(children) + self.assertEqual(process.need_ack, len(children)) + for i in range(len(children)): + self.assertEqual(process.children[i], children[i].id) + + def test_root_sleep_check(self): + def return_suspended(*args, **kwargs): + return states.SUSPENDED + + def return_revoked(*args, **kwargs): + return states.REVOKED + + def return_blocked(*args, **kwargs): + return states.BLOCKED + + another_status = MagicMock() + status = [states.CREATED, states.READY, states.RUNNING, states.FINISHED, states.FAILED] + another_status.side_effect = status + + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), children=[], root_pipeline=IdentifyObject(), subprocess_stack=Stack() + ) + process = PipelineProcess.objects.create() + process.snapshot = mock_snapshot + + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_suspended): + self.assertEqual(process.root_sleep_check(), (True, states.SUSPENDED)) + + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_revoked): + self.assertEqual(process.root_sleep_check(), (True, states.REVOKED)) + + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_blocked): + self.assertEqual(process.root_sleep_check(), (True, states.BLOCKED)) + process.parent_id = "parent_id" + self.assertEqual(process.root_sleep_check(), (False, states.BLOCKED)) + + with mock.patch(PIPELINE_STATUS_STATE_FOR, another_status): + for s in status: + self.assertEqual(process.root_sleep_check(), (False, s)) + + def test_subproc_sleep_check(self): + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), children=[], root_pipeline=IdentifyObject(), subprocess_stack=Stack([1, 2, 3, 4]) + ) + process = PipelineProcess.objects.create() + process.snapshot = mock_snapshot + + def return_all_running(*args, **kwargs): + return [ + StatusObject(id=1, state=states.RUNNING), + StatusObject(id=2, state=states.RUNNING), + StatusObject(id=3, state=states.RUNNING), + StatusObject(id=4, state=states.RUNNING), + ] + + def return_one_suspended(*args, **kwargs): + return [ + StatusObject(id=1, state=states.RUNNING), + StatusObject(id=2, state=states.SUSPENDED), + StatusObject(id=3, state=states.RUNNING), + StatusObject(id=4, state=states.RUNNING), + ] + + def return_first_suspended(*args, **kwargs): + return [ + StatusObject(id=1, state=states.SUSPENDED), + StatusObject(id=2, state=states.RUNNING), + StatusObject(id=3, state=states.RUNNING), + StatusObject(id=4, state=states.RUNNING), + ] + + def return_last_suspended(*args, **kwargs): + return [ + StatusObject(id=1, state=states.RUNNING), + StatusObject(id=2, state=states.RUNNING), + StatusObject(id=3, state=states.RUNNING), + StatusObject(id=4, state=states.SUSPENDED), + ] + + with mock.patch(PIPELINE_STATUS_FILTER, return_all_running): + self.assertEqual(process.subproc_sleep_check(), (False, [1, 2, 3, 4])) + + with mock.patch(PIPELINE_STATUS_FILTER, return_one_suspended): + self.assertEqual(process.subproc_sleep_check(), (True, [1])) + + with mock.patch(PIPELINE_STATUS_FILTER, return_first_suspended): + self.assertEqual(process.subproc_sleep_check(), (True, [])) + + with mock.patch(PIPELINE_STATUS_FILTER, return_last_suspended): + self.assertEqual(process.subproc_sleep_check(), (True, [1, 2, 3])) + + @patch(PIPELINE_CELERYTASK_UNBIND, MagicMock()) + def test_freeze(self): + from pipeline.engine.models import ProcessCeleryTask + + pipeline = PipelineObject() + + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + self.assertFalse(process.is_frozen) + + process.freeze() + self.assertTrue(process.is_frozen) + process.refresh_from_db() + self.assertTrue(process.is_frozen) + + ProcessCeleryTask.objects.unbind.assert_called_with(process.id) + + @patch(SIGNAL_VALVE_SEND, MagicMock()) + def test_unfreeze(self): + from pipeline.django_signal_valve.valve import send + + pipeline = PipelineObject() + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + + process.freeze() + process.unfreeze() + self.assertFalse(process.is_frozen) + process.refresh_from_db() + self.assertFalse(process.is_frozen) + + send.assert_called_with(signals, "process_unfreeze", sender=PipelineProcess, process_id=process.id) + + @patch(PIPELINE_PROCESS_ADJUST_STATUS, MagicMock()) + @patch(PIPELINE_CELERYTASK_UNBIND, MagicMock()) + def test_sleep(self): + from pipeline.engine.models import ProcessCeleryTask + + pipeline = PipelineObject() + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + + process.sleep(do_not_save=True, adjust_status=True) + process.adjust_status.assert_called_with(None) + ProcessCeleryTask.objects.unbind.assert_not_called() + process.adjust_status.reset_mock() + + process.sleep(do_not_save=True, adjust_status=True, adjust_scope=[1, 2, 3, 4]) + process.adjust_status.assert_called_with([1, 2, 3, 4]) + ProcessCeleryTask.objects.unbind.assert_not_called() + process.adjust_status.reset_mock() + + process.sleep(do_not_save=False, adjust_status=False) + process.adjust_status.assert_not_called() + self.assertTrue(process.sleep) + ProcessCeleryTask.objects.unbind.assert_called_with(process.id) + + with mock.patch(PIPELINE_PROCESS_CHILD_PROCESS_READY, MagicMock()): + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), + children=[1, 2, 3, 4], + root_pipeline=IdentifyObject(), + subprocess_stack=Stack([]), + ) + process.snapshot = mock_snapshot + process.sleep(do_not_save=False, adjust_status=False) + PipelineProcess.objects.child_process_ready.assert_has_calls( + [mock.call(1), mock.call(2), mock.call(3), mock.call(4)] + ) + + @patch(PIPELINE_STATUS_BATCH_TRANSIT, MagicMock()) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock()) + def test_adjust_status(self): + process = PipelineProcess.objects.create() + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), + children=[], + root_pipeline=IdentifyObject(id="root_pipeline_id"), + subprocess_stack=Stack([1, 2, 3, 4]), + ) + process.snapshot = mock_snapshot + process.current_node_id = "current_node_id" + + def return_suspended_for_node(id, may_not_exist=False): + if id == "current_node_id": + return states.SUSPENDED + + def return_failed_for_node(id, may_not_exist=False): + if id == "current_node_id": + return states.FAILED + + def return_suspended_for_root_pipeline(id, may_not_exist=False): + if id == "root_pipeline_id": + return states.SUSPENDED + + def return_none_for_node(*args, **kwargs): + return None + + def return_empty_list_for_subproc(subprocess_stack): + return [] + + def return_all_running_for_subproc(subprocess_stack): + return [states.RUNNING, states.RUNNING, states.RUNNING, states.RUNNING] + + def return_last_suspended_for_subproc(subprocess_stack): + return [states.RUNNING, states.RUNNING, states.RUNNING, states.SUSPENDED] + + def return_one_suspended_for_subproc(subprocess_stack): + return [states.RUNNING, states.SUSPENDED, states.RUNNING, states.RUNNING] + + node_state_possibility = [return_suspended_for_node, return_failed_for_node] + + with mock.patch(PIPELINE_STATUS_STATES_FOR, return_empty_list_for_subproc): + for case in node_state_possibility: + with mock.patch(PIPELINE_STATUS_STATE_FOR, case): + process.adjust_status() + Status.objects.batch_transit.assert_called_with( + id_list=[1, 2, 3, 4], state=states.BLOCKED, from_state=states.RUNNING + ) + Status.objects.transit.assert_called_with( + "root_pipeline_id", to_state=states.BLOCKED, is_pipeline=True + ) + Status.objects.batch_transit.reset_mock() + Status.objects.transit.reset_mock() + + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_suspended_for_root_pipeline): + process.adjust_status() + Status.objects.batch_transit.assert_called_with( + id_list=[1, 2, 3, 4], state=states.SUSPENDED, from_state=states.RUNNING + ) + Status.objects.batch_transit.reset_mock() + + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_none_for_node): + with mock.patch(PIPELINE_STATUS_STATES_FOR, return_all_running_for_subproc): + process.adjust_status() + Status.objects.batch_transit.assert_not_called() + + with mock.patch(PIPELINE_STATUS_STATES_FOR, return_last_suspended_for_subproc): + process.adjust_status(adjust_scope=[1, 2, 3]) + Status.objects.batch_transit.assert_called_with( + id_list=[1, 2, 3], state=states.BLOCKED, from_state=states.RUNNING + ) + Status.objects.batch_transit.reset_mock() + + with mock.patch(PIPELINE_STATUS_STATES_FOR, return_one_suspended_for_subproc): + process.adjust_status(adjust_scope=[1]) + Status.objects.batch_transit.assert_called_with( + id_list=[1], state=states.BLOCKED, from_state=states.RUNNING + ) + Status.objects.batch_transit.reset_mock() + + def test_wake_up(self): + process = PipelineProcess.objects.create() + process.is_sleep = True + process.save() + + self.assertTrue(process.is_sleep) + process.wake_up() + self.assertFalse(process.is_sleep) + + @patch(PIPELINE_CELERYTASK_DESTROY, MagicMock()) + def test_destroy(self): + from pipeline.engine.models import ProcessCeleryTask + + process = PipelineProcess.objects.create() + process.id = uniqid() + process.current_node_id = "current_node_id" + + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), children=[1, 2, 3, 4], root_pipeline=IdentifyObject(), subprocess_stack=Stack([]) + ) + mock_snapshot.delete = MagicMock() + process.snapshot = mock_snapshot + + process.destroy() + self.assertFalse(process.is_alive) + self.assertEqual(process.current_node_id, "") + self.assertIsNone(process.snapshot) + mock_snapshot.delete.assert_called() + ProcessCeleryTask.objects.destroy.assert_called_with(process.id) + + def test_save(self): + process = PipelineProcess.objects.create() + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), children=[1, 2, 3, 4], root_pipeline=IdentifyObject(), subprocess_stack=Stack([]) + ) + mock_snapshot.save = MagicMock() + process.snapshot = mock_snapshot + + process.save(save_snapshot=False) + mock_snapshot.save.assert_not_called() + process.save(save_snapshot=True) + mock_snapshot.save.assert_called() + mock_snapshot.save.reset_mock() + process.save() + mock_snapshot.save.assert_called() + + def test_blocked_by_failure_or_suspended(self): + process = PipelineProcess.objects.create() + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), children=[], root_pipeline=IdentifyObject(), subprocess_stack=Stack([]) + ) + process.snapshot = mock_snapshot + + def return_suspended(*args, **kwargs): + return states.SUSPENDED + + def return_failed(*args, **kwargs): + return states.FAILED + + def return_none(*args, **kwargs): + return None + + class MockChild(object): + def __init__(self, failed=False, suspended=False): + self.failed = failed + self.suspended = suspended + + def blocked_by_failure_or_suspended(self): + return self.failed or self.suspended + + def return_child_no_anomaly(*args, **kwargs): + return [MockChild(), MockChild(), MockChild()] + + def return_child_has_failed(*args, **kwargs): + return [MockChild(), MockChild(), MockChild(failed=True)] + + def return_child_has_suspended(*args, **kwargs): + return [MockChild(), MockChild(), MockChild(suspended=True)] + + process.is_sleep = False + self.assertFalse(process.blocked_by_failure_or_suspended()) + + # 当前节点已经执行失败 + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_failed): + process.is_sleep = True + self.assertTrue(process.blocked_by_failure_or_suspended()) + + # 当前节点被暂停 + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_suspended): + process.is_sleep = True + self.assertTrue(process.blocked_by_failure_or_suspended()) + + # 整个流程进入了 SUSPENDED 状态,未开始执行下一个节点 + with mock.patch(PIPELINE_STATUS_STATE_FOR, return_none): + process.is_sleep = True + self.assertFalse(process.blocked_by_failure_or_suspended()) + + mock_snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=Stack(), children=[1, 2, 3], root_pipeline=IdentifyObject(), subprocess_stack=Stack([]) + ) + process.snapshot = mock_snapshot + + # 子进程都没有异常 + with mock.patch(PIPELINE_PROCESS_FILTER, return_child_no_anomaly): + process.is_sleep = True + self.assertFalse(process.blocked_by_failure_or_suspended()) + + # 子进程中存在失败的进程 + with mock.patch(PIPELINE_PROCESS_FILTER, return_child_has_failed): + process.is_sleep = True + self.assertTrue(process.blocked_by_failure_or_suspended()) + + # 子进程中存在暂停的进程 + with mock.patch(PIPELINE_PROCESS_FILTER, return_child_has_suspended): + process.is_sleep = True + self.assertTrue(process.blocked_by_failure_or_suspended()) + + def test_sync_with_children(self): + outputs = {"output_key": "output_value"} + variables = {"variable_key": "varaiable_value"} + + process = PipelineProcess.objects.create() + context = Object() + context.update_global_var = MagicMock() + context.sync_change = MagicMock() + + data = Object() + data.update_outputs = MagicMock() + + mock_snapshot = ProcessSnapshot( + data={ + "_pipeline_stack": Stack([PipelineObject(context=context, data=data)]), + "_children": [1, 2, 3, 4], + "_root_pipeline": IdentifyObject(), + "_subprocess_stack": Stack([]), + } + ) + process.snapshot = mock_snapshot + process.clean_children = MagicMock() + + def return_none(*args, **kwargs): + return None + + def return_mock(id): + if id.endswith("data"): + return DataObject(outputs=outputs) + if id.endswith("context"): + return ContextObject(variables=variables) + + with mock.patch(PIPELINE_ENGINE_CORE_DATA_GET_OBJECT, return_none): + self.assertRaises(exceptions.ChildDataSyncError, process.sync_with_children) + + with mock.patch(PIPELINE_ENGINE_CORE_DATA_GET_OBJECT, return_mock): + process.sync_with_children() + context.sync_change.assert_called() + data.update_outputs.assert_called_with(outputs) + process.clean_children.assert_called() + + @patch(PIPELINE_ENGINE_CORE_DATA_SET_OBJECT, MagicMock()) + @patch(PIPELINE_PROCESS_BLOCKED_BY_FAILURE, MagicMock()) + @patch(PIPELINE_PROCESS_DESTROY, MagicMock()) + @patch(PIPELINE_PROCESS_PROCESS_READY, MagicMock()) + @patch(PIPELINE_STATUS_BATCH_TRANSIT, MagicMock()) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock()) + def test_destroy_and_wake_up_parent(self): + context = MockContext() + context.clear_change_keys = MagicMock() + pipeline = PipelineObject(context=context) + + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + children = [] + for i in range(3): + children.append(process.__class__.objects.fork_child(process, "current_node_id", "destination_id")) + process.join(children) + + # def worker(child): + # child.destroy_and_wake_up_parent(child.destination_id) + + for child in children: + child.destroy_and_wake_up_parent(child.destination_id) + # sys_processes.append(Process(target=worker, args=(child,))) + + # for p in sys_processes: + # p.start() + # + # for p in sys_processes: + # p.join() + + process.refresh_from_db() + self.assertEqual(process.need_ack, -1) + self.assertEqual(process.ack_num, 0) + self.assertEqual(PipelineProcess.blocked_by_failure_or_suspended.call_count, 2) + PipelineProcess.objects.process_ready.assert_called_once() + self.assertEqual(PipelineProcess.destroy.call_count, 3) + + def test__context_key(self): + process = PipelineProcess.objects.create() + process.id = uniqid() + self.assertEqual(process._context_key(), "{}_context".format(process.id)) + self.assertEqual(process._context_key(process_id="another_id"), "{}_context".format("another_id")) + + def test__data_key(self): + process = PipelineProcess.objects.create() + process.id = uniqid() + self.assertEqual(process._data_key(), "{}_data".format(process.id)) + self.assertEqual(process._data_key(process_id="another_id"), "{}_data".format("another_id")) + + def test_can_be_waked(self): + process = PipelineProcess.objects.create() + + process.is_sleep = False + process.is_alive = False + self.assertFalse(process.can_be_waked()) + process.is_sleep = True + process.is_alive = False + self.assertFalse(process.can_be_waked()) + process.is_sleep = False + process.is_alive = True + self.assertFalse(process.can_be_waked()) + + process.is_sleep = True + process.is_alive = True + process.need_ack = 3 + process.ack_num = 2 + self.assertFalse(process.can_be_waked()) + + process.need_ack = 3 + process.ack_num = 3 + self.assertTrue(process.can_be_waked()) + process.need_ack = -1 + self.assertTrue(process.can_be_waked()) + + @patch(PIPELINE_ENGINE_CORE_DATA_DEL_OBJECT, MagicMock()) + def test_clean_children(self): + from pipeline.engine.core.data import del_object + + mock_snapshot = ProcessSnapshot( + data={ + "_pipeline_stack": Stack(), + "_children": ["1", "2", "3"], + "_root_pipeline": IdentifyObject(), + "_subprocess_stack": Stack([]), + } + ) + mock_snapshot.clean_children = MagicMock() + mock_snapshot.save = MagicMock() + + process = PipelineProcess.objects.create() + process.snapshot = mock_snapshot + + process.clean_children() + del_object.assert_has_calls( + [ + mock.call(process._context_key("1")), + mock.call(process._data_key("1")), + mock.call(process._context_key("2")), + mock.call(process._data_key("2")), + mock.call(process._context_key("3")), + mock.call(process._data_key("3")), + ] + ) + mock_snapshot.clean_children.assert_called() + mock_snapshot.save.assert_called() + + @patch(PIPELINE_STATUS_FAIL, MagicMock()) + @patch(PIPELINE_STATUS_RAW_FAIL, MagicMock()) + def test_exit_gracefully(self): + mock_snapshot = ProcessSnapshot( + data={ + "_pipeline_stack": Stack(), + "_children": ["1", "2", "3"], + "_root_pipeline": PipelineObject(), + "_subprocess_stack": Stack([]), + } + ) + + process = PipelineProcess.objects.create() + process.snapshot = mock_snapshot + process.sleep = MagicMock() + e = Exception("test") + + process.current_node_id = uniqid() + process.exit_gracefully(e) + Status.objects.fail.assert_called_with(process.current_node_id, ex_data=traceback.format_exc()) + Status.objects.raw_fail.assert_not_called() + process.sleep.assert_called_with(adjust_status=True) + + Status.objects.fail.reset_mock() + process.sleep.reset_mock() + + # when stack is not empty + mock_snapshot.data["_pipeline_stack"] = Stack([PipelineObject()]) + process.current_node_id = uniqid() + process.exit_gracefully(e) + Status.objects.fail.assert_called_with(process.current_node_id, ex_data=traceback.format_exc()) + Status.objects.raw_fail.assert_not_called() + process.sleep.assert_called_with(adjust_status=True) + + Status.objects.fail.reset_mock() + process.sleep.reset_mock() + + # when current_node is none + top_pipeline = PipelineObject() + top_pipeline.node = MagicMock(return_value=None) + mock_snapshot.data["_pipeline_stack"] = Stack([top_pipeline]) + process.current_node_id = uniqid() + process.exit_gracefully(e) + Status.objects.fail.assert_not_called() + Status.objects.raw_fail.assert_called_with(process.current_node_id, ex_data=traceback.format_exc()) + process.sleep.assert_called_with(adjust_status=True) + + def test_refresh_current_node(self): + node_id = uniqid() + process = PipelineProcess.objects.create() + process.refresh_current_node(node_id) + process.refresh_from_db() + self.assertEqual(process.current_node_id, node_id) + + @patch(PIPELINE_STATUS_BATCH_TRANSIT, MagicMock()) + def test_revoke_subprocess(self): + mock_snapshot = ProcessSnapshot( + data={ + "_pipeline_stack": Stack(), + "_children": [], + "_root_pipeline": PipelineObject(), + "_subprocess_stack": Stack([1, 2, 3, 4]), + } + ) + + process = PipelineProcess.objects.create(id=uniqid()) + process.snapshot = mock_snapshot + process.sleep = MagicMock() + + process.revoke_subprocess() + Status.objects.batch_transit.assert_called_with(id_list=[1, 2, 3, 4], state=states.REVOKED) + + child_1 = Object() + child_2 = Object() + child_3 = Object() + child_1.revoke_subprocess = MagicMock() + child_2.revoke_subprocess = MagicMock() + child_3.revoke_subprocess = MagicMock() + + def get_child(id): + return {1: child_1, 2: child_2, 3: child_3}[id] + + mock_snapshot.data["_children"] = [1, 2, 3] + + with mock.patch(PIPELINE_PROCESS_GET, get_child): + process.revoke_subprocess() + Status.objects.batch_transit.assert_called_with(id_list=[1, 2, 3, 4], state=states.REVOKED) + child_1.revoke_subprocess.assert_called() + child_2.revoke_subprocess.assert_called() + child_3.revoke_subprocess.assert_called() + + # test when subprocess_stack and children return None + process = PipelineProcess.objects.create(id=uniqid()) + self.assertIsNone(process.subprocess_stack) + self.assertIsNone(process.children) + process.revoke_subprocess() + + @patch(PIPELINE_PROCESS_DESTROY, MagicMock()) + def test_destroy_all(self): + mock_snapshot = ProcessSnapshot( + data={ + "_pipeline_stack": Stack(), + "_children": [], + "_root_pipeline": PipelineObject(), + "_subprocess_stack": Stack([]), + } + ) + process = PipelineProcess.objects.create() + process.snapshot = mock_snapshot + process.is_alive = False + process.destroy_all() + process.destroy.assert_not_called() + + process.is_alive = True + process.destroy_all() + process.destroy.assert_called() + process.destroy.reset_mock() + + mock_snapshot.data["_children"] = [1, 2, 3] + + child_1 = Object() + child_1.children = [] + child_1.destroy = MagicMock() + child_1.is_alive = True + child_2 = Object() + child_2.children = [] + child_2.destroy = MagicMock() + child_2.is_alive = False + child_3 = Object() + child_3.children = [1] + child_3.destroy = MagicMock() + child_3.is_alive = True + + def get_child(id): + return {1: child_1, 2: child_2, 3: child_3}[id] + + with mock.patch(PIPELINE_PROCESS_GET, get_child): + process.destroy_all() + child_1.destroy.assert_called() + child_2.destroy.assert_not_called() + child_3.destroy.assert_called() + self.assertEqual(child_1.destroy.call_count, 2) + + def test_in_subprocess__true(self): + snapshot = ProcessSnapshot(data={"_pipeline_stack": Stack([1, 2])}) + process = PipelineProcess() + process.snapshot = snapshot + + self.assertTrue(process.in_subprocess) + + def test_in_subprocess__false(self): + snapshot = ProcessSnapshot(data={"_pipeline_stack": Stack([1])}) + process = PipelineProcess() + process.snapshot = snapshot + + self.assertFalse(process.in_subprocess) + + def test_priority_for_process(self): + pipeline = PipelineObject() + process = PipelineProcess.objects.prepare_for_pipeline(pipeline) + priority = 5 + PipelineModel.objects.prepare_for_pipeline(pipeline=pipeline, process=process, priority=priority) + + self.assertEqual(PipelineProcess.objects.priority_for_process(process.id), priority) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_process_celery_task.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_process_celery_task.py new file mode 100644 index 00000000..5821db4d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_process_celery_task.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.models import ProcessCeleryTask, SendFailedCeleryTask + +from ..mock import * # noqa + + +class TestProcessCeleryTask(TestCase): + def tearDown(self): + ProcessCeleryTask.objects.all().delete() + + def test_bind(self): + process_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + ProcessCeleryTask.objects.bind(process_id=process_id, celery_task_id=celery_task_id) + task = ProcessCeleryTask.objects.get(process_id=process_id, celery_task_id=celery_task_id) + self.assertEqual(task.process_id, process_id) + self.assertEqual(task.celery_task_id, celery_task_id) + + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + ProcessCeleryTask.objects.bind(process_id=process_id, celery_task_id=celery_task_id) + task.refresh_from_db() + self.assertEqual(task.process_id, process_id) + self.assertEqual(task.celery_task_id, celery_task_id) + + def test_unbind(self): + process_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + ProcessCeleryTask.objects.bind(process_id=process_id, celery_task_id=celery_task_id) + task = ProcessCeleryTask.objects.get(process_id=process_id, celery_task_id=celery_task_id) + ProcessCeleryTask.objects.unbind(process_id) + task.refresh_from_db() + self.assertEqual(task.celery_task_id, "") + + def test_destroy(self): + process_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + ProcessCeleryTask.objects.bind(process_id=process_id, celery_task_id=celery_task_id) + ProcessCeleryTask.objects.destroy(process_id) + self.assertRaises( + ProcessCeleryTask.DoesNotExist, ProcessCeleryTask.objects.get, process_id=process_id, + ) + + def test_start_task__record_error(self): + task = MagicMock() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + task.apply_async = MagicMock(return_value=celery_task_id) + task.name = "name_token" + process_id = uniqid() + kwargs = {"a": "1", "b": 2} + mock_watch = MagicMock() + + with patch("pipeline.engine.models.core.SendFailedCeleryTask.watch", mock_watch): + ProcessCeleryTask.objects.start_task(process_id, task=task, kwargs=kwargs) + + mock_watch.assert_called_once_with( + name=task.name, + kwargs=kwargs, + type=SendFailedCeleryTask.TASK_TYPE_PROCESS, + extra_kwargs={"process_id": process_id}, + ) + task.apply_async.assert_called_with(a="1", b=2) + self.assertEqual( + ProcessCeleryTask.objects.filter( + process_id=process_id, celery_task_id=task.apply_async.return_value + ).count(), + 1, + ) + + def test_start_task__no_record_error(self): + task = MagicMock() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + task.apply_async = MagicMock(return_value=celery_task_id) + task.name = "name_token" + process_id = uniqid() + kwargs = {"a": "1", "b": 2} + mock_watch = MagicMock() + + with patch("pipeline.engine.models.core.SendFailedCeleryTask.watch", mock_watch): + ProcessCeleryTask.objects.start_task(process_id, task=task, kwargs=kwargs, record_error=False) + + mock_watch.assert_not_called() + task.apply_async.assert_called_with(a="1", b=2) + self.assertEqual( + ProcessCeleryTask.objects.filter( + process_id=process_id, celery_task_id=task.apply_async.return_value + ).count(), + 1, + ) + + @patch("pipeline.engine.models.core.revoke", MagicMock()) + def test_revoke(self): + from pipeline.engine.models.core import revoke + + process_id = uniqid() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + ProcessCeleryTask.objects.bind(process_id=process_id, celery_task_id=celery_task_id) + ProcessCeleryTask.objects.revoke(process_id) + revoke.assert_called_with(celery_task_id, terminate=True) + self.assertRaises( + ProcessCeleryTask.DoesNotExist, + ProcessCeleryTask.objects.get, + process_id=process_id, + celery_task_id=celery_task_id, + ) + + revoke.reset_mock() + + ProcessCeleryTask.objects.bind(process_id=process_id, celery_task_id=celery_task_id) + ProcessCeleryTask.objects.revoke(process_id, kill=True) + revoke.assert_called_with(celery_task_id, terminate=True, signal="SIGKILL") + self.assertRaises( + ProcessCeleryTask.DoesNotExist, + ProcessCeleryTask.objects.get, + process_id=process_id, + celery_task_id=celery_task_id, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_process_snapshot.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_process_snapshot.py new file mode 100644 index 00000000..1f99b360 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_process_snapshot.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.models.core import ProcessSnapshot +from pipeline.engine.utils import Stack + + +class TestProcessSnapshot(TestCase): + def setUp(self): + self.pipeline_stack = Stack(["pipeline1", "pipeline2"]) + self.subprocess_stack = Stack(["subprocess1", "subprocess2"]) + self.children = ["child1", "child2"] + self.root_pipeline = "root_pipeline" + self.snapshot = ProcessSnapshot.objects.create_snapshot( + pipeline_stack=self.pipeline_stack, + children=self.children, + root_pipeline=self.root_pipeline, + subprocess_stack=self.subprocess_stack, + ) + + def test_properties(self): + self.assertEqual(self.snapshot.pipeline_stack, self.pipeline_stack) + self.assertEqual(self.snapshot.children, self.children) + self.assertEqual(self.snapshot.root_pipeline, self.root_pipeline) + self.assertEqual(self.snapshot.subprocess_stack, self.subprocess_stack) + + def test_clean_children(self): + self.snapshot.clean_children() + self.assertEqual(len(self.snapshot.children), 0) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_schedule_celery_task.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_schedule_celery_task.py new file mode 100644 index 00000000..313fa417 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_schedule_celery_task.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.models import ScheduleCeleryTask, SendFailedCeleryTask + +from ..mock import * # noqa + + +class TestScheduleCeleryTask(TestCase): + def test_bind(self): + schedule_id = "{}{}".format(uniqid(), uniqid()) + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + ScheduleCeleryTask.objects.bind(schedule_id=schedule_id, celery_task_id=celery_task_id) + task = ScheduleCeleryTask.objects.get(schedule_id=schedule_id, celery_task_id=celery_task_id) + self.assertEqual(task.schedule_id, schedule_id) + self.assertEqual(task.celery_task_id, celery_task_id) + + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + ScheduleCeleryTask.objects.bind(schedule_id=schedule_id, celery_task_id=celery_task_id) + task.refresh_from_db() + self.assertEqual(task.schedule_id, schedule_id) + self.assertEqual(task.celery_task_id, celery_task_id) + + def test_unbind(self): + schedule_id = "{}{}".format(uniqid(), uniqid()) + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + ScheduleCeleryTask.objects.bind(schedule_id=schedule_id, celery_task_id=celery_task_id) + task = ScheduleCeleryTask.objects.get(schedule_id=schedule_id, celery_task_id=celery_task_id) + ScheduleCeleryTask.objects.unbind(schedule_id) + task.refresh_from_db() + self.assertEqual(task.celery_task_id, "") + + def test_destroy(self): + schedule_id = "{}{}".format(uniqid(), uniqid()) + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + + ScheduleCeleryTask.objects.bind(schedule_id=schedule_id, celery_task_id=celery_task_id) + ScheduleCeleryTask.objects.destroy(schedule_id) + self.assertRaises( + ScheduleCeleryTask.DoesNotExist, ScheduleCeleryTask.objects.get, schedule_id=schedule_id, + ) + + def test_start_task__record_error(self): + task = MagicMock() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + task.apply_async = MagicMock(return_value=celery_task_id) + task.name = "name_token" + schedule_id = uniqid() + kwargs = {"a": "1", "b": 2} + mock_watch = MagicMock() + + with patch("pipeline.engine.models.core.SendFailedCeleryTask.watch", mock_watch): + ScheduleCeleryTask.objects.start_task(schedule_id, task=task, kwargs=kwargs) + + mock_watch.assert_called_once_with( + name=task.name, + kwargs=kwargs, + type=SendFailedCeleryTask.TASK_TYPE_SCHEDULE, + extra_kwargs={"schedule_id": schedule_id}, + ) + task.apply_async.assert_called_with(a="1", b=2) + self.assertEqual( + ScheduleCeleryTask.objects.filter( + schedule_id=schedule_id, celery_task_id=task.apply_async.return_value + ).count(), + 1, + ) + + def test_start_task__no_record_error(self): + task = MagicMock() + celery_task_id = "{}{}".format(uniqid(), uniqid())[:40] + task.apply_async = MagicMock(return_value=celery_task_id) + task.name = "name_token" + schedule_id = uniqid() + kwargs = {"a": "1", "b": 2} + mock_watch = MagicMock() + + with patch("pipeline.engine.models.core.SendFailedCeleryTask.watch", mock_watch): + ScheduleCeleryTask.objects.start_task(schedule_id, task=task, kwargs=kwargs, record_error=False) + + mock_watch.assert_not_called() + task.apply_async.assert_called_with(a="1", b=2) + self.assertEqual( + ScheduleCeleryTask.objects.filter( + schedule_id=schedule_id, celery_task_id=task.apply_async.return_value + ).count(), + 1, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_schedule_service.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_schedule_service.py new file mode 100644 index 00000000..904b58ad --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_schedule_service.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.django_signal_valve import valve +from pipeline.engine import signals +from pipeline.engine.exceptions import InvalidOperationException +from pipeline.engine.models import ScheduleCeleryTask, ScheduleService + +from ..mock import * # noqa + +valve.unload_valve_function() + + +class TestScheduleService(TestCase): + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.set_schedule_data", mock.MagicMock()) + def test_set_schedule(self): + from pipeline.django_signal_valve.valve import send + from pipeline.engine.core.data import set_schedule_data + + service_act = ServiceActObject(interval=None) + process_id = uniqid() + version = uniqid() + parent_data = "parent_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + self.assertEqual(schedule.id, "{}{}".format(service_act.id, version)) + self.assertEqual(schedule.activity_id, service_act.id) + self.assertEqual(schedule.process_id, process_id) + self.assertEqual(schedule.wait_callback, True) + self.assertEqual(schedule.version, version) + set_schedule_data.assert_called_with(schedule.id, parent_data) + + # service need callback + set_schedule_data.reset_mock() + interval = StaticIntervalObject(interval=3) + service_act = ServiceActObject(interval=interval) + process_id = uniqid() + version = uniqid() + parent_data = "new_parent_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + self.assertEqual(schedule.id, "{}{}".format(service_act.id, version)) + self.assertEqual(schedule.activity_id, service_act.id) + self.assertEqual(schedule.process_id, process_id) + self.assertEqual(schedule.wait_callback, False) + self.assertEqual(schedule.version, version) + set_schedule_data.assert_called_with(schedule.id, parent_data) + send.assert_called_with( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=process_id, + schedule_id=schedule.id, + countdown=interval.interval, + ) + + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.set_schedule_data", mock.MagicMock()) + def test_schedule_for(self): + service_act = ServiceActObject(interval=None) + process_id = uniqid() + version = uniqid() + parent_data = "parent_data" + ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + schedule = ScheduleService.objects.schedule_for(activity_id=service_act.id, version=version) + self.assertEqual(schedule.id, "{}{}".format(service_act.id, version)) + + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.set_schedule_data", mock.MagicMock()) + def test_delete_schedule(self): + service_act = ServiceActObject(interval=None) + process_id = uniqid() + version = uniqid() + parent_data = "parent_data" + ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + ScheduleService.objects.delete_schedule(activity_id=service_act.id, version=version) + self.assertRaises( + ScheduleService.DoesNotExist, + ScheduleService.objects.schedule_for, + activity_id=service_act.id, + version=version, + ) + + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.set_schedule_data", mock.MagicMock()) + @mock.patch("pipeline.engine.models.ScheduleCeleryTask.objects.unbind", mock.MagicMock()) + def test_set_next_schedule(self): + from pipeline.django_signal_valve.valve import send + + interval = StaticIntervalObject(interval=3) + service_act = ServiceActObject(interval=interval) + process_id = uniqid() + version = uniqid() + parent_data = "new_parent_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + + schedule.is_scheduling = True + schedule.save() + schedule.set_next_schedule() + schedule.refresh_from_db() + self.assertFalse(schedule.is_scheduling) + send.assert_called_with( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=process_id, + schedule_id=schedule.id, + countdown=interval.interval, + ) + ScheduleCeleryTask.objects.unbind.assert_called_with(schedule.id) + + # test invalid call + service_act = ServiceActObject(interval=None) + process_id = uniqid() + version = uniqid() + parent_data = "new_parent_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + self.assertRaises(InvalidOperationException, schedule.set_next_schedule) + + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.set_schedule_data", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.delete_parent_data", mock.MagicMock()) + @mock.patch("pipeline.engine.models.ScheduleCeleryTask.objects.destroy", mock.MagicMock()) + def test_destroy(self): + from pipeline.engine.core.data import delete_parent_data + + interval = StaticIntervalObject(interval=3) + service_act = ServiceActObject(interval=interval) + process_id = uniqid() + version = uniqid() + parent_data = "new_parent_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + + schedule_id = schedule.id + schedule.destroy() + self.assertRaises(ScheduleService.DoesNotExist, ScheduleService.objects.get, id=schedule_id) + delete_parent_data.assert_called_with(schedule_id) + ScheduleCeleryTask.objects.destroy.assert_called_with(schedule_id) + + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + @mock.patch("pipeline.engine.models.ScheduleCeleryTask.objects.destroy", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.set_schedule_data", mock.MagicMock()) + def test_finish(self): + interval = StaticIntervalObject(interval=3) + service_act = ServiceActObject(interval=interval) + process_id = uniqid() + version = uniqid() + parent_data = "new_parent_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + schedule.finish() + + self.assertTrue(schedule.is_finished) + self.assertIsNone(schedule.service_act) + self.assertFalse(schedule.is_scheduling) + ScheduleCeleryTask.objects.destroy.assert_called_with(schedule.id) + + @mock.patch("pipeline.django_signal_valve.valve.send", mock.MagicMock()) + @mock.patch("pipeline.engine.core.data.set_schedule_data", mock.MagicMock()) + def test_callback(self): + from pipeline.django_signal_valve.valve import send + + service_act = ServiceActObject(interval=None) + process_id = uniqid() + version = uniqid() + parent_data = "new_parent_data" + callback_data = "callback_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + schedule.callback(callback_data, process_id) + + self.assertEqual(schedule.callback_data, callback_data) + send.assert_called_with( + signals, + "schedule_ready", + sender=ScheduleService, + process_id=process_id, + schedule_id=schedule.id, + countdown=0, + ) + + # test invalid callback + service_act = ServiceActObject(interval=StaticIntervalObject(interval=1)) + process_id = uniqid() + version = uniqid() + parent_data = "new_parent_data" + callback_data = "callback_data" + schedule = ScheduleService.objects.set_schedule( + activity_id=service_act.id, + service_act=service_act, + process_id=process_id, + version=version, + parent_data=parent_data, + ) + self.assertRaises( + InvalidOperationException, schedule.callback, callback_data=callback_data, process_id=process_id + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_send_failed_celery_task.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_send_failed_celery_task.py new file mode 100644 index 00000000..19ebede2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_send_failed_celery_task.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json +from mock import patch, MagicMock + +from django.test import TestCase + +from pipeline.engine.exceptions import CeleryFailedTaskCatchException +from pipeline.engine.models import SendFailedCeleryTask + + +class SendFailedCeleryTaskTestCase(TestCase): + def tearDown(self): + SendFailedCeleryTask.objects.all().delete() + + def test_kwargs_dict(self): + kwargs = {"task_kwargs": "token"} + task = SendFailedCeleryTask.objects.create( + name="name", + kwargs=json.dumps(kwargs), + type=SendFailedCeleryTask.TASK_TYPE_EMPTY, + extra_kwargs="extra_kwargs_token", + exec_trace="trace_token", + ) + + self.assertEqual(task.kwargs_dict, kwargs) + + def test_extra_kwargs_dict(self): + extra_kwargs = {"extra_kwargs": "token"} + task = SendFailedCeleryTask.objects.create( + name="name", + kwargs="kwargs_token", + type=SendFailedCeleryTask.TASK_TYPE_EMPTY, + extra_kwargs=json.dumps(extra_kwargs), + exec_trace="trace_token", + ) + + self.assertEqual(task.extra_kwargs_dict, extra_kwargs) + + def test_resend__type_error(self): + task = SendFailedCeleryTask( + name="name", kwargs="kwargs_token", type=-1, extra_kwargs="extra_kwargs_token", exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + mock_task.apply_async = MagicMock(side_effect=Exception) + current_app = MagicMock() + current_app.tasks = MagicMock() + + with patch("pipeline.engine.models.core.current_app", current_app): + self.assertRaises(TypeError, task.resend) + + task.delete.assert_not_called() + + def test_resend__empty_send_error(self): + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_EMPTY, + extra_kwargs="extra_kwargs_token", + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + mock_task.apply_async = MagicMock(side_effect=RuntimeError) + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + + with patch("pipeline.engine.models.core.current_app", current_app): + self.assertRaises(RuntimeError, task.resend) + + task.delete.assert_not_called() + + def test_resend__process_send_error(self): + process_id = "pid" + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_PROCESS, + extra_kwargs=json.dumps({"process_id": process_id}), + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + mock_start_task = MagicMock(side_effect=RuntimeError) + + with patch("pipeline.engine.models.core.current_app", current_app): + with patch( + "pipeline.engine.models.core.ProcessCeleryTask.objects.start_task", mock_start_task, + ): + self.assertRaises(RuntimeError, task.resend) + + mock_start_task.assert_called_once_with( + process_id=process_id, task=mock_task, kwargs=task.kwargs_dict, record_error=False, + ) + task.delete.assert_not_called() + + def test_resend__node_send_error(self): + node_id = "nid" + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_NODE, + extra_kwargs=json.dumps({"node_id": node_id}), + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + mock_start_task = MagicMock(side_effect=RuntimeError) + + with patch("pipeline.engine.models.core.current_app", current_app): + with patch( + "pipeline.engine.models.core.NodeCeleryTask.objects.start_task", mock_start_task, + ): + self.assertRaises(RuntimeError, task.resend) + + mock_start_task.assert_called_once_with( + node_id=node_id, task=mock_task, kwargs=task.kwargs_dict, record_error=False + ) + task.delete.assert_not_called() + + def test_resend__schedule_send_error(self): + schedule_id = "sid" + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_SCHEDULE, + extra_kwargs=json.dumps({"schedule_id": schedule_id}), + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + mock_start_task = MagicMock(side_effect=RuntimeError) + + with patch("pipeline.engine.models.core.current_app", current_app): + with patch( + "pipeline.engine.models.core.ScheduleCeleryTask.objects.start_task", mock_start_task, + ): + self.assertRaises(RuntimeError, task.resend) + + mock_start_task.assert_called_once_with( + schedule_id=schedule_id, task=mock_task, kwargs=task.kwargs_dict, record_error=False, + ) + task.delete.assert_not_called() + + def test_resend__empty_send_success(self): + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_EMPTY, + extra_kwargs="extra_kwargs_token", + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + mock_task.apply_async = MagicMock() + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + + with patch("pipeline.engine.models.core.current_app", current_app): + task.resend() + + mock_task.apply_async.assert_called_once_with(**task.kwargs_dict) + task.delete.assert_called_once() + + def test_resend__process_send_success(self): + process_id = "pid" + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_PROCESS, + extra_kwargs=json.dumps({"process_id": process_id}), + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + mock_start_task = MagicMock() + + with patch("pipeline.engine.models.core.current_app", current_app): + with patch( + "pipeline.engine.models.core.ProcessCeleryTask.objects.start_task", mock_start_task, + ): + task.resend() + + mock_start_task.assert_called_once_with( + process_id=process_id, task=mock_task, kwargs=task.kwargs_dict, record_error=False, + ) + task.delete.assert_called_once() + + def test_resend__node_send_success(self): + node_id = "nid" + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_NODE, + extra_kwargs=json.dumps({"node_id": node_id}), + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + mock_start_task = MagicMock() + + with patch("pipeline.engine.models.core.current_app", current_app): + with patch( + "pipeline.engine.models.core.NodeCeleryTask.objects.start_task", mock_start_task, + ): + task.resend() + + mock_start_task.assert_called_once_with( + node_id=node_id, task=mock_task, kwargs=task.kwargs_dict, record_error=False + ) + task.delete.assert_called_once() + + def test_resend__schedule_send_success(self): + schedule_id = "sid" + task = SendFailedCeleryTask( + name="name", + kwargs=json.dumps({"task_kargs": "token"}), + type=SendFailedCeleryTask.TASK_TYPE_SCHEDULE, + extra_kwargs=json.dumps({"schedule_id": schedule_id}), + exec_trace="trace_token", + ) + task.delete = MagicMock() + + mock_task = MagicMock() + current_app = MagicMock() + current_app.tasks = {"name": mock_task} + mock_start_task = MagicMock() + + with patch("pipeline.engine.models.core.current_app", current_app): + with patch( + "pipeline.engine.models.core.ScheduleCeleryTask.objects.start_task", mock_start_task, + ): + task.resend() + + mock_start_task.assert_called_once_with( + schedule_id=schedule_id, task=mock_task, kwargs=task.kwargs_dict, record_error=False, + ) + task.delete.assert_called_once() + + def test_watch__no_exception(self): + with SendFailedCeleryTask.watch(1, 2, 3, 4): + pass + + self.assertFalse(SendFailedCeleryTask.objects.all().exists()) + + def test_watch__catch_execption(self): + name = "name_token" + kwargs = "kwargs_token" + type = SendFailedCeleryTask.TASK_TYPE_EMPTY + extra_kwargs = "extra_kwargs_token" + try: + with SendFailedCeleryTask.watch(name, kwargs, type, extra_kwargs): + raise RuntimeError() + except CeleryFailedTaskCatchException as e: + self.assertEqual(e.task_name, name) + + task = SendFailedCeleryTask.objects.all()[0] + self.assertEqual(task.name, name) + self.assertEqual(task.kwargs, kwargs) + self.assertEqual(task.type, type) + self.assertEqual(task.extra_kwargs, extra_kwargs) + self.assertNotEqual(len(task.exec_trace), 0) + + def test_record__with_dict_kwargs(self): + name = "name_token" + kwargs = {"1": "1"} + type = SendFailedCeleryTask.TASK_TYPE_EMPTY + extra_kwargs = {"2": "2"} + exec_trace = "exec_trace_token" + + task = SendFailedCeleryTask.objects.record(name, kwargs, type, extra_kwargs, exec_trace) + self.assertEqual(task.name, name) + self.assertEqual(task.kwargs_dict, kwargs) + self.assertEqual(task.type, type) + self.assertEqual(task.extra_kwargs_dict, extra_kwargs) + self.assertEqual(task.exec_trace, exec_trace) + + def test_record__with_str_kwargs(self): + name = "name_token" + kwargs = {"1": "1"} + type = SendFailedCeleryTask.TASK_TYPE_EMPTY + extra_kwargs = {"2": "2"} + exec_trace = "exec_trace_token" + + task = SendFailedCeleryTask.objects.record(name, json.dumps(kwargs), type, json.dumps(extra_kwargs), exec_trace) + self.assertEqual(task.name, name) + self.assertEqual(task.kwargs_dict, kwargs) + self.assertEqual(task.type, type) + self.assertEqual(task.extra_kwargs_dict, extra_kwargs) + self.assertEqual(task.exec_trace, exec_trace) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_status.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_status.py new file mode 100644 index 00000000..332b1647 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_status.py @@ -0,0 +1,518 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy + +from django.test import TestCase + +from pipeline.engine import states +from pipeline.engine.models import Data, LogEntry, Status, SubProcessRelationship +from pipeline.tests.mock_settings import * # noqa + +from ..mock import * # noqa + + +class TestStatus(TestCase): + def test_transit(self): + mock_record = MagicMock() + mock_record.return_value = IdentifyObject() + + mock_link_history = MagicMock() + + # start test + id_1 = uniqid() + result = Status.objects.transit(id=id_1, to_state=states.RUNNING, start=True, name=id_1) + state = Status.objects.get(id=id_1) + self.assertTrue(result.result) + self.assertEqual(state.state, states.RUNNING) + self.assertEqual(state.name, id_1) + self.assertIsNotNone(state.started_time) + + # transit test + with patch(PIPELINE_HISTORY_RECORD, mock_record): + with patch(PIPELINE_HISTORY_LINK_HISTORY, mock_link_history): + for is_pipeline, appoint_map in list(states.TRANSITION_MAP.items()): + for is_appoint, state_map in list(appoint_map.items()): + for from_state, to_state_set in list(state_map.items()): + + # valid transit + for to_state in to_state_set: + state_id = uniqid() + result = Status.objects.transit(id=state_id, to_state=from_state, start=True) + self.assertTrue(result.result) + state = Status.objects.get(id=state_id) + self.assertIsNotNone(state.state_refresh_at) + state.state_refresh_at = None + state.save() + result = Status.objects.transit( + id=state_id, is_pipeline=is_pipeline, appoint=is_appoint, to_state=to_state + ) + self.assertTrue(result.result, "valid: from {} to {}".format(from_state, to_state)) + state.refresh_from_db() + self.assertIsNotNone(state.state_refresh_at) + self.assertEqual(state.state, to_state) + if to_state in states.ARCHIVED_STATES: + self.assertIsNotNone(state.archived_time) + else: + self.assertIsNone(state.archived_time) + + # invalid transit + invalid_to_state_set = states.ALL_STATES.difference(to_state_set) + for invalid_to_state in invalid_to_state_set: + state_id = uniqid() + Status.objects.transit(id=state_id, to_state=from_state, start=True) + result = Status.objects.transit( + id=state_id, is_pipeline=is_pipeline, appoint=is_appoint, to_state=invalid_to_state + ) + self.assertFalse( + result.result, "invalid: from {} to {}".format(from_state, invalid_to_state) + ) + state = Status.objects.get(id=state_id) + self.assertEqual(state.state, from_state) + + # transit when process is frozen + def return_a_frozen_process(*args, **kwargs): + obj = Object() + obj.is_frozen = True + return obj + + with patch(PIPELINE_PROCESS_GET, return_a_frozen_process): + id_2 = uniqid() + SubProcessRelationship.objects.create(process_id=uniqid(), subprocess_id=id_2) + result = Status.objects.transit(id=id_2, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.transit(id=id_2, to_state=states.FINISHED, is_pipeline=True) + self.assertFalse(result.result) + result = Status.objects.transit(id=id_2, to_state=states.FINISHED, is_pipeline=False) + self.assertTrue(result.result) + + def return_a_frozen_process_list(*args, **kwargs): + obj = Object() + obj.is_frozen = True + return [obj] + + with patch(PIPELINE_PROCESS_FILTER, return_a_frozen_process_list): + id_3 = uniqid() + result = Status.objects.transit(id=id_3, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.transit(id=id_3, to_state=states.FINISHED, is_pipeline=True) + self.assertFalse(result.result) + result = Status.objects.transit(id=id_3, to_state=states.FINISHED, is_pipeline=False) + self.assertTrue(result.result) + + # test special treat when transit from FINISHED to RUNNING + + mock_record.reset_mock() + mock_link_history.reset_mock() + + with patch(PIPELINE_HISTORY_RECORD, mock_record): + with patch(PIPELINE_HISTORY_LINK_HISTORY, mock_link_history): + id_4 = uniqid() + version = uniqid() + result = Status.objects.transit(id=id_4, to_state=states.FINISHED, start=True, version=version) + self.assertTrue(result.result) + result = Status.objects.transit(id=id_4, to_state=states.RUNNING, appoint=True) + self.assertFalse(result.result) + result = Status.objects.transit(id=id_4, to_state=states.RUNNING) + self.assertTrue(result.result) + mock_record.assert_called() + mock_link_history.assert_called() + state = Status.objects.get(id=id_4) + self.assertNotEqual(state.version, version) + self.assertEqual(state.loop, 2) + + # test transit old version state + + id_6 = uniqid() + result = Status.objects.transit(id=id_6, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.transit(id=id_6, to_state=states.FINISHED, version=uniqid()) + self.assertFalse(result.result) + state = Status.objects.get(id=id_6) + self.assertEqual(state.state, states.RUNNING) + + # test unchanged_pass is true + id_7 = uniqid() + result = Status.objects.transit(id=id_7, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.transit(id=id_7, to_state=states.RUNNING, unchanged_pass=True) + self.assertTrue(result.result) + state = Status.objects.get(id=id_7) + self.assertEqual(state.state, states.RUNNING) + + # test unchanged_pass is false + id_8 = uniqid() + result = Status.objects.transit(id=id_8, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.transit(id=id_8, to_state=states.RUNNING) + self.assertFalse(result.result) + state = Status.objects.get(id=id_8) + self.assertEqual(state.state, states.RUNNING) + + def test_batch_transit(self): + status_id_list = {uniqid() for _ in range(5)} + for sid in status_id_list: + result = Status.objects.transit(id=sid, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + + Status.objects.batch_transit(id_list=status_id_list, state=states.BLOCKED) + for sid in status_id_list: + self.assertEqual(Status.objects.state_for(sid), states.BLOCKED) + + # test exclude param + status_id_list = [uniqid() for _ in range(3)] + exclude = [uniqid() for _ in range(2)] + all_id_list = copy.deepcopy(status_id_list) + all_id_list.extend(exclude) + for sid in all_id_list: + result = Status.objects.transit(id=sid, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + + Status.objects.batch_transit(id_list=status_id_list, state=states.BLOCKED, exclude=exclude) + for sid in status_id_list: + self.assertEqual(Status.objects.state_for(sid), states.BLOCKED) + for sid in exclude: + self.assertEqual(Status.objects.state_for(sid), states.RUNNING) + + def test_state_for(self): + status_id = uniqid() + result = Status.objects.transit(id=status_id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + + self.assertEqual(Status.objects.state_for(status_id), states.RUNNING) + self.assertIsNone(Status.objects.state_for(uniqid(), may_not_exist=True)) + self.assertRaises(Status.DoesNotExist, Status.objects.state_for, id=uniqid()) + + # test version param + status_id = uniqid() + result = Status.objects.transit(id=status_id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + status = Status.objects.get(id=status_id) + version = status.version + + self.assertEqual(Status.objects.state_for(status_id, version=version), states.RUNNING) + self.assertIsNone(Status.objects.state_for(uniqid(), may_not_exist=True, version=version)) + self.assertIsNone(Status.objects.state_for(status_id, may_not_exist=True, version=uniqid())) + self.assertRaises(Status.DoesNotExist, Status.objects.state_for, id=uniqid(), version=version) + self.assertRaises(Status.DoesNotExist, Status.objects.state_for, id=uniqid(), version=uniqid()) + + def test_version_for(self): + status_id = uniqid() + result = Status.objects.transit(id=status_id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + status = Status.objects.get(id=status_id) + self.assertEqual(Status.objects.version_for(status_id), status.version) + + def test_states_for(self): + status_id_list = {uniqid() for _ in range(5)} + for sid in status_id_list: + result = Status.objects.transit(id=sid, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + + state_list = Status.objects.states_for(status_id_list) + self.assertEqual(len(status_id_list), len(state_list)) + for s in state_list: + self.assertEqual(s, states.RUNNING) + + # test not exist id + status_id_list = [uniqid() for _ in range(3)] + not_exist = [uniqid() for _ in range(2)] + for sid in status_id_list: + result = Status.objects.transit(id=sid, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + + status_id_list.extend(not_exist) + state_list = Status.objects.states_for(status_id_list) + self.assertEqual(len(status_id_list) - len(not_exist), len(state_list)) + for s in state_list: + self.assertEqual(s, states.RUNNING) + + def test_prepare_for_pipeline(self): + pipeline = PipelineObject() + Status.objects.prepare_for_pipeline(pipeline) + status = Status.objects.get(id=pipeline.id) + self.assertEqual(status.state, states.READY) + + cls_str = str(pipeline.__class__) + cls_name = pipeline.__class__.__name__[:64] + self.assertEqual(status.name, cls_str if len(cls_str) <= 64 else cls_name) + + @patch(PIPELINE_DATA_WRITE_NODE_DATA, MagicMock()) + def test_fail(self): + + # success call test + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.fail(node, "ex_data") + self.assertTrue(result.result) + Data.objects.write_node_data.assert_called_with(node, "ex_data") + status = Status.objects.get(id=node.id) + self.assertEqual(status.state, states.FAILED) + + Data.objects.write_node_data.reset_mock() + + # test call failed + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.BLOCKED, start=True) + self.assertTrue(result.result) + result = Status.objects.fail(node, "ex_data") + self.assertFalse(result.result) + Data.objects.write_node_data.assert_not_called() + status = Status.objects.get(id=node.id) + self.assertEqual(status.state, states.BLOCKED) + + @patch(PIPELINE_DATA_WIRTE_EX_DATA, MagicMock()) + def test_raw_fail(self): + # success call test + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.raw_fail(node.id, "ex_data") + self.assertTrue(result.result) + Data.objects.write_ex_data.assert_called_with(node.id, "ex_data") + status = Status.objects.get(id=node.id) + self.assertEqual(status.state, states.FAILED) + + Data.objects.write_ex_data.reset_mock() + + # test call failed + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.BLOCKED, start=True) + self.assertTrue(result.result) + result = Status.objects.raw_fail(node.id, "ex_data") + self.assertFalse(result.result) + Data.objects.write_ex_data.assert_not_called() + status = Status.objects.get(id=node.id) + self.assertEqual(status.state, states.BLOCKED) + + @patch(PIPELINE_DATA_WRITE_NODE_DATA, MagicMock()) + def test_finish(self): + + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.finish(node) + self.assertTrue(result.result) + Data.objects.write_node_data.assert_called_with(node) + status = Status.objects.get(id=node.id) + self.assertFalse(status.error_ignorable) + self.assertEqual(status.state, states.FINISHED) + + # test error_ignorable param + Data.objects.write_node_data.reset_mock() + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + result = Status.objects.finish(node, error_ignorable=True) + self.assertTrue(result.result) + Data.objects.write_node_data.assert_called_with(node) + status = Status.objects.get(id=node.id) + self.assertTrue(status.error_ignorable) + self.assertEqual(status.state, states.FINISHED) + + # test call failed + Data.objects.write_node_data.reset_mock() + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.READY, start=True) + self.assertTrue(result.result) + result = Status.objects.finish(node) + self.assertFalse(result.result) + Data.objects.write_node_data.assert_not_called() + status = Status.objects.get(id=node.id) + self.assertFalse(status.error_ignorable) + self.assertEqual(status.state, states.READY) + + Data.objects.write_node_data.reset_mock() + node = IdentifyObject() + result = Status.objects.transit(id=node.id, to_state=states.READY, start=True) + self.assertTrue(result.result) + result = Status.objects.finish(node, error_ignorable=True) + self.assertFalse(result.result) + Data.objects.write_node_data.assert_not_called() + status = Status.objects.get(id=node.id) + self.assertFalse(status.error_ignorable) + self.assertEqual(status.state, states.READY) + + @patch(PIPELINE_DATA_WRITE_NODE_DATA, MagicMock()) + @patch(PIPELINE_HISTORY_LINK_HISTORY, MagicMock()) + @patch(PIPELINE_STATUS_RECOVER_FROM_BLOCK, MagicMock()) + @patch(ENGINE_SIGNAL_NODE_SKIP_CALL, MagicMock()) + def test_skip(self): + + from pipeline.engine.signals import node_skip_call + + mock_record = MagicMock() + mock_record.return_value = IdentifyObject() + node_skip_call.send = MagicMock() + + node = IdentifyObject() + node.skip = MagicMock() + process = Object() + process.root_pipeline = IdentifyObject() + process.subprocess_stack = "subprocess_stack" + result = Status.objects.transit(id=node.id, to_state=states.FAILED, start=True) + self.assertTrue(result.result) + + with patch(PIPELINE_HISTORY_RECORD, mock_record): + result = Status.objects.skip(process, node) + self.assertTrue(result.result) + mock_record.assert_called() + LogEntry.objects.link_history.assert_called_with(node_id=node.id, history_id=mock_record.return_value.id) + status = Status.objects.get(id=node.id) + self.assertTrue(status.skip) + self.assertEqual(status.started_time, status.archived_time) + self.assertEqual(status.state, states.FINISHED) + node.skip.assert_called() + Data.objects.write_node_data.assert_called_with(node) + Status.objects.recover_from_block.assert_called_with(process.root_pipeline.id, process.subprocess_stack) + node_skip_call.send.assert_called_once() + + mock_record.reset_mock() + LogEntry.objects.link_history.reset_mock() + node.skip.reset_mock() + Data.objects.write_node_data.reset_mock() + Status.objects.recover_from_block.reset_mock() + node_skip_call.send.reset_mock() + node.id = uniqid() + result = Status.objects.transit(id=node.id, to_state=states.RUNNING, start=True) + self.assertTrue(result.result) + + # test skip failed + with patch(PIPELINE_HISTORY_RECORD, mock_record): + result = Status.objects.skip(process, node) + self.assertFalse(result.result) + mock_record.assert_not_called() + LogEntry.objects.link_history.assert_not_called() + status = Status.objects.get(id=node.id) + self.assertFalse(status.skip) + self.assertNotEqual(status.started_time, status.archived_time) + self.assertEqual(status.state, states.RUNNING) + node.skip.assert_not_called() + Data.objects.write_node_data.assert_not_called() + Status.objects.recover_from_block.assert_not_called() + node_skip_call.send.assert_not_called() + + @patch(PIPELINE_DATA_WRITE_NODE_DATA, MagicMock()) + @patch(PIPELINE_HISTORY_LINK_HISTORY, MagicMock()) + @patch(PIPELINE_STATUS_RECOVER_FROM_BLOCK, MagicMock()) + @patch(ENGINE_SIGNAL_NODE_RETRY_READY, MagicMock()) + def test_retry(self): + + from pipeline.engine.signals import node_retry_ready + + mock_record = MagicMock() + mock_record.return_value = IdentifyObject() + node_retry_ready.send = MagicMock() + + node = IdentifyObject() + node.skip = MagicMock() + node.next_exec_is_retry = MagicMock() + process = Object() + process.root_pipeline = IdentifyObject() + process.subprocess_stack = "subprocess_stack" + process.save = MagicMock() + result = Status.objects.transit(id=node.id, to_state=states.FAILED, start=True) + self.assertTrue(result.result) + + with patch(PIPELINE_HISTORY_RECORD, mock_record): + status = Status.objects.get(id=node.id) + result = Status.objects.retry(process, node, None) + self.assertTrue(result.result) + self.assertNotEqual(status.version, Status.objects.version_for(node.id)) + status.refresh_from_db() + self.assertEqual(status.retry, 1) + self.assertEqual(status.state, states.READY) + mock_record.assert_called() + LogEntry.objects.link_history.assert_called_with(node_id=node.id, history_id=mock_record.return_value.id) + Status.objects.recover_from_block.assert_called_with(process.root_pipeline.id, process.subprocess_stack) + Data.objects.write_node_data.assert_not_called() + node_retry_ready.send.assert_called_once() + process.save.assert_called_once() + node.next_exec_is_retry.assert_called() + + process.save.reset_mock() + mock_record.reset_mock() + LogEntry.objects.link_history.reset_mock() + node.skip.reset_mock() + node.next_exec_is_retry = MagicMock() + Data.objects.write_node_data.reset_mock() + Status.objects.recover_from_block.reset_mock() + node_retry_ready.send.reset_mock() + node.id = uniqid() + inputs = {"key": "value"} + result = Status.objects.transit(id=node.id, to_state=states.FAILED, start=True) + self.assertTrue(result.result) + + # test retry with inputs + with patch(PIPELINE_HISTORY_RECORD, mock_record): + status = Status.objects.get(id=node.id) + result = Status.objects.retry(process, node, inputs) + self.assertTrue(result.result) + self.assertNotEqual(status.version, Status.objects.version_for(node.id)) + status.refresh_from_db() + self.assertEqual(status.retry, 1) + self.assertEqual(status.state, states.READY) + mock_record.assert_called() + node.next_exec_is_retry.assert_called() + LogEntry.objects.link_history.assert_called_with(node_id=node.id, history_id=mock_record.return_value.id) + Status.objects.recover_from_block.assert_called_with(process.root_pipeline.id, process.subprocess_stack) + Data.objects.write_node_data.assert_called_with(node) + node_retry_ready.send.assert_called_once() + process.save.assert_called_once() + + process.save.reset_mock() + mock_record.reset_mock() + LogEntry.objects.link_history.reset_mock() + node.skip.reset_mock() + Data.objects.write_node_data.reset_mock() + Status.objects.recover_from_block.reset_mock() + node_retry_ready.send.reset_mock() + node.id = uniqid() + result = Status.objects.transit(id=node.id, to_state=states.FINISHED, start=True) + self.assertTrue(result.result) + + # test retry failed + with patch(PIPELINE_HISTORY_RECORD, mock_record): + status = Status.objects.get(id=node.id) + result = Status.objects.retry(process, node, inputs) + self.assertFalse(result.result) + self.assertEqual(status.version, Status.objects.version_for(node.id)) + status.refresh_from_db() + self.assertEqual(status.retry, 0) + self.assertEqual(status.state, states.FINISHED) + mock_record.assert_not_called() + LogEntry.objects.link_history.assert_not_called() + Status.objects.recover_from_block.assert_not_called() + Data.objects.write_node_data.assert_not_called() + node_retry_ready.send.assert_not_called() + process.save.assert_not_called() + + # test retry node reach max run limit + Status.objects.filter(id=node.id).update(loop=11) + with patch("pipeline.engine.models.core.RERUN_MAX_LIMIT", 10): + result = Status.objects.retry(process, node, inputs) + self.assertFalse(result.result) + self.assertEqual(result.message, "rerun times exceed max limit: 10, can not retry") + + @patch(PIPELINE_STATUS_BATCH_TRANSIT, MagicMock()) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock()) + def test_recover_from_block(self): + root_pipeline_id = uniqid() + subprocess_stack = [] + + Status.objects.recover_from_block(root_pipeline_id, []) + Status.objects.batch_transit.assert_called_with( + id_list=subprocess_stack, state=states.RUNNING, from_state=states.BLOCKED + ) + Status.objects.transit.assert_called_with(id=root_pipeline_id, to_state=states.READY, is_pipeline=True) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_subprocess_relationship.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_subprocess_relationship.py new file mode 100644 index 00000000..fc252ab2 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/core/test_subprocess_relationship.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.models import SubProcessRelationship +from pipeline.utils.uniqid import uniqid + + +class TestSubprocessRelationship(TestCase): + def setUp(self): + self.subprocess_id = uniqid() + self.process_id = uniqid() + + def test_add_relation(self): + rel_id = SubProcessRelationship.objects.add_relation( + subprocess_id=self.subprocess_id, process_id=self.process_id + ).id + + self.assertTrue( + SubProcessRelationship.objects.filter(subprocess_id=self.subprocess_id, process_id=self.process_id).exists() + ) + + rel = SubProcessRelationship.objects.get(id=rel_id) + self.assertEqual(rel.subprocess_id, self.subprocess_id) + self.assertEqual(rel.process_id, self.process_id) + + def test_delete_relationship(self): + SubProcessRelationship.objects.add_relation(subprocess_id=self.subprocess_id, process_id="1") + SubProcessRelationship.objects.add_relation(subprocess_id=self.subprocess_id, process_id="2") + SubProcessRelationship.objects.add_relation(subprocess_id=self.subprocess_id, process_id="3") + SubProcessRelationship.objects.delete_relation(subprocess_id=self.subprocess_id, process_id=None) + self.assertFalse(SubProcessRelationship.objects.filter(subprocess_id=self.subprocess_id)) + + SubProcessRelationship.objects.add_relation(subprocess_id="1", process_id=self.process_id) + SubProcessRelationship.objects.add_relation(subprocess_id="2", process_id=self.process_id) + SubProcessRelationship.objects.add_relation(subprocess_id="3", process_id=self.process_id) + SubProcessRelationship.objects.delete_relation(subprocess_id=None, process_id=self.process_id) + self.assertFalse(SubProcessRelationship.objects.filter(process_id=self.process_id)) + + SubProcessRelationship.objects.add_relation(subprocess_id=self.subprocess_id, process_id=self.process_id) + SubProcessRelationship.objects.delete_relation(subprocess_id=self.subprocess_id, process_id=self.process_id) + self.assertFalse( + SubProcessRelationship.objects.filter(process_id=self.process_id, subprocess_id=self.subprocess_id) + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/data/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/data/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/data/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/data/test_data_snapshot.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/data/test_data_snapshot.py new file mode 100644 index 00000000..4b87804f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/data/test_data_snapshot.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.models import DataSnapshot + + +class DataSnapshotTestCase(TestCase): + def setUp(self): + self.key_1 = "test_key_1" + self.key_2 = "test_key_2" + self.not_exist_key = "not_exist" + self.obj_1 = {"a": "a", 1: "1", 2: "2", "list": [1, 2, 3]} + self.obj_2 = [1, 5, 3] + self.obj_3 = [1, 2, 3] + + def test_set_object(self): + set_result = DataSnapshot.objects.set_object(self.key_1, self.obj_1) + self.assertTrue(set_result) + obj_1 = DataSnapshot.objects.get_object(self.key_1) + self.assertEqual(self.obj_1, obj_1) + + # override + set_result = DataSnapshot.objects.set_object(self.key_1, self.obj_2) + self.assertTrue(set_result) + obj_2 = DataSnapshot.objects.get_object(self.key_1) + self.assertEqual(self.obj_2, obj_2) + + # new obj + set_result = DataSnapshot.objects.set_object(self.key_2, self.obj_3) + self.assertTrue(set_result) + obj_3 = DataSnapshot.objects.get_object(self.key_2) + self.assertEqual(self.obj_3, obj_3) + + def test_get_object(self): + DataSnapshot.objects.set_object(self.key_1, self.obj_1) + obj_1 = DataSnapshot.objects.get_object(self.key_1) + self.assertEqual(self.obj_1, obj_1) + + # none + none = DataSnapshot.objects.get_object(self.not_exist_key) + self.assertIsNone(none) + + def test_del_object(self): + DataSnapshot.objects.set_object(self.key_1, self.obj_1) + del_result = DataSnapshot.objects.del_object(self.key_1) + self.assertTrue(del_result) + none = DataSnapshot.objects.get_object(self.key_1) + self.assertIsNone(none) + del_result = DataSnapshot.objects.del_object(self.key_1) + self.assertFalse(del_result) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/function/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/function/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/function/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/function/test_function_switch.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/function/test_function_switch.py new file mode 100644 index 00000000..2586d891 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/function/test_function_switch.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.conf import function_switch as fs +from pipeline.engine.models import FunctionSwitch + +origin_switch_list = fs.switch_list + + +class TestFunctionSwitch(TestCase): + def setUp(self): + fs.switch_list = origin_switch_list + FunctionSwitch.objects.init_db() + + def test_init_db(self): + fs.switch_list = [ + {"name": "test_1", "description": "unit_test_switch_1", "is_active": False}, + {"name": "test_2", "description": "unit_test_switch_2", "is_active": False}, + {"name": "test_3", "description": "unit_test_switch_3", "is_active": True}, + ] + FunctionSwitch.objects.init_db() + for switch_config in fs.switch_list: + switch = FunctionSwitch.objects.get(name=switch_config["name"]) + self.assertEqual(switch.name, switch_config["name"]) + self.assertEqual(switch.description, switch_config["description"]) + self.assertEqual(switch.is_active, switch_config["is_active"]) + + fs.switch_list = [ + {"name": "test_1", "description": "unit_test_switch_1_1", "is_active": False}, + {"name": "test_2", "description": "unit_test_switch_2_2", "is_active": False}, + {"name": "test_3", "description": "unit_test_switch_3_3", "is_active": True}, + {"name": "test_4", "description": "unit_test_switch_3", "is_active": True}, + ] + FunctionSwitch.objects.init_db() + for switch_config in fs.switch_list: + switch = FunctionSwitch.objects.get(name=switch_config["name"]) + self.assertEqual(switch.name, switch_config["name"]) + self.assertEqual(switch.description, switch_config["description"]) + self.assertEqual(switch.is_active, switch_config["is_active"]) + + def test_is_frozen(self): + FunctionSwitch.objects.filter(name=fs.FREEZE_ENGINE).update(is_active=False) + self.assertFalse(FunctionSwitch.objects.is_frozen()) + FunctionSwitch.objects.filter(name=fs.FREEZE_ENGINE).update(is_active=True) + self.assertTrue(FunctionSwitch.objects.is_frozen()) + + def test_freeze_engine(self): + FunctionSwitch.objects.filter(name=fs.FREEZE_ENGINE).update(is_active=False) + FunctionSwitch.objects.freeze_engine() + is_active = FunctionSwitch.objects.get(name=fs.FREEZE_ENGINE).is_active + self.assertTrue(is_active) + + def test_unfreeze_engine(self): + FunctionSwitch.objects.filter(name=fs.FREEZE_ENGINE).update(is_active=True) + FunctionSwitch.objects.unfreeze_engine() + is_active = FunctionSwitch.objects.get(name=fs.FREEZE_ENGINE).is_active + self.assertFalse(is_active) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/models/mock.py b/runtime/bamboo-pipeline/pipeline/tests/engine/models/mock.py new file mode 100644 index 00000000..b9a4e178 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/models/mock.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.tests.engine.mock import * # noqa diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/test_api.py b/runtime/bamboo-pipeline/pipeline/tests/engine/test_api.py new file mode 100644 index 00000000..28395846 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/test_api.py @@ -0,0 +1,885 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from datetime import timedelta + +from django.test import TestCase +from django.utils import timezone +from redis.exceptions import ConnectionError as RedisConnectionError + +from pipeline.constants import PIPELINE_DEFAULT_PRIORITY, PIPELINE_MAX_PRIORITY, PIPELINE_MIN_PRIORITY +from pipeline.core.flow.activity import ServiceActivity +from pipeline.core.flow.gateway import ExclusiveGateway, ParallelGateway +from pipeline.engine import api, exceptions, states +from pipeline.engine.models import ( + Data, + NodeRelationship, + PipelineModel, + PipelineProcess, + ProcessCeleryTask, + ScheduleService, + Status, +) +from pipeline.engine.utils import calculate_elapsed_time +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + + +def dummy_wrapper(func): + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + +class TestEngineAPIDecorator(TestCase): + def test__node_existence_check(self): + @api._node_existence_check + def test_func(id): + return True + + with patch(PIPELINE_STATUS_GET, MagicMock()): + self.assertTrue(test_func("id")) + + with patch(PIPELINE_STATUS_GET, MagicMock(side_effect=Status.DoesNotExist)): + act_result = test_func("id") + self.assertFalse(act_result.result) + + def test__frozen_check(self): + @api._frozen_check + def test_func(): + return True + + with patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)): + self.assertTrue(test_func()) + + with patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=True)): + act_result = test_func() + self.assertFalse(act_result.result) + + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test__worker_check(self): + @api._worker_check + def test_func(): + return True + + with patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=[1, 2, 3])): + self.assertTrue(test_func()) + + with patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=[])): + act_result = test_func() + self.assertFalse(act_result.result) + + with patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(side_effect=exceptions.RabbitMQConnectionError)): + act_result = test_func() + self.assertFalse(act_result.result) + + with patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(side_effect=RedisConnectionError)): + act_result = test_func() + self.assertFalse(act_result.result) + + +class TestEngineAPI(TestCase): + def setUp(self): + self.pipeline_id = uniqid() + self.node_id = uniqid() + self.version = uniqid() + self.dummy_return = uniqid() + self.maxDiff = None + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_PREPARE_FOR_PIPELINE, MagicMock()) + @patch(PIPELINE_PIPELINE_MODEL_PREPARE_FOR_PIPELINE, MagicMock()) + @patch(PIPELINE_PIPELINE_MODEL_PIPELINE_READY, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_start_pipeline(self): + process = MockPipelineProcess() + pipeline_instance = "pipeline_instance" + with patch(PIPELINE_PROCESS_PREPARE_FOR_PIPELINE, MagicMock(return_value=process)): + act_result = api.start_pipeline(pipeline_instance) + + self.assertTrue(act_result.result) + + Status.objects.prepare_for_pipeline.assert_called_once_with(pipeline_instance) + + PipelineProcess.objects.prepare_for_pipeline.assert_called_once_with(pipeline_instance) + + PipelineModel.objects.prepare_for_pipeline.assert_called_once_with( + pipeline_instance, process, PIPELINE_DEFAULT_PRIORITY, queue="" + ) + + PipelineModel.objects.pipeline_ready.assert_called_once_with(process_id=process.id) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_start_pipeline__raise_invalid_operation(self): + pipeline_instance = "pipeline_instance" + + self.assertRaises( + exceptions.InvalidOperationException, + api.start_pipeline, + pipeline_instance, + priority=PIPELINE_MAX_PRIORITY + 1, + ) + self.assertRaises( + exceptions.InvalidOperationException, + api.start_pipeline, + pipeline_instance, + priority=PIPELINE_MIN_PRIORITY - 1, + ) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + def test_pause_pipeline(self): + act_result = api.pause_pipeline(self.pipeline_id) + + Status.objects.transit.assert_called_once_with( + id=self.pipeline_id, to_state=states.SUSPENDED, is_pipeline=True, appoint=True + ) + + self.assertTrue(act_result.result) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=False))) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_resume_pipeline__transit_fail(self): + act_result = api.resume_pipeline(self.pipeline_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_STATUS_FILTER, mock.MagicMock(return_value=MockQuerySet(qs=["test"]))) + @patch(PIPELINE_PROCESS_BATCH_PROCESS_READY, MagicMock()) + @patch(PIPELINE_ENGINE_API_GET_PROCESS_TO_BE_WAKED, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_resume_pipeline__transit_success(self): + pipeline_model = MockPipelineModel() + + with patch(PIPELINE_PIPELINE_MODEL_GET, MagicMock(return_value=pipeline_model)): + act_result = api.resume_pipeline(self.pipeline_id) + + self.assertTrue(act_result.result) + + api._get_process_to_be_waked.assert_called_once_with(pipeline_model.process, []) + + PipelineProcess.objects.batch_process_ready.assert_called_once_with( + process_id_list=[], pipeline_id=self.pipeline_id + ) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=False))) + def test_revoke_pipeline__transit_fail(self): + act_result = api.revoke_pipeline(self.pipeline_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + def test_revoke_pipeline__process_is_none(self): + pipeline_model = MockPipelineModel(process=None) + + with patch(PIPELINE_PIPELINE_MODEL_GET, MagicMock(return_value=pipeline_model)): + act_result = api.revoke_pipeline(self.pipeline_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_revoke_pipeline__transit_success(self): + pipeline_model = MockPipelineModel() + + with patch(PIPELINE_PIPELINE_MODEL_GET, MagicMock(return_value=pipeline_model)): + with mock.patch( + PIPELINE_PROCESS_SELECT_FOR_UPDATE, + mock.MagicMock(return_value=MockQuerySet(get_return=pipeline_model.process)), + ): + act_result = api.revoke_pipeline(self.pipeline_id) + + self.assertTrue(act_result.result) + + pipeline_model.process.revoke_subprocess.assert_called_once() + pipeline_model.process.destroy_all.assert_called_once() + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + def test_pause_node_appointment(self): + act_result = api.pause_node_appointment(self.node_id) + + self.assertTrue(act_result.result) + + Status.objects.transit.assert_called_once_with(id=self.node_id, to_state=states.SUSPENDED, appoint=True) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_PROCESS_FILTER, MagicMock(return_value=MockQuerySet(exists_return=False))) + @patch( + PIPELINE_SUBPROCESS_RELATIONSHIP_GET_RELATE_PROCESS, MagicMock(return_value=MockQuerySet(exists_return=False)) + ) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_resume_node_appointment__fail_with_invalid_node(self): + act_result = api.resume_node_appointment(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=False))) + @patch(PIPELINE_PROCESS_FILTER, MagicMock(return_value=MockQuerySet(exists_return=True))) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_resume_node_appointment__resume_not_subprocess_transit_fail(self): + act_result = api.resume_node_appointment(self.node_id) + + Status.objects.transit.assert_called_once_with(id=self.node_id, to_state=states.READY, appoint=True) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_STATUS_RECOVER_FROM_BLOCK, MagicMock()) + @patch(PIPELINE_PROCESS_PROCESS_READY, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_resume_node_appointment__resume_not_subprocess(self): + process = MockPipelineProcess() + + with patch( + PIPELINE_PROCESS_FILTER, MagicMock(return_value=MockQuerySet(exists_return=True, first_return=process)) + ): + act_result = api.resume_node_appointment(self.node_id) + + self.assertTrue(act_result.result) + + Status.objects.transit.assert_called_once_with(id=self.node_id, to_state=states.READY, appoint=True) + + Status.objects.recover_from_block.assert_called_once_with( + process.root_pipeline.id, process.subprocess_stack + ) + + PipelineProcess.objects.process_ready.assert_called_once_with(process_id=process.id) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=False))) + @patch(PIPELINE_PROCESS_FILTER, MagicMock(return_value=MockQuerySet(exists_return=False))) + @patch( + PIPELINE_SUBPROCESS_RELATIONSHIP_GET_RELATE_PROCESS, MagicMock(return_value=MockQuerySet(exists_return=True)) + ) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_resume_node_appointment__resume_subprocess_transit_fail(self): + act_result = api.resume_node_appointment(self.node_id) + + Status.objects.transit.assert_called_once_with( + id=self.node_id, to_state=states.RUNNING, is_pipeline=True, appoint=True + ) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_PROCESS_FILTER, MagicMock(return_value=MockQuerySet(exists_return=False))) + @patch(PIPELINE_STATUS_RECOVER_FROM_BLOCK, MagicMock()) + @patch(PIPELINE_PROCESS_BATCH_PROCESS_READY, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_resume_node_appointment__resume_subprocess(self): + root_pipeline = PipelineObject() + + can_be_wake_process_1 = MockPipelineProcess(can_be_waked=True, root_pipeline=root_pipeline) + can_be_wake_process_2 = MockPipelineProcess(can_be_waked=True, root_pipeline=root_pipeline) + can_be_wake_process_3 = MockPipelineProcess(can_be_waked=True, root_pipeline=root_pipeline) + can_not_be_wake_process_1 = MockPipelineProcess(root_pipeline=root_pipeline) + can_not_be_wake_process_2 = MockPipelineProcess(root_pipeline=root_pipeline) + + exists_return = [ + can_be_wake_process_1, + can_be_wake_process_2, + can_be_wake_process_3, + can_not_be_wake_process_1, + can_not_be_wake_process_2, + ] + + subprocess_to_be_transit = {can_be_wake_process_1.id, can_be_wake_process_2.id, can_be_wake_process_3.id} + + can_be_waked_ids = [can_be_wake_process_1.id, can_be_wake_process_2.id, can_be_wake_process_3.id] + + with patch( + PIPELINE_SUBPROCESS_RELATIONSHIP_GET_RELATE_PROCESS, + MagicMock( + return_value=MockQuerySet( + exists_return=exists_return, first_return=can_be_wake_process_1, qs=exists_return + ) + ), + ): + act_result = api.resume_node_appointment(self.node_id) + + self.assertTrue(act_result.result) + + Status.objects.recover_from_block.assert_called_once_with(root_pipeline.id, subprocess_to_be_transit) + + PipelineProcess.objects.batch_process_ready.assert_called_once_with( + process_id_list=can_be_waked_ids, pipeline_id=root_pipeline.id + ) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_PROCESS_GET, MagicMock(side_effect=PipelineProcess.DoesNotExist)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_retry_node__fail_with_can_not_get_process(self): + act_result = api.retry_node(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_retry_node__fail_with_invalid_node_type(self): + top_pipeline = PipelineObject(nodes={self.node_id: ServiceActObject()}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.retry_node(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_retry_node__with_node_can_not_retry(self): + # with service activity + top_pipeline = PipelineObject( + nodes={self.node_id: ServiceActivity(id=self.node_id, service=None, retryable=False)} + ) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.retry_node(self.node_id) + + self.assertFalse(act_result.result) + + # with parallel gateway + pg = ParallelGateway(id=self.node_id, converge_gateway_id=uniqid()) + setattr(pg, "retryable", False) + top_pipeline = PipelineObject(nodes={self.node_id: pg}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.retry_node(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_RETRY, MagicMock(return_value=MockActionResult(result=False, message="retry fail"))) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_retry_node__with_retry_fail(self): + node = ServiceActivity(id=self.node_id, service=None) + top_pipeline = PipelineObject(nodes={self.node_id: node}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.retry_node(self.node_id) + + Status.objects.retry.assert_called_once_with(process, node, None) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_RETRY, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_PROCESS_PROCESS_READY, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_retry_node__success(self): + node = ServiceActivity(id=self.node_id, service=None) + top_pipeline = PipelineObject(nodes={self.node_id: node}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + retry_inputs = {"id": self.node_id} + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.retry_node(self.node_id, inputs=retry_inputs) + + self.assertTrue(act_result.result) + + Status.objects.retry.assert_called_once_with(process, node, retry_inputs) + + PipelineProcess.objects.process_ready.assert_called_once_with(process_id=process.id) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_PROCESS_GET, MagicMock(side_effect=PipelineProcess.DoesNotExist)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_node__fail_with_can_not_get_process(self): + act_result = api.skip_node(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_node__fail_with_invalid_node_type(self): + top_pipeline = PipelineObject(nodes={self.node_id: ServiceActObject()}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.skip_node(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_node__fail_with_node_can_not_skip(self): + top_pipeline = PipelineObject( + nodes={self.node_id: ServiceActivity(id=self.node_id, service=None, skippable=False)} + ) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.skip_node(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_SKIP, MagicMock(return_value=MockActionResult(result=False, message="skip fail"))) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_node__fail_with_skip_fail(self): + node = ServiceActivity(id=self.node_id, service=None) + top_pipeline = PipelineObject(nodes={self.node_id: node}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.skip_node(self.node_id) + + Status.objects.skip.assert_called_once_with(process, node) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_SKIP, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_PROCESS_PROCESS_READY, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_node__success(self): + node = ServiceActivity(id=self.node_id, service=None) + mock_next = IdentifyObject() + + def _next(): + return mock_next + + setattr(node, "next", _next) + top_pipeline = PipelineObject(nodes={self.node_id: node}, context=MockContext()) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.skip_node(self.node_id) + + self.assertTrue(act_result.result) + + process.top_pipeline.context.extract_output.assert_called_once_with(node) + + process.save.assert_called_once() + + PipelineProcess.objects.process_ready(process_id=process.id, current_node_id=mock_next.id) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_PROCESS_GET, MagicMock(side_effect=PipelineProcess.DoesNotExist)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_exclusive_gateway__fail_with_can_not_get_process(self): + act_result = api.skip_exclusive_gateway(self.node_id, uniqid()) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_exclusive_gateway__fail_with_invalid_node_type(self): + top_pipeline = PipelineObject(nodes={self.node_id: ServiceActObject()}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.skip_exclusive_gateway(self.node_id, uniqid()) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_SKIP, MagicMock(return_value=MockActionResult(result=False, message="skip fail"))) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_exclusive_gateway__fail_with_skip_fail(self): + eg = ExclusiveGateway(id=uniqid()) + next_node = IdentifyObject() + setattr(eg, "target_for_sequence_flow", MagicMock(return_value=next_node)) + top_pipeline = PipelineObject(nodes={self.node_id: eg}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.skip_exclusive_gateway(self.node_id, uniqid()) + + Status.objects.skip.assert_called_once_with(process, eg) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_STATUS_SKIP, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_PROCESS_PROCESS_READY, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_skip_exclusive_gateway__success(self): + eg = ExclusiveGateway(id=uniqid()) + next_node = IdentifyObject() + setattr(eg, "target_for_sequence_flow", MagicMock(return_value=next_node)) + top_pipeline = PipelineObject(nodes={self.node_id: eg}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.skip_exclusive_gateway(self.node_id, uniqid()) + + self.assertTrue(act_result.result) + + Status.objects.skip.assert_called_once_with(process, eg) + + PipelineProcess.objects.process_ready.assert_called_once_with( + process_id=process.id, current_node_id=next_node.id + ) + + @patch(PIPELINE_NODE_RELATIONSHIP_FILTER, MagicMock(return_value=MockQuerySet(exists_return=False))) + def test_status_tree__with_not_exist_node(self): + self.assertRaises(exceptions.InvalidOperationException, api.get_status_tree, self.node_id) + + def test_status_tree(self): + s1 = Status.objects.create( + id=uniqid(), + name="s1", + state=states.FINISHED, + started_time=timezone.now(), + archived_time=timezone.now() + timedelta(seconds=3), + ) + s2 = Status.objects.create( + id=uniqid(), + name="s2", + state=states.FINISHED, + started_time=timezone.now(), + archived_time=timezone.now() + timedelta(seconds=3), + ) + s3 = Status.objects.create( + id=uniqid(), + name="s3", + state=states.FINISHED, + started_time=timezone.now(), + archived_time=timezone.now() + timedelta(seconds=3), + ) + s4 = Status.objects.create( + id=uniqid(), + name="s4", + state=states.FINISHED, + started_time=timezone.now(), + archived_time=timezone.now() + timedelta(seconds=3), + ) + s5 = Status.objects.create( + id=uniqid(), + name="s5", + state=states.FINISHED, + started_time=timezone.now(), + archived_time=timezone.now() + timedelta(seconds=3), + ) + s6 = Status.objects.create( + id=uniqid(), + name="s6", + state=states.FINISHED, + started_time=timezone.now(), + archived_time=timezone.now() + timedelta(seconds=3), + ) + + NodeRelationship.objects.build_relationship(s1.id, s1.id) + NodeRelationship.objects.build_relationship(s2.id, s2.id) + NodeRelationship.objects.build_relationship(s3.id, s3.id) + NodeRelationship.objects.build_relationship(s4.id, s4.id) + NodeRelationship.objects.build_relationship(s5.id, s5.id) + NodeRelationship.objects.build_relationship(s6.id, s6.id) + + NodeRelationship.objects.build_relationship(s1.id, s2.id) + NodeRelationship.objects.build_relationship(s1.id, s3.id) + NodeRelationship.objects.build_relationship(s2.id, s4.id) + NodeRelationship.objects.build_relationship(s4.id, s5.id) + NodeRelationship.objects.build_relationship(s4.id, s6.id) + + # refresh from db, sync datetime + s1.refresh_from_db() + s2.refresh_from_db() + s3.refresh_from_db() + s4.refresh_from_db() + s5.refresh_from_db() + s6.refresh_from_db() + + def get_status_dict_with_children(s, children): + return { + "archived_time": s.archived_time, + "created_time": s.created_time, + "elapsed_time": calculate_elapsed_time(s.started_time, s.archived_time), + "error_ignorable": s.error_ignorable, + "id": s.id, + "loop": s.loop, + "name": s.name, + "retry": s.retry, + "skip": s.skip, + "started_time": s.started_time, + "state": s.state, + "version": s.version, + "children": children, + "state_refresh_at": None, + } + + tree_depth_1 = get_status_dict_with_children( + s1, + children={ + s2.id: get_status_dict_with_children(s2, children={}), + s3.id: get_status_dict_with_children(s3, children={}), + }, + ) + + tree = api.get_status_tree(s1.id, 1) + self.assertDictEqual(tree, tree_depth_1) + + tree_depth_2 = get_status_dict_with_children( + s1, + children={ + s2.id: get_status_dict_with_children(s2, children={s4.id: get_status_dict_with_children(s4, {})}), + s3.id: get_status_dict_with_children(s3, {}), + }, + ) + + tree = api.get_status_tree(s1.id, 2) + self.assertDictEqual(tree, tree_depth_2) + + tree_depth_3 = get_status_dict_with_children( + s1, + children={ + s2.id: get_status_dict_with_children( + s2, + children={ + s4.id: get_status_dict_with_children( + s4, + children={ + s5.id: get_status_dict_with_children(s5, {}), + s6.id: get_status_dict_with_children(s6, {}), + }, + ) + }, + ), + s3.id: get_status_dict_with_children(s3, children={}), + }, + ) # noqa + + tree = api.get_status_tree(s1.id, 3) + self.assertDictEqual(tree, tree_depth_3) + + tree = api.get_status_tree(s1.id, 4) + self.assertDictEqual(tree, tree_depth_3) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(side_effect=ScheduleService.DoesNotExist)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_activity_callback__fail_with_schedule_not_exist(self): + with patch(PIPELINE_STATUS_VERSION_FOR, MagicMock(return_value=self.version)): + self.assertRaises(ScheduleService.DoesNotExist, api.activity_callback, self.node_id, None) + + ScheduleService.objects.schedule_for.assert_has_calls([mock.call(self.node_id, self.version)] * 3) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock()) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_activity_callback__fail_with_process_not_exist(self): + with patch(PIPELINE_STATUS_VERSION_FOR, MagicMock(return_value=self.version)): + act_result = api.activity_callback(self.node_id, None) + + ScheduleService.objects.schedule_for.assert_called_once_with(self.node_id, self.version) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @patch(PIPELINE_PROCESS_GET, MagicMock(return_value=IdentifyObject())) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_activity_callback__fail_with_schedule_finished(self): + with patch(PIPELINE_STATUS_VERSION_FOR, MagicMock(return_value=self.version)): + with patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(return_value=MockScheduleService(is_finished=True))): + self.assertRaises(exceptions.InvalidOperationException, api.activity_callback, self.node_id, None) + + ScheduleService.objects.schedule_for.assert_called_once_with(self.node_id, self.version) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_ENGINE_API_WORKERS, MagicMock(return_value=True)) + @mock.patch(DJCELERY_APP_CURRENT_APP_CONNECTION, mock.MagicMock()) + def test_activity_callback__success(self): + process = MockPipelineProcess() + callback_data = uniqid() + + with patch(PIPELINE_STATUS_VERSION_FOR, MagicMock(return_value=self.version)): + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + # schedule service get once + service = MockScheduleService() + with patch(PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(return_value=service)): + act_result = api.activity_callback(self.node_id, callback_data) + + self.assertTrue(act_result.result) + + ScheduleService.objects.schedule_for.assert_called_once_with(self.node_id, self.version) + + service.callback.assert_called_once_with(callback_data, process.id) + + # schedule service get twice + service = MockScheduleService() + with patch( + PIPELINE_SCHEDULE_SCHEDULE_FOR, MagicMock(side_effect=[ScheduleService.DoesNotExist, service]) + ): + act_result = api.activity_callback(self.node_id, callback_data) + + self.assertTrue(act_result.result) + + ScheduleService.objects.schedule_for.assert_has_calls([mock.call(self.node_id, self.version)] * 2) + + service.callback.assert_called_once_with(callback_data, process.id) + + def test_get_inputs(self): + data = MockData(get_inputs_return=uniqid()) + with patch(PIPELINE_DATA_GET, MagicMock(return_value=data)): + inputs = api.get_inputs(self.node_id) + self.assertEqual(inputs, data.inputs) + + def test_get_outputs(self): + data = MockData(get_inputs_return=uniqid(), get_outputs_return=uniqid()) + with patch(PIPELINE_DATA_GET, MagicMock(return_value=data)): + outputs = api.get_outputs(self.node_id) + self.assertEqual(outputs, {"outputs": data.outputs, "ex_data": data.ex_data}) + + def test_get_batch_outputs(self): + data1 = MockData(get_inputs_return=uniqid(), get_outputs_return=uniqid()) + data2 = MockData(get_inputs_return=uniqid(), get_outputs_return=uniqid()) + data3 = MockData(get_inputs_return=uniqid(), get_outputs_return=uniqid()) + with patch( + PIPELINE_DATA_FILTER, MagicMock(return_value=MockQuerySet(exists_return=True, qs=[data1, data2, data3])) + ): + outputs = api.get_batch_outputs([data1.id, data2.id, data3.id]) + self.assertEqual( + outputs, + { + data1.id: {"outputs": data1.outputs, "ex_data": data1.ex_data}, + data2.id: {"outputs": data2.outputs, "ex_data": data2.ex_data}, + data3.id: {"outputs": data3.outputs, "ex_data": data3.ex_data}, + }, + ) + + def test_get_activity_histories(self): + with patch(PIPELINE_HISTORY_GET_HISTORY, MagicMock(return_value=self.dummy_return)): + history = api.get_activity_histories(self.node_id) + self.assertEqual(history, self.dummy_return) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_PROCESS_GET, MagicMock(side_effect=PipelineProcess.DoesNotExist)) + def test_forced_fail__fail_with_process_do_not_exist(self): + act_result = api.forced_fail(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + def test_forced_fail__fail_with_invalid_node_type(self): + top_pipeline = PipelineObject(nodes={self.node_id: ServiceActObject()}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.forced_fail(self.node_id, uniqid()) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_STATUS_GET, MagicMock()) + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=False, message="transit fail"))) + def test_forced_fail__fail_with_transit_fail(self): + top_pipeline = PipelineObject(nodes={self.node_id: ServiceActivity(id=self.node_id, service=None)}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.forced_fail(self.node_id) + + self.assertFalse(act_result.result) + + @patch(PIPELINE_FUNCTION_SWITCH_IS_FROZEN, MagicMock(return_value=False)) + @patch(PIPELINE_STATUS_TRANSIT, MagicMock(return_value=MockActionResult(result=True))) + @patch(PIPELINE_SCHEDULE_DELETE_SCHEDULE, MagicMock()) + @patch(PIPELINE_CELERYTASK_REVOKE, MagicMock()) + @patch(PIPELINE_DATA_FORCED_FAIL, MagicMock()) + def test_forced_fail__success(self): + node = ServiceActivity(id=self.node_id, service=None) + setattr(node, "failure_handler", MagicMock()) + + top_pipeline = PipelineObject(nodes={self.node_id: node}) + process = MockPipelineProcess(top_pipeline=top_pipeline) + status = MockStatus() + old_version = status.version + kill = True + ex_data = "ex_data" + + with patch(PIPELINE_STATUS_GET, MagicMock(return_value=status)): + with patch(PIPELINE_PROCESS_GET, MagicMock(return_value=process)): + act_result = api.forced_fail(self.node_id, kill, ex_data) + + self.assertTrue(act_result.result) + + node.failure_handler.assert_called_once_with(process.root_pipeline.data) + + ScheduleService.objects.delete_schedule.assert_called_once_with(status.id, old_version) + + Data.objects.forced_fail.assert_called_once_with(self.node_id, ex_data) + + ProcessCeleryTask.objects.revoke.assert_called_once_with(process.id, kill) + + process.adjust_status.assert_called_once() + + self.assertTrue(process.is_sleep) + + process.save.assert_called_once() + + self.assertNotEqual(old_version, status.version) + + status.save.assert_called_once() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/test_apps.py b/runtime/bamboo-pipeline/pipeline/tests/engine/test_apps.py new file mode 100644 index 00000000..f2e922db --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/test_apps.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import mock +from django.test import TestCase + +from pipeline.engine.apps import EngineConfig + + +class EngineConfigTestCase(TestCase): + @mock.patch("pipeline.django_signal_valve.valve.set_valve_function", mock.MagicMock()) + @mock.patch("pipeline.engine.models.FunctionSwitch.objects.init_db", mock.MagicMock()) + @mock.patch("pipeline.engine.signals.dispatch.dispatch", mock.MagicMock()) + def test_ready(self): + from pipeline.engine.signals import dispatch # noqa + from pipeline.django_signal_valve import valve # noqa + from pipeline.engine.models import FunctionSwitch # noqa + + EngineConfig.path = "test" + + config = EngineConfig("", "") + + config.ready() + + dispatch.dispatch.assert_called() + + valve.set_valve_function.assert_called_with(FunctionSwitch.objects.is_frozen) + + FunctionSwitch.objects.init_db.assert_called() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/test_states.py b/runtime/bamboo-pipeline/pipeline/tests/engine/test_states.py new file mode 100644 index 00000000..81931d23 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/test_states.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import itertools + +from django.test import TestCase + +from pipeline.engine import states +from pipeline.engine.states import * # noqa + + +class StatesTestCase(TestCase): + def test_constants(self): + self.assertEqual(CREATED, "CREATED") + self.assertEqual(READY, "READY") + self.assertEqual(RUNNING, "RUNNING") + self.assertEqual(SUSPENDED, "SUSPENDED") + self.assertEqual(BLOCKED, "BLOCKED") + self.assertEqual(FINISHED, "FINISHED") + self.assertEqual(FAILED, "FAILED") + self.assertEqual(REVOKED, "REVOKED") + self.assertEqual(EXPIRED, "EXPIRED") + + self.assertEqual(ALL_STATES, frozenset([READY, RUNNING, SUSPENDED, BLOCKED, FINISHED, FAILED, REVOKED])) + self.assertEqual(SLEEP_STATES, frozenset([SUSPENDED, REVOKED])) + self.assertEqual(CHILDREN_IGNORE_STATES, frozenset([BLOCKED])) + self.assertEqual( + states._NODE_TRANSITION, + ConstantDict( + { + READY: frozenset([RUNNING, SUSPENDED]), + RUNNING: frozenset([FINISHED, FAILED]), + SUSPENDED: frozenset([READY, REVOKED]), + BLOCKED: frozenset([]), + FINISHED: frozenset([RUNNING, FAILED]), + FAILED: frozenset([]), + REVOKED: frozenset([]), + } + ), + ) + self.assertEqual( + states._PIPELINE_TRANSITION, + ConstantDict( + { + READY: frozenset([RUNNING, SUSPENDED, BLOCKED]), + RUNNING: frozenset([SUSPENDED, BLOCKED, FINISHED, FAILED]), + SUSPENDED: frozenset([READY, REVOKED, BLOCKED]), + BLOCKED: frozenset([READY, REVOKED]), + FINISHED: frozenset([RUNNING]), + FAILED: frozenset([]), + REVOKED: frozenset([]), + } + ), + ) + self.assertEqual( + states._APPOINT_PIPELINE_TRANSITION, + ConstantDict( + { + READY: frozenset([SUSPENDED, REVOKED]), + RUNNING: frozenset([SUSPENDED, REVOKED]), + SUSPENDED: frozenset([READY, REVOKED, RUNNING]), + BLOCKED: frozenset([REVOKED]), + FINISHED: frozenset([]), + FAILED: frozenset([REVOKED]), + REVOKED: frozenset([]), + } + ), + ) + self.assertEqual( + states._APPOINT_NODE_TRANSITION, + ConstantDict( + { + READY: frozenset([SUSPENDED]), + RUNNING: frozenset([]), + SUSPENDED: frozenset([READY]), + BLOCKED: frozenset([]), + FINISHED: frozenset([]), + FAILED: frozenset([READY, FINISHED]), + REVOKED: frozenset([]), + } + ), + ) + self.assertEqual( + TRANSITION_MAP, + { + # first level: is_pipeline + True: { + # second level: appoint + True: states._APPOINT_PIPELINE_TRANSITION, + False: states._PIPELINE_TRANSITION, + }, + False: {True: states._APPOINT_NODE_TRANSITION, False: states._NODE_TRANSITION}, + }, + ) + + def test_can_transit(self): + + for is_pipeline, appoint_case in list(TRANSITION_MAP.items()): + for is_appoint, from_to_map in list(appoint_case.items()): + for from_, to_set in list(from_to_map.items()): + valid_transit = to_set + invalid_transit = ALL_STATES.difference(to_set) + + for valid_to in valid_transit: + self.assertTrue( + can_transit( + from_state=from_, to_state=valid_to, is_pipeline=is_pipeline, appoint=is_appoint + ) + ) + + for invalid_to in invalid_transit: + self.assertFalse( + can_transit( + from_state=from_, to_state=invalid_to, is_pipeline=is_pipeline, appoint=is_appoint + ) + ) + + def test_is_rerunning(self): + for (f, t) in itertools.product(ALL_STATES, ALL_STATES): + if f == FINISHED and t == RUNNING: + self.assertTrue(is_rerunning(f, t)) + else: + self.assertFalse(is_rerunning(f, t)) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/test_tasks.py b/runtime/bamboo-pipeline/pipeline/tests/engine/test_tasks.py new file mode 100644 index 00000000..e123fd90 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/test_tasks.py @@ -0,0 +1,399 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import itertools + +from django.test import TestCase + +from pipeline.core.pipeline import Pipeline +from pipeline.engine import api, signals, states, tasks +from pipeline.engine.core import runtime, schedule +from pipeline.engine.models import NodeCeleryTask, NodeRelationship, ProcessCeleryTask, Status +from pipeline.tests.engine.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + + +class EngineTaskTestCase(TestCase): + def setUp(self): + self.alive_process = MockPipelineProcess(top_pipeline=PipelineObject(node=ServiceActObject(interval=None))) + self.not_alive_process = MockPipelineProcess(is_alive=False) + self.get_alive_process = mock.MagicMock(return_value=self.alive_process) + self.get_not_alive_process = mock.MagicMock(return_value=self.not_alive_process) + self.transit_success = mock.MagicMock(return_value=MockActionResult(result=True)) + self.transit_fail = mock.MagicMock(return_value=MockActionResult(result=False)) + self.transit_fail_and_return_suspended = mock.MagicMock( + return_value=MockActionResult(result=False, extra=FancyDict({"state": states.SUSPENDED})) + ) + self.transit_fail_and_return_blocked = MagicMock( + return_value=MockActionResult(result=False, extra=FancyDict({"state": states.BLOCKED})) + ) + + @mock.patch(ENGINE_RUN_LOOP, mock.MagicMock()) + def test_process_unfreeze(self): + # alive process + with mock.patch(PIPELINE_PROCESS_GET, self.get_alive_process): + tasks.process_unfreeze(self.alive_process.id) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + runtime.run_loop.assert_called_with(self.alive_process) + + runtime.run_loop.reset_mock() + + # dead process + with mock.patch(PIPELINE_PROCESS_GET, self.get_not_alive_process): + tasks.process_unfreeze(self.not_alive_process.id) + + self.get_not_alive_process.assert_called_with(id=self.not_alive_process.id) + + runtime.run_loop.assert_not_called() + + @mock.patch(ENGINE_RUN_LOOP, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock()) + @mock.patch(PIPELINE_NODE_RELATIONSHIP_BUILD, mock.MagicMock()) + def test_start(self): + # dead process + with mock.patch(PIPELINE_PROCESS_GET, self.get_not_alive_process): + tasks.start(self.not_alive_process.id) + + self.get_not_alive_process.assert_called_with(id=self.not_alive_process.id) + + Status.objects.transit.assert_not_called() + + NodeRelationship.objects.build_relationship.assert_not_called() + + runtime.run_loop.assert_not_called() + + # alive process + with mock.patch(PIPELINE_PROCESS_GET, self.get_alive_process): + # transit success + with mock.patch(PIPELINE_STATUS_TRANSIT, self.transit_success): + tasks.start(self.alive_process.id) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + self.transit_success.assert_called_with( + self.alive_process.root_pipeline.id, states.RUNNING, is_pipeline=True, start=True + ) + + NodeRelationship.objects.build_relationship.assert_called_with( + self.alive_process.root_pipeline.id, self.alive_process.root_pipeline.id + ) + + runtime.run_loop.assert_called_with(self.alive_process) + + self.get_alive_process.reset_mock() + self.transit_success.reset_mock() + NodeRelationship.objects.build_relationship.reset_mock() + runtime.run_loop.reset_mock() + + # transit failed + with mock.patch(PIPELINE_STATUS_TRANSIT, self.transit_fail): + tasks.start(self.alive_process.id) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + self.transit_fail.assert_called_with( + self.alive_process.root_pipeline.id, states.RUNNING, is_pipeline=True, start=True + ) + + NodeRelationship.objects.build_relationship.assert_not_called() + + runtime.run_loop.assert_not_called() + + @mock.patch(ENGINE_RUN_LOOP, mock.MagicMock()) + def test_dispatch(self): + # alive process + with mock.patch(PIPELINE_PROCESS_GET, self.get_alive_process): + tasks.dispatch(self.alive_process.id) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + runtime.run_loop.assert_called_with(self.alive_process) + + self.get_not_alive_process.reset_mock() + runtime.run_loop.reset_mock() + + # dead process + with mock.patch(PIPELINE_PROCESS_GET, self.get_not_alive_process): + tasks.dispatch(self.not_alive_process.id) + + self.get_not_alive_process.assert_called_with(id=self.not_alive_process.id) + + runtime.run_loop.assert_not_called() + + @mock.patch(ENGINE_RUN_LOOP, mock.MagicMock()) + @mock.patch(PIPELINE_STATUS_TRANSIT, mock.MagicMock()) + def test_process_wake_up(self): + # dead process + with mock.patch(PIPELINE_PROCESS_GET, self.get_not_alive_process): + for current_node_id, call_from_child in itertools.product((uniqid(), None), (True, False)): + self.get_not_alive_process.reset_mock() + tasks.process_wake_up( + self.not_alive_process.id, current_node_id=current_node_id, call_from_child=call_from_child + ) + + self.get_not_alive_process.assert_called_with(id=self.not_alive_process.id) + + Status.objects.transit.assert_not_called() + + self.not_alive_process.wake_up.assert_not_called() + + runtime.run_loop.assert_not_called() + + # alive process + with mock.patch(PIPELINE_PROCESS_GET, self.get_alive_process): + # call from child + tasks.process_wake_up(self.alive_process.id, current_node_id=None, call_from_child=True) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + Status.objects.transit.assert_not_called() + + self.alive_process.wake_up.assert_called() + + self.assertIsNone(self.alive_process.current_node_id) + + runtime.run_loop.assert_called_with(self.alive_process) + + self.get_alive_process.reset_mock() + self.alive_process.wake_up.reset_mock() + runtime.run_loop.reset_mock() + + # has current_node_id + current_node_id = uniqid() + tasks.process_wake_up(self.alive_process.id, current_node_id=current_node_id, call_from_child=True) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + Status.objects.transit.assert_not_called() + + self.alive_process.wake_up.assert_called() + + self.assertEqual(self.alive_process.current_node_id, current_node_id) + + runtime.run_loop.assert_called_with(self.alive_process) + + self.get_alive_process.reset_mock() + self.alive_process.wake_up.reset_mock() + runtime.run_loop.reset_mock() + self.alive_process.current_node_id = None + + # not call from child + + with mock.patch(PIPELINE_STATUS_TRANSIT, self.transit_success): + # transit success + tasks.process_wake_up(self.alive_process.id, current_node_id=None, call_from_child=False) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + self.transit_success.assert_called_with( + self.alive_process.root_pipeline.id, to_state=states.RUNNING, is_pipeline=True, unchanged_pass=True + ) + + self.alive_process.wake_up.assert_called() + + self.assertIsNone(self.alive_process.current_node_id) + + runtime.run_loop.assert_called_with(self.alive_process) + + self.get_alive_process.reset_mock() + self.alive_process.wake_up.reset_mock() + runtime.run_loop.reset_mock() + + with mock.patch(PIPELINE_STATUS_TRANSIT, self.transit_fail_and_return_suspended): + # transit failed + tasks.process_wake_up(self.alive_process.id, current_node_id=None, call_from_child=False) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + self.transit_fail_and_return_suspended.assert_called_with( + self.alive_process.root_pipeline.id, to_state=states.RUNNING, is_pipeline=True, unchanged_pass=True + ) + + self.alive_process.wake_up.assert_not_called() + + self.assertIsNone(self.alive_process.current_node_id) + + runtime.run_loop.assert_not_called() + + with mock.patch(PIPELINE_STATUS_TRANSIT, self.transit_fail_and_return_blocked): + # transit failed but in blocked state + tasks.process_wake_up(self.alive_process.id, current_node_id=None, call_from_child=False) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + self.transit_fail_and_return_blocked.assert_called_with( + self.alive_process.root_pipeline.id, to_state=states.RUNNING, is_pipeline=True, unchanged_pass=True + ) + + self.alive_process.wake_up.assert_called() + + self.assertIsNone(self.alive_process.current_node_id) + + runtime.run_loop.assert_called_with(self.alive_process) + + @mock.patch(ENGINE_RUN_LOOP, mock.MagicMock()) + def test_wake_up(self): + # alive process + with mock.patch(PIPELINE_PROCESS_GET, self.get_alive_process): + tasks.wake_up(self.alive_process.id) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + self.alive_process.wake_up.assert_called() + + runtime.run_loop.assert_called_with(self.alive_process) + + self.get_not_alive_process.reset_mock() + self.alive_process.wake_up.reset_mock() + runtime.run_loop.reset_mock() + + # dead process + with mock.patch(PIPELINE_PROCESS_GET, self.get_not_alive_process): + tasks.wake_up(self.not_alive_process.id) + + self.get_not_alive_process.assert_called_with(id=self.not_alive_process.id) + + self.not_alive_process.wake_up.assert_not_called() + + runtime.run_loop.assert_not_called() + + @mock.patch(ENGINE_TASKS_WAKE_UP_APPLY, mock.MagicMock(return_value=IdentifyObject(id="task_id"))) + @mock.patch(PIPELINE_CELERYTASK_BIND, mock.MagicMock()) + def test_batch_wake_up(self): + process_id_list = [uniqid() for _ in range(5)] + + # transit success + with mock.patch(PIPELINE_STATUS_TRANSIT, self.transit_success): + tasks.batch_wake_up(process_id_list, self.alive_process.root_pipeline.id) + + self.transit_success.assert_called_with( + self.alive_process.root_pipeline.id, to_state=states.RUNNING, is_pipeline=True + ) + + tasks.wake_up.apply_async.assert_has_calls([mock.call(args=[pid]) for pid in process_id_list]) + + ProcessCeleryTask.objects.bind.assert_has_calls([mock.call(pid, "task_id") for pid in process_id_list]) + + tasks.wake_up.apply_async.reset_mock() + ProcessCeleryTask.objects.bind.reset_mock() + + # transit fail + with mock.patch(PIPELINE_STATUS_TRANSIT, self.transit_fail): + tasks.batch_wake_up(process_id_list, self.alive_process.root_pipeline.id) + + self.transit_fail.assert_called_with( + self.alive_process.root_pipeline.id, to_state=states.RUNNING, is_pipeline=True + ) + + tasks.wake_up.apply_async.assert_not_called() + + ProcessCeleryTask.objects.bind.assert_not_called() + + @mock.patch(ENGINE_RUN_LOOP, mock.MagicMock()) + def test_wake_from_schedule(self): + with mock.patch(PIPELINE_PROCESS_GET, self.get_alive_process): + tasks.wake_from_schedule(self.alive_process.id, None) + + self.get_alive_process.assert_called_with(id=self.alive_process.id) + + self.alive_process.wake_up.assert_called() + + self.assertEqual(self.alive_process.current_node_id, self.alive_process.top_pipeline.node(None).next().id) + + runtime.run_loop.assert_called_with(self.alive_process) + + @mock.patch(ENGINE_SCHEDULE, mock.MagicMock()) + def test_service_schedule(self): + process_id = uniqid() + schedule_id = uniqid() + data_id = None + tasks.service_schedule(process_id, schedule_id, data_id) + schedule.schedule.assert_called_with(process_id, schedule_id, data_id) + + @mock.patch(PIPELINE_NODE_CELERYTASK_DESTROY, mock.MagicMock()) + @mock.patch(ENGINE_API_FORCED_FAIL, mock.MagicMock()) + @mock.patch(ENGINE_ACTIVITY_FAIL_SIGNAL, mock.MagicMock()) + def test_node_timeout_check(self): + + # state for return None + with mock.patch(PIPELINE_STATUS_STATE_FOR, mock.MagicMock(return_value=None)): + node_id = uniqid() + version = uniqid() + root_pipeline_id = uniqid() + tasks.node_timeout_check(node_id, version, root_pipeline_id) + + NodeCeleryTask.objects.destroy.assert_called_with(node_id) + + Status.objects.state_for.assert_called_with(node_id, version=version, may_not_exist=True) + + api.forced_fail.assert_not_called() + + for state_not_running in states.ALL_STATES.difference({states.RUNNING}): + NodeCeleryTask.objects.destroy.reset_mock() + api.forced_fail.reset_mock() + # state for return other values + with mock.patch(PIPELINE_STATUS_STATE_FOR, mock.MagicMock(return_value=state_not_running)): + node_id = uniqid() + version = uniqid() + root_pipeline_id = uniqid() + tasks.node_timeout_check(node_id, version, root_pipeline_id) + + NodeCeleryTask.objects.destroy.assert_called_with(node_id) + + Status.objects.state_for.assert_called_with(node_id, version=version, may_not_exist=True) + + api.forced_fail.assert_not_called() + + NodeCeleryTask.objects.destroy.reset_mock() + api.forced_fail.reset_mock() + + # state for return RUNNING + with mock.patch(PIPELINE_STATUS_STATE_FOR, mock.MagicMock(return_value=states.RUNNING)): + # force fail success + with mock.patch(ENGINE_API_FORCED_FAIL, mock.MagicMock(return_value=MockActionResult(result=True))): + node_id = uniqid() + version = uniqid() + root_pipeline_id = uniqid() + tasks.node_timeout_check(node_id, version, root_pipeline_id) + + NodeCeleryTask.objects.destroy.assert_called_with(node_id) + + Status.objects.state_for.assert_called_with(node_id, version=version, may_not_exist=True) + + api.forced_fail.assert_called_with(node_id, kill=True, ex_data="node execution timeout") + + signals.activity_failed.send.assert_called_with( + sender=Pipeline, pipeline_id=root_pipeline_id, pipeline_activity_id=node_id + ) + + NodeCeleryTask.objects.destroy.reset_mock() + Status.objects.state_for.reset_mock() + api.forced_fail.reset_mock() + signals.activity_failed.send.reset_mock() + + # force fail failed + with mock.patch(ENGINE_API_FORCED_FAIL, mock.MagicMock(return_value=MockActionResult(result=False))): + node_id = uniqid() + version = uniqid() + root_pipeline_id = uniqid() + tasks.node_timeout_check(node_id, version, root_pipeline_id) + + NodeCeleryTask.objects.destroy.assert_called_with(node_id) + + Status.objects.state_for.assert_called_with(node_id, version=version, may_not_exist=True) + + api.forced_fail.assert_called_with(node_id, kill=True, ex_data="node execution timeout") + + signals.activity_failed.send.assert_not_called() diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/utils/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/engine/utils/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/utils/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/utils/test_stack.py b/runtime/bamboo-pipeline/pipeline/tests/engine/utils/test_stack.py new file mode 100644 index 00000000..140048e9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/utils/test_stack.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine.utils import Stack + + +class TestStack(TestCase): + def test_push(self): + stack = Stack() + self.assertEqual(stack, []) + stack.push(1) + stack.push(2) + self.assertEqual(stack, [1, 2]) + + def test_pop(self): + stack = Stack() + self.assertRaises(IndexError, stack.pop) + stack.push(1) + stack.push(2) + r = stack.pop() + self.assertEqual(r, 2) + self.assertEqual(stack, [1]) + r = stack.pop() + self.assertEqual(r, 1) + self.assertEqual(stack, []) + + def test_top(self): + stack = Stack() + self.assertRaises(IndexError, stack.top) + stack.push(1) + self.assertEqual(stack.top(), 1) + self.assertEqual(stack, [1]) + stack.push(2) + self.assertEqual(stack.top(), 2) + self.assertEqual(stack, [1, 2]) diff --git a/runtime/bamboo-pipeline/pipeline/tests/engine/utils/test_utils_func.py b/runtime/bamboo-pipeline/pipeline/tests/engine/utils/test_utils_func.py new file mode 100644 index 00000000..233e8a02 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/engine/utils/test_utils_func.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import datetime + +from django.test import TestCase +from django.utils import timezone + +from pipeline.engine.utils import calculate_elapsed_time + + +class EngineUtilsFuncTestCase(TestCase): + def test_calculate_elapsed_time(self): + self.assertEqual(calculate_elapsed_time(None, None), 0) + + self.assertEqual(calculate_elapsed_time(started_time=None, archived_time=timezone.now()), 0) + + self.assertNotEqual( + calculate_elapsed_time(started_time=timezone.now() - datetime.timedelta(seconds=1), archived_time=None), 0 + ) + + # seconds + start = timezone.now() + archive = start + datetime.timedelta(seconds=59) + + self.assertEqual(calculate_elapsed_time(started_time=start, archived_time=archive), 59) + + # minutes + start = timezone.now() + archive = start + datetime.timedelta(minutes=3) + + self.assertEqual(calculate_elapsed_time(started_time=start, archived_time=archive), 3 * 60) + + # hours + start = timezone.now() + archive = start + datetime.timedelta(hours=3) + + self.assertEqual(calculate_elapsed_time(started_time=start, archived_time=archive), 3 * 60 * 60) + + # days + start = timezone.now() + archive = start + datetime.timedelta(days=3) + + self.assertEqual(calculate_elapsed_time(started_time=start, archived_time=archive), 3 * 24 * 60 * 60) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/eri/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_context.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_context.py new file mode 100644 index 00000000..b945eb38 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_context.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import json + +from django.test import TransactionTestCase + +from bamboo_engine.eri import ContextValue, ContextValueType + +from pipeline.eri.imp.context import ContextMixin +from pipeline.eri.models import ContextValue as DBContextValue +from pipeline.eri.models import ContextOutputs +from bamboo_engine.utils.string import unique_id + + +class ContextMixinTestCase(TransactionTestCase): + def setUp(self): + self.mixin = ContextMixin() + self.pipeline_id = unique_id("p") + self.outputs = ["a", "b", "c", "d"] + DBContextValue.objects.create( + pipeline_id=self.pipeline_id, + key="${var_1}", + type=ContextValueType.PLAIN.value, + serializer=ContextMixin.JSON_SERIALIZER, + value=json.dumps("123"), + references="[]", + ) + DBContextValue.objects.create( + pipeline_id=self.pipeline_id, + key="${var_2}", + type=ContextValueType.PLAIN.value, + serializer=ContextMixin.JSON_SERIALIZER, + value=json.dumps(123), + references="[]", + ) + DBContextValue.objects.create( + pipeline_id=self.pipeline_id, + key="${var_3}", + type=ContextValueType.SPLICE.value, + serializer=ContextMixin.JSON_SERIALIZER, + value=json.dumps("${var_1}_${var_2}"), + references='["${var_1}", "${var_2}"]', + ) + DBContextValue.objects.create( + pipeline_id=self.pipeline_id, + key="${var_4}", + type=ContextValueType.COMPUTE.value, + serializer=ContextMixin.JSON_SERIALIZER, + value=json.dumps({"attr1": "a", "attr2": "${var_3}"}), + references='["${var_1}", "${var_2}", "${var_3}"]', + code="cv", + ) + ContextOutputs.objects.create(pipeline_id=self.pipeline_id, outputs=json.dumps(self.outputs)) + + def test_get_context_values(self): + context_values = self.mixin.get_context_values(self.pipeline_id, {"${var_1}", "${var_2}"}) + self.assertEqual(len(context_values), 2) + self.assertEqual(context_values[0].key, "${var_1}") + self.assertEqual(context_values[0].type, ContextValueType.PLAIN) + self.assertEqual(context_values[0].value, "123") + self.assertIsNone(context_values[0].code) + self.assertEqual(context_values[1].key, "${var_2}") + self.assertEqual(context_values[1].type, ContextValueType.PLAIN) + self.assertEqual(context_values[1].value, 123) + self.assertIsNone(context_values[1].code) + + context_values = self.mixin.get_context_values( + self.pipeline_id, {"${var_1}", "${var_2}", "${var_3}", "${var_4}"} + ) + self.assertEqual(len(context_values), 4) + self.assertEqual(context_values[0].key, "${var_1}") + self.assertEqual(context_values[0].type, ContextValueType.PLAIN) + self.assertEqual(context_values[0].value, "123") + self.assertIsNone(context_values[0].code) + self.assertEqual(context_values[1].key, "${var_2}") + self.assertEqual(context_values[1].type, ContextValueType.PLAIN) + self.assertEqual(context_values[1].value, 123) + self.assertIsNone(context_values[1].code) + self.assertEqual(context_values[2].key, "${var_3}") + self.assertEqual(context_values[2].type, ContextValueType.SPLICE) + self.assertEqual(context_values[2].value, "${var_1}_${var_2}") + self.assertIsNone(context_values[2].code) + self.assertEqual(context_values[3].key, "${var_4}") + self.assertEqual(context_values[3].type, ContextValueType.COMPUTE) + self.assertEqual(context_values[3].value, {"attr1": "a", "attr2": "${var_3}"}) + self.assertEqual(context_values[3].code, "cv") + + def test_get_context_key_references(self): + references = self.mixin.get_context_key_references(self.pipeline_id, {"${var_1}", "${var_2}"}) + self.assertEqual(references, set()) + references = self.mixin.get_context_key_references( + self.pipeline_id, {"${var_1}", "${var_2}", "${var_3}", "${var_4}"} + ) + self.assertEqual(references, {"${var_1}", "${var_2}", "${var_3}"}) + + def test_get_context(self): + context_values = self.mixin.get_context(self.pipeline_id) + self.assertEqual(len(context_values), 4) + self.assertEqual(context_values[0].key, "${var_1}") + self.assertEqual(context_values[0].type, ContextValueType.PLAIN) + self.assertEqual(context_values[0].value, "123") + self.assertIsNone(context_values[0].code) + self.assertEqual(context_values[1].key, "${var_2}") + self.assertEqual(context_values[1].type, ContextValueType.PLAIN) + self.assertEqual(context_values[1].value, 123) + self.assertIsNone(context_values[1].code) + self.assertEqual(context_values[2].key, "${var_3}") + self.assertEqual(context_values[2].type, ContextValueType.SPLICE) + self.assertEqual(context_values[2].value, "${var_1}_${var_2}") + self.assertIsNone(context_values[2].code) + self.assertEqual(context_values[3].key, "${var_4}") + self.assertEqual(context_values[3].type, ContextValueType.COMPUTE) + self.assertEqual(context_values[3].value, {"attr1": "a", "attr2": "${var_3}"}) + self.assertEqual(context_values[3].code, "cv") + + def test_get_context_outputs(self): + outputs = self.mixin.get_context_outputs(self.pipeline_id) + self.assertEqual(outputs, set(self.outputs)) + + def test_upsert_plain_context_values(self): + update = { + "${var_3}": ContextValue(key="${var_3}", type=ContextValueType.PLAIN, value="123_123"), + "${var_4}": ContextValue(key="${var_4}", type=ContextValueType.PLAIN, value="compute_val"), + "${var_5}": ContextValue(key="${var_5}", type=ContextValueType.PLAIN, value="5_val"), + "${var_6}": ContextValue(key="${var_6}", type=ContextValueType.PLAIN, value="6_val"), + } + self.mixin.upsert_plain_context_values(self.pipeline_id, update) + + context_values = self.mixin.get_context(self.pipeline_id) + self.assertEqual(len(context_values), 6) + context_values = {cv.key: cv for cv in context_values} + self.assertEqual(context_values["${var_1}"].key, "${var_1}") + self.assertEqual(context_values["${var_1}"].type, ContextValueType.PLAIN) + self.assertEqual(context_values["${var_1}"].value, "123") + self.assertIsNone(context_values["${var_1}"].code) + self.assertEqual(context_values["${var_2}"].key, "${var_2}") + self.assertEqual(context_values["${var_2}"].type, ContextValueType.PLAIN) + self.assertEqual(context_values["${var_2}"].value, 123) + self.assertIsNone(context_values["${var_2}"].code) + self.assertEqual(context_values["${var_3}"].key, "${var_3}") + self.assertEqual(context_values["${var_3}"].type, ContextValueType.PLAIN) + self.assertEqual(context_values["${var_3}"].value, "123_123") + self.assertIsNone(context_values["${var_3}"].code) + self.assertEqual(context_values["${var_4}"].key, "${var_4}") + self.assertEqual(context_values["${var_4}"].type, ContextValueType.PLAIN) + self.assertEqual(context_values["${var_4}"].value, "compute_val") + self.assertIsNone(context_values["${var_4}"].code) + self.assertEqual(context_values["${var_5}"].key, "${var_5}") + self.assertEqual(context_values["${var_5}"].type, ContextValueType.PLAIN) + self.assertEqual(context_values["${var_5}"].value, "5_val") + self.assertIsNone(context_values["${var_5}"].code) + self.assertEqual(context_values["${var_6}"].key, "${var_6}") + self.assertEqual(context_values["${var_6}"].type, ContextValueType.PLAIN) + self.assertEqual(context_values["${var_6}"].value, "6_val") + self.assertIsNone(context_values["${var_6}"].code) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_data.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_data.py new file mode 100644 index 00000000..be9d669c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_data.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import json + +from django.test import TransactionTestCase + +from bamboo_engine import exceptions +from bamboo_engine.eri import Data, DataInput, ExecutionData, CallbackData + +from pipeline.eri.imp.data import DataMixin +from pipeline.eri.models import Data as DBData +from pipeline.eri.models import ExecutionData as DBExecutionData +from pipeline.eri.models import CallbackData as DBCallbackData +from bamboo_engine.utils.string import unique_id + + +class Obj: + def __init__(self, attr1, attr2): + self.attr1 = attr1 + self.attr2 = attr2 + + def __eq__(self, other): + return self.attr1 == other.attr1 and self.attr2 == other.attr2 + + +class DataMixinTestCase(TransactionTestCase): + def setUp(self): + self.mixin = DataMixin() + self.node_id = unique_id("n") + self.version = unique_id("v") + self.data_inputs = {"a": {"need_render": True, "value": 1}, "b": {"need_render": True, "value": 2}} + self.data_outputs = {"c": 3, "d": 4} + self.data = DBData.objects.create( + node_id=self.node_id, inputs=json.dumps(self.data_inputs), outputs=json.dumps(self.data_outputs) + ) + + self.pickle_node_id = unique_id("n") + self.mix_node_id = unique_id("n") + self.json_exec_data_inputs = {"a": 1, "b": 2} + self.json_exec_data_outputs = {"c": 3, "d": 4} + self.pickle_exec_data_inputs = {"a": 1, "b": Obj(1, Obj(2, 3))} + self.pickle_exec_data_outputs = {"c": 3, "d": Obj(4, Obj(5, 6))} + self.json_exec_data = DBExecutionData.objects.create( + node_id=self.node_id, + inputs=self.mixin._serialize(self.json_exec_data_inputs)[0], + inputs_serializer=self.mixin._serialize(self.json_exec_data_inputs)[1], + outputs=self.mixin._serialize(self.json_exec_data_outputs)[0], + outputs_serializer=self.mixin._serialize(self.json_exec_data_outputs)[1], + ) + self.pickle_exec_data = DBExecutionData.objects.create( + node_id=self.pickle_node_id, + inputs=self.mixin._serialize(self.pickle_exec_data_inputs)[0], + inputs_serializer=self.mixin._serialize(self.pickle_exec_data_inputs)[1], + outputs=self.mixin._serialize(self.pickle_exec_data_outputs)[0], + outputs_serializer=self.mixin._serialize(self.pickle_exec_data_outputs)[1], + ) + self.mix_exec_data = DBExecutionData.objects.create( + node_id=self.mix_node_id, + inputs=self.mixin._serialize(self.json_exec_data_inputs)[0], + inputs_serializer=self.mixin._serialize(self.json_exec_data_inputs)[1], + outputs=self.mixin._serialize(self.pickle_exec_data_outputs)[0], + outputs_serializer=self.mixin._serialize(self.pickle_exec_data_outputs)[1], + ) + + self.raw_callback_data = {"callback": 1} + self.callback_data = DBCallbackData.objects.create( + node_id=self.node_id, version=self.version, data=json.dumps(self.raw_callback_data) + ) + + def test_get_data(self): + data = self.mixin.get_data(self.node_id) + self.assertTrue(isinstance(data, Data)) + self.assertEqual(data.outputs, self.data_outputs) + self.assertEqual(data.inputs["a"].value, 1) + self.assertTrue(data.inputs["a"].value) + self.assertEqual(data.inputs["b"].value, 2) + self.assertTrue(data.inputs["b"].value) + + def test_get_data_inputs(self): + inputs = self.mixin.get_data_inputs(self.node_id) + self.assertEqual(inputs["a"].value, 1) + self.assertTrue(inputs["a"].value) + self.assertEqual(inputs["b"].value, 2) + self.assertTrue(inputs["b"].value) + + def test_get_data_inputs__not_exist(self): + self.assertRaises(exceptions.NotFoundError, self.mixin.get_data_inputs, "not_exist") + + def test_get_data_outputs(self): + outputs = self.mixin.get_data_outputs(self.node_id) + self.assertEqual(outputs, self.data_outputs) + + def test_get_data_output__not_exist(self): + self.assertRaises(exceptions.NotFoundError, self.mixin.get_data_outputs, "not_exist") + + def test_set_data_inputs(self): + self.mixin.set_data_inputs(self.node_id, {"l": DataInput(need_render=True, value=[1, 2, 3])}) + actual = self.mixin.get_data_inputs(self.node_id) + self.assertEqual(actual["l"].need_render, True) + self.assertEqual(actual["l"].value, [1, 2, 3]) + + def test_set_data_inputs__new(self): + node_id = unique_id("n") + inputs = {"a": DataInput(need_render=True, value=1), "b": DataInput(need_render=True, value=2)} + self.mixin.set_data_inputs(node_id, inputs) + data = self.mixin.get_data(node_id) + self.assertEqual(data.outputs, {}) + self.assertEqual(data.inputs["a"].value, 1) + self.assertTrue(data.inputs["a"].value) + self.assertEqual(data.inputs["b"].value, 2) + self.assertTrue(data.inputs["b"].value) + + def test_get_execution_data(self): + json_exec_data = self.mixin.get_execution_data(self.node_id) + self.assertTrue(isinstance(json_exec_data, ExecutionData)) + self.assertEqual(json_exec_data.inputs, self.json_exec_data_inputs) + self.assertEqual(json_exec_data.outputs, self.json_exec_data_outputs) + + pickle_exec_data = self.mixin.get_execution_data(self.pickle_node_id) + self.assertTrue(isinstance(pickle_exec_data, ExecutionData)) + self.assertEqual(pickle_exec_data.inputs, self.pickle_exec_data_inputs) + self.assertEqual(pickle_exec_data.outputs, self.pickle_exec_data_outputs) + + mix_exec_data = self.mixin.get_execution_data(self.mix_node_id) + self.assertTrue(isinstance(mix_exec_data, ExecutionData)) + self.assertEqual(mix_exec_data.inputs, self.json_exec_data_inputs) + self.assertEqual(mix_exec_data.outputs, self.pickle_exec_data_outputs) + + def test_get_execution_data_inputs(self): + self.assertEqual(self.mixin.get_execution_data_inputs(self.node_id), self.json_exec_data_inputs) + self.assertEqual(self.mixin.get_execution_data_inputs(self.pickle_node_id), self.pickle_exec_data_inputs) + + def test_get_execution_data_inputs__not_exist(self): + self.assertEqual(self.mixin.get_execution_data_inputs("not_exist"), {}) + + def test_get_execution_data_outputs(self): + self.assertEqual(self.mixin.get_execution_data_outputs(self.node_id), self.json_exec_data_outputs) + self.assertEqual(self.mixin.get_execution_data_outputs(self.pickle_node_id), self.pickle_exec_data_outputs) + + def test_get_execution_data_outputs__not_exist(self): + self.assertEqual(self.mixin.get_execution_data_outputs("not_exist"), {}) + + def test_set_execution_data(self): + json_node_id = unique_id("n") + pickle_node_id = unique_id("n") + mix_node_id = unique_id("n") + DBExecutionData.objects.create( + node_id=json_node_id, + inputs="{}", + inputs_serializer=self.mixin.JSON_SERIALIZER, + outputs="{}", + outputs_serializer=self.mixin.JSON_SERIALIZER, + ) + DBExecutionData.objects.create( + node_id=pickle_node_id, + inputs="{}", + inputs_serializer=self.mixin.JSON_SERIALIZER, + outputs="{}", + outputs_serializer=self.mixin.JSON_SERIALIZER, + ) + DBExecutionData.objects.create( + node_id=mix_node_id, + inputs="{}", + inputs_serializer=self.mixin.JSON_SERIALIZER, + outputs="{}", + outputs_serializer=self.mixin.JSON_SERIALIZER, + ) + + self.mixin.set_execution_data( + json_node_id, ExecutionData(self.json_exec_data_inputs, self.json_exec_data_outputs) + ) + data = DBExecutionData.objects.get(node_id=json_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.json_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.json_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + self.mixin.set_execution_data( + pickle_node_id, ExecutionData(self.pickle_exec_data_inputs, self.pickle_exec_data_outputs) + ) + data = DBExecutionData.objects.get(node_id=pickle_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.pickle_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.PICKLE_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.pickle_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.PICKLE_SERIALIZER) + + self.mixin.set_execution_data( + mix_node_id, ExecutionData(self.json_exec_data_inputs, self.pickle_exec_data_outputs) + ) + data = DBExecutionData.objects.get(node_id=mix_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.json_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.pickle_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.PICKLE_SERIALIZER) + + def test_set_execution_data__new(self): + json_node_id = unique_id("n") + pickle_node_id = unique_id("n") + mix_node_id = unique_id("n") + + self.mixin.set_execution_data( + json_node_id, ExecutionData(self.json_exec_data_inputs, self.json_exec_data_outputs) + ) + data = DBExecutionData.objects.get(node_id=json_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.json_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.json_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + self.mixin.set_execution_data( + pickle_node_id, ExecutionData(self.pickle_exec_data_inputs, self.pickle_exec_data_outputs) + ) + data = DBExecutionData.objects.get(node_id=pickle_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.pickle_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.PICKLE_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.pickle_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.PICKLE_SERIALIZER) + + self.mixin.set_execution_data( + mix_node_id, ExecutionData(self.json_exec_data_inputs, self.pickle_exec_data_outputs) + ) + data = DBExecutionData.objects.get(node_id=mix_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.json_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.pickle_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.PICKLE_SERIALIZER) + + def test_set_execution_data_inputs(self): + json_node_id = unique_id("n") + pickle_node_id = unique_id("n") + + DBExecutionData.objects.create( + node_id=json_node_id, + inputs="{}", + inputs_serializer=self.mixin.JSON_SERIALIZER, + outputs="{}", + outputs_serializer=self.mixin.JSON_SERIALIZER, + ) + DBExecutionData.objects.create( + node_id=pickle_node_id, + inputs="{}", + inputs_serializer=self.mixin.JSON_SERIALIZER, + outputs="{}", + outputs_serializer=self.mixin.JSON_SERIALIZER, + ) + + self.mixin.set_execution_data_inputs(json_node_id, self.json_exec_data_inputs) + data = DBExecutionData.objects.get(node_id=json_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.json_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), {}) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + self.mixin.set_execution_data_inputs(pickle_node_id, self.pickle_exec_data_inputs) + data = DBExecutionData.objects.get(node_id=pickle_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.pickle_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.PICKLE_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), {}) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + def test_set_execution_data_inputs__new(self): + json_node_id = unique_id("n") + pickle_node_id = unique_id("n") + + self.mixin.set_execution_data_inputs(json_node_id, self.json_exec_data_inputs) + data = DBExecutionData.objects.get(node_id=json_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.json_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), {}) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + self.mixin.set_execution_data_inputs(pickle_node_id, self.pickle_exec_data_inputs) + data = DBExecutionData.objects.get(node_id=pickle_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), self.pickle_exec_data_inputs) + self.assertEqual(data.inputs_serializer, self.mixin.PICKLE_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), {}) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + def test_set_execution_data_outputs(self): + json_node_id = unique_id("n") + pickle_node_id = unique_id("n") + + DBExecutionData.objects.create( + node_id=json_node_id, + inputs="{}", + inputs_serializer=self.mixin.JSON_SERIALIZER, + outputs="{}", + outputs_serializer=self.mixin.JSON_SERIALIZER, + ) + DBExecutionData.objects.create( + node_id=pickle_node_id, + inputs="{}", + inputs_serializer=self.mixin.JSON_SERIALIZER, + outputs="{}", + outputs_serializer=self.mixin.JSON_SERIALIZER, + ) + + self.mixin.set_execution_data_outputs(json_node_id, self.json_exec_data_outputs) + data = DBExecutionData.objects.get(node_id=json_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), {}) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.json_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + self.mixin.set_execution_data_outputs(pickle_node_id, self.pickle_exec_data_outputs) + data = DBExecutionData.objects.get(node_id=pickle_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), {}) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.pickle_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.PICKLE_SERIALIZER) + + def test_set_execution_data_outputs__new(self): + json_node_id = unique_id("n") + pickle_node_id = unique_id("n") + + self.mixin.set_execution_data_outputs(json_node_id, self.json_exec_data_outputs) + data = DBExecutionData.objects.get(node_id=json_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), {}) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.json_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.JSON_SERIALIZER) + + self.mixin.set_execution_data_outputs(pickle_node_id, self.pickle_exec_data_outputs) + data = DBExecutionData.objects.get(node_id=pickle_node_id) + self.assertEqual(self.mixin._deserialize(data.inputs, data.inputs_serializer), {}) + self.assertEqual(data.inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(data.outputs, data.outputs_serializer), self.pickle_exec_data_outputs) + self.assertEqual(data.outputs_serializer, self.mixin.PICKLE_SERIALIZER) + + def test_set_callback_data(self): + self.raw_callback_data["c"] = 1 + data_id = self.mixin.set_callback_data(node_id=self.node_id, version=self.version, data=self.raw_callback_data) + data_model = DBCallbackData.objects.get(id=data_id) + self.assertEqual(data_model.node_id, self.node_id) + self.assertEqual(data_model.version, self.version) + self.assertEqual(json.loads(data_model.data), self.raw_callback_data) + + def test_get_callback_data(self): + data = self.mixin.get_callback_data(self.callback_data.id) + self.assertIsInstance(data, CallbackData) + self.assertEqual(data.id, self.callback_data.id) + self.assertEqual(data.node_id, self.node_id) + self.assertEqual(data.version, self.version) + self.assertEqual(data.data, self.raw_callback_data) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_execution_history.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_execution_history.py new file mode 100644 index 00000000..978736f6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_execution_history.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.utils import timezone +from django.test import TransactionTestCase + +from bamboo_engine.eri import ExecutionHistory, ExecutionShortHistory + +from pipeline.eri.imp.execution_history import ExecutionHistoryMixin +from pipeline.eri.models import ExecutionHistory as DBExecutionHistory +from bamboo_engine.utils.string import unique_id + + +class Obj: + def __init__(self, attr1, attr2): + self.attr1 = attr1 + self.attr2 = attr2 + + def __eq__(self, other): + return self.attr1 == other.attr1 and self.attr2 == other.attr2 + + +class ExecutionHistoryMixinTestCase(TransactionTestCase): + def setUp(self): + self.mixin = ExecutionHistoryMixin() + self.node_id = unique_id("n") + self.version = unique_id("v") + self.started_time = timezone.now() + self.archived_time = timezone.now() + self.json_data = {"a": 1, "b": 2} + self.pickle_data = {"c": 3, "d": Obj(4, Obj(5, 6))} + + def test_add_history(self): + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=1, + skip=True, + retry=3, + version=self.version, + inputs=self.json_data, + outputs=self.pickle_data, + ) + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=2, + skip=False, + retry=0, + version=self.version, + inputs=self.json_data, + outputs=self.json_data, + ) + qs = DBExecutionHistory.objects.filter(node_id=self.node_id) + + self.assertEqual(len(qs), 2) + self.assertEqual(qs[0].node_id, self.node_id) + self.assertEqual(qs[0].started_time, self.started_time) + self.assertEqual(qs[0].archived_time, self.archived_time) + self.assertEqual(qs[0].loop, 1) + self.assertTrue(qs[0].skip) + self.assertEqual(qs[0].retry, 3) + self.assertEqual(qs[0].version, self.version) + self.assertEqual(qs[0].inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(qs[0].inputs, qs[0].inputs_serializer), self.json_data) + self.assertEqual(qs[0].outputs_serializer, self.mixin.PICKLE_SERIALIZER) + self.assertEqual(self.mixin._deserialize(qs[0].outputs, qs[0].outputs_serializer), self.pickle_data) + + self.assertEqual(qs[1].node_id, self.node_id) + self.assertEqual(qs[1].started_time, self.started_time) + self.assertEqual(qs[1].archived_time, self.archived_time) + self.assertEqual(qs[1].loop, 2) + self.assertFalse(qs[1].skip) + self.assertEqual(qs[1].retry, 0) + self.assertEqual(qs[1].version, self.version) + self.assertEqual(qs[1].inputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(qs[1].inputs, qs[1].inputs_serializer), self.json_data) + self.assertEqual(qs[1].outputs_serializer, self.mixin.JSON_SERIALIZER) + self.assertEqual(self.mixin._deserialize(qs[1].outputs, qs[1].outputs_serializer), self.json_data) + + def test_get_histories(self): + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=1, + skip=True, + retry=3, + version=self.version, + inputs=self.json_data, + outputs=self.pickle_data, + ) + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=2, + skip=False, + retry=0, + version=self.version, + inputs=self.json_data, + outputs=self.json_data, + ) + + histories = self.mixin.get_histories(node_id=self.node_id) + self.assertEqual(len(histories), 2) + self.assertTrue(isinstance(histories[0], ExecutionHistory)) + self.assertEqual(histories[0].node_id, self.node_id) + self.assertEqual(histories[0].started_time, self.started_time) + self.assertEqual(histories[0].archived_time, self.archived_time) + self.assertEqual(histories[0].loop, 1) + self.assertTrue(histories[0].skip) + self.assertEqual(histories[0].retry, 3) + self.assertEqual(histories[0].version, self.version) + self.assertEqual(histories[0].inputs, self.json_data) + self.assertEqual(histories[0].outputs, self.pickle_data) + + self.assertTrue(isinstance(histories[1], ExecutionHistory)) + self.assertEqual(histories[1].node_id, self.node_id) + self.assertEqual(histories[1].started_time, self.started_time) + self.assertEqual(histories[1].archived_time, self.archived_time) + self.assertEqual(histories[1].loop, 2) + self.assertFalse(histories[1].skip) + self.assertEqual(histories[1].retry, 0) + self.assertEqual(histories[1].version, self.version) + self.assertEqual(histories[1].inputs, self.json_data) + self.assertEqual(histories[1].outputs, self.json_data) + + def test_get_histories__with_loop(self): + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=1, + skip=True, + retry=3, + version=self.version, + inputs=self.json_data, + outputs=self.pickle_data, + ) + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=2, + skip=False, + retry=0, + version=self.version, + inputs=self.json_data, + outputs=self.json_data, + ) + + histories = self.mixin.get_histories(node_id=self.node_id, loop=1) + self.assertEqual(len(histories), 1) + self.assertTrue(isinstance(histories[0], ExecutionHistory)) + self.assertEqual(histories[0].node_id, self.node_id) + self.assertEqual(histories[0].started_time, self.started_time) + self.assertEqual(histories[0].archived_time, self.archived_time) + self.assertEqual(histories[0].loop, 1) + self.assertTrue(histories[0].skip) + self.assertEqual(histories[0].retry, 3) + self.assertEqual(histories[0].version, self.version) + self.assertEqual(histories[0].inputs, self.json_data) + self.assertEqual(histories[0].outputs, self.pickle_data) + + def test_get_short_histories(self): + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=1, + skip=True, + retry=3, + version=self.version, + inputs=self.json_data, + outputs=self.pickle_data, + ) + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=2, + skip=False, + retry=0, + version=self.version, + inputs=self.json_data, + outputs=self.json_data, + ) + + histories = self.mixin.get_short_histories(node_id=self.node_id) + self.assertEqual(len(histories), 2) + self.assertTrue(isinstance(histories[0], ExecutionShortHistory)) + self.assertEqual(histories[0].node_id, self.node_id) + self.assertEqual(histories[0].started_time, self.started_time) + self.assertEqual(histories[0].archived_time, self.archived_time) + self.assertEqual(histories[0].loop, 1) + self.assertTrue(histories[0].skip) + self.assertEqual(histories[0].retry, 3) + self.assertEqual(histories[0].version, self.version) + + self.assertTrue(isinstance(histories[1], ExecutionShortHistory)) + self.assertEqual(histories[1].node_id, self.node_id) + self.assertEqual(histories[1].started_time, self.started_time) + self.assertEqual(histories[1].archived_time, self.archived_time) + self.assertEqual(histories[1].loop, 2) + self.assertFalse(histories[1].skip) + self.assertEqual(histories[1].retry, 0) + self.assertEqual(histories[1].version, self.version) + + def test_get_short_histories__with_loop(self): + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=1, + skip=True, + retry=3, + version=self.version, + inputs=self.json_data, + outputs=self.pickle_data, + ) + self.mixin.add_history( + node_id=self.node_id, + started_time=self.started_time, + archived_time=self.archived_time, + loop=2, + skip=False, + retry=0, + version=self.version, + inputs=self.json_data, + outputs=self.json_data, + ) + + histories = self.mixin.get_short_histories(node_id=self.node_id, loop=1) + self.assertEqual(len(histories), 1) + self.assertTrue(isinstance(histories[0], ExecutionShortHistory)) + self.assertEqual(histories[0].node_id, self.node_id) + self.assertEqual(histories[0].started_time, self.started_time) + self.assertEqual(histories[0].archived_time, self.archived_time) + self.assertEqual(histories[0].loop, 1) + self.assertTrue(histories[0].skip) + self.assertEqual(histories[0].retry, 3) + self.assertEqual(histories[0].version, self.version) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_node.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_node.py new file mode 100644 index 00000000..5c0dbcde --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_node.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import json + +from django.test import TestCase + +from bamboo_engine.eri import ( + NodeType, + ServiceActivity, + SubProcess, + ExclusiveGateway, + ParallelGateway, + ConditionalParallelGateway, + ConvergeGateway, + EmptyStartEvent, + EmptyEndEvent, + ExecutableEndEvent, +) + +from pipeline.eri.imp.node import NodeMixin +from pipeline.eri.models import Node as DBNode + + +class ProcessMixinTestCase(TestCase): + def setUp(self): + self.mixin = NodeMixin() + + def test_get_node(self): + nodes = { + "n1": { + "id": "n1", + "type": NodeType.ServiceActivity.value, + "targets": {"f1": "t1"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "can_skip": True, + "can_retry": True, + "code": "test_code", + "version": "legacy", + "timeout": None, + "error_ignorable": True, + }, + "n2": { + "id": "n2", + "type": NodeType.SubProcess.value, + "targets": {"f1": "t1"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "start_event_id": "s1", + "can_skip": True, + "can_retry": True, + }, + "n3": { + "id": "n3", + "type": NodeType.ExclusiveGateway.value, + "targets": {"f1": "t1", "f2": "t2"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "can_skip": True, + "can_retry": True, + "conditions": [ + {"name": "c1", "evaluation": "${k} == 1", "target_id": "t1", "flow_id": "f1"}, + {"name": "c2", "evaluation": "${k} == 2", "target_id": "t2", "flow_id": "f2"}, + ], + }, + "n4": { + "id": "n4", + "type": NodeType.ParallelGateway.value, + "targets": {"f1": "t1", "f2": "t2"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "converge_gateway_id": "c1", + "can_skip": True, + "can_retry": True, + }, + "n5": { + "id": "n5", + "type": NodeType.ConditionalParallelGateway.value, + "targets": {"f1": "t1", "f2": "t2"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "can_skip": True, + "can_retry": True, + "conditions": [ + {"name": "c1", "evaluation": "${k} == 1", "target_id": "t1", "flow_id": "f1"}, + {"name": "c2", "evaluation": "${k} == 2", "target_id": "t2", "flow_id": "f2"}, + ], + "converge_gateway_id": "c1", + }, + "n6": { + "id": "n6", + "type": NodeType.ConvergeGateway.value, + "targets": {"f1": "t1"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "can_skip": True, + "can_retry": True, + }, + "n7": { + "id": "n7", + "type": NodeType.EmptyStartEvent.value, + "targets": {"f1": "t1"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "can_skip": True, + "can_retry": True, + }, + "n8": { + "id": "n8", + "type": NodeType.EmptyEndEvent.value, + "targets": {"f1": "t1"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "can_skip": True, + "can_retry": True, + }, + "n9": { + "id": "n9", + "type": NodeType.ExecutableEndEvent.value, + "targets": {"f1": "t1"}, + "root_pipeline_id": "root", + "parent_pipeline_id": "parent", + "code": "", + "can_skip": True, + "can_retry": True, + }, + } + for node_id, detail in nodes.items(): + DBNode.objects.create(node_id=node_id, detail=json.dumps(detail)) + + node = self.mixin.get_node("n1") + self.assertTrue(isinstance(node, ServiceActivity)) + self.assertEqual(node.id, "n1") + self.assertEqual(node.type, NodeType.ServiceActivity) + self.assertEqual(node.target_flows, ["f1"]) + self.assertEqual(node.target_nodes, ["t1"]) + self.assertEqual(node.targets, {"f1": "t1"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + self.assertEqual(node.code, "test_code") + self.assertEqual(node.version, "legacy") + self.assertEqual(node.timeout, None) + self.assertEqual(node.error_ignorable, True) + + node = self.mixin.get_node("n2") + self.assertTrue(isinstance(node, SubProcess)) + self.assertEqual(node.id, "n2") + self.assertEqual(node.type, NodeType.SubProcess) + self.assertEqual(node.target_flows, ["f1"]) + self.assertEqual(node.target_nodes, ["t1"]) + self.assertEqual(node.targets, {"f1": "t1"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + self.assertEqual(node.start_event_id, "s1") + + node = self.mixin.get_node("n3") + self.assertTrue(isinstance(node, ExclusiveGateway)) + self.assertEqual(node.id, "n3") + self.assertEqual(node.type, NodeType.ExclusiveGateway) + self.assertEqual(node.target_flows, ["f1", "f2"]) + self.assertEqual(node.target_nodes, ["t1", "t2"]) + self.assertEqual(node.targets, {"f1": "t1", "f2": "t2"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + self.assertEqual( + [(c.name, c.evaluation, c.target_id, c.flow_id) for c in node.conditions], + [("c1", "${k} == 1", "t1", "f1"), ("c2", "${k} == 2", "t2", "f2")], + ) + + node = self.mixin.get_node("n4") + self.assertTrue(isinstance(node, ParallelGateway)) + self.assertEqual(node.id, "n4") + self.assertEqual(node.type, NodeType.ParallelGateway) + self.assertEqual(node.target_flows, ["f1", "f2"]) + self.assertEqual(node.target_nodes, ["t1", "t2"]) + self.assertEqual(node.targets, {"f1": "t1", "f2": "t2"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + self.assertEqual(node.converge_gateway_id, "c1") + + node = self.mixin.get_node("n5") + self.assertTrue(isinstance(node, ConditionalParallelGateway)) + self.assertEqual(node.id, "n5") + self.assertEqual(node.type, NodeType.ConditionalParallelGateway) + self.assertEqual(node.target_flows, ["f1", "f2"]) + self.assertEqual(node.target_nodes, ["t1", "t2"]) + self.assertEqual(node.targets, {"f1": "t1", "f2": "t2"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + self.assertEqual( + [(c.name, c.evaluation, c.target_id, c.flow_id) for c in node.conditions], + [("c1", "${k} == 1", "t1", "f1"), ("c2", "${k} == 2", "t2", "f2")], + ) + self.assertEqual(node.converge_gateway_id, "c1") + + node = self.mixin.get_node("n6") + self.assertTrue(isinstance(node, ConvergeGateway)) + self.assertEqual(node.id, "n6") + self.assertEqual(node.type, NodeType.ConvergeGateway) + self.assertEqual(node.target_flows, ["f1"]) + self.assertEqual(node.target_nodes, ["t1"]) + self.assertEqual(node.targets, {"f1": "t1"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + + node = self.mixin.get_node("n7") + self.assertTrue(isinstance(node, EmptyStartEvent)) + self.assertEqual(node.id, "n7") + self.assertEqual(node.type, NodeType.EmptyStartEvent) + self.assertEqual(node.target_flows, ["f1"]) + self.assertEqual(node.target_nodes, ["t1"]) + self.assertEqual(node.targets, {"f1": "t1"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + + node = self.mixin.get_node("n8") + self.assertTrue(isinstance(node, EmptyEndEvent)) + self.assertEqual(node.id, "n8") + self.assertEqual(node.type, NodeType.EmptyEndEvent) + self.assertEqual(node.target_flows, ["f1"]) + self.assertEqual(node.target_nodes, ["t1"]) + self.assertEqual(node.targets, {"f1": "t1"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + + node = self.mixin.get_node("n9") + self.assertTrue(isinstance(node, ExecutableEndEvent)) + self.assertEqual(node.id, "n9") + self.assertEqual(node.type, NodeType.ExecutableEndEvent) + self.assertEqual(node.target_flows, ["f1"]) + self.assertEqual(node.target_nodes, ["t1"]) + self.assertEqual(node.targets, {"f1": "t1"}) + self.assertEqual(node.root_pipeline_id, "root") + self.assertEqual(node.parent_pipeline_id, "parent") + self.assertEqual(node.can_skip, True) + self.assertEqual(node.can_retry, True) + self.assertEqual(node.code, "") diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_process.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_process.py new file mode 100644 index 00000000..6249a019 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_process.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import uuid +import threading + +from django.test import TransactionTestCase + +from pipeline.eri.models import Process +from pipeline.eri.imp.process import ProcessMixin + + +class ProcessMixinTestCase(TransactionTestCase): + def setUp(self): + self.mixin = ProcessMixin() + self.process = Process.objects.create(priority=1, queue="queue") + + def test_beat(self): + last_heartbeat = self.process.last_heartbeat + self.mixin.beat(self.process.id) + self.process.refresh_from_db() + self.assertTrue(last_heartbeat < self.process.last_heartbeat) + + def test_wake_up(self): + self.assertTrue(self.process.asleep) + self.mixin.wake_up(self.process.id) + self.process.refresh_from_db() + self.assertFalse(self.process.asleep) + + def test_sleep(self): + self.process.asleep = False + self.process.save() + self.mixin.sleep(self.process.id) + self.process.refresh_from_db() + self.assertTrue(self.process.asleep) + + def test_suspend(self): + self.assertFalse(self.process.suspended) + self.assertEqual(self.process.suspended_by, "") + self.mixin.suspend(self.process.id, "123") + self.process.refresh_from_db() + self.assertTrue(self.process.suspended) + self.assertEqual(self.process.suspended_by, "123") + + def test_kill(self): + self.process.asleep = False + self.process.save() + self.mixin.kill(self.process.id) + self.process.refresh_from_db() + self.assertTrue(self.process.asleep) + + def test_resume(self): + self.mixin.suspend(self.process.id, "123") + self.mixin.resume(self.process.id) + self.process.refresh_from_db() + self.assertFalse(self.process.suspended) + self.assertEqual(self.process.suspended_by, "") + + def test_batch_resume(self): + p1 = Process.objects.create(priority=1, queue="queue") + p2 = Process.objects.create(priority=1, queue="queue") + p3 = Process.objects.create(priority=1, queue="queue") + self.mixin.suspend(p1.id, "123") + self.mixin.suspend(p2.id, "123") + self.mixin.suspend(p3.id, "123") + self.mixin.batch_resume([p1.id, p2.id, p3.id]) + p1.refresh_from_db() + p2.refresh_from_db() + p3.refresh_from_db() + self.assertFalse(p1.suspended) + self.assertFalse(p2.suspended) + self.assertFalse(p3.suspended) + self.assertEqual(p1.suspended_by, "") + self.assertEqual(p2.suspended_by, "") + self.assertEqual(p3.suspended_by, "") + + def test_die(self): + self.assertFalse(self.process.dead) + self.mixin.die(self.process.id) + self.process.refresh_from_db() + self.assertTrue(self.process.dead) + + def test_get_process_info(self): + process = Process.objects.create( + priority=1, queue="queue", destination_id="d", root_pipeline_id="r", pipeline_stack="[]", parent_id=2 + ) + process_info = self.mixin.get_process_info(process.id) + self.assertEqual(process_info.process_id, process.id) + self.assertEqual(process_info.destination_id, process.destination_id) + self.assertEqual(process_info.root_pipeline_id, process.root_pipeline_id) + self.assertEqual(process_info.pipeline_stack, []) + self.assertEqual(process_info.parent_id, process.parent_id) + + def test_get_suspended_process_info(self): + p1 = Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex) + p2 = Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex) + p3 = Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex) + self.mixin.suspend(p1.id, "123") + self.mixin.suspend(p2.id, "123") + self.mixin.suspend(p3.id, "123") + spi_list = self.mixin.get_suspended_process_info("123") + actual = [(spi.process_id, spi.current_node) for spi in spi_list] + self.assertEqual( + actual, [(p1.id, p1.current_node_id), (p2.id, p2.current_node_id), (p3.id, p3.current_node_id)] + ) + + def test_get_sleep_process_with_current_node_id(self): + process = Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex) + self.mixin.sleep(process.id) + self.assertEqual(self.mixin.get_sleep_process_with_current_node_id(process.current_node_id), process.id) + + def test_get_sleep_process_with_current_node_id__not_exist(self): + self.assertIsNone(self.mixin.get_sleep_process_with_current_node_id("not_exist")) + + def test_get_sleep_process_with_current_node_id__more_than_one(self): + p1 = Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex) + p2 = Process.objects.create(priority=1, queue="queue", current_node_id=p1.current_node_id) + self.mixin.sleep(p1.id) + self.mixin.sleep(p2.id) + self.assertRaises(ValueError, self.mixin.get_sleep_process_with_current_node_id, p1.current_node_id) + + def test_get_process_id_with_current_node_id(self): + p1 = Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex) + Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex, dead=True) + self.assertEqual(self.mixin.get_process_id_with_current_node_id(p1.current_node_id), p1.id) + + def test_get_process_id_with_current_node_id__not_exist(self): + self.assertIsNone(self.mixin.get_process_id_with_current_node_id("not_exist")) + + def test_get_process_id_with_current_node_id_more_than_one(self): + p1 = Process.objects.create(priority=1, queue="queue", current_node_id=uuid.uuid1().hex) + p2 = Process.objects.create(priority=1, queue="queue", current_node_id=p1.current_node_id) + self.mixin.sleep(p1.id) + self.mixin.sleep(p2.id) + self.assertRaises(ValueError, self.mixin.get_process_id_with_current_node_id, p1.current_node_id) + + def test_set_current_node(self): + node_id = uuid.uuid1().hex + self.mixin.set_current_node(self.process.id, node_id) + self.process.refresh_from_db() + self.assertEqual(self.process.current_node_id, node_id) + + def test_child_process_finish(self): + need_ack = 30 + + process = Process.objects.create(priority=1, queue="queue", ack_num=0, need_ack=need_ack) + + lock = threading.Lock() + res = {False: 0, True: 0} + + def target(parent_id, process_id): + success = self.mixin.child_process_finish(parent_id, process_id) + lock.acquire() + res[success] += 1 + lock.release() + + threads = [threading.Thread(target=target, args=(process.id, i)) for i in range(need_ack)] + + for t in threads: + t.start() + + for t in threads: + t.join(1) + + process.refresh_from_db() + self.assertEqual(process.ack_num, 0) + self.assertEqual(process.need_ack, -1) + self.assertEqual(res, {True: 1, False: need_ack - 1}) + + def test_is_frozen(self): + self.assertFalse(self.mixin.is_frozen(self.process.id)) + self.process.frozen = True + self.process.save() + self.assertTrue(self.mixin.is_frozen(self.process.id)) + + def test_freeze(self): + self.assertFalse(self.process.frozen) + self.mixin.freeze(self.process.id) + self.process.refresh_from_db() + self.assertTrue(self.process.frozen) + + def test_fork(self): + from_to = {} + for i in range(10): + from_to[str(i)] = str(i + 1) + + dps = self.mixin.fork(parent_id=self.process.id, root_pipeline_id="r", pipeline_stack=[1, 2], from_to=from_to) + self.assertEqual(len(dps), 10) + actual = [dp.node_id for dp in dps] + self.assertEqual(actual, [str(i) for i in range(10)]) + + def test_fork__parent_does_not_exist(self): + self.assertRaises( + Process.DoesNotExist, + self.mixin.fork, + parent_id=self.process.id + 1, + root_pipeline_id="r", + pipeline_stack=[1, 2], + from_to={}, + ) + + def test_join(self): + self.mixin.join(self.process.id, list(range(100))) + self.process.refresh_from_db() + self.assertEqual(self.process.ack_num, 0) + self.assertEqual(self.process.need_ack, 100) + + def test_set_pipeline_stack(self): + self.assertEqual(self.process.pipeline_stack, "[]") + self.mixin.set_pipeline_stack(self.process.id, ["1", "2", "3"]) + self.process.refresh_from_db() + self.assertEqual(self.process.pipeline_stack, '["1", "2", "3"]') + + def test_get_process_info_with_root_pipeline(self): + self.process.root_pipeline_id = "root" + self.process.save() + p = self.mixin.get_process_info_with_root_pipeline("root") + self.assertEqual(1, len(p)) + self.assertEqual(p[0].root_pipeline_id, "root") + self.assertEqual(p[0].process_id, self.process.id) + self.assertEqual(p[0].destination_id, self.process.destination_id) + self.assertEqual(p[0].pipeline_stack, []) + self.assertEqual(p[0].parent_id, self.process.parent_id) + + p = self.mixin.get_process_info_with_root_pipeline("not_exist") + self.assertEqual(0, len(p)) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_schedule.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_schedule.py new file mode 100644 index 00000000..2f31279c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_schedule.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +import threading + +from django.test import TransactionTestCase + +from bamboo_engine.eri.models import Schedule, ScheduleType + +from pipeline.eri.models import Schedule as DBSchedule +from pipeline.eri.imp.schedule import ScheduleMixin + + +class ScheduleMixinTestCase(TransactionTestCase): + def setUp(self): + self.mixin = ScheduleMixin() + self.process_id = 99 + self.node_id = "nid" + self.version = "v1" + self.schedule_type = ScheduleType.POLL + self.schedule = DBSchedule.objects.create( + process_id=self.process_id, node_id=self.node_id, version="v2", type=self.schedule_type.value + ) + + def test_set_schedule(self): + schedule = self.mixin.set_schedule( + process_id=self.process_id, node_id=self.node_id, version=self.version, schedule_type=self.schedule_type + ) + schedule_model = DBSchedule.objects.get(id=schedule.id) + + self.assertTrue(schedule, Schedule) + self.assertEqual(schedule.id, schedule_model.id) + self.assertEqual(schedule.type, self.schedule_type) + self.assertEqual(schedule.process_id, self.process_id) + self.assertEqual(schedule.node_id, self.node_id) + self.assertEqual(schedule.finished, False) + self.assertEqual(schedule.expired, False) + self.assertEqual(schedule.version, self.version) + self.assertEqual(schedule.times, 0) + + self.assertEqual(schedule_model.type, self.schedule_type.value) + self.assertEqual(schedule_model.process_id, self.process_id) + self.assertEqual(schedule_model.node_id, self.node_id) + self.assertEqual(schedule_model.finished, False) + self.assertEqual(schedule_model.expired, False) + self.assertEqual(schedule_model.scheduling, False) + self.assertEqual(schedule_model.version, self.version) + self.assertEqual(schedule_model.schedule_times, 0) + + def test_get_schedule(self): + schedule = self.mixin.get_schedule(self.schedule.id) + + self.assertTrue(isinstance(schedule, Schedule)) + self.assertEqual(schedule.id, self.schedule.id) + self.assertEqual(schedule.type, ScheduleType(self.schedule.type)) + self.assertEqual(schedule.process_id, self.schedule.process_id) + self.assertEqual(schedule.node_id, self.schedule.node_id) + self.assertEqual(schedule.finished, self.schedule.finished) + self.assertEqual(schedule.expired, self.schedule.expired) + self.assertEqual(schedule.version, self.schedule.version) + self.assertEqual(schedule.times, self.schedule.schedule_times) + + def test_get_schedule_with_node_and_version(self): + schedule = self.mixin.get_schedule_with_node_and_version(self.schedule.node_id, self.schedule.version) + + self.assertTrue(isinstance(schedule, Schedule)) + self.assertEqual(schedule.id, self.schedule.id) + self.assertEqual(schedule.type, ScheduleType(self.schedule.type)) + self.assertEqual(schedule.process_id, self.schedule.process_id) + self.assertEqual(schedule.node_id, self.schedule.node_id) + self.assertEqual(schedule.finished, self.schedule.finished) + self.assertEqual(schedule.expired, self.schedule.expired) + self.assertEqual(schedule.version, self.schedule.version) + self.assertEqual(schedule.times, self.schedule.schedule_times) + + def test_get_schedule_with_node_and_version_not_exist(self): + self.assertRaises( + DBSchedule.DoesNotExist, self.mixin.get_schedule_with_node_and_version, self.schedule.node_id, "not_exist", + ) + + def test_apply_schedule_lock(self): + schedule_count = 10 + + lock = threading.Lock() + res = {False: 0, True: 0} + + def target(schedule_id): + success = self.mixin.apply_schedule_lock(schedule_id) + lock.acquire() + res[success] += 1 + lock.release() + + threads = [threading.Thread(target=target, args=(self.schedule.id,)) for i in range(schedule_count)] + + for t in threads: + t.start() + + for t in threads: + t.join(1) + + self.schedule.refresh_from_db() + + self.assertTrue(self.schedule.scheduling) + self.assertEqual(res[False], schedule_count - 1) + self.assertEqual(res[True], 1) + + def test_apply_schedule_lock__all_fail(self): + self.schedule.scheduling = True + self.schedule.save() + schedule_count = 10 + + lock = threading.Lock() + res = {False: 0, True: 0} + + def target(schedule_id): + success = self.mixin.apply_schedule_lock(schedule_id) + lock.acquire() + res[success] += 1 + lock.release() + + threads = [threading.Thread(target=target, args=(self.schedule.id,)) for i in range(schedule_count)] + + for t in threads: + t.start() + + for t in threads: + t.join(1) + + self.schedule.refresh_from_db() + + self.assertTrue(self.schedule.scheduling) + self.assertEqual(res[False], schedule_count) + self.assertEqual(res[True], 0) + + def test_release_schedule_lock(self): + self.schedule.scheduling = True + self.schedule.save() + schedule_count = 10 + + def target(schedule_id): + self.mixin.release_schedule_lock(schedule_id) + + threads = [threading.Thread(target=target, args=(self.schedule.id,)) for i in range(schedule_count)] + + for t in threads: + t.start() + + for t in threads: + t.join(1) + + self.schedule.refresh_from_db() + + self.assertFalse(self.schedule.scheduling) + + def test_expire_schedule(self): + self.assertFalse(self.schedule.expired) + self.mixin.expire_schedule(self.schedule.id) + self.schedule.refresh_from_db() + self.assertTrue(self.schedule.expired) + + def test_finish_schedule(self): + self.assertFalse(self.schedule.finished) + self.mixin.finish_schedule(self.schedule.id) + self.schedule.refresh_from_db() + self.assertTrue(self.schedule.finished) + + def test_add_schedule_times(self): + self.assertEqual(self.schedule.schedule_times, 0) + self.mixin.add_schedule_times(self.schedule.id) + self.mixin.add_schedule_times(self.schedule.id) + self.mixin.add_schedule_times(self.schedule.id) + self.schedule.refresh_from_db() + self.assertEqual(self.schedule.schedule_times, 3) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_service.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_service.py new file mode 100644 index 00000000..8dc4995e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_service.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock + +from django.test import TestCase + +from bamboo_engine.eri import ExecutionData, CallbackData, ScheduleType + +from pipeline.core.flow.activity import Service, StaticIntervalGenerator, SquareIntervalGenerator +from pipeline.eri.imp.service import ServiceWrapper + + +class ServiceWrapperTestCase(TestCase): + def test_pre_execute(self): + service = MagicMock() + service.pre_execute = MagicMock() + data = ExecutionData(inputs={"1": 1}, outputs={"2": 2}) + root_pipeline_data = ExecutionData(inputs={"3": 3}, outputs={"4": 4}) + + w = ServiceWrapper(service) + w.pre_execute(data, root_pipeline_data) + + self.assertEqual(service.pre_execute.call_args[0][0].inputs, data.inputs) + self.assertEqual(service.pre_execute.call_args[0][0].outputs, data.outputs) + + def test_pre_execute__no_define(self): + service = MagicMock() + service.pre_execute = None + data = ExecutionData({}, {}) + root_pipeline_data = ExecutionData({}, {}) + + w = ServiceWrapper(service) + w.pre_execute(data, root_pipeline_data) + + def test_execute(self): + class S(Service): + def execute(self, data, parent_data): + data.inputs.a = 1 + data.inputs.b = 2 + data.outputs.c = 3 + data.outputs.d = 4 + assert data.get_one_of_inputs("1") == 1 + assert data.get_one_of_outputs("2") == 2 + assert parent_data.get_one_of_inputs("3") == 3 + assert parent_data.get_one_of_outputs("4") == 4 + parent_data.inputs.e = 5 + parent_data.outputs.f = 6 + return False + + data = ExecutionData(inputs={"1": 1}, outputs={"2": 2}) + root_pipeline_data = ExecutionData(inputs={"3": 3}, outputs={"4": 4}) + + w = ServiceWrapper(S()) + execute_res = w.execute(data, root_pipeline_data) + + self.assertFalse(execute_res) + self.assertEqual(data.inputs, {"1": 1, "a": 1, "b": 2}) + self.assertEqual(data.outputs, {"2": 2, "c": 3, "d": 4}) + self.assertEqual(root_pipeline_data.inputs, {"3": 3}) + self.assertEqual(root_pipeline_data.outputs, {"4": 4}) + + def test_schedule(self): + class S(Service): + def execute(self, data, parent_data): + pass + + def schedule(self, data, parent_data, callback_data=None): + data.inputs.a = 1 + data.inputs.b = 2 + data.outputs.c = 3 + data.outputs.d = 4 + assert data.get_one_of_inputs("1") == 1 + assert data.get_one_of_outputs("2") == 2 + assert parent_data.get_one_of_inputs("3") == 3 + assert parent_data.get_one_of_outputs("4") == 4 + assert callback_data == {"callback_data": "callback_data"} + parent_data.inputs.e = 5 + parent_data.outputs.f = 6 + return False + + data = ExecutionData(inputs={"1": 1}, outputs={"2": 2}) + root_pipeline_data = ExecutionData(inputs={"3": 3}, outputs={"4": 4}) + callback_data = CallbackData(1, "", "", data={"callback_data": "callback_data"}) + schedule = MagicMock() + + w = ServiceWrapper(S()) + schedule_res = w.schedule(schedule, data, root_pipeline_data, callback_data) + + self.assertFalse(schedule_res) + self.assertEqual(data.inputs, {"1": 1, "a": 1, "b": 2}) + self.assertEqual(data.outputs, {"2": 2, "c": 3, "d": 4}) + self.assertEqual(root_pipeline_data.inputs, {"3": 3}) + self.assertEqual(root_pipeline_data.outputs, {"4": 4}) + + def test_need_schedule(self): + class S1(Service): + __need_schedule__ = True + + def execute(self, data, parent_data): + pass + + class S2(Service): + def execute(self, data, parent_data): + pass + + self.assertTrue(ServiceWrapper(S1()).need_schedule()) + self.assertFalse(ServiceWrapper(S2()).need_schedule()) + + def test_schedule_type(self): + class S1(Service): + def execute(self, data, parent_data): + pass + + class S2(Service): + __need_schedule__ = True + interval = StaticIntervalGenerator(5) + + def execute(self, data, parent_data): + pass + + class S3(Service): + __need_schedule__ = True + + def execute(self, data, parent_data): + pass + + class S4(Service): + __need_schedule__ = True + __multi_callback_enabled__ = True + + def execute(self, data, parent_data): + pass + + self.assertEqual(ServiceWrapper(S1()).schedule_type(), None) + self.assertEqual(ServiceWrapper(S2()).schedule_type(), ScheduleType.POLL) + self.assertEqual(ServiceWrapper(S3()).schedule_type(), ScheduleType.CALLBACK) + self.assertEqual(ServiceWrapper(S4()).schedule_type(), ScheduleType.MULTIPLE_CALLBACK) + + def test_is_schedule_done(self): + class S(Service): + __need_schedule__ = True + + def execute(self, data, parent_data): + pass + + s = S() + w = ServiceWrapper(s) + + self.assertFalse(w.is_schedule_done()) + s.finish_schedule() + self.assertTrue(w.is_schedule_done()) + + def test_schedule_after(self): + class S1(Service): + __need_schedule__ = True + interval = StaticIntervalGenerator(5) + + def execute(self, data, parent_data): + pass + + class S2(Service): + __need_schedule__ = True + interval = SquareIntervalGenerator() + + def execute(self, data, parent_data): + pass + + schedule = MagicMock() + + w1 = ServiceWrapper(S1()) + w2 = ServiceWrapper(S2()) + data = MagicMock() + root_pipeline_data = MagicMock() + + self.assertEqual(w1.schedule_after(None, data, root_pipeline_data), 5) + self.assertEqual(w2.schedule_after(None, data, root_pipeline_data), 1) + schedule.times = 10 + self.assertEqual(w1.schedule_after(schedule, data, root_pipeline_data), 5) + self.assertEqual(w2.schedule_after(schedule, data, root_pipeline_data), 100) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_state.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_state.py new file mode 100644 index 00000000..9b29dc30 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_state.py @@ -0,0 +1,496 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import patch, MagicMock + +from django.test import TransactionTestCase +from django.utils import timezone + +from bamboo_engine.eri.models import State +from bamboo_engine.exceptions import StateVersionNotMatchError + +from pipeline.eri.models import State as DBState +from pipeline.eri.imp.state import StateMixin, states +from bamboo_engine.utils.string import unique_id + + +class StateMixinTestCase(TransactionTestCase): + def setUp(self): + self.mixin = StateMixin() + self.started_time = timezone.now() + self.archived_time = timezone.now() + self.state = DBState.objects.create( + node_id=unique_id("n"), + root_id=unique_id("n"), + parent_id=unique_id("n"), + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + + def test_get_state(self): + state = self.mixin.get_state(self.state.node_id) + self.assertTrue(isinstance(state, State)) + self.assertEqual(state.node_id, self.state.node_id) + self.assertEqual(state.root_id, self.state.root_id) + self.assertEqual(state.parent_id, self.state.parent_id) + self.assertEqual(state.name, self.state.name) + self.assertEqual(state.version, self.state.version) + self.assertEqual(state.loop, self.state.loop) + self.assertEqual(state.retry, self.state.retry) + self.assertEqual(state.skip, self.state.skip) + self.assertEqual(state.created_time, self.state.created_time) + self.assertEqual(state.started_time, self.state.started_time) + self.assertEqual(state.archived_time, self.state.archived_time) + + def test_get_state__not_exist(self): + self.assertRaises(DBState.DoesNotExist, self.mixin.get_state, "not_exist") + + def test_get_state_or_none(self): + state = self.mixin.get_state_or_none(self.state.node_id) + self.assertTrue(isinstance(state, State)) + self.assertEqual(state.node_id, self.state.node_id) + self.assertEqual(state.root_id, self.state.root_id) + self.assertEqual(state.parent_id, self.state.parent_id) + self.assertEqual(state.name, self.state.name) + self.assertEqual(state.version, self.state.version) + self.assertEqual(state.loop, self.state.loop) + self.assertEqual(state.retry, self.state.retry) + self.assertEqual(state.skip, self.state.skip) + self.assertEqual(state.created_time, self.state.created_time) + self.assertEqual(state.started_time, self.state.started_time) + self.assertEqual(state.archived_time, self.state.archived_time) + + def test_get_state_or_none__not_exist(self): + self.assertIsNone(self.mixin.get_state_or_none("not_exist")) + + def test_get_state_by_root(self): + s1 = DBState.objects.create( + node_id=unique_id("n"), + root_id=unique_id("n"), + parent_id="", + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + s2 = DBState.objects.create( + node_id=unique_id("n"), + root_id=s1.root_id, + parent_id="", + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + s3 = DBState.objects.create( + node_id=unique_id("n"), + root_id=s1.root_id, + parent_id="", + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + + state_list = self.mixin.get_state_by_root(s1.root_id) + self.assertEqual(len(state_list), 3) + + self.assertTrue(isinstance(state_list[0], State)) + self.assertEqual(state_list[0].node_id, s1.node_id) + self.assertEqual(state_list[0].root_id, s1.root_id) + self.assertEqual(state_list[0].parent_id, s1.parent_id) + self.assertEqual(state_list[0].name, s1.name) + self.assertEqual(state_list[0].version, s1.version) + self.assertEqual(state_list[0].loop, s1.loop) + self.assertEqual(state_list[0].retry, s1.retry) + self.assertEqual(state_list[0].skip, s1.skip) + self.assertEqual(state_list[0].created_time, s1.created_time) + self.assertEqual(state_list[0].started_time, s1.started_time) + self.assertEqual(state_list[0].archived_time, s1.archived_time) + + self.assertTrue(isinstance(state_list[1], State)) + self.assertEqual(state_list[1].node_id, s2.node_id) + self.assertEqual(state_list[1].root_id, s2.root_id) + self.assertEqual(state_list[1].parent_id, s2.parent_id) + self.assertEqual(state_list[1].name, s2.name) + self.assertEqual(state_list[1].version, s2.version) + self.assertEqual(state_list[1].loop, s2.loop) + self.assertEqual(state_list[1].retry, s2.retry) + self.assertEqual(state_list[1].skip, s2.skip) + self.assertEqual(state_list[1].created_time, s2.created_time) + self.assertEqual(state_list[1].started_time, s2.started_time) + self.assertEqual(state_list[1].archived_time, s2.archived_time) + + self.assertTrue(isinstance(state_list[2], State)) + self.assertEqual(state_list[2].node_id, s3.node_id) + self.assertEqual(state_list[2].root_id, s3.root_id) + self.assertEqual(state_list[2].parent_id, s3.parent_id) + self.assertEqual(state_list[2].name, s3.name) + self.assertEqual(state_list[2].version, s3.version) + self.assertEqual(state_list[2].loop, s3.loop) + self.assertEqual(state_list[2].retry, s3.retry) + self.assertEqual(state_list[2].skip, s3.skip) + self.assertEqual(state_list[2].created_time, s3.created_time) + self.assertEqual(state_list[2].started_time, s3.started_time) + self.assertEqual(state_list[2].archived_time, s3.archived_time) + + def test_get_state_by_root__not_exist(self): + state_list = self.mixin.get_state_by_root("not_exist") + self.assertEqual(state_list, []) + + def test_get_state_by_parent(self): + s1 = DBState.objects.create( + node_id=unique_id("n"), + root_id=unique_id("n"), + parent_id=unique_id("n"), + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + s2 = DBState.objects.create( + node_id=unique_id("n"), + root_id=s1.root_id, + parent_id=s1.parent_id, + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + s3 = DBState.objects.create( + node_id=unique_id("n"), + root_id=s1.root_id, + parent_id=s1.parent_id, + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + + state_list = self.mixin.get_state_by_root(s1.root_id) + self.assertEqual(len(state_list), 3) + + self.assertTrue(isinstance(state_list[0], State)) + self.assertEqual(state_list[0].node_id, s1.node_id) + self.assertEqual(state_list[0].root_id, s1.root_id) + self.assertEqual(state_list[0].parent_id, s1.parent_id) + self.assertEqual(state_list[0].name, s1.name) + self.assertEqual(state_list[0].version, s1.version) + self.assertEqual(state_list[0].loop, s1.loop) + self.assertEqual(state_list[0].retry, s1.retry) + self.assertEqual(state_list[0].skip, s1.skip) + self.assertEqual(state_list[0].created_time, s1.created_time) + self.assertEqual(state_list[0].started_time, s1.started_time) + self.assertEqual(state_list[0].archived_time, s1.archived_time) + + self.assertTrue(isinstance(state_list[1], State)) + self.assertEqual(state_list[1].node_id, s2.node_id) + self.assertEqual(state_list[1].root_id, s2.root_id) + self.assertEqual(state_list[1].parent_id, s2.parent_id) + self.assertEqual(state_list[1].name, s2.name) + self.assertEqual(state_list[1].version, s2.version) + self.assertEqual(state_list[1].loop, s2.loop) + self.assertEqual(state_list[1].retry, s2.retry) + self.assertEqual(state_list[1].skip, s2.skip) + self.assertEqual(state_list[1].created_time, s2.created_time) + self.assertEqual(state_list[1].started_time, s2.started_time) + self.assertEqual(state_list[1].archived_time, s2.archived_time) + + self.assertTrue(isinstance(state_list[2], State)) + self.assertEqual(state_list[2].node_id, s3.node_id) + self.assertEqual(state_list[2].root_id, s3.root_id) + self.assertEqual(state_list[2].parent_id, s3.parent_id) + self.assertEqual(state_list[2].name, s3.name) + self.assertEqual(state_list[2].version, s3.version) + self.assertEqual(state_list[2].loop, s3.loop) + self.assertEqual(state_list[2].retry, s3.retry) + self.assertEqual(state_list[2].skip, s3.skip) + self.assertEqual(state_list[2].created_time, s3.created_time) + self.assertEqual(state_list[2].started_time, s3.started_time) + self.assertEqual(state_list[2].archived_time, s3.archived_time) + + def test_get_state_by_parent__not_exist(self): + state_list = self.mixin.get_state_by_parent("not_exist") + self.assertEqual(state_list, []) + + def test_batch_get_state_name(self): + s1 = DBState.objects.create( + node_id=unique_id("n"), + root_id=unique_id("n"), + parent_id=unique_id("n"), + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + s2 = DBState.objects.create( + node_id=unique_id("n"), + root_id=s1.root_id, + parent_id=s1.parent_id, + name=states.READY, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + s3 = DBState.objects.create( + node_id=unique_id("n"), + root_id=s1.root_id, + parent_id=s1.parent_id, + name=states.FINISHED, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + s4 = DBState.objects.create( + node_id=unique_id("n"), + root_id=s1.root_id, + parent_id=s1.parent_id, + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + + state_names = self.mixin.batch_get_state_name([s1.node_id, s2.node_id, s3.node_id, s4.node_id]) + self.assertEqual( + state_names, {s1.node_id: s1.name, s2.node_id: s2.name, s3.node_id: s3.name, s4.node_id: s4.name} + ) + + def test_has_state(self): + self.assertTrue(self.mixin.has_state(self.state.node_id)) + + def test_has_state__not_exist(self): + self.assertFalse(self.mixin.has_state("not_exist")) + + def test_set_state_root_and_parent(self): + s = DBState.objects.create( + node_id=unique_id("n"), + name=states.RUNNING, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + self.assertEqual(s.root_id, "") + self.assertEqual(s.parent_id, "") + + self.mixin.set_state_root_and_parent(s.node_id, self.state.root_id, self.state.parent_id) + s.refresh_from_db() + self.assertEqual(s.root_id, self.state.root_id) + self.assertEqual(s.parent_id, self.state.parent_id) + + def test_set_state__not_exist(self): + node_id = unique_id("n") + root_id = unique_id("n") + parent_id = root_id + to_state = states.RUNNING + + post_set_state = MagicMock() + with patch("pipeline.eri.imp.state.post_set_state", post_set_state): + version = self.mixin.set_state( + node_id=node_id, + to_state=to_state, + loop=-1, + root_id=root_id, + parent_id=parent_id, + is_retry=True, + is_skip=True, + reset_retry=True, + reset_skip=True, + error_ignored=True, + reset_error_ignored=True, + refresh_version=False, + clear_started_time=True, + set_started_time=True, + clear_archived_time=True, + set_archive_time=False, + ) + + state = DBState.objects.get(node_id=node_id) + self.assertEqual(len(version), 33) + self.assertEqual(state.node_id, node_id) + self.assertEqual(state.root_id, root_id) + self.assertEqual(state.parent_id, parent_id) + self.assertEqual(state.name, to_state) + self.assertEqual(len(state.version), 33) + self.assertEqual(state.loop, 1) + self.assertEqual(state.retry, 0) + self.assertEqual(state.skip, False) + self.assertEqual(state.error_ignored, False) + self.assertIsNotNone(state.created_time) + self.assertIsNotNone(state.started_time) + self.assertIsNone(state.archived_time) + post_set_state.send.assert_called_once_with( + sender=DBState, + node_id=node_id, + to_state=to_state, + version=state.version, + root_id=state.root_id, + parent_id=state.parent_id, + loop=-1, + ) + + def test_set_state__exist(self): + to_state = states.FINISHED + + post_set_state = MagicMock() + with patch("pipeline.eri.imp.state.post_set_state", post_set_state): + version = self.mixin.set_state( + node_id=self.state.node_id, + to_state=to_state, + loop=2, + is_retry=True, + is_skip=True, + reset_retry=False, + reset_skip=False, + error_ignored=True, + reset_error_ignored=False, + refresh_version=True, + clear_started_time=True, + set_started_time=True, + clear_archived_time=True, + set_archive_time=True, + ) + + state = DBState.objects.get(node_id=self.state.node_id) + self.assertEqual(len(version), 33) + self.assertNotEqual(version, self.state.version) + self.assertEqual(version, state.version) + self.assertEqual(state.node_id, self.state.node_id) + self.assertEqual(state.root_id, self.state.root_id) + self.assertEqual(state.parent_id, self.state.parent_id) + self.assertEqual(state.name, to_state) + self.assertEqual(len(state.version), 33) + self.assertNotEqual(state.version, self.state.version) + self.assertEqual(state.loop, 2) + self.assertEqual(state.retry, 1) + self.assertEqual(state.skip, True) + self.assertEqual(state.error_ignored, True) + self.assertIsNotNone(state.created_time) + self.assertNotEqual(state.started_time, self.started_time) + self.assertIsNotNone(state.archived_time) + self.assertNotEqual(state.archived_time, self.archived_time) + post_set_state.send.assert_called_once_with( + sender=DBState, + node_id=state.node_id, + to_state=to_state, + version=state.version, + root_id=state.root_id, + parent_id=state.parent_id, + loop=state.loop, + ) + + def test_set_state__raise(self): + post_set_state = MagicMock() + with patch("pipeline.eri.imp.state.post_set_state", post_set_state): + self.assertRaises( + RuntimeError, + self.mixin.set_state, + node_id=self.state.node_id, + to_state=states.READY, + loop=2, + is_retry=True, + is_skip=True, + reset_retry=True, + reset_skip=True, + reset_error_ignored=False, + error_ignored=True, + refresh_version=False, + clear_started_time=True, + set_started_time=True, + clear_archived_time=True, + set_archive_time=True, + ) + post_set_state.send.assert_not_called() + + def test_set_state__raise_version_not_match(self): + post_set_state = MagicMock() + with patch("pipeline.eri.imp.state.post_set_state", post_set_state): + self.assertRaises( + StateVersionNotMatchError, + self.mixin.set_state, + node_id=self.state.node_id, + version=unique_id("v"), + to_state=states.SUSPENDED, + loop=2, + is_retry=True, + is_skip=True, + reset_retry=True, + reset_skip=True, + reset_error_ignored=False, + error_ignored=True, + refresh_version=False, + clear_started_time=True, + set_started_time=True, + clear_archived_time=True, + set_archive_time=True, + ) + post_set_state.send.assert_not_called() + + def test_reset_state_inner_loop(self): + s = DBState.objects.create( + node_id=unique_id("n"), + name=states.RUNNING, + inner_loop=1, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + self.assertEqual(s.inner_loop, 1) + + self.mixin.reset_state_inner_loop(s.node_id) + s.refresh_from_db() + self.assertEqual(s.inner_loop, 0) + + def test_reset_children_state_inner_loop(self): + parent_node_id = unique_id("n") + parent_state = DBState.objects.create( + node_id=parent_node_id, + name=states.RUNNING, + inner_loop=1, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + first_child_state = DBState.objects.create( + node_id=unique_id("n"), + name=states.RUNNING, + parent_id=parent_node_id, + inner_loop=1, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + second_child_state = DBState.objects.create( + node_id=unique_id("n"), + name=states.RUNNING, + parent_id=parent_node_id, + inner_loop=1, + version=unique_id("v"), + started_time=self.started_time, + archived_time=self.archived_time, + ) + + self.assertEqual(parent_state.inner_loop, 1) + self.assertEqual(first_child_state.inner_loop, 1) + self.assertEqual(second_child_state.inner_loop, 1) + + self.mixin.reset_children_state_inner_loop(parent_state.node_id) + parent_state.refresh_from_db() + first_child_state.refresh_from_db() + second_child_state.refresh_from_db() + + self.assertEqual(parent_state.inner_loop, 1) + self.assertEqual(first_child_state.inner_loop, 0) + self.assertEqual(second_child_state.inner_loop, 0) diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_variable.py b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_variable.py new file mode 100644 index 00000000..6a208f96 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/imp/test_variable.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock + +from django.test import TestCase + +from bamboo_engine.eri import ContextValue, ContextValueType +from bamboo_engine.context import Context, SpliceVariable + +from pipeline.core.data.var import LazyVariable +from pipeline.eri.imp.variable import VariableWrapper + + +class TestVariable(LazyVariable): + code = "wrapper_test" + name = "wrapper_test" + + def get_value(self): + assert self.value == "1-2" + assert self.pipeline_data == {"1": 1, "2": 2} + return "heihei" + + +class VariableWrapperTestCase(TestCase): + def test_get(self): + runtime = MagicMock() + + values = [ + ContextValue("${a}", type=ContextValueType.PLAIN, value="1"), + ContextValue("${b}", type=ContextValueType.PLAIN, value="2"), + ContextValue("${c}", type=ContextValueType.SPLICE, value="${a}-${b}"), + ] + + context = Context(runtime, values, {"id": 1}) + + w = VariableWrapper( + original_value=SpliceVariable(key="${c}", value="${a}-${b}", pool=context.pool), + var_cls=TestVariable, + additional_data={"1": 1, "2": 2}, + ) + + self.assertEqual(w.get(), "heihei") diff --git a/runtime/bamboo-pipeline/pipeline/tests/eri/test_runtime.py b/runtime/bamboo-pipeline/pipeline/tests/eri/test_runtime.py new file mode 100644 index 00000000..2912db48 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/eri/test_runtime.py @@ -0,0 +1,828 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import json + +from django.test import TransactionTestCase + +from bamboo_engine.eri import NodeType +from bamboo_engine import builder +from bamboo_engine import validator +from bamboo_engine.builder import * # noqa + +from pipeline.eri.models import Process, Node, Data, ContextValue, ContextOutputs +from pipeline.eri.runtime import BambooDjangoRuntime + + +class BambooDjangoRuntimeTestCase(TransactionTestCase): + def setUp(self): + self.maxDiff = None + self.runtime = BambooDjangoRuntime() + + def test_node_rerun_limit(self): + self.assertEqual(self.runtime.node_rerun_limit("1", "2"), 100) + + def test_prepare_run_pipeline_simple(self): + + # struct + start = EmptyStartEvent(id="start") + pg = ParallelGateway(id="pg") + act1 = ServiceActivity(id="act1", component_code="debug_node") + act2 = ServiceActivity( + id="act2", component_code="debug_node", error_ignorable=True, timeout=5, skippable=True, retryable=True + ) + cg1 = ConvergeGateway(id="cg1") + eg = ExclusiveGateway(id="eg", conditions={0: "True == True", 1: "True == False"}) + act3 = ServiceActivity(id="act3", component_code="debug_node") + act4 = ServiceActivity(id="act4", component_code="debug_node") + cg2 = ConvergeGateway(id="cg2") + cpg = ConditionalParallelGateway(id="cpg", conditions={0: "True == True", 1: "True == True"}) + act5 = ServiceActivity(id="act5", component_code="debug_node") + act6 = ServiceActivity(id="act6", component_code="debug_node") + cg3 = ConvergeGateway(id="cg3") + + sub_start = EmptyStartEvent(id="sub_start") + sub_act1 = ServiceActivity(id="sub_act1", component_code="sub_debug_node") + sub_act2 = ServiceActivity(id="sub_act2", component_code="sub_debug_node") + sub_act3 = ServiceActivity(id="sub_act3", component_code="sub_debug_node") + sub_end = EmptyEndEvent(id="sub_end") + sub_start.extend(sub_act1).extend(sub_act2).extend(sub_act3).extend(sub_end) + + subproc = SubProcess(id="subproc", start=sub_start) + end = EmptyEndEvent(id="end") + + start.extend(pg).connect(act1, act2).converge(cg1).extend(eg).connect(act3, act4).converge(cg2).extend( + cpg + ).connect(act5, act6).converge(cg3).extend(subproc).extend(end) + + # data + act1.component.inputs.key1 = Var(type=Var.SPLICE, value="${a}") + act1.component.inputs.key2 = Var(type=Var.SPLICE, value="${b}") + act1.component.inputs.key3 = Var(type=Var.LAZY, value="${a}-${b}", custom_type="ip") + + act2.component.inputs.key2 = Var(type=Var.SPLICE, value="${a}") + act2.component.inputs.key3 = Var(type=Var.SPLICE, value="${b}") + + act3.component.inputs.key3 = Var(type=Var.SPLICE, value="${a}") + act3.component.inputs.key4 = Var(type=Var.SPLICE, value="${b}") + + act4.component.inputs.key4 = Var(type=Var.SPLICE, value="${a}") + act4.component.inputs.key5 = Var(type=Var.SPLICE, value="${b}") + + act5.component.inputs.key5 = Var(type=Var.SPLICE, value="${a}") + act5.component.inputs.key6 = Var(type=Var.SPLICE, value="${b}") + + act6.component.inputs.key6 = Var(type=Var.SPLICE, value="${a}") + act6.component.inputs.key7 = Var(type=Var.SPLICE, value="${b}") + + sub_act1.component.inputs.key7 = Var(type=Var.SPLICE, value="${c}") + sub_act1.component.inputs.key8 = Var(type=Var.SPLICE, value="${d}") + + sub_act2.component.inputs.key8 = Var(type=Var.SPLICE, value="${c}") + sub_act2.component.inputs.key9 = Var(type=Var.SPLICE, value="${d}") + + sub_act3.component.inputs.key9 = Var(type=Var.SPLICE, value="${c}") + sub_act3.component.inputs.key10 = Var(type=Var.SPLICE, value="${d}") + + sub_data = builder.Data() + sub_data.inputs["${sub_a}"] = Var(type=Var.LAZY, value={"a": "${b}"}, custom_type="ip") + sub_data.inputs["${sub_b}"] = Var(type=Var.SPLICE, value="${c}") + sub_data.inputs["${sub_c}"] = Var(type=Var.PLAIN, value="c") + sub_data.inputs["${sub_d}"] = Var(type=Var.PLAIN, value="") + sub_data.inputs["${sub_e}"] = Var(type=Var.PLAIN, value="") + sub_data.inputs["${sub_output1}"] = NodeOutput( + source_act=sub_act1.id, source_key="key7", type=Var.PLAIN, value="" + ) + sub_data.inputs["${sub_output2}"] = NodeOutput( + source_act=sub_act2.id, source_key="key8", type=Var.PLAIN, value="" + ) + sub_data.outputs = ["${sub_a}", "${sub_b}"] + sub_params = Params( + {"${sub_d}": Var(type=Var.SPLICE, value="${a}"), "${sub_e}": Var(type=Var.SPLICE, value="${b}")} + ) + + pipeline_data = builder.Data() + pipeline_data.inputs["${a}"] = Var(type=Var.LAZY, value=["${b}", "${c}_${d}"], custom_type="ip") + pipeline_data.inputs["${b}"] = Var(type=Var.SPLICE, value="${e}_2") + pipeline_data.inputs["${c}"] = Var(type=Var.SPLICE, value="${e}_${f}") + pipeline_data.inputs["${d}"] = Var(type=Var.PLAIN, value="ab") + pipeline_data.inputs["${e}"] = Var(type=Var.PLAIN, value="cd") + pipeline_data.inputs["${f}"] = Var(type=Var.PLAIN, value="ef") + pipeline_data.inputs["${g}"] = Var(type=Var.SPLICE, value="1 + ${h}") + pipeline_data.inputs["${h}"] = Var(type=Var.SPLICE, value="${f}-${f}") + pipeline_data.inputs["${output1}"] = NodeOutput(source_act=act1.id, source_key="key1", type=Var.PLAIN, value="") + pipeline_data.inputs["${output2}"] = NodeOutput(source_act=act2.id, source_key="key2", type=Var.PLAIN, value="") + pipeline_data.outputs = ["${a}", "${d}", "${g}"] + pipeline_data.pre_render_keys = ["${d}"] + + subproc.data = sub_data + subproc.params = sub_params + pipeline = build_tree(start, id="pipeline", data=pipeline_data) + validator.validate_and_process_pipeline(pipeline) + + # assertion + process_id = self.runtime.prepare_run_pipeline(pipeline, {"k": "v"}, {}, {"${k1}": "v1", "${k2}": "v2"}) + process = Process.objects.get(id=process_id) + self.assertEqual(process.root_pipeline_id, pipeline["id"]) + self.assertEqual(process.queue, "") + self.assertEqual(process.priority, 100) + + nodes = {node.node_id: node for node in Node.objects.all()} + datas = {data.node_id: data for data in Data.objects.all()} + context_values = {} + for cv in ContextValue.objects.all(): + context_values.setdefault(cv.pipeline_id, {})[cv.key] = cv + context_outputs = {co.pipeline_id: co for co in ContextOutputs.objects.all()} + + self.assertEqual(len(nodes), 20) + self.assertEqual(len(datas), 12) + self.assertEqual(len(context_values["pipeline"]), 9) + self.assertEqual(len(context_values["subproc"]), 7) + self.assertEqual(len(context_outputs), 2) + + # node + self.assertEqual( + json.loads(nodes["start"].detail), + { + "id": "start", + "type": NodeType.EmptyStartEvent.value, + "targets": {pipeline["start_event"]["outgoing"]: "pg"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + }, + ) + self.assertEqual( + json.loads(nodes["pg"].detail), + { + "id": "pg", + "type": NodeType.ParallelGateway.value, + "targets": { + flow_id: pipeline["flows"][flow_id]["target"] for flow_id in pipeline["gateways"]["pg"]["outgoing"] + }, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "converge_gateway_id": "cg1", + "can_skip": False, + "can_retry": True, + }, + ) + self.assertEqual( + json.loads(nodes["act1"].detail), + { + "id": "act1", + "type": NodeType.ServiceActivity.value, + "targets": {pipeline["activities"]["act1"]["outgoing"]: "cg1"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + "code": "debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["act2"].detail), + { + "id": "act2", + "type": NodeType.ServiceActivity.value, + "targets": {pipeline["activities"]["act2"]["outgoing"]: "cg1"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + "code": "debug_node", + "version": "legacy", + "timeout": 5, + "error_ignorable": True, + }, + ) + self.assertEqual( + json.loads(nodes["cg1"].detail), + { + "id": "cg1", + "type": NodeType.ConvergeGateway.value, + "targets": {pipeline["gateways"]["cg1"]["outgoing"]: "eg"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": False, + "can_retry": True, + }, + ) + self.assertEqual( + json.loads(nodes["eg"].detail), + { + "id": "eg", + "type": NodeType.ExclusiveGateway.value, + "targets": { + flow_id: pipeline["flows"][flow_id]["target"] for flow_id in pipeline["gateways"]["eg"]["outgoing"] + }, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + "conditions": [ + { + "name": flow_id, + "evaluation": cond["evaluate"], + "target_id": pipeline["flows"][flow_id]["target"], + "flow_id": flow_id, + } + for flow_id, cond in pipeline["gateways"]["eg"]["conditions"].items() + ], + }, + ) + self.assertEqual( + json.loads(nodes["act3"].detail), + { + "id": "act3", + "type": NodeType.ServiceActivity.value, + "targets": {pipeline["activities"]["act3"]["outgoing"]: "cg2"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + "code": "debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["act4"].detail), + { + "id": "act4", + "type": NodeType.ServiceActivity.value, + "targets": {pipeline["activities"]["act4"]["outgoing"]: "cg2"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + "code": "debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["cg2"].detail), + { + "id": "cg2", + "type": NodeType.ConvergeGateway.value, + "targets": {pipeline["gateways"]["cg2"]["outgoing"]: "cpg"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": False, + "can_retry": True, + }, + ) + self.assertEqual( + json.loads(nodes["cpg"].detail), + { + "id": "cpg", + "type": NodeType.ConditionalParallelGateway.value, + "targets": { + flow_id: pipeline["flows"][flow_id]["target"] for flow_id in pipeline["gateways"]["cpg"]["outgoing"] + }, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": False, + "can_retry": True, + "conditions": [ + { + "name": flow_id, + "evaluation": cond["evaluate"], + "target_id": pipeline["flows"][flow_id]["target"], + "flow_id": flow_id, + } + for flow_id, cond in pipeline["gateways"]["cpg"]["conditions"].items() + ], + "converge_gateway_id": "cg3", + }, + ) + self.assertEqual( + json.loads(nodes["act5"].detail), + { + "id": "act5", + "type": NodeType.ServiceActivity.value, + "targets": {pipeline["activities"]["act5"]["outgoing"]: "cg3"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + "code": "debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["act6"].detail), + { + "id": "act6", + "type": NodeType.ServiceActivity.value, + "targets": {pipeline["activities"]["act6"]["outgoing"]: "cg3"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": True, + "can_retry": True, + "code": "debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["cg3"].detail), + { + "id": "cg3", + "type": NodeType.ConvergeGateway.value, + "targets": {pipeline["gateways"]["cg3"]["outgoing"]: "subproc"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": False, + "can_retry": True, + }, + ) + self.assertEqual( + json.loads(nodes["subproc"].detail), + { + "id": "subproc", + "type": NodeType.SubProcess.value, + "targets": {pipeline["activities"]["subproc"]["outgoing"]: "end"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": False, + "can_retry": True, + "start_event_id": "sub_start", + }, + ) + self.assertEqual( + json.loads(nodes["sub_start"].detail), + { + "id": "sub_start", + "type": NodeType.EmptyStartEvent.value, + "targets": {pipeline["activities"]["subproc"]["pipeline"]["start_event"]["outgoing"]: "sub_act1"}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "subproc", + "can_skip": True, + "can_retry": True, + }, + ) + self.assertEqual( + json.loads(nodes["sub_act1"].detail), + { + "id": "sub_act1", + "type": NodeType.ServiceActivity.value, + "targets": { + pipeline["activities"]["subproc"]["pipeline"]["activities"]["sub_act1"]["outgoing"]: "sub_act2" + }, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "subproc", + "can_skip": True, + "can_retry": True, + "code": "sub_debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["sub_act2"].detail), + { + "id": "sub_act2", + "type": NodeType.ServiceActivity.value, + "targets": { + pipeline["activities"]["subproc"]["pipeline"]["activities"]["sub_act2"]["outgoing"]: "sub_act3" + }, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "subproc", + "can_skip": True, + "can_retry": True, + "code": "sub_debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["sub_act3"].detail), + { + "id": "sub_act3", + "type": NodeType.ServiceActivity.value, + "targets": { + pipeline["activities"]["subproc"]["pipeline"]["activities"]["sub_act3"]["outgoing"]: "sub_end" + }, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "subproc", + "can_skip": True, + "can_retry": True, + "code": "sub_debug_node", + "version": "legacy", + "timeout": None, + "error_ignorable": False, + }, + ) + self.assertEqual( + json.loads(nodes["sub_end"].detail), + { + "id": "sub_end", + "type": NodeType.EmptyEndEvent.value, + "targets": {}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "subproc", + "can_skip": False, + "can_retry": True, + }, + ) + self.assertEqual( + json.loads(nodes["end"].detail), + { + "id": "end", + "type": NodeType.EmptyEndEvent.value, + "targets": {}, + "root_pipeline_id": "pipeline", + "parent_pipeline_id": "pipeline", + "can_skip": False, + "can_retry": True, + }, + ) + + # data + self.assertEqual(json.loads(datas["pipeline"].inputs), {"k": {"need_render": False, "value": "v"}}) + + self.assertEqual( + json.loads(datas["act1"].inputs), + { + "key1": {"need_render": True, "value": "${a}"}, + "key2": {"need_render": True, "value": "${b}"}, + "key3": {"need_render": True, "value": "${key3_act1}"}, + }, + ) + self.assertEqual(json.loads(datas["act1"].outputs), {"key1": "${output1}"}) + + self.assertEqual( + json.loads(datas["act2"].inputs), + {"key2": {"need_render": True, "value": "${a}"}, "key3": {"need_render": True, "value": "${b}"}}, + ) + self.assertEqual(json.loads(datas["act2"].outputs), {"key2": "${output2}"}) + + self.assertEqual( + json.loads(datas["act3"].inputs), + {"key3": {"need_render": True, "value": "${a}"}, "key4": {"need_render": True, "value": "${b}"}}, + ) + + self.assertEqual( + json.loads(datas["act4"].inputs), + {"key4": {"need_render": True, "value": "${a}"}, "key5": {"need_render": True, "value": "${b}"}}, + ) + + self.assertEqual( + json.loads(datas["act5"].inputs), + {"key5": {"need_render": True, "value": "${a}"}, "key6": {"need_render": True, "value": "${b}"}}, + ) + + self.assertEqual( + json.loads(datas["act6"].inputs), + {"key6": {"need_render": True, "value": "${a}"}, "key7": {"need_render": True, "value": "${b}"}}, + ) + + self.assertEqual( + json.loads(datas["sub_act1"].inputs), + {"key7": {"need_render": True, "value": "${c}"}, "key8": {"need_render": True, "value": "${d}"}}, + ) + self.assertEqual(json.loads(datas["sub_act1"].outputs), {"key7": "${sub_output1}"}) + + self.assertEqual( + json.loads(datas["sub_act2"].inputs), + {"key8": {"need_render": True, "value": "${c}"}, "key9": {"need_render": True, "value": "${d}"}}, + ) + self.assertEqual(json.loads(datas["sub_act2"].outputs), {"key8": "${sub_output2}"}) + + self.assertEqual( + json.loads(datas["sub_act3"].inputs), + {"key9": {"need_render": True, "value": "${c}"}, "key10": {"need_render": True, "value": "${d}"}}, + ) + + self.assertEqual( + json.loads(datas["subproc"].inputs), + {"${sub_d}": {"need_render": True, "value": "${a}"}, "${sub_e}": {"need_render": True, "value": "${b}"}}, + ) + self.assertEqual(json.loads(datas["subproc"].outputs), {}) + + # context outputs + self.assertEqual(json.loads(context_outputs["pipeline"].outputs), ["${a}", "${d}", "${g}"]) + self.assertEqual(json.loads(context_outputs["subproc"].outputs), ["${sub_a}", "${sub_b}"]) + + # context values + key = "${a}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 3, + "serializer": "json", + "value": ["${b}", "${c}_${d}"], + "references": {"${b}", "${c}", "${d}", "${e}", "${f}"}, + }, + ) + + key = "${b}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 2, + "serializer": "json", + "value": "${e}_2", + "references": {"${e}"}, + }, + ) + + key = "${c}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 2, + "serializer": "json", + "value": "${e}_${f}", + "references": {"${e}", "${f}"}, + }, + ) + + key = "${d}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 1, + "serializer": "json", + "value": "ab", + "references": set(), + }, + ) + + key = "${e}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 1, + "serializer": "json", + "value": "cd", + "references": set(), + }, + ) + + key = "${f}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 1, + "serializer": "json", + "value": "ef", + "references": set(), + }, + ) + + key = "${g}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 2, + "serializer": "json", + "value": "1 + ${h}", + "references": {"${h}", "${f}"}, + }, + ) + + key = "${h}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 2, + "serializer": "json", + "value": "${f}-${f}", + "references": {"${f}"}, + }, + ) + + key = "${key3_act1}" + self.assertEqual( + { + "pipeline_id": "pipeline", + "key": context_values["pipeline"][key].key, + "type": context_values["pipeline"][key].type, + "serializer": context_values["pipeline"][key].serializer, + "value": json.loads(context_values["pipeline"][key].value), + "references": set(json.loads(context_values["pipeline"][key].references)), + }, + { + "pipeline_id": "pipeline", + "key": key, + "type": 3, + "serializer": "json", + "value": "${a}-${b}", + "references": {"${b}", "${f}", "${a}", "${d}", "${c}", "${e}"}, + }, + ) + + key = "${sub_a}" + self.assertEqual( + { + "pipeline_id": "subproc", + "key": context_values["subproc"][key].key, + "type": context_values["subproc"][key].type, + "serializer": context_values["subproc"][key].serializer, + "value": json.loads(context_values["subproc"][key].value), + "references": set(json.loads(context_values["subproc"][key].references)), + }, + { + "pipeline_id": "subproc", + "key": key, + "type": 3, + "serializer": "json", + "value": {"a": "${b}"}, + "references": {"${b}"}, + }, + ) + + key = "${sub_b}" + self.assertEqual( + { + "pipeline_id": "subproc", + "key": context_values["subproc"][key].key, + "type": context_values["subproc"][key].type, + "serializer": context_values["subproc"][key].serializer, + "value": json.loads(context_values["subproc"][key].value), + "references": set(json.loads(context_values["subproc"][key].references)), + }, + { + "pipeline_id": "subproc", + "key": key, + "type": 2, + "serializer": "json", + "value": "${c}", + "references": {"${c}"}, + }, + ) + + key = "${sub_c}" + self.assertEqual( + { + "pipeline_id": "subproc", + "key": context_values["subproc"][key].key, + "type": context_values["subproc"][key].type, + "serializer": context_values["subproc"][key].serializer, + "value": json.loads(context_values["subproc"][key].value), + "references": set(json.loads(context_values["subproc"][key].references)), + }, + {"pipeline_id": "subproc", "key": key, "type": 1, "serializer": "json", "value": "c", "references": set()}, + ) + + key = "${sub_d}" + self.assertEqual( + { + "pipeline_id": "subproc", + "key": context_values["subproc"][key].key, + "type": context_values["subproc"][key].type, + "serializer": context_values["subproc"][key].serializer, + "value": json.loads(context_values["subproc"][key].value), + "references": set(json.loads(context_values["subproc"][key].references)), + }, + {"pipeline_id": "subproc", "key": key, "type": 1, "serializer": "json", "value": "", "references": set()}, + ) + + key = "${sub_e}" + self.assertEqual( + { + "pipeline_id": "subproc", + "key": context_values["subproc"][key].key, + "type": context_values["subproc"][key].type, + "serializer": context_values["subproc"][key].serializer, + "value": json.loads(context_values["subproc"][key].value), + "references": set(json.loads(context_values["subproc"][key].references)), + }, + {"pipeline_id": "subproc", "key": key, "type": 1, "serializer": "json", "value": "", "references": set()}, + ) + + key = "${k1}" + self.assertEqual( + { + "pipeline_id": "subproc", + "key": context_values["subproc"][key].key, + "type": context_values["subproc"][key].type, + "serializer": context_values["subproc"][key].serializer, + "value": json.loads(context_values["subproc"][key].value), + "references": set(json.loads(context_values["subproc"][key].references)), + }, + { + "pipeline_id": "subproc", + "key": key, + "type": 1, + "serializer": "json", + "value": "v1", + "references": set(), + }, + ) + + key = "${k2}" + self.assertEqual( + { + "pipeline_id": "subproc", + "key": context_values["subproc"][key].key, + "type": context_values["subproc"][key].type, + "serializer": context_values["subproc"][key].serializer, + "value": json.loads(context_values["subproc"][key].value), + "references": set(json.loads(context_values["subproc"][key].references)), + }, + { + "pipeline_id": "subproc", + "key": key, + "type": 1, + "serializer": "json", + "value": "v2", + "references": set(), + }, + ) diff --git a/runtime/bamboo-pipeline/pipeline/tests/mock.py b/runtime/bamboo-pipeline/pipeline/tests/mock.py new file mode 100644 index 00000000..98b9ebf5 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/mock.py @@ -0,0 +1,403 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import mock +from mock import MagicMock, patch # noqa + +from pipeline.utils.collections import FancyDict +from pipeline.utils.uniqid import uniqid + + +def __reduce__(self): + return (mock.MagicMock, ()) + + +mock.mock.MagicMock.__reduce__ = __reduce__ +mock.MagicMock.__reduce__ = __reduce__ +MagicMock.__reduce__ = __reduce__ + + +class Object(object): + pass + + +class MockResponse(object): + def __init__(self, **kwargs): + self.content = kwargs.get("content") + self.ok = kwargs.get("ok", True) + + +class ContextObject(object): + def __init__(self, variables): + self.variables = variables + + +class DataObject(object): + def __init__(self, outputs=None): + self._outputs = outputs or {} + + def get_outputs(self): + return self._outputs + + +class IdentifyObject(object): + def __init__(self, id=None, name=""): + self.id = id or uniqid() + self.name = name + + +class StartEventObject(IdentifyObject): + def __init__(self, id=None, node=None, data=None): + self.next = mock.MagicMock(return_value=node or uniqid()) + self.data = data or MockData() + super(StartEventObject, self).__init__(id=id) + + +class EndEventObject(IdentifyObject): + def __init__(self, id=None): + self.pipeline_finish = mock.MagicMock() + super(EndEventObject, self).__init__(id=id) + + +class ExecutableEndEventObject(IdentifyObject): + def __init__(self, id=None): + self.pipeline_finish = mock.MagicMock() + self.execute = mock.MagicMock() + self.data = mock.MagicMock() + self.data.outputs = mock.MagicMock() + super(ExecutableEndEventObject, self).__init__(id=id) + + +class PipelineSpecObject(object): + def __init__(self, activities=None): + self.activities = activities or {} + + +class PipelineObject(IdentifyObject): + def __init__(self, context=None, data=None, node=None, nodes=None, start_event=None, spec=None): + self._start_event = StartEventObject() + self.context = context + self.data = data + self.real_node = node + self.nodes = nodes or {} + self.start_event = start_event or StartEventObject() + self.spec = spec or PipelineSpecObject() + self.prune = MagicMock() + super(PipelineObject, self).__init__() + + def node(self, node_id): + return self.nodes.get(node_id) or self.real_node or node_id + + def shell(self): + return self + + +class StatusObject(IdentifyObject): + def __init__(self, state, id=None): + self.state = state + super(StatusObject, self).__init__(id=id) + + +class StaticIntervalObject(Object): + def __init__(self, interval): + self.interval = interval + + def next(self): + return self.interval + + +class ServiceActObject(IdentifyObject): + def __init__( + self, + interval=None, + id=None, + schedule_return=True, + execute_return=True, + execute_pre_process_return=True, + schedule_exception=None, + execute_exception=None, + execute_pre_process_exception=None, + timeout=None, + error_ignorable=False, + is_schedule_done=False, + result_bit=True, + data=None, + need_schedule=False, + multi_callback_enabled=False, + on_retry=False, + ): + self.service = Object() + self.service.interval = interval + self.service.multi_callback_enabled = mock.MagicMock(return_value=multi_callback_enabled) + self.schedule = ( + mock.MagicMock(return_value=schedule_return) + if not schedule_exception + else mock.MagicMock(side_effect=schedule_exception) + ) + self.execute_pre_process = ( + mock.MagicMock(return_value=execute_pre_process_return) + if not execute_pre_process_exception + else mock.MagicMock(side_effect=execute_pre_process_exception) + ) + self.execute = ( + mock.MagicMock(return_value=execute_return) + if not execute_exception + else mock.MagicMock(side_effect=execute_exception) + ) + self.timeout = timeout + self._next = IdentifyObject() + self.schedule_fail = mock.MagicMock() + self.error_ignorable = error_ignorable + self.ignore_error = mock.MagicMock() + self.finish_schedule = mock.MagicMock() + self.is_schedule_done = mock.MagicMock(return_value=is_schedule_done) + self.get_result_bit = mock.MagicMock(return_value=result_bit) + self.prepare_rerun_data = mock.MagicMock() + self.data = data or MockData() + self.failure_handler = mock.MagicMock() + self.need_schedule = mock.MagicMock(return_value=need_schedule) + self.shell = mock.MagicMock(return_value=self) + self.on_retry = mock.MagicMock(return_value=on_retry) + self.retry_at_current_exec = mock.MagicMock() + self.setup_runtime_attrs = mock.MagicMock() + super(ServiceActObject, self).__init__(id) + + def next(self): + return self._next + + +class SubprocessObject(IdentifyObject): + def __init__(self, id=None, pipeline=None): + self.pipeline = pipeline or PipelineObject() + super(SubprocessObject, self).__init__(id) + + +class MockPipelineModel(IdentifyObject): + def __init__(self, **kwargs): + self.process = kwargs.get("process", MockPipelineProcess()) + super(MockPipelineModel, self).__init__(kwargs.get("id")) + + +class MockPipelineProcess(IdentifyObject): + def __init__(self, *args, **kwargs): + super(MockPipelineProcess, self).__init__(id=kwargs.get("id")) + + self.is_alive = kwargs.get("is_alive", True) + self.root_pipeline = kwargs.get( + "root_pipeline", + PipelineObject(data=kwargs.get("root_pipeline_data"), context=kwargs.get("root_pipeline_context")), + ) + self.root_pipeline_id = self.root_pipeline.id + self.top_pipeline = kwargs.get( + "top_pipeline", + PipelineObject( + data=kwargs.get("top_pipeline_data"), + context=kwargs.get("top_pipeline_context"), + spec=kwargs.get("top_pipeline_spec"), + ), + ) + self.current_node_id = kwargs.get("current_node_id") + self.destination_id = kwargs.get("destination_id") + self.current_node_id = kwargs.get("current_node_id") + self.wake_up = mock.MagicMock() + self.destroy_and_wake_up_parent = mock.MagicMock() + self.root_sleep_check = mock.MagicMock() + self.subproc_sleep_check = mock.MagicMock() + self.refresh_current_node = mock.MagicMock() + self.sleep = mock.MagicMock() + self.freeze = mock.MagicMock() + self.exit_gracefully = mock.MagicMock() + self.adjust_status = mock.MagicMock() + self.is_alive = kwargs.get("is_alive", True) + self.save = mock.MagicMock() + self.push_pipeline = mock.MagicMock() + self.join = mock.MagicMock() + self.destroy = mock.MagicMock() + self.pipeline_stack = [self.top_pipeline] + kwargs.get("pipeline_stack", []) + self.sync_with_children = mock.MagicMock(**{"side_effect": kwargs.get("sync_exception")}) + self.children = kwargs.get("children", []) + self.clean_children = mock.MagicMock() + self.revoke_subprocess = mock.MagicMock() + self.destroy_all = mock.MagicMock() + self.subprocess_stack = kwargs.get("subprocess_stack", []) + self.can_be_waked = mock.MagicMock(return_value=kwargs.get("can_be_waked", False)) + self.subproc_sleep_check = mock.MagicMock( + return_value=kwargs.get("subproc_sleep_check_return", (False, [self.id])) + ) + self.in_subprocess = mock.MagicMock(return_value=kwargs.get("in_subprocess_return", False)) + self.take_snapshot = mock.MagicMock() + + def pop_pipeline(self): + return self.pipeline_stack.pop() + + +class MockActionResult(object): + def __init__(self, result, message=None, extra=None): + self.result = result + self.message = message or "" + self.extra = extra + + +class MockHandlerResult(object): + def __init__(self, should_return, should_sleep, next_node=None, after_sleep_call=None, args=[], kwargs={}): + self.should_return = should_return + self.should_sleep = should_sleep + self.next_node = next_node or IdentifyObject() + self.after_sleep_call = after_sleep_call + self.args = args + self.kwargs = kwargs + + +class MockScheduleService(object): + def __init__(self, id=None, **kwargs): + self.id = id or ("{}{}".format(uniqid(), uniqid())) + self.activity_id = self.id[:32] + self.version = self.id[32:] + self.destroy = mock.MagicMock() + self.service_act = ServiceActObject( + interval=None, + id=self.activity_id, + schedule_return=kwargs.get("schedule_return"), + schedule_exception=kwargs.get("schedule_exception"), + timeout=kwargs.get("service_timeout"), + error_ignorable=kwargs.get("service_err_ignore", False), + is_schedule_done=kwargs.get("schedule_done", False), + result_bit=kwargs.get("result_bit", True), + ) + self.callback_data = kwargs.get("callback_data", "callback_data") + self.wait_callback = kwargs.get("wait_callback", False) + self.multi_callback_enabled = kwargs.get("multi_callback_enabled", False) + self.process_id = kwargs.get("process_id", uniqid()) + self.is_finished = kwargs.get("is_finished", False) + self.schedule_times = 0 + self.finish = mock.MagicMock() + self.set_next_schedule = mock.MagicMock() + self.callback = mock.MagicMock() + self.save = mock.MagicMock() + self.is_one_time_callback = mock.MagicMock(return_value=self.wait_callback and not self.multi_callback_enabled) + + +class MockQuerySet(object): + def __init__(self, exists_return=True, get_return=None, first_return=None, qs=None, update_return=1): + self.update = mock.MagicMock(return_value=update_return) + self.exists = mock.MagicMock(return_value=exists_return) + self.get = mock.MagicMock(return_value=get_return) + self.first = mock.MagicMock(return_value=first_return) + self.qs = qs + + def __iter__(self): + return self.qs.__iter__() + + def __len__(self): + return len(self.qs) + + +class MockEngineModelStatus(object): + def __init__(self, error_ignorable): + self.error_ignorable = error_ignorable + self.save = mock.MagicMock() + + +class MockData(object): + def __init__(self, get_outputs_return=None, get_inputs_return=None, get_one_of_outputs_return=None, ex_data=None): + self.id = uniqid() + get_inputs_return = get_inputs_return or FancyDict() + get_outputs_return = get_outputs_return or FancyDict() + self.update_outputs = mock.MagicMock() + self.get_outputs = mock.MagicMock(return_value=get_outputs_return) + self.set_outputs = mock.MagicMock() + self.get_inputs = mock.MagicMock(return_value=get_inputs_return) + self.inputs = get_inputs_return + self.outputs = get_outputs_return + self.ex_data = ex_data + + if isinstance(get_one_of_outputs_return, dict): + + def side_effect(arg): + return get_one_of_outputs_return[arg] + + self.get_one_of_outputs = mock.MagicMock(side_effect=side_effect) + else: + self.get_one_of_outputs = mock.MagicMock(return_value=get_one_of_outputs_return) + + +class MockContext(object): + def __init__(self, **kwargs): + self.extract_output = mock.MagicMock() + self.set_global_var = mock.MagicMock() + self.update_global_var = mock.MagicMock() + self.write_output = mock.MagicMock() + self.clear = mock.MagicMock() + self.recover_variable = mock.MagicMock() + self.variables = kwargs.get("variables", "variables") + + +class MockStatus(IdentifyObject): + def __init__(self, loop=0, id=None, state=None, started_time=None, archived_time=None, retry=False, skip=False): + super(MockStatus, self).__init__(id=id) + self.version = uniqid() + self.loop = loop + self.state = state + self.started_time = started_time + self.archived_time = archived_time + self.retry = retry + self.skip = skip + + self.save = MagicMock() + + +class MockSubprocessActivity(IdentifyObject): + def __init__(self, **kwargs): + self.pipeline = kwargs.get( + "pipeline", PipelineObject(data=kwargs.get("pipeline_data"), context=kwargs.get("pipeline_context")) + ) + self.next = mock.MagicMock(return_value=kwargs.get("next", uniqid())) + self.data = kwargs.get("data", MockData()) + self.prepare_rerun_data = mock.MagicMock() + super(MockSubprocessActivity, self).__init__(kwargs.get("id")) + + +class MockSequenceCollection(object): + def __init__(self, **kwargs): + self.all_target_node = mock.MagicMock( + return_value=[IdentifyObject() for _ in range(kwargs.get("target_num", 3))] + ) + + +class MockParallelGateway(object): + def __init__(self, **kwargs): + self.outgoing = kwargs.get("outgoing", MockSequenceCollection()) + self.converge_gateway_id = kwargs.get("converge_gateway_id", uniqid()) + + +class MockExclusiveGateway(object): + def __init__(self, **kwargs): + self.id = "id" + side_effect = kwargs.get("next_exception") + if side_effect: + self.next = mock.MagicMock(side_effect=side_effect) + else: + self.next = mock.MagicMock(return_value=kwargs.get("node", IdentifyObject())) + + +class MockConvergeGateway(object): + def __init__(self, **kwargs): + self.next = mock.MagicMock(return_value=kwargs.get("next", IdentifyObject())) + + +class MockParser(object): + def __init__(self, parse_return="pipeline"): + self.parse_return = parse_return + + def parse(self): + return self.parse_return diff --git a/runtime/bamboo-pipeline/pipeline/tests/mock_settings.py b/runtime/bamboo-pipeline/pipeline/tests/mock_settings.py new file mode 100644 index 00000000..ae3efbc3 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/mock_settings.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +PIPELINE_CORE_GATEWAY_DEFORMAT = "pipeline.core.flow.gateway.deformat_constant_key" +PIPELINE_CORE_CONSTANT_RESOLVE = "pipeline.core.data.expression.ConstantTemplate.resolve_data" + +PIPELINE_STATUS_GET = "pipeline.engine.models.Status.objects.get" +PIPELINE_STATUS_FAIL = "pipeline.engine.models.Status.objects.fail" +PIPELINE_STATUS_RAW_FAIL = "pipeline.engine.models.Status.objects.raw_fail" +PIPELINE_STATUS_RETRY = "pipeline.engine.models.Status.objects.retry" +PIPELINE_STATUS_SKIP = "pipeline.engine.models.Status.objects.skip" +PIPELINE_STATUS_FINISH = "pipeline.engine.models.Status.objects.finish" +PIPELINE_STATUS_FILTER = "pipeline.engine.models.Status.objects.filter" +PIPELINE_STATUS_TRANSIT = "pipeline.engine.models.Status.objects.transit" +PIPELINE_STATUS_STATE_FOR = "pipeline.engine.models.Status.objects.state_for" +PIPELINE_STATUS_STATES_FOR = "pipeline.engine.models.Status.objects.states_for" +PIPELINE_STATUS_SELECT_FOR_UPDATE = "pipeline.engine.models.Status.objects.select_for_update" +PIPELINE_STATUS_PREPARE_FOR_PIPELINE = "pipeline.engine.models.Status.objects.prepare_for_pipeline" +PIPELINE_STATUS_RECOVER_FROM_BLOCK = "pipeline.engine.models.Status.objects.recover_from_block" +PIPELINE_STATUS_VERSION_FOR = "pipeline.engine.models.Status.objects.version_for" +PIPELINE_STATUS_BATCH_TRANSIT = "pipeline.engine.models.Status.objects.batch_transit" + +PIPELINE_PROCESS_GET = "pipeline.engine.models.PipelineProcess.objects.get" +PIPELINE_PROCESS_FILTER = "pipeline.engine.models.PipelineProcess.objects.filter" +PIPELINE_PROCESS_SELECT_FOR_UPDATE = "pipeline.engine.models.PipelineProcess.objects.select_for_update" +PIPELINE_PROCESS_FORK_CHILD = "pipeline.engine.models.PipelineProcess.objects.fork_child" +PIPELINE_PROCESS_PREPARE_FOR_PIPELINE = "pipeline.engine.models.PipelineProcess.objects.prepare_for_pipeline" +PIPELINE_PROCESS_BATCH_PROCESS_READY = "pipeline.engine.models.PipelineProcess.objects.batch_process_ready" +PIPELINE_PROCESS_PROCESS_READY = "pipeline.engine.models.PipelineProcess.objects.process_ready" +PIPELINE_PROCESS_ADJUST_STATUS = "pipeline.engine.models.PipelineProcess.adjust_status" +PIPELINE_PROCESS_CHILD_PROCESS_READY = "pipeline.engine.models.PipelineProcess.objects.child_process_ready" +PIPELINE_PROCESS_DESTROY = "pipeline.engine.models.PipelineProcess.destroy" +PIPELINE_PROCESS_BLOCKED_BY_FAILURE = "pipeline.engine.models.PipelineProcess.blocked_by_failure_or_suspended" + +PIPELINE_SCHEDULE_SERVICE_FILTER = "pipeline.engine.models.ScheduleService.objects.filter" +PIPELINE_SCHEDULE_SERVICE_GET = "pipeline.engine.models.ScheduleService.objects.get" +PIPELINE_SCHEDULE_SERVICE_SET_SCHEDULE = "pipeline.engine.models.ScheduleService.objects.set_schedule" +PIPELINE_SCHEDULE_SCHEDULE_FOR = "pipeline.engine.models.ScheduleService.objects.schedule_for" +PIPELINE_SCHEDULE_DELETE_SCHEDULE = "pipeline.engine.models.ScheduleService.objects.delete_schedule" + +PIPELINE_DATA_GET = "pipeline.engine.models.Data.objects.get" +PIPELINE_DATA_FILTER = "pipeline.engine.models.Data.objects.filter" +PIPELINE_DATA_WRITE_NODE_DATA = "pipeline.engine.models.Data.objects.write_node_data" +PIPELINE_DATA_FORCED_FAIL = "pipeline.engine.models.Data.objects.forced_fail" +PIPELINE_DATA_WIRTE_EX_DATA = "pipeline.engine.models.Data.objects.write_ex_data" + +PIPELINE_NODE_RELATIONSHIP_BUILD = "pipeline.engine.models.NodeRelationship.objects.build_relationship" +PIPELINE_NODE_RELATIONSHIP_FILTER = "pipeline.engine.models.NodeRelationship.objects.filter" + +PIPELINE_CELERYTASK_BIND = "pipeline.engine.models.ProcessCeleryTask.objects.bind" +PIPELINE_CELERYTASK_UNBIND = "pipeline.engine.models.ProcessCeleryTask.objects.unbind" +PIPELINE_CELERYTASK_REVOKE = "pipeline.engine.models.ProcessCeleryTask.objects.revoke" +PIPELINE_CELERYTASK_DESTROY = "pipeline.engine.models.ProcessCeleryTask.objects.destroy" + +PIPELINE_NODE_CELERYTASK_DESTROY = "pipeline.engine.models.NodeCeleryTask.objects.destroy" + +PIPELINE_FUNCTION_SWITCH_IS_FROZEN = "pipeline.engine.models.FunctionSwitch.objects.is_frozen" + +PIPELINE_MODELS_TASK_SERVICE_RUN_PIPELINE = "pipeline.models.task_service.run_pipeline" +PIPELINE_MODELS_POST_PIPELINE_FINISH = "pipeline.models.post_pipeline_finish" +PIPELINE_MODELS_POST_PIPELINE_REVOKE = "pipeline.models.post_pipeline_revoke" + +PIPELINE_PIPELINE_INSTANCE_CALCULATE_TREE_INFO = "pipeline.models.PipelineInstance.calculate_tree_info" +PIPELINE_PIPELINE_INSTANCE_IMPORT_STRING = "pipeline.models.import_string" + +PIPELINE_PIPELINE_MODEL_GET = "pipeline.engine.models.PipelineModel.objects.get" +PIPELINE_PIPELINE_MODEL_PREPARE_FOR_PIPELINE = "pipeline.engine.models.PipelineModel.objects.prepare_for_pipeline" +PIPELINE_PIPELINE_MODEL_PIPELINE_READY = "pipeline.engine.models.PipelineModel.objects.pipeline_ready" + +PIPELINE_SUBPROCESS_RELATIONSHIP_GET_RELATE_PROCESS = ( + "pipeline.engine.models.SubProcessRelationship.objects." "get_relate_process" +) + +PIPELINE_HISTORY_GET_HISTORY = "pipeline.engine.models.History.objects.get_histories" +PIPELINE_HISTORY_RECORD = "pipeline.engine.models.History.objects.record" +PIPELINE_HISTORY_LINK_HISTORY = "pipeline.engine.models.LogEntry.objects.link_history" + +PIPELINE_ENGINE_API_WORKERS = "pipeline.engine.api.workers" +PIPELINE_ENGINE_API_GET_PROCESS_TO_BE_WAKED = "pipeline.engine.api._get_process_to_be_waked" + +PIPELINE_ENGINE_CORE_DATA_DEL_OBJECT = "pipeline.engine.core.data.del_object" +PIPELINE_ENGINE_CORE_DATA_GET_OBJECT = "pipeline.engine.core.data.get_object" +PIPELINE_ENGINE_CORE_DATA_SET_OBJECT = "pipeline.engine.core.data.set_object" + +PIPELINE_ENGINE_CORE_API_WORKERS = "pipeline.engine.core.api.workers" + +SCHEDULE_GET_SCHEDULE_PARENT_DATA = "pipeline.engine.core.schedule.get_schedule_parent_data" +SCHEDULE_DELETE_PARENT_DATA = "pipeline.engine.core.schedule.delete_parent_data" +SCHEDULE_SET_SCHEDULE_DATA = "pipeline.engine.core.schedule.set_schedule_data" + +ENGINE_ACTIVITY_FAIL_SIGNAL = "pipeline.engine.signals.activity_failed.send" +ENGINE_SIGNAL_TIMEOUT_START_SEND = "pipeline.engine.signals.service_activity_timeout_monitor_start.send" +ENGINE_SIGNAL_TIMEOUT_END_SEND = "pipeline.engine.signals.service_activity_timeout_monitor_end.send" +ENGINE_SIGNAL_ACT_SCHEDULE_FAIL_SEND = "pipeline.engine.signals.service_schedule_fail.send" +ENGINE_SIGNAL_ACT_SCHEDULE_SUCCESS_SEND = "pipeline.engine.signals.service_schedule_success.send" + +ENGINE_SIGNAL_NODE_RETRY_READY = "pipeline.engine.signals.node_retry_ready" +ENGINE_SIGNAL_NODE_SKIP_CALL = "pipeline.engine.signals.node_skip_call" + +ENGINE_SCHEDULE = "pipeline.engine.core.schedule.schedule" +ENGINE_API_FORCED_FAIL = "pipeline.engine.api.forced_fail" +ENGINE_RUN_LOOP = "pipeline.engine.core.runtime.run_loop" +ENGINE_TASKS_WAKE_UP_APPLY = "pipeline.engine.tasks.wake_up.apply_async" + +SIGNAL_VALVE_SEND = "pipeline.django_signal_valve.valve.send" + +SUBPROCESS_HYDRATE_NODE_DATA = "pipeline.engine.core.handlers.subprocess.hydrate_node_data" +SUBPROCESS_HYDRATE_DATA = "pipeline.engine.core.handlers.subprocess.hydrate_data" + +SERVICE_ACT_HYDRATE_NODE_DATA = "pipeline.engine.core.handlers.service_activity.hydrate_node_data" +SERVICE_ACT_HYDRATE_DATA = "pipeline.engine.core.handlers.service_activity.hydrate_data" + +EXG_HYDRATE_NODE_DATA = "pipeline.engine.core.handlers.exclusive_gateway.hydrate_node_data" +EXG_HYDRATE_DATA = "pipeline.engine.core.handlers.exclusive_gateway.hydrate_data" + +CPG_HYDRATE_DATA = "pipeline.engine.core.handlers.conditional_parallel.hydrate_data" + +ENGINE_HANDLERS_END_EVENT_HANDLE = "pipeline.engine.core.handlers.endevent.base.EndEventHandler.handle" +UTILS_IMPORTER_BASE_EXECUTE_SRC_CODE = "pipeline.utils.importer.base.NonstandardModuleImporter._execute_src_code" +UTILS_IMPORTER_GIT__FETCH_REPO_FILE = "pipeline.utils.importer.git.GitRepoModuleImporter._fetch_repo_file" +UTILS_IMPORTER_GIT__FILE_URL = "pipeline.utils.importer.git.GitRepoModuleImporter._file_url" +UTILS_IMPORTER_GIT_GET_SOURCE = "pipeline.utils.importer.git.GitRepoModuleImporter.get_source" +UTILS_IMPORTER_GIT_GET_FILE = "pipeline.utils.importer.git.GitRepoModuleImporter.get_file" +UTILS_IMPORTER_GIT_IS_PACKAGE = "pipeline.utils.importer.git.GitRepoModuleImporter.is_package" + +APPS_SETTINGS = "pipeline.apps.settings" +APPS_SENTINEL = "pipeline.apps.Sentinel" + + +ENGINE_DATA_API_SETTINGS = "pipeline.engine.core.data.api.settings" +ENGINE_DATA_API_IMPORT_BACKEND = "pipeline.engine.core.data.api._import_backend" +ENGINE_DATA_API_BACKEND = "pipeline.engine.core.data.api._backend" +ENGINE_DATA_API_CANDIDATE_BACKEND = "pipeline.engine.core.data.api._candidate_backend" + +ENGINE_HEALTH_ZOMBIE_HEAL_DEFAULT_SETTINGS = "pipeline.engine.health.zombie.heal.default_settings" + +DJCELERY_APP_CURRENT_APP_CONNECTION = "celery.current_app.connection" diff --git a/runtime/bamboo-pipeline/pipeline/tests/models/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/models/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/models/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/models/test_instance.py b/runtime/bamboo-pipeline/pipeline/tests/models/test_instance.py new file mode 100644 index 00000000..9809b947 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/models/test_instance.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.engine import states +from pipeline.engine.models import NodeRelationship, Status +from pipeline.engine.utils import ActionResult +from pipeline.models import PipelineInstance, PipelineTemplate +from pipeline.service import task_service +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + + +class TestPipelineInstance(TestCase): + def setUp(self): + self.data = { + "activities": { + "node8fe2bb234d29860981a2bc7e6077": { + "retryable": True, + "component": {"code": "sleep_timer", "data": {"bk_timing": {"hook": False, "value": "3"}}}, + "error_ignorable": False, + "id": "node8fe2bb234d29860981a2bc7e6077", + "incoming": "line67b0e8cc895b1b9f9e0413dc50d1", + "isSkipped": True, + "loop": None, + "name": "\u5b9a\u65f6", + "optional": False, + "outgoing": "line73943da9f6f17601a40dc46bd229", + "stage_name": "\u6b65\u9aa41", + "type": "ServiceActivity", + } + }, + "constants": { + "${ip}": { + "custom_type": "input", + "desc": "", + "index": 0, + "key": "${ip}", + "name": "ip", + "show_type": "show", + "source_info": {}, + "source_tag": "", + "source_type": "custom", + "validation": "^.+$", + "validator": [], + "value": "", + } + }, + "end_event": { + "id": "nodeade2061fe6e69dc5b64a588480a7", + "incoming": "line73943da9f6f17601a40dc46bd229", + "name": "", + "outgoing": "", + "type": "EmptyEndEvent", + }, + "flows": { + "line67b0e8cc895b1b9f9e0413dc50d1": { + "id": "line67b0e8cc895b1b9f9e0413dc50d1", + "is_default": False, + "source": "nodedee24d10226c975f4d2c659cc29d", + "target": "node8fe2bb234d29860981a2bc7e6077", + }, + "line73943da9f6f17601a40dc46bd229": { + "id": "line73943da9f6f17601a40dc46bd229", + "is_default": False, + "source": "node8fe2bb234d29860981a2bc7e6077", + "target": "nodeade2061fe6e69dc5b64a588480a7", + }, + }, + "gateways": {}, + "outputs": [], + "start_event": { + "id": "nodedee24d10226c975f4d2c659cc29d", + "incoming": "", + "name": "", + "outgoing": "line67b0e8cc895b1b9f9e0413dc50d1", + "type": "EmptyStartEvent", + }, + } + self.creator = "start" + self.template = PipelineTemplate.objects.create_model(self.data, creator=self.creator, template_id="1") + self.instance, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator, instance_id="1" + ) + self.instance_2, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator, instance_id="2" + ) + self.instance_3, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator, instance_id="3" + ) + + @mock.patch("pipeline.models.PipelineTemplate.objects.unfold_subprocess", mock.MagicMock()) + def test_create_instance(self): + creator = self.creator + instance = self.instance + self.assertIsNotNone(instance.snapshot) + self.assertEqual(instance.snapshot.data, instance.data) + self.assertEqual(creator, instance.creator) + self.assertFalse(instance.is_started) + self.assertFalse(instance.is_finished) + self.assertFalse(instance.is_deleted) + + # test spread + PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator, instance_id="1" + ) + + PipelineTemplate.objects.unfold_subprocess.assert_called_with(self.data) + + PipelineTemplate.objects.unfold_subprocess.reset_mock() + + PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator, instance_id="1", spread=True + ) + + PipelineTemplate.objects.unfold_subprocess.assert_not_called() + + def test_create_instance__without_template(self): + self.instance_4, no_use = PipelineInstance.objects.create_instance( + template=None, exec_data=self.data, creator=self.creator, instance_id="4" + ) + self.assertIsNone(self.instance_4.template) + self.assertIsNone(self.instance_4.snapshot) + self.assertIsNotNone(self.instance_4.execution_snapshot) + + def test_set_started(self): + PipelineInstance.objects.set_started(self.instance.instance_id, self.creator) + self.instance.refresh_from_db() + self.assertTrue(self.instance.is_started) + + def test_set_finished(self): + NodeRelationship.objects.build_relationship(self.instance.instance_id, self.instance.instance_id) + Status.objects.create(id=self.instance.instance_id, state=states.FINISHED) + for act_id in self.data["activities"]: + NodeRelationship.objects.build_relationship(self.instance.instance_id, act_id) + Status.objects.create(id=act_id, state=states.FINISHED) + NodeRelationship.objects.build_relationship(self.instance.instance_id, self.data["start_event"]["id"]) + Status.objects.create(id=self.data["start_event"]["id"], state=states.FINISHED) + NodeRelationship.objects.build_relationship(self.instance.instance_id, self.data["end_event"]["id"]) + Status.objects.create(id=self.data["end_event"]["id"], state=states.FINISHED) + + post_pipeline_finish = MagicMock() + with patch(PIPELINE_MODELS_POST_PIPELINE_FINISH, post_pipeline_finish): + PipelineInstance.objects.set_finished(self.instance.instance_id) + + self.instance.refresh_from_db() + self.assertTrue(self.instance.is_finished) + post_pipeline_finish.send.assert_called_once_with( + sender=PipelineInstance, instance_id=self.instance.instance_id + ) + + def test_set_revoked(self): + NodeRelationship.objects.build_relationship(self.instance.instance_id, self.instance.instance_id) + Status.objects.create(id=self.instance.instance_id, state=states.REVOKED) + + post_pipeline_revoke = MagicMock() + with patch(PIPELINE_MODELS_POST_PIPELINE_REVOKE, post_pipeline_revoke): + PipelineInstance.objects.set_revoked(self.instance.instance_id) + + self.instance.refresh_from_db() + self.assertTrue(self.instance.is_revoked) + post_pipeline_revoke.send.assert_called_once_with( + sender=PipelineInstance, instance_id=self.instance.instance_id + ) + + def test_delete_instance(self): + PipelineInstance.objects.delete_model(self.instance.instance_id) + i = PipelineInstance.objects.get(instance_id=self.instance.instance_id) + self.assertTrue(i.is_deleted) + PipelineInstance.objects.delete_model([self.instance_2.instance_id, self.instance_3.instance_id]) + i2 = PipelineInstance.objects.get(instance_id=self.instance_2.instance_id) + i3 = PipelineInstance.objects.get(instance_id=self.instance_3.instance_id) + self.assertTrue(i2.is_deleted) + self.assertTrue(i3.is_deleted) + + @patch(PIPELINE_MODELS_TASK_SERVICE_RUN_PIPELINE, MagicMock(return_value=ActionResult(result=True, message=""))) + @patch(PIPELINE_PIPELINE_INSTANCE_CALCULATE_TREE_INFO, MagicMock()) + @patch(PIPELINE_PIPELINE_INSTANCE_IMPORT_STRING, MagicMock(retrun_value=MockParser)) + def test_start__success(self): + instance, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator + ) + executor = "token_1" + instance.start(executor) + + instance.refresh_from_db() + + instance.calculate_tree_info.assert_called_once() + self.assertTrue(instance.is_started) + self.assertEqual(instance.executor, executor) + self.assertIsNotNone(instance.start_time) + + task_service.run_pipeline.assert_called_once() + + @patch(PIPELINE_MODELS_TASK_SERVICE_RUN_PIPELINE, MagicMock(return_value=ActionResult(result=False, message=""))) + @patch(PIPELINE_PIPELINE_INSTANCE_CALCULATE_TREE_INFO, MagicMock()) + def test_start__already_started(self): + instance, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator + ) + instance.is_started = True + instance.save() + executor = "token_1" + + instance.start(executor) + + instance.calculate_tree_info.assert_not_called() + task_service.run_pipeline.assert_not_called() + + @patch(PIPELINE_MODELS_TASK_SERVICE_RUN_PIPELINE, MagicMock(return_value=ActionResult(result=False, message=""))) + @patch(PIPELINE_PIPELINE_INSTANCE_CALCULATE_TREE_INFO, MagicMock()) + @patch(PIPELINE_PIPELINE_INSTANCE_IMPORT_STRING, MagicMock(side_effect=ImportError())) + def test_start__parser_cls_error(self): + instance, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator + ) + executor = "token_1" + + instance.start(executor) + + instance.refresh_from_db() + + self.assertFalse(instance.is_started) + self.assertEqual(instance.executor, "") + self.assertIsNone(instance.start_time) + + instance.calculate_tree_info.assert_not_called() + task_service.run_pipeline.assert_not_called() + + @patch(PIPELINE_MODELS_TASK_SERVICE_RUN_PIPELINE, MagicMock(return_value=ActionResult(result=False, message=""))) + @patch(PIPELINE_PIPELINE_INSTANCE_CALCULATE_TREE_INFO, MagicMock()) + @patch(PIPELINE_PIPELINE_INSTANCE_IMPORT_STRING, MagicMock(retrun_value=MockParser)) + def test_start__task_service_call_fail(self): + instance, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator + ) + executor = "token_1" + instance.start(executor) + + instance.refresh_from_db() + + instance.calculate_tree_info.assert_called_once() + task_service.run_pipeline.assert_called_once() + + self.assertFalse(instance.is_started) + self.assertEqual(instance.executor, "") + self.assertIsNone(instance.start_time) + + @patch(PIPELINE_MODELS_TASK_SERVICE_RUN_PIPELINE, MagicMock(return_value=ActionResult(result=False, message=""))) + @patch(PIPELINE_PIPELINE_INSTANCE_CALCULATE_TREE_INFO, MagicMock(side_effect=Exception())) + def test_start__error_occurred_before_task_service_call(self): + instance, no_use = PipelineInstance.objects.create_instance( + self.template, exec_data=self.data, creator=self.creator + ) + executor = "token_1" + + try: + instance.start(executor) + except Exception: + pass + + instance.refresh_from_db() + + self.assertFalse(instance.is_started) + self.assertEqual(instance.executor, "") + self.assertIsNone(instance.start_time) + + task_service.run_pipeline.assert_not_called() diff --git a/runtime/bamboo-pipeline/pipeline/tests/models/test_snapshot.py b/runtime/bamboo-pipeline/pipeline/tests/models/test_snapshot.py new file mode 100644 index 00000000..73a8e42c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/models/test_snapshot.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.models import Snapshot + + +class TestSnapshot(TestCase): + def test_create_snapshot(self): + data = {"a": 1, "b": [1, 2, 3], "c": {"d": "d"}} + snapshot = Snapshot.objects.create_snapshot(data) + Snapshot.objects.create_snapshot(data) + self.assertEqual(snapshot.data, data) + self.assertEqual(len(snapshot.md5sum), 32) + self.assertIsNotNone(snapshot.create_time) + + def test_no_change(self): + data = {"a": 1, "b": [1, 2, 3], "c": {"d": "d"}} + snapshot = Snapshot.objects.create_snapshot(data) + md5, changed = snapshot.has_change(data) + self.assertFalse(changed) + self.assertEqual(md5, snapshot.md5sum) + data = {"a": 2, "b": [1, 2, 3], "c": {"d": "d"}} + md5, changed = snapshot.has_change(data) + self.assertTrue(changed) + self.assertNotEqual(md5, snapshot.md5sum) diff --git a/runtime/bamboo-pipeline/pipeline/tests/models/test_template.py b/runtime/bamboo-pipeline/pipeline/tests/models/test_template.py new file mode 100644 index 00000000..314f60c9 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/models/test_template.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.core.constants import PE +from pipeline.models import PipelineTemplate, Snapshot + + +class TestPipelineTemplate(TestCase): + def setUp(self): + self.data = { + "activities": { + "act_1": { + "outgoing": "line_2", + "incoming": "line_1", + "name": "loop", + "error_ignorable": False, + "component": { + "global_outputs": {}, + "inputs": {"i": {"type": "splice", "value": "${loop_i}"}}, + "code": "loop_test_comp", + }, + "optional": False, + "type": "ServiceActivity", + "loop_times": 4, + "id": "act_1", + "loop": {}, + } + }, + "end_event": { + "incoming": "line_2", + "outgoing": "", + "type": "EmptyEndEvent", + "id": "end_event_id", + "name": "", + }, + "flows": { + "line_1": {"is_default": False, "source": "start_event_id", "id": "line_1", "target": "act_1"}, + "line_2": {"is_default": False, "source": "act_1", "id": "line_2", "target": "end_event_id"}, + }, + "id": "pipeline_0", + "gateways": {}, + "data": {"inputs": {"${loop_i}": {"type": "plain", "value": 1}}, "outputs": {}}, + "start_event": { + "incoming": "", + "outgoing": "line_1", + "type": "EmptyStartEvent", + "id": "start_event_id", + "name": "", + }, + } + self.creator = "start" + self.template = PipelineTemplate.objects.create_model(self.data, creator=self.creator, template_id="1") + self.template_2 = PipelineTemplate.objects.create_model(self.data, creator=self.creator, template_id="2") + self.template_3 = PipelineTemplate.objects.create_model(self.data, creator=self.creator, template_id="3") + + def test_create_template(self): + template = self.template + data = self.data + creator = self.creator + self.assertEqual(template.creator, creator) + self.assertFalse(template.is_deleted) + self.assertIsNotNone(template.snapshot) + self.assertEqual(template.data, data) + + def test_delete_template(self): + PipelineTemplate.objects.delete_model(self.template.template_id) + t = PipelineTemplate.objects.get(template_id=self.template.template_id) + self.assertTrue(t.is_deleted) + PipelineTemplate.objects.delete_model([self.template_2.template_id, self.template_3.template_id]) + t2 = PipelineTemplate.objects.get(template_id=self.template_2.template_id) + t3 = PipelineTemplate.objects.get(template_id=self.template_3.template_id) + self.assertTrue(t2.is_deleted) + self.assertTrue(t3.is_deleted) + + def test_set_has_subprocess_bit(self): + template_do_not_has_subprocess = PipelineTemplate( + snapshot=Snapshot( + data={PE.activities: {"1": {"type": PE.ServiceActivity}, "2": {"type": PE.ServiceActivity}}} + ) + ) + + template_do_not_has_subprocess.set_has_subprocess_bit() + self.assertFalse(template_do_not_has_subprocess.has_subprocess) + + template_has_1_subprocess = PipelineTemplate( + snapshot=Snapshot(data={PE.activities: {"1": {"type": PE.SubProcess}, "2": {"type": PE.ServiceActivity}}}) + ) + template_has_1_subprocess.set_has_subprocess_bit() + self.assertTrue(template_has_1_subprocess.has_subprocess) + + template_has_3_subprocess = PipelineTemplate( + snapshot=Snapshot( + data={ + PE.activities: { + "1": {"type": PE.SubProcess}, + "2": {"type": PE.SubProcess}, + "3": {"type": PE.SubProcess}, + "4": {"type": PE.ServiceActivity}, + } + } + ) + ) + template_has_3_subprocess.set_has_subprocess_bit() + self.assertTrue(template_has_3_subprocess.has_subprocess) diff --git a/runtime/bamboo-pipeline/pipeline/tests/pipeline_data.py b/runtime/bamboo-pipeline/pipeline/tests/pipeline_data.py new file mode 100644 index 00000000..0b4aae81 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/pipeline_data.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +# 存储 pipeline 结构的字典 +pipeline_1 = { + "id": "p1", # 该 pipeline 的 id + "name": "name", + "start_event": {"id": "", "name": "", "type": "EmptyStartEvent", "incoming": None, "outgoing": "outgoing_flow_id"}, + "end_event": {"id": "", "name": "", "type": "EmptyEndEvent", "incoming": "incoming_flow_id", "outgoing": None}, + "activities": { # 存放该 pipeline 中所有的 task,包含:起始任务,结束任务,子 pipeline + "n1": { + "id": "n1", + "type": "ServiceActivity", + "name": "", + "incoming": "f1", + "outgoing": "f2", + "component": { + "tag_code": "", + "data": { + "env_id": {"hook": True, "constant": "${_env_id}", "value": ""}, + "another_param": {"hook": True, "constant": "${_another_param}", "value": ""}, + }, + }, + }, + "n2": {"id": "n2", "type": "SubProcess", "name": "", "incoming": "f3", "outgoing": "f4", "template_id": ""}, + }, + "flows": { # 存放该 Pipeline 中所有的线 + "f1": {"id": "f1", "source": "n1", "target": "n2", "is_default": False}, + "f2": {"id": "f2", "source": "n2", "target": "n3", "is_default": False}, + }, + "gateways": { # 这里存放着网关的详细信息 + "g2": { + "id": "g2", + "type": "ExclusiveGateway", + "name": "", + "incoming": "flow_id_0", + "outgoing": ["flow_id_1", "flow_id_2"], + "data_source": "activity_id", + "conditions": { + "flow_id_1": {"evaluate": "result > 10"}, # 判断条件 + "flow_id_2": {"evaluate": "result < 10"}, # 判断条件 + }, + "converge_gateway_id": "converge_gateway_id", + }, + "g3": { + "id": "g3", + "type": "ConvergeGateway", + "name": "", + "incoming": ["flow_id_3", "flow_id_4"], + "outgoing": "flow_id_5", + }, + }, + "constants": { # 全局变量 + # '${_env_id}': { + # 'name': '', + # 'key': '${_env_id}', + # 'desc': '', + # 'tag_type': 'input_var', + # 'validation': '^\d+$', + # 'show_type': 'show', + # 'tag_code': '${_env_id}', + # 'value': '', + # 'data': { + # 'set': { + # 'value': '${set}', + # 'constant': '', + # 'hook': 'off', + # }, + # 'module': { + # 'value': '${module}', + # 'constant': '', + # 'hook': 'off', + # } + # } + # }, + "${_env_id}": { + "name": "", + "key": "${_env_id}", + "desc": "", + "tag_type": "input_var", + "validation": r"^\d+$", + "show_type": "show", + "tag_code": "${_env_id}", + "value": "11", + }, + }, +} diff --git a/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/data.py b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/data.py new file mode 100644 index 00000000..9c7ad53c --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/data.py @@ -0,0 +1,544 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy + +from pipeline.utils.uniqid import node_uniqid + +id_list = [node_uniqid() for i in range(10)] +PIPELINE_DATA = { + "id": id_list[0], + "name": "name", + "start_event": { + "id": id_list[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list[5], + }, + "end_event": {"id": id_list[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list[7], "outgoing": None}, + "activities": { + id_list[3]: { + "id": id_list[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[5], + "outgoing": id_list[6], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "custom2"}, + "radio_test": {"type": "plain", "value": "1"}, + }, + "global_outputs": {"key1": "${global_key1}"}, + }, + }, + id_list[4]: { + "id": id_list[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[6], + "outgoing": id_list[7], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "value1"}, + "radio_test": {"type": "splice", "value": "before_${global_key1}"}, + }, + "global_outputs": {}, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list[5]: {"id": id_list[5], "source": id_list[1], "target": id_list[3]}, + id_list[6]: {"id": id_list[6], "source": id_list[3], "target": id_list[4]}, + id_list[7]: {"id": id_list[7], "source": id_list[4], "target": id_list[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "data": { + "inputs": { + "${demo_input_test}": {"type": "plain", "value": "value1"}, + "${global_key1}": {"type": "splice", "source_act": id_list[3], "source_key": "key1", "value": ""}, + "${custom_key1}": {"type": "splice", "value": "aaa_${global_key1}"}, + "${custom_key2}": {"type": "plain", "value": "custom2"}, + }, + "outputs": {"${demo_input_test}": "${demo_input_test}", "${global_key1}": "${global_key1}"}, + }, +} + +WEB_PIPELINE_DATA = { + "id": id_list[0], + "name": "name", + "start_event": { + "id": id_list[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list[5], + }, + "end_event": {"id": id_list[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list[7], "outgoing": None}, + "activities": { + id_list[3]: { + "id": id_list[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[5], + "outgoing": id_list[6], + "component": { + "code": "test", + "data": { + "input_test": {"hook": False, "value": "${custom_key2}"}, + "radio_test": {"hook": False, "value": "1"}, + }, + }, + }, + id_list[4]: { + "id": id_list[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[6], + "outgoing": id_list[7], + "component": { + "code": "test", + "data": { + "input_test": {"hook": True, "value": "${demo_input_test}"}, + "radio_test": {"hook": False, "value": "before_${global_key1}"}, + }, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list[5]: {"id": id_list[5], "source": id_list[1], "target": id_list[3]}, + id_list[6]: {"id": id_list[6], "source": id_list[3], "target": id_list[4]}, + id_list[7]: {"id": id_list[7], "source": id_list[4], "target": id_list[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "constants": { + "${demo_input_test}": { + "name": "input", + "key": "${demo_input_test}", + "desc": "", + "validation": "^.*$", + "show_type": "show", + "value": "value1", + "source_type": "component_inputs", + "source_tag": "demo.input_test", + # 'source_step': [id_list[4], ], + "source_info": {id_list[4]: ["input_test"]}, + # 'source_key': '', + "custom_type": "", + }, + "${custom_key1}": { + "name": "input", + "key": "${custom_key1}", + "desc": "", + "validation": "", + "show_type": "show", + "value": "aaa_${global_key1}", + "source_type": "custom", + "source_tag": "", + "source_info": {}, + # 'source_key': '', + "custom_type": "simple_input", + }, + "${custom_key2}": { + "name": "input", + "key": "${custom_key2}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "custom2", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "custom_type": "simple_input", + }, + "${global_key1}": { + "name": "input", + "key": "${global_key1}", + "desc": "", + "validation": "^.*$", + "show_type": "hide", + "value": "", + "source_type": "component_outputs", + "source_tag": "", + # 'source_step': id_list[3], + # 'source_key': 'key1', + "source_info": {id_list[3]: ["key1"]}, + "custom_type": "", + }, + }, + "outputs": ["${demo_input_test}", "${global_key1}"], +} + +id_list3 = [node_uniqid() for i in range(10)] +sub_pipeline = { + "id": id_list3[0], + "name": "name", + "start_event": { + "id": id_list3[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list3[5], + }, + "end_event": {"id": id_list3[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list3[7], "outgoing": None}, + "activities": { + id_list3[3]: { + "id": id_list3[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[5], + "outgoing": id_list3[6], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "before_after"}, + "radio_test": {"type": "plain", "value": "1"}, + }, + "global_outputs": {"key1": "${global_key1}"}, + }, + }, + id_list3[4]: { + "id": id_list3[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[6], + "outgoing": id_list3[7], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "value1"}, + "radio_test": {"type": "splice", "value": "before_${global_key1}"}, + }, + "global_outputs": {}, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list3[5]: {"id": id_list3[5], "source": id_list3[1], "target": id_list3[3]}, + id_list3[6]: {"id": id_list3[6], "source": id_list3[3], "target": id_list3[4]}, + id_list3[7]: {"id": id_list3[7], "source": id_list3[4], "target": id_list3[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "data": { + "inputs": { + "${demo_input_test}": {"type": "plain", "value": "value2"}, + "${global_key1}": {"type": "splice", "source_act": id_list3[3], "source_key": "key1", "value": ""}, + "${custom_key2}": {"type": "splice", "value": "aaa_${global_key1}"}, + }, + "outputs": {"${demo_input_test}": "${demo_input_test_sub}", "${global_key1}": "${global_key1_sub}"}, + }, +} + +sub_web_pipeline = { + "id": id_list3[0], + "name": "name", + "start_event": { + "id": id_list3[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list3[5], + }, + "end_event": {"id": id_list3[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list3[7], "outgoing": None}, + "activities": { + id_list3[3]: { + "id": id_list3[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[5], + "outgoing": id_list3[6], + "component": { + "code": "test", + "data": { + "input_test": {"hook": False, "value": "${custom_key2}"}, + "radio_test": {"hook": False, "value": "1"}, + }, + }, + }, + id_list3[4]: { + "id": id_list3[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[6], + "outgoing": id_list3[7], + "component": { + "code": "test", + "data": { + "input_test": {"hook": True, "value": "${demo_input_test}"}, + "radio_test": {"hook": False, "value": "before_${global_key1}_${custom_key2}"}, + }, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list3[5]: {"id": id_list3[5], "source": id_list3[1], "target": id_list3[3]}, + id_list3[6]: {"id": id_list3[6], "source": id_list3[3], "target": id_list3[4]}, + id_list3[7]: {"id": id_list3[7], "source": id_list3[4], "target": id_list3[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "constants": { + "${demo_input_test}": { + "name": "input", + "key": "${demo_input_test}", + "desc": "", + "validation": "^.*$", + "show_type": "hide", + "value": "value2", + "source_type": "component_inputs", + "source_tag": "demo.input_test", + # 'source_step': [id_list3[4], ], + # 'source_key': '', + "source_info": {id_list3[4]: ["input_test"], id_list3[5]: ["input_test"]}, + "custom_type": "", + }, + "${custom_key1}": { + "name": "input", + "key": "${custom_key1}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "aaa_${global_key1}", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + "${custom_key2}": { + "name": "input", + "key": "${custom_key2}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "custom2", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + "${global_key1}": { + "name": "input", + "key": "${global_key1}", + "desc": "", + "validation": "^.*$", + "show_type": "hide", + "value": "", + "source_type": "component_outputs", + "source_tag": "", + # 'source_step': id_list3[3], + # 'source_key': 'key1', + "source_info": {id_list3[3]: ["key1"]}, + "custom_type": "", + }, + }, + "outputs": ["${demo_input_test}", "${global_key1}"], +} + +PIPELINE_WITH_SUB_PROCESS = copy.deepcopy(PIPELINE_DATA) +PIPELINE_WITH_SUB_PROCESS["activities"][id_list[4]] = { + "id": id_list[4], + "type": "SubProcess", + "name": "second_task", + "incoming": id_list[6], + "outgoing": id_list[7], + "pipeline": sub_pipeline, + "exposed_constants": [], + "hooked_constants": [], + "params": {}, +} + +CONDITIONAL_PARALLEL = { + "activities": { + "3adc3e38891233e1b0bf9cdf62dbfd5d": { + "component": {"code": "test", "global_outputs": {}, "inputs": {}}, + "error_ignorable": False, + "id": "3adc3e38891233e1b0bf9cdf62dbfd5d", + "incoming": ["0b5bd2783ba93667b9f197f2fb7a6488"], + "name": None, + "optional": False, + "outgoing": "eb739c39bf6f3394b6db8f6b9220e0c1", + "type": "ServiceActivity", + }, + "40b176a1a92d307b9f8a2ec08d2c47ec": { + "component": {"code": "test", "global_outputs": {}, "inputs": {}}, + "error_ignorable": False, + "id": "40b176a1a92d307b9f8a2ec08d2c47ec", + "incoming": ["9395729e7b9d3f1a9396d1dfc3881ab8"], + "name": None, + "optional": False, + "outgoing": "b61b2d9c59ee3243947d337372a99ea7", + "type": "ServiceActivity", + }, + "a80da787752f3094a244537788e129b1": { + "component": {"code": "test", "global_outputs": {}, "inputs": {}}, + "error_ignorable": False, + "id": "a80da787752f3094a244537788e129b1", + "incoming": ["6bafc9fd6d6e395aba808725340f15e9"], + "name": None, + "optional": False, + "outgoing": "3f273abe5f3038caa777682b1b62bbde", + "type": "ServiceActivity", + }, + "b54bf29c856e36e68b89fb3fe3cabdfd": { + "component": {"code": "test", "global_outputs": {}, "inputs": {}}, + "error_ignorable": False, + "id": "b54bf29c856e36e68b89fb3fe3cabdfd", + "incoming": ["d3f70e28042f3b03ad15e96b69a3f3fc"], + "name": None, + "optional": False, + "outgoing": "33762dce859437f2908129956db280da", + "type": "ServiceActivity", + }, + "fd40b66e751733129e854d1e5070c3f1": { + "component": {"code": "test", "global_outputs": {}, "inputs": {}}, + "error_ignorable": False, + "id": "fd40b66e751733129e854d1e5070c3f1", + "incoming": ["a5f1d27c5a873c6ea0a2ec922542f2b0"], + "name": None, + "optional": False, + "outgoing": "b767de1b435a313d8e3da4c111f24fb0", + "type": "ServiceActivity", + }, + }, + "data": {"inputs": {}, "outputs": {}}, + "end_event": { + "id": "60d7c4ec44343e43a89fa12e506641f6", + "incoming": ["4775f07b91b638b9a705e579955636e4"], + "name": None, + "outgoing": "", + "type": "EmptyEndEvent", + }, + "flows": { + "0b5bd2783ba93667b9f197f2fb7a6488": { + "id": "0b5bd2783ba93667b9f197f2fb7a6488", + "is_default": False, + "source": "ea73c396788d37b8beb8df4f79798a09", + "target": "3adc3e38891233e1b0bf9cdf62dbfd5d", + }, + "33762dce859437f2908129956db280da": { + "id": "33762dce859437f2908129956db280da", + "is_default": False, + "source": "b54bf29c856e36e68b89fb3fe3cabdfd", + "target": "757fcd23b18238bfa570993f0429d0d3", + }, + "3f273abe5f3038caa777682b1b62bbde": { + "id": "3f273abe5f3038caa777682b1b62bbde", + "is_default": False, + "source": "a80da787752f3094a244537788e129b1", + "target": "757fcd23b18238bfa570993f0429d0d3", + }, + "4775f07b91b638b9a705e579955636e4": { + "id": "4775f07b91b638b9a705e579955636e4", + "is_default": False, + "source": "757fcd23b18238bfa570993f0429d0d3", + "target": "60d7c4ec44343e43a89fa12e506641f6", + }, + "6bafc9fd6d6e395aba808725340f15e9": { + "id": "6bafc9fd6d6e395aba808725340f15e9", + "is_default": False, + "source": "ea73c396788d37b8beb8df4f79798a09", + "target": "a80da787752f3094a244537788e129b1", + }, + "739716dfe0353ce8a590408d6452165e": { + "id": "739716dfe0353ce8a590408d6452165e", + "is_default": False, + "source": "ebe56dd469a93067983ea847e2b61978", + "target": "ea73c396788d37b8beb8df4f79798a09", + }, + "9395729e7b9d3f1a9396d1dfc3881ab8": { + "id": "9395729e7b9d3f1a9396d1dfc3881ab8", + "is_default": False, + "source": "ea73c396788d37b8beb8df4f79798a09", + "target": "40b176a1a92d307b9f8a2ec08d2c47ec", + }, + "a5f1d27c5a873c6ea0a2ec922542f2b0": { + "id": "a5f1d27c5a873c6ea0a2ec922542f2b0", + "is_default": False, + "source": "ea73c396788d37b8beb8df4f79798a09", + "target": "fd40b66e751733129e854d1e5070c3f1", + }, + "b61b2d9c59ee3243947d337372a99ea7": { + "id": "b61b2d9c59ee3243947d337372a99ea7", + "is_default": False, + "source": "40b176a1a92d307b9f8a2ec08d2c47ec", + "target": "757fcd23b18238bfa570993f0429d0d3", + }, + "b767de1b435a313d8e3da4c111f24fb0": { + "id": "b767de1b435a313d8e3da4c111f24fb0", + "is_default": False, + "source": "fd40b66e751733129e854d1e5070c3f1", + "target": "757fcd23b18238bfa570993f0429d0d3", + }, + "d3f70e28042f3b03ad15e96b69a3f3fc": { + "id": "d3f70e28042f3b03ad15e96b69a3f3fc", + "is_default": False, + "source": "ea73c396788d37b8beb8df4f79798a09", + "target": "b54bf29c856e36e68b89fb3fe3cabdfd", + }, + "eb739c39bf6f3394b6db8f6b9220e0c1": { + "id": "eb739c39bf6f3394b6db8f6b9220e0c1", + "is_default": False, + "source": "3adc3e38891233e1b0bf9cdf62dbfd5d", + "target": "757fcd23b18238bfa570993f0429d0d3", + }, + }, + "gateways": { + "757fcd23b18238bfa570993f0429d0d3": { + "id": "757fcd23b18238bfa570993f0429d0d3", + "incoming": [ + "b61b2d9c59ee3243947d337372a99ea7", + "eb739c39bf6f3394b6db8f6b9220e0c1", + "33762dce859437f2908129956db280da", + "b767de1b435a313d8e3da4c111f24fb0", + "3f273abe5f3038caa777682b1b62bbde", + ], + "name": None, + "outgoing": "4775f07b91b638b9a705e579955636e4", + "type": "ConvergeGateway", + }, + "ea73c396788d37b8beb8df4f79798a09": { + "conditions": { + "0b5bd2783ba93667b9f197f2fb7a6488": {"evaluate": "True == True"}, + "6bafc9fd6d6e395aba808725340f15e9": {"evaluate": "True == False"}, + "9395729e7b9d3f1a9396d1dfc3881ab8": {"evaluate": "True == True"}, + "a5f1d27c5a873c6ea0a2ec922542f2b0": {"evaluate": "True == False"}, + "d3f70e28042f3b03ad15e96b69a3f3fc": {"evaluate": "True == False"}, + }, + "id": "ea73c396788d37b8beb8df4f79798a09", + "incoming": ["739716dfe0353ce8a590408d6452165e"], + "name": None, + "outgoing": [ + "9395729e7b9d3f1a9396d1dfc3881ab8", + "0b5bd2783ba93667b9f197f2fb7a6488", + "d3f70e28042f3b03ad15e96b69a3f3fc", + "a5f1d27c5a873c6ea0a2ec922542f2b0", + "6bafc9fd6d6e395aba808725340f15e9", + ], + "type": "ConditionalParallelGateway", + }, + }, + "id": "28b3413186dd3cd48310531354bc897a", + "start_event": { + "id": "ebe56dd469a93067983ea847e2b61978", + "incoming": "", + "name": None, + "outgoing": "739716dfe0353ce8a590408d6452165e", + "type": "EmptyStartEvent", + }, +} diff --git a/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/new_data_for_test.py b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/new_data_for_test.py new file mode 100644 index 00000000..2fafae38 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/new_data_for_test.py @@ -0,0 +1,700 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import copy + +from pipeline.utils.uniqid import node_uniqid + +id_list = [node_uniqid() for i in range(10)] +PIPELINE_DATA = { + "id": id_list[0], + "name": "name", + "start_event": { + "id": id_list[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list[5], + }, + "end_event": {"id": id_list[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list[7], "outgoing": None}, + "activities": { + id_list[3]: { + "id": id_list[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[5], + "outgoing": id_list[6], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "custom2"}, + "radio_test": {"type": "plain", "value": "1"}, + }, + "global_outputs": {"key1": "${global_key1}"}, + }, + }, + id_list[4]: { + "id": id_list[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[6], + "outgoing": id_list[7], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "value1"}, + "radio_test": {"type": "splice", "value": "before_${global_key1}"}, + }, + "global_outputs": {}, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list[5]: {"id": id_list[5], "source": id_list[1], "target": id_list[3]}, + id_list[6]: {"id": id_list[6], "source": id_list[3], "target": id_list[4]}, + id_list[7]: {"id": id_list[7], "source": id_list[4], "target": id_list[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "data": { + "inputs": { + "${demo_input_test}": {"type": "plain", "value": "value1"}, + "${global_key1}": {"type": "splice", "source_act": id_list[3], "source_key": "key1", "value": ""}, + "${custom_key1}": {"type": "splice", "value": "aaa_${global_key1}"}, + "${custom_key2}": {"type": "plain", "value": "custom2"}, + }, + "outputs": {"${demo_input_test}": "${demo_input_test}", "${global_key1}": "${global_key1}"}, + }, +} + +WEB_PIPELINE_DATA = { + "id": id_list[0], + "name": "name", + "start_event": { + "id": id_list[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list[5], + }, + "end_event": {"id": id_list[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list[7], "outgoing": None}, + "activities": { + id_list[3]: { + "id": id_list[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[5], + "outgoing": id_list[6], + "component": { + "code": "test", + "data": { + "input_test": {"hook": False, "value": "${custom_key2}"}, + "radio_test": {"hook": False, "value": "1"}, + }, + }, + }, + id_list[4]: { + "id": id_list[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list[6], + "outgoing": id_list[7], + "component": { + "code": "test", + "data": { + "input_test": {"hook": True, "value": "${demo_input_test}"}, + "radio_test": {"hook": False, "value": "before_${global_key1}"}, + }, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list[5]: {"id": id_list[5], "source": id_list[1], "target": id_list[3]}, + id_list[6]: {"id": id_list[6], "source": id_list[3], "target": id_list[4]}, + id_list[7]: {"id": id_list[7], "source": id_list[4], "target": id_list[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "constants": { + "${demo_input_test}": { + "name": "input", + "key": "${demo_input_test}", + "desc": "", + "validation": "^.*$", + "show_type": "show", + "value": "value1", + "source_type": "component_inputs", + "source_tag": "demo.input_test", + # 'source_step': [id_list[4], ], + "source_info": {id_list[4]: ["input_test"]}, + # 'source_key': '', + "custom_type": "", + }, + "${custom_key1}": { + "name": "input", + "key": "${custom_key1}", + "desc": "", + "validation": "", + "show_type": "show", + "value": "aaa_${global_key1}", + "source_type": "custom", + "source_tag": "", + "source_info": {}, + # 'source_key': '', + "custom_type": "simple_input", + }, + "${custom_key2}": { + "name": "input", + "key": "${custom_key2}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "custom2", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "custom_type": "simple_input", + }, + "${global_key1}": { + "name": "input", + "key": "${global_key1}", + "desc": "", + "validation": "^.*$", + "show_type": "hide", + "value": "", + "source_type": "component_outputs", + "source_tag": "", + # 'source_step': id_list[3], + # 'source_key': 'key1', + "source_info": {id_list[3]: ["key1"]}, + "custom_type": "", + }, + }, + "outputs": ["${demo_input_test}", "${global_key1}"], +} + +id_list3 = [node_uniqid() for i in range(10)] +sub_pipeline = { + "id": id_list3[0], + "name": "name", + "start_event": { + "id": id_list3[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list3[5], + }, + "end_event": {"id": id_list3[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list3[7], "outgoing": None}, + "activities": { + id_list3[3]: { + "id": id_list3[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[5], + "outgoing": id_list3[6], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "before_after"}, + "radio_test": {"type": "plain", "value": "1"}, + }, + "global_outputs": {"key1": "${global_key1}"}, + }, + }, + id_list3[4]: { + "id": id_list3[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[6], + "outgoing": id_list3[7], + "component": { + "code": "test", + "inputs": { + "input_test": {"type": "plain", "value": "value1"}, + "radio_test": {"type": "splice", "value": "before_${global_key1}"}, + }, + "global_outputs": {}, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list3[5]: {"id": id_list3[5], "source": id_list3[1], "target": id_list3[3]}, + id_list3[6]: {"id": id_list3[6], "source": id_list3[3], "target": id_list3[4]}, + id_list3[7]: {"id": id_list3[7], "source": id_list3[4], "target": id_list3[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "data": { + "inputs": { + "${demo_input_test}": {"type": "plain", "value": "value2"}, + "${global_key1}": {"type": "splice", "source_act": id_list3[3], "source_key": "key1", "value": ""}, + "${custom_key2}": {"type": "splice", "value": "aaa_${global_key1}"}, + }, + "outputs": {"${demo_input_test}": "${demo_input_test_sub}", "${global_key1}": "${global_key1_sub}"}, + }, +} + +sub_web_pipeline = { + "id": id_list3[0], + "name": "name", + "start_event": { + "id": id_list3[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list3[5], + }, + "end_event": {"id": id_list3[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list3[7], "outgoing": None}, + "activities": { + id_list3[3]: { + "id": id_list3[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[5], + "outgoing": id_list3[6], + "component": { + "code": "test", + "data": { + "input_test": {"hook": False, "value": "${custom_key2}"}, + "radio_test": {"hook": False, "value": "1"}, + }, + }, + }, + id_list3[4]: { + "id": id_list3[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list3[6], + "outgoing": id_list3[7], + "component": { + "code": "test", + "data": { + "input_test": {"hook": True, "value": "${demo_input_test}"}, + "radio_test": {"hook": False, "value": "before_${global_key1}_${custom_key2}"}, + }, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list3[5]: {"id": id_list3[5], "source": id_list3[1], "target": id_list3[3]}, + id_list3[6]: {"id": id_list3[6], "source": id_list3[3], "target": id_list3[4]}, + id_list3[7]: {"id": id_list3[7], "source": id_list3[4], "target": id_list3[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "constants": { + "${demo_input_test}": { + "name": "input", + "key": "${demo_input_test}", + "desc": "", + "validation": "^.*$", + "show_type": "hide", + "value": "value2", + "source_type": "component_inputs", + "source_tag": "demo.input_test", + # 'source_step': [id_list3[4], ], + # 'source_key': '', + "source_info": {id_list3[4]: ["input_test"], id_list3[5]: ["input_test"]}, + "custom_type": "", + }, + "${custom_key1}": { + "name": "input", + "key": "${custom_key1}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "aaa_${global_key1}", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + "${custom_key2}": { + "name": "input", + "key": "${custom_key2}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "custom2", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + "${global_key1}": { + "name": "input", + "key": "${global_key1}", + "desc": "", + "validation": "^.*$", + "show_type": "hide", + "value": "", + "source_type": "component_outputs", + "source_tag": "", + # 'source_step': id_list3[3], + # 'source_key': 'key1', + "source_info": {id_list3[3]: ["key1"]}, + "custom_type": "", + }, + }, + "outputs": ["${demo_input_test}", "${global_key1}"], +} + +PIPELINE_WITH_SUB_PROCESS = copy.deepcopy(PIPELINE_DATA) +PIPELINE_WITH_SUB_PROCESS["activities"][id_list[4]] = { + "id": id_list[4], + "type": "SubProcess", + "name": "second_task", + "incoming": id_list[6], + "outgoing": id_list[7], + "pipeline": sub_pipeline, + "exposed_constants": [], + "hooked_constants": [], +} + +# 子流程 全局输入全部hide 输出全部无引用到父流程 +WEB_PIPELINE_WITH_SUB_PROCESS = copy.deepcopy(WEB_PIPELINE_DATA) +WEB_PIPELINE_WITH_SUB_PROCESS["activities"][id_list[4]] = { + "id": id_list[4], + "type": "SubProcess", + "name": "second_task", + "incoming": id_list[6], + "outgoing": id_list[7], + "pipeline": sub_web_pipeline, + "hooked_constants": [], +} + +# 子流程 全局输入部分show,并且引用了父流程的全局变量,无引用到父流程 输出全部无引用到父流程 +id_list2 = [node_uniqid() for i in range(20)] +sub_web_pipeline2 = { + "id": id_list2[0], + "name": "name", + "start_event": { + "id": id_list2[1], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list2[5], + }, + "end_event": {"id": id_list2[2], "name": "end", "type": "EmptyEndEvent", "incoming": id_list2[7], "outgoing": None}, + "activities": { + id_list2[3]: { + "id": id_list2[3], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list2[5], + "outgoing": id_list2[6], + "component": { + "code": "test", + "data": { + "input_test": {"hook": False, "value": "${custom_key2}"}, + "radio_test": {"hook": False, "value": "1"}, + }, + }, + }, + id_list2[4]: { + "id": id_list2[4], + "type": "ServiceActivity", + "name": "first_task", + "incoming": id_list2[6], + "outgoing": id_list2[7], + "component": { + "code": "test", + "data": { + "input_test": {"hook": True, "value": "${demo_input_test}"}, + "radio_test": {"hook": False, "value": "before_${global_key1}_${custom_key2}"}, + }, + }, + }, + }, + "flows": { # 存放该 Pipeline 中所有的线 + id_list2[5]: {"id": id_list2[5], "source": id_list2[1], "target": id_list2[3]}, + id_list2[6]: {"id": id_list2[6], "source": id_list2[3], "target": id_list2[4]}, + id_list2[7]: {"id": id_list2[7], "source": id_list2[4], "target": id_list2[2]}, + }, + "gateways": {}, # 这里存放着网关的详细信息 + "constants": { + "${demo_input_test}": { + "name": "input", + "key": "${demo_input_test}", + "desc": "", + "validation": "^.*$", + "show_type": "show", + "value": "value2_${root_key1}", + "source_type": "component_inputs", + "source_tag": "demo.input_test", + # 'source_step': [id_list2[4], ], + # 'source_key': '', + "source_info": {id_list2[4]: ["input_test"]}, + "custom_type": "", + }, + "${custom_key1}": { + "name": "input", + "key": "${custom_key1}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "aaa_${global_key1}", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + "${custom_key2}": { + "name": "input", + "key": "${custom_key2}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "custom2", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + "${global_key1}": { + "name": "input", + "key": "${global_key1}", + "desc": "", + "validation": "^.*$", + "show_type": "hide", + "value": "", + "source_type": "component_outputs", + "source_tag": "", + # 'source_step': id_list2[3], + # 'source_key': 'key1', + "source_info": {id_list2[3]: ["key1"]}, + "custom_type": "", + }, + }, + "outputs": ["${demo_input_test}", "${global_key1}"], +} + +WEB_PIPELINE_WITH_SUB_PROCESS2 = { + "id": id_list2[15], + "name": "web_pipeline3", + "start_event": { + "id": id_list2[8], + "name": "start", + "type": "EmptyStartEvent", + "incoming": None, + "outgoing": id_list2[12], + }, + "end_event": { + "id": id_list2[11], + "name": "end", + "type": "EmptyEndEvent", + "incoming": id_list2[14], + "outgoing": None, + }, + "activities": { + id_list2[9]: { + "id": id_list2[9], + "type": "ServiceActivity", + "name": "act_task", + "incoming": id_list2[12], + "outgoing": id_list2[13], + "component": { + "code": "test", + "data": {"input_test": {"hook": False, "value": "test1"}, "radio_test": {"hook": False, "value": "1"}}, + }, + }, + id_list2[10]: { + "id": id_list2[10], + "type": "SubProcess", + "name": "sub_pipeline", + "incoming": id_list2[13], + "outgoing": id_list2[14], + "pipeline": sub_web_pipeline2, + "hooked_constants": [], + }, + }, + "flows": { + id_list2[12]: {"id": id_list2[12], "source": id_list2[8], "target": id_list2[9]}, + id_list2[13]: {"id": id_list2[13], "source": id_list2[9], "target": id_list2[10]}, + id_list2[14]: {"id": id_list2[14], "source": id_list2[10], "target": id_list2[11]}, + }, + "gateways": {}, + "constants": { + "${root_key1}": { + "name": "root_key1", + "key": "${root_key1}", + "desc": "", + "validation": "", + "show_type": "hide", + "value": "aaa_${root_key2}", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + "${root_key2}": { + "name": "root_key2", + "key": "${root_key2}", + "desc": "", + "validation": "", + "show_type": "show", + "value": "root_value2", + "source_type": "custom", + "source_tag": "", + # 'source_step': '', + # 'source_key': '', + "source_info": {}, + "custom_type": "simple_input", + }, + }, + "outputs": [], +} + +# 子流程 全局输入部分show,部分引用到父流程 全部输出无引用到父流程 +WEB_PIPELINE_WITH_SUB_PROCESS3 = { + "id": node_uniqid(), + "activities": { + "a69a41785c7b30afbd46c532a6f466a7": { + "outgoing": "ca1aab540d4c35f5a2f60a05ed80181d", + "incoming": "199fda538c9e39d3876465f8925774f2", + "name": "\u8282\u70b9_1", + "optional": False, + "pipeline": { + "activities": { + "8f7428b073963641bcf8ce01b447e17d": { + "outgoing": "d5bafd9b95d739a892ded011e70708ac", + "incoming": "fd2c65c4b4e7313abf8fb26ddb6b406b", + "name": "\u8282\u70b9_1", + "type": "ServiceActivity", + "component": { + "code": "test", + "data": { + "input_test": {"hook": True, "is_valid": True, "value": "${input_test}"}, + "radio_test": {"hook": False, "is_valid": True, "value": "1"}, + }, + }, + "ignore": False, + "optional": False, + "id": "8f7428b073963641bcf8ce01b447e17d", + "loop": None, + } + }, + "end_event": { + "incoming": "d5bafd9b95d739a892ded011e70708ac", + "outgoing": "", + "type": "EmptyEndEvent", + "id": "4a93549e80c83bd293d4c4be658cb99f", + "name": "", + }, + "outputs": [], + "flows": { + "fd2c65c4b4e7313abf8fb26ddb6b406b": { + "is_default": False, + "source": "b3a6c3e168c83d6a951b962935ebc10e", + "id": "fd2c65c4b4e7313abf8fb26ddb6b406b", + "target": "8f7428b073963641bcf8ce01b447e17d", + }, + "d5bafd9b95d739a892ded011e70708ac": { + "is_default": False, + "source": "8f7428b073963641bcf8ce01b447e17d", + "id": "d5bafd9b95d739a892ded011e70708ac", + "target": "4a93549e80c83bd293d4c4be658cb99f", + }, + }, + "start_event": { + "incoming": "", + "outgoing": "fd2c65c4b4e7313abf8fb26ddb6b406b", + "type": "EmptyStartEvent", + "id": "b3a6c3e168c83d6a951b962935ebc10e", + "name": "", + }, + "id": "a69a41785c7b30afbd46c532a6f466a7", + "constants": { + "${input_test}": { + "source_tag": "demo.input_test", + "name": "\u8f93\u5165\u6846", + "custom_type": "input", + # u'source_key': u'', + "value": "${input_test}", + "show_type": "show", + "source_type": "component_inputs", + "is_valid": True, + "key": "${input_test}", + "desc": "", + "validation": "^.*$", + # u'source_step': [u'8f7428b073963641bcf8ce01b447e17d'], + "source_info": {"8f7428b073963641bcf8ce01b447e17d": ["input_test"]}, + } + }, + "gateways": {}, + }, + "id": "a69a41785c7b30afbd46c532a6f466a7", + "ignore": False, + "type": "SubProcess", + "template_id": "dd210a6ecf0e374985ed87bcc087d447", + "loop": None, + "hooked_constants": ["${input_test}"], + } + }, + "end_event": { + "type": "EmptyEndEvent", + "outgoing": "", + "incoming": "ca1aab540d4c35f5a2f60a05ed80181d", + "id": "e02925dc56c7354faf5b55c4b8afe691", + "name": "", + }, + "outputs": [], + "flows": { + "ca1aab540d4c35f5a2f60a05ed80181d": { + "is_default": False, + "source": "a69a41785c7b30afbd46c532a6f466a7", + "id": "ca1aab540d4c35f5a2f60a05ed80181d", + "target": "e02925dc56c7354faf5b55c4b8afe691", + }, + "199fda538c9e39d3876465f8925774f2": { + "is_default": False, + "source": "48a815abbadf3845ac5283036c064a9a", + "id": "199fda538c9e39d3876465f8925774f2", + "target": "a69a41785c7b30afbd46c532a6f466a7", + }, + }, + "start_event": { + "type": "EmptyStartEvent", + "outgoing": "199fda538c9e39d3876465f8925774f2", + "incoming": "", + "id": "48a815abbadf3845ac5283036c064a9a", + "name": "", + }, + "constants": { + "${input_test}": { + "source_tag": "demo.input_test", + "name": "\u8f93\u5165\u6846", + "custom_type": "input", + # u'source_key': u'', + "value": "1", + "show_type": "show", + "source_type": "component_inputs", + "key": "${input_test}", + "desc": "", + "validation": "^.*$", + # u'source_step': [u'a69a41785c7b30afbd46c532a6f466a7'], + "source_info": {"8f7428b073963641bcf8ce01b447e17d": ["input_test"]}, + } + }, + "gateways": {}, +} +# 子流程 全局输入部分show,并且引用了父流程的全局变量,无引用到父流程 输出全部无引用到父流程 + +# 子流程 全局输入部分show,并且引用到父流程 输出全部无引用到父流程 diff --git a/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/test_pipeline_parser.py b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/test_pipeline_parser.py new file mode 100644 index 00000000..e08fa3ea --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/pipeline_parser/test_pipeline_parser.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import unittest + +from pipeline.core.pipeline import Pipeline +from pipeline.parser.pipeline_parser import PipelineParser + +from .data import CONDITIONAL_PARALLEL, PIPELINE_DATA, PIPELINE_WITH_SUB_PROCESS + + +class TestPipelineParser(unittest.TestCase): + def setUp(self): + from pipeline.component_framework.component import Component + from pipeline.core.flow.activity import Service + + class TestService(Service): + def execute(self, data, parent_data): + return True + + def outputs_format(self): + return [] + + class TestComponent(Component): + name = "test" + code = "test" + bound_service = TestService + form = "test.js" + + def test_pipeline_parser(self): + parser_obj = PipelineParser(PIPELINE_DATA) + self.assertIsInstance(parser_obj.parse(), Pipeline) + + def test_sub_process_parser(self): + parser_obj = PipelineParser(PIPELINE_WITH_SUB_PROCESS) + self.assertIsInstance(parser_obj.parse(), Pipeline) + + def test_conditional_parallel_parser(self): + parser_obj = PipelineParser(CONDITIONAL_PARALLEL) + self.assertIsInstance(parser_obj.parse(), Pipeline) diff --git a/runtime/bamboo-pipeline/pipeline/tests/readme.md b/runtime/bamboo-pipeline/pipeline/tests/readme.md new file mode 100644 index 00000000..008164b5 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/readme.md @@ -0,0 +1,2 @@ +python manage.py test +python manage.py test pipeline.tests.core.data.test_expression --keepdb \ No newline at end of file diff --git a/runtime/bamboo-pipeline/pipeline/tests/signals/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/signals/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/signals/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/signals/test_handlers.py b/runtime/bamboo-pipeline/pipeline/tests/signals/test_handlers.py new file mode 100644 index 00000000..8f72d324 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/signals/test_handlers.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import mock +from django.test import TestCase + +from pipeline.models import PipelineTemplate +from pipeline.signals import handlers + + +class MockPipelineTemplate(object): + def __init__(self, is_deleted): + self.is_deleted = is_deleted + self.set_has_subprocess_bit = mock.MagicMock() + + +class PipelineSignalHandlerTestCase(TestCase): + def test_template_pre_save_handler(self): + template_to_be_delete = MockPipelineTemplate(is_deleted=True) + handlers.pipeline_template_pre_save_handler(sender=PipelineTemplate, instance=template_to_be_delete) + template_to_be_delete.set_has_subprocess_bit.assert_not_called() + + template_to_be_save = MockPipelineTemplate(is_deleted=False) + handlers.pipeline_template_pre_save_handler(sender=PipelineTemplate, instance=template_to_be_save) + template_to_be_save.set_has_subprocess_bit.assert_called_once() diff --git a/runtime/bamboo-pipeline/pipeline/tests/test_apps.py b/runtime/bamboo-pipeline/pipeline/tests/test_apps.py new file mode 100644 index 00000000..a373f5fd --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/test_apps.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline import apps +from pipeline.tests.mock import * # noqa +from pipeline.tests.mock_settings import * # noqa + + +class TestApps(TestCase): + def test_get_client_through_sentinel__single_sentinel(self): + settings = MagicMock() + settings.REDIS = { + "host": "1.1.1.1", + "port": "123456", + } + rs = MagicMock() + sentinel = MagicMock(return_value=rs) + + with patch(APPS_SETTINGS, settings): + with patch(APPS_SENTINEL, sentinel): + r = apps.get_client_through_sentinel() + + sentinel.assert_called_once_with([("1.1.1.1", "123456")], sentinel_kwargs={}) + rs.master_for.assert_called_once_with("mymaster") + self.assertIsNotNone(r) + + @patch(APPS_SENTINEL, MagicMock(return_value=MagicMock())) + def test_get_client_through_sentinel__mutiple_sentinel(self): + settings = MagicMock() + settings.REDIS = { + "host": "1.1.1.1,2.2.2.2, 3.3.3.3 , 4.4.4.4", + "port": "123456,45678,11111", + "password": "password_token", + "service_name": "name_token", + } + rs = MagicMock() + sentinel = MagicMock(return_value=rs) + + with patch(APPS_SETTINGS, settings): + with patch(APPS_SENTINEL, sentinel): + r = apps.get_client_through_sentinel() + + sentinel.assert_called_once_with( + [("1.1.1.1", "123456"), ("2.2.2.2", "45678"), ("3.3.3.3", "11111")], + password="password_token", + sentinel_kwargs={}, + ) + rs.master_for.assert_called_once_with("name_token") + self.assertIsNotNone(r) diff --git a/runtime/bamboo-pipeline/pipeline/tests/utils/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/utils/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/utils/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/utils/boolrule/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/utils/boolrule/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/utils/boolrule/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/utils/boolrule/tests.py b/runtime/bamboo-pipeline/pipeline/tests/utils/boolrule/tests.py new file mode 100644 index 00000000..faa292e7 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/utils/boolrule/tests.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.utils.boolrule import BoolRule + + +class BoolRuleTests(TestCase): + def test_eq(self): + self.assertTrue(BoolRule("1 == 1").test()) + self.assertTrue(BoolRule('"1" == 1').test()) + + self.assertTrue(BoolRule("True == true").test()) + self.assertTrue(BoolRule("False == false").test()) + + self.assertTrue(BoolRule("1 == True").test()) + self.assertTrue(BoolRule("0 == False").test()) + self.assertTrue(BoolRule('"1" == True').test()) + self.assertTrue(BoolRule('"0" == False').test()) + self.assertTrue(BoolRule('"3.14" == 3.14').test()) + + self.assertTrue(BoolRule('"abc" == "abc"').test()) + + self.assertFalse(BoolRule("1 == 2").test()) + self.assertFalse(BoolRule('123 == "123a"').test()) + self.assertFalse(BoolRule('1 == "2"').test()) + + self.assertFalse(BoolRule('True == "true"').test()) + self.assertFalse(BoolRule('False == "false"').test()) + + def test_ne(self): + self.assertTrue(BoolRule("1 != 2").test()) + self.assertTrue(BoolRule('"1" != 2').test()) + + self.assertTrue(BoolRule('True != "true"').test()) + + self.assertTrue(BoolRule('"abc" != "cba"').test()) + + self.assertFalse(BoolRule("1 != 1").test()) + + def test_gt(self): + self.assertTrue(BoolRule("2 > 1").test()) + self.assertTrue(BoolRule('"2" > 1').test()) + + self.assertFalse(BoolRule("1 > 2").test()) + self.assertFalse(BoolRule('"1" > 2').test()) + + def test_lt(self): + self.assertTrue(BoolRule("1 < 2").test()) + self.assertTrue(BoolRule('"1" < 2').test()) + + self.assertFalse(BoolRule("2 < 1").test()) + self.assertFalse(BoolRule("2 < 2").test()) + + def test_in(self): + self.assertTrue(BoolRule("1 in (1, 2)").test()) + self.assertTrue(BoolRule('1 in ("1", "2")').test()) + self.assertTrue(BoolRule('"1" in (1, 2)').test()) + self.assertTrue(BoolRule('"1" in ("1", "2")').test()) + + self.assertFalse(BoolRule("1 in (0, 2)").test()) + self.assertFalse(BoolRule('1 in ("11", 2)').test()) + + def test_notin(self): + self.assertTrue(BoolRule("1 notin (0, 2)").test()) + self.assertTrue(BoolRule('1 notin ("0", "2")').test()) + self.assertTrue(BoolRule('"abc" notin (0, 2)').test()) + + def test_and(self): + self.assertTrue(BoolRule("1 < 2 and 2 < 3").test()) + self.assertTrue(BoolRule('"a" < "s" and 2 < 3').test()) + + self.assertFalse(BoolRule("1 > 2 and 2 > 1").test()) + self.assertFalse(BoolRule("2 > 1 and 1 > 2").test()) + self.assertFalse(BoolRule("2 > 1 and 1 > 2").test()) + self.assertFalse(BoolRule('"s" > "s" and 2 < 3').test()) + self.assertFalse(BoolRule('"s" < "s" and 2 < 3').test()) + + def test_or(self): + self.assertTrue(BoolRule("1 < 2 or 2 < 3").test()) + self.assertTrue(BoolRule("1 < 2 or 2 < 1").test()) + self.assertTrue(BoolRule("1 > 2 or 2 > 1").test()) + self.assertTrue(BoolRule('"s" > "s" or "su" > "st"').test()) + + self.assertFalse(BoolRule("1 > 2 or 2 > 3").test()) + self.assertFalse(BoolRule('"a" > "s" or "s" > "st"').test()) + + def test_context(self): + context = {"${v1}": 1, "${v2}": "1"} + self.assertTrue(BoolRule("${v1} == ${v2}").test(context)) + self.assertTrue(BoolRule("${v1} == 1").test(context)) + self.assertTrue(BoolRule('${v1} == "1"').test(context)) + self.assertTrue(BoolRule('${v2} == "1"').test(context)) + self.assertTrue(BoolRule('${v2} == "1"').test(context)) + + self.assertTrue(BoolRule('${v1} in ("1")').test(context)) + + def test_gt_or_equal(self): + context = {"${v1}": 1, "${v2}": "1"} + self.assertTrue(BoolRule("${v1} >= ${v2}").test(context)) + self.assertTrue(BoolRule("${v1} >= 1").test(context)) + self.assertTrue(BoolRule('${v1} >= "1"').test(context)) + self.assertTrue(BoolRule("${v1} >= 0").test(context)) + self.assertTrue(BoolRule('${v1} >= "0"').test(context)) + + # self.assertTrue(BoolRule('${v1} >= 2').test(context)) + self.assertFalse(BoolRule('${v2} >= "2"').test(context)) + + def test_lt_or_equal(self): + context = {"${v1}": 1, "${v2}": "1"} + self.assertTrue(BoolRule("${v1} <= ${v2}").test(context)) + self.assertTrue(BoolRule("${v1} <= 1").test(context)) + self.assertTrue(BoolRule('${v1} <= "2"').test(context)) + self.assertTrue(BoolRule('${v1} <= "123456789111"').test(context)) + self.assertTrue(BoolRule("${v1} <= 123456789111").test(context)) + self.assertFalse(BoolRule("${v1} <= 0").test(context)) + self.assertFalse(BoolRule('${v1} <= "0"').test(context)) + self.assertTrue(BoolRule('"a" <= "b"').test(context)) + self.assertFalse(BoolRule('"a" <= "49"').test(context)) + + def test_true_equal(self): + context = {"${v1}": True, "${v2}": "True"} + # 下面的表达式测试不符合预期 + # self.assertTrue(BoolRule('${v1} == ${v2}').test(context)) + self.assertTrue(BoolRule("${v1} == True").test(context)) + self.assertTrue(BoolRule("${v1} == true").test(context)) + self.assertTrue(BoolRule("${v1} == ${v1}").test(context)) + self.assertTrue(BoolRule("${v1} == 1").test(context)) + self.assertTrue(BoolRule('${v1} == "1"').test(context)) + + self.assertFalse(BoolRule('${v1} == "s"').test(context)) + self.assertFalse(BoolRule("${v1} == 0").test(context)) + self.assertFalse(BoolRule('${v1} == "0"').test(context)) + self.assertFalse(BoolRule("${v1} == false").test(context)) + self.assertFalse(BoolRule("${v1} == False").test(context)) + self.assertFalse(BoolRule('${v1} == "false"').test(context)) + self.assertFalse(BoolRule('${v1} == "False"').test(context)) + + def test_false_equal(self): + context = {"${v1}": False, "${v2}": "False"} + # 下面的表达式测试不符合预期 + # self.assertTrue(BoolRule('${v1} == "False"').test(context)) + self.assertTrue(BoolRule("${v1} == ${v1}").test(context)) + self.assertTrue(BoolRule("${v1} == false").test(context)) + self.assertTrue(BoolRule("${v1} == False").test(context)) + self.assertTrue(BoolRule('${v1} == "0"').test(context)) + self.assertTrue(BoolRule("${v1} == 0").test(context)) + self.assertTrue(BoolRule('${v1} == "0"').test(context)) + + self.assertFalse(BoolRule('${v1} == "1"').test(context)) + self.assertFalse(BoolRule("${v1} == true").test(context)) + self.assertFalse(BoolRule('${v1} == "true"').test(context)) + self.assertFalse(BoolRule("${v1} == True").test(context)) + self.assertFalse(BoolRule('${v1} == "True"').test(context)) + self.assertFalse(BoolRule('${v1} == "s"').test(context)) + + def test_multi_or(self): + self.assertTrue(BoolRule('("s" > "s" or "su" > "st") or (1 > 3 and 2 < 3)').test()) + self.assertTrue(BoolRule('(1 > 3 and 2 < 3) or ("s" > "s" or "su" > "st")').test()) + self.assertTrue(BoolRule('(1 < 3 and 2 < 3) or ("s" > "s" or "su" > "st")').test()) + self.assertTrue(BoolRule('(1 > 2 or 2 > 3) or ("s" > "s" or "su" > "st") or (4 > 5 and 5 < 6)').test()) + + self.assertFalse(BoolRule('(1 > 2 or 2 > 3) or ("s" > "s" or "su" < "st")').test()) + self.assertFalse(BoolRule('(1 > 2 or 2 > 3) or ("s" > "s" or "su" < "st") or (4 > 5 and 5 < 6)').test()) + + def test_multi_and(self): + self.assertTrue(BoolRule('("s" > "s" or "su" > "st") and (1 < 3 and 2 < 3)').test()) + + self.assertFalse(BoolRule('(1 < 2 or 2 > 3) and ("s" > "s" or "su" < "st")').test()) + self.assertFalse(BoolRule('(1 > 2 or 2 > 3) and ("s" > "s" or "su" > "st")').test()) + self.assertFalse(BoolRule('(1 > 2 or 2 > 3) and ("s" > "s" or "su" < "st")').test()) + self.assertFalse(BoolRule('(1 < 3 and 2 < 3) and ("s" > "s" or "su" > "st") and (4 > 5 and 5 < 6)').test()) diff --git a/runtime/bamboo-pipeline/pipeline/tests/validators/__init__.py b/runtime/bamboo-pipeline/pipeline/tests/validators/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/validators/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/tests/validators/cases.py b/runtime/bamboo-pipeline/pipeline/tests/validators/cases.py new file mode 100644 index 00000000..496d2b51 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/validators/cases.py @@ -0,0 +1,1191 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.builder import build_tree +from pipeline.builder.flow import * # noqa +from pipeline.tests.validators.utils import * # noqa +from pipeline.validators.gateway import * # noqa + + +def flow_valid_case(testcase): + def _(num): + return num - 1 + + def out_assert_case(length, out_set): + return {"len": length, "outgoing": out_set} + + outgoing_assert = { + start_event_id: out_assert_case(1, {act_id(1)}), + act_id(1): out_assert_case(1, {parallel_gw_id(1)}), + parallel_gw_id(1): out_assert_case(3, {parallel_gw_id(2), act_id(5), act_id(6)}), + parallel_gw_id(2): out_assert_case(3, {act_id(2), act_id(3), act_id(4)}), + act_id(2): out_assert_case(1, {converge_gw_id(1)}), + act_id(3): out_assert_case(1, {converge_gw_id(1)}), + act_id(4): out_assert_case(1, {converge_gw_id(1)}), + converge_gw_id(1): out_assert_case(1, {act_id(7)}), + act_id(7): out_assert_case(1, {exclusive_gw_id(1)}), + exclusive_gw_id(1): out_assert_case(2, {parallel_gw_id(2), converge_gw_id(3)}), + act_id(5): out_assert_case(1, {exclusive_gw_id(7)}), + exclusive_gw_id(7): out_assert_case(2, {act_id(8), converge_gw_id(3)}), + act_id(8): out_assert_case(1, {exclusive_gw_id(8)}), + exclusive_gw_id(8): out_assert_case(2, {act_id(8), act_id(11)}), + act_id(11): out_assert_case(1, {converge_gw_id(3)}), + act_id(6): out_assert_case(1, {exclusive_gw_id(2)}), + exclusive_gw_id(2): out_assert_case(3, {act_id(6), act_id(9), act_id(10)}), + act_id(9): out_assert_case(1, {converge_gw_id(2)}), + act_id(10): out_assert_case(1, {converge_gw_id(2)}), + converge_gw_id(2): out_assert_case(1, {act_id(12)}), + act_id(12): out_assert_case(1, {exclusive_gw_id(6)}), + exclusive_gw_id(6): out_assert_case(3, {act_id(6), converge_gw_id(3), converge_gw_id(2)}), + converge_gw_id(3): out_assert_case(1, {act_id(13)}), + act_id(13): out_assert_case(1, {exclusive_gw_id(3)}), + exclusive_gw_id(3): out_assert_case(4, {end_event_id, act_id(14), parallel_gw_id(3), act_id(1)}), + act_id(14): out_assert_case(1, {exclusive_gw_id(4)}), + exclusive_gw_id(4): out_assert_case(2, {act_id(13), converge_gw_id(4)}), + parallel_gw_id(3): out_assert_case(3, {act_id(15), act_id(16), act_id(17)}), + act_id(15): out_assert_case(1, {act_id(18)}), + act_id(18): out_assert_case(1, {converge_gw_id(4)}), + act_id(16): out_assert_case(1, {converge_gw_id(4)}), + act_id(17): out_assert_case(1, {exclusive_gw_id(5)}), + exclusive_gw_id(5): out_assert_case(2, {act_id(19), act_id(20)}), + act_id(19): out_assert_case(1, {converge_gw_id(4)}), + act_id(20): out_assert_case(1, {converge_gw_id(4)}), + converge_gw_id(4): out_assert_case(1, {end_event_id}), + end_event_id: out_assert_case(0, set()), + } + + stream_assert = { + start_event_id: MAIN_STREAM, + act_id(1): MAIN_STREAM, + parallel_gw_id(1): MAIN_STREAM, + parallel_gw_id(2): "pg_1_0", + act_id(2): "pg_2_0", + act_id(3): "pg_2_1", + act_id(4): "pg_2_2", + converge_gw_id(1): "pg_1_0", + act_id(7): "pg_1_0", + exclusive_gw_id(1): "pg_1_0", + act_id(5): "pg_1_1", + exclusive_gw_id(7): "pg_1_1", + act_id(8): "pg_1_1", + exclusive_gw_id(8): "pg_1_1", + act_id(11): "pg_1_1", + act_id(6): "pg_1_2", + exclusive_gw_id(2): "pg_1_2", + act_id(9): "pg_1_2", + act_id(10): "pg_1_2", + converge_gw_id(2): "pg_1_2", + act_id(12): "pg_1_2", + exclusive_gw_id(6): "pg_1_2", + converge_gw_id(3): MAIN_STREAM, + act_id(13): MAIN_STREAM, + exclusive_gw_id(3): MAIN_STREAM, + act_id(14): MAIN_STREAM, + exclusive_gw_id(4): MAIN_STREAM, + parallel_gw_id(3): MAIN_STREAM, + act_id(15): "pg_3_0", + act_id(18): "pg_3_0", + act_id(16): "pg_3_1", + act_id(17): "pg_3_2", + exclusive_gw_id(5): "pg_3_2", + act_id(19): "pg_3_2", + act_id(20): "pg_3_2", + converge_gw_id(4): MAIN_STREAM, + end_event_id: MAIN_STREAM, + } + + gateway_validation_assert = { + converge_gw_id(1): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 1, + "converged": {parallel_gw_id(2)}, + "distance": 5, + }, + converge_gw_id(2): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 1, + "converged": {exclusive_gw_id(2)}, + "distance": 6, + }, + converge_gw_id(3): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 5, + "converged": { + parallel_gw_id(1), + exclusive_gw_id(1), + exclusive_gw_id(7), + exclusive_gw_id(8), + exclusive_gw_id(6), + }, + "distance": 9, + }, + converge_gw_id(4): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 4, + "converged": {parallel_gw_id(3), exclusive_gw_id(3), exclusive_gw_id(4), exclusive_gw_id(5)}, + "distance": 16, + }, + exclusive_gw_id(1): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 7, + }, + exclusive_gw_id(2): { + "match": None, + "match_assert": converge_gw_id(2), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(3): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": True, + "distance": 11, + }, + exclusive_gw_id(4): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 13, + }, + exclusive_gw_id(5): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 14, + }, + exclusive_gw_id(6): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 8, + }, + exclusive_gw_id(7): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(8): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 6, + }, + parallel_gw_id(1): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 2, + }, + parallel_gw_id(2): { + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 3, + }, + parallel_gw_id(3): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 12, + }, + } + + start = EmptyStartEvent(id=start_event_id) + acts = [ServiceActivity(id=act_id(i)) for i in range(1, 21)] + pgs = [ParallelGateway(id=parallel_gw_id(i)) for i in range(1, 3)] + pgs.append(ConditionalParallelGateway(id=parallel_gw_id(3), conditions={0: "123", 1: "456", 2: "789"})) + egs = [ + ExclusiveGateway(id=exclusive_gw_id(i), conditions={0: "123", 1: "456", 2: "789", 3: "101112"}) + for i in range(1, 9) + ] + cgs = [ConvergeGateway(id=converge_gw_id(i)) for i in range(1, 5)] + end = EmptyEndEvent(id=end_event_id) + + nodes = [start, end] + nodes.extend(acts) + nodes.extend(pgs) + nodes.extend(egs) + nodes.extend(cgs) + + start.extend(acts[_(1)]).extend(pgs[_(1)]).connect(pgs[_(2)], acts[_(5)], acts[_(6)]) + + pgs[_(2)].connect(acts[_(2)], acts[_(3)], acts[_(4)]).converge(cgs[_(1)]).extend(acts[_(7)]).extend( + egs[_(1)] + ).connect(pgs[_(2)], cgs[_(3)]) + acts[_(5)].extend(egs[_(7)]).connect(cgs[_(3)], acts[_(8)]).to(acts[_(8)]).extend(egs[_(8)]).connect( + acts[_(8)], acts[_(11)] + ).to(acts[_(11)]).extend(cgs[_(3)]) + acts[_(6)].extend(egs[_(2)]).connect(acts[_(9)], acts[_(10)],).converge(cgs[_(2)]).extend(acts[_(12)]).extend( + egs[_(6)] + ).connect(acts[_(6)], cgs[_(3)], cgs[_(2)]).to(egs[_(2)]).connect(acts[_(6)]) + + cgs[_(3)].extend(acts[_(13)]).extend(egs[_(3)]).connect(end, acts[_(14)], pgs[_(3)], acts[_(1)]) + + acts[_(14)].extend(egs[_(4)]).connect(acts[_(13)], cgs[_(4)]) + pgs[_(3)].connect(acts[_(15)], acts[_(16)], acts[_(17)]).to(acts[_(15)]).extend(acts[_(18)]).extend(cgs[_(4)]).to( + acts[_(17)] + ).extend(egs[_(5)]).connect(acts[_(19)], acts[_(20)]).to(acts[_(19)]).extend(cgs[_(4)]).to(acts[_(20)]).extend( + cgs[_(4)] + ).to( + acts[_(16)] + ).extend( + cgs[_(4)] + ).extend( + end + ) + + for node in nodes: + a = outgoing_assert[node.id] + out = {out.id for out in node.outgoing} + testcase.assertEqual( + a["len"], + len(node.outgoing), + msg="{id} actual: {a}, expect: {e}".format(id=node.id, a=len(node.outgoing), e=a["len"]), + ) + testcase.assertEqual( + a["outgoing"], out, msg="{id} actual: {a}, expect: {e}".format(id=node.id, a=out, e=a["outgoing"]) + ) + + return build_tree(start), gateway_validation_assert, stream_assert + + +def flow_valid_edge_case_1(): + start = EmptyStartEvent(id=start_event_id) + act_1 = ServiceActivity(id=act_id(1)) + act_2 = ServiceActivity(id=act_id(2)) + eg = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456", 2: "789"}) + act_3 = ServiceActivity(id=act_id(3)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(act_1).extend(act_2).extend(eg).connect(act_1, act_2, act_3).to(act_3).extend(end) + + return build_tree(start) + + +def flow_valid_edge_case_2(): + return { + "activities": { + "act_1": { + "component": {"inputs": {}, "code": None}, + "outgoing": "82b12b6aae533e55bdcc5bccfb014c2d", + "incoming": ["3fc89273786a36b8a6e7beac8301274d"], + "name": None, + "error_ignorable": False, + "type": "ServiceActivity", + "id": "act_1", + "optional": False, + }, + "act_2": { + "component": {"inputs": {}, "code": None}, + "outgoing": "3368add44347310eaef1f26f25909026", + "incoming": ["76caeed0e6053fea9db84a89f56a74a8"], + "name": None, + "error_ignorable": False, + "type": "ServiceActivity", + "id": "act_2", + "optional": False, + }, + }, + "end_event": { + "type": "EmptyEndEvent", + "outgoing": "", + "incoming": ["05f91b45a15b37d7b0c96d3ff94bff80"], + "id": "end_event_id", + "name": None, + }, + "flows": { + "27a9cdeaef623d37834ac6917d05eac5": { + "is_default": False, + "source": "start_event_id", + "target": "pg_1", + "id": "27a9cdeaef623d37834ac6917d05eac5", + }, + "82b12b6aae533e55bdcc5bccfb014c2d": { + "is_default": False, + "source": "act_1", + "target": "cg_1", + "id": "82b12b6aae533e55bdcc5bccfb014c2d", + }, + "3368add44347310eaef1f26f25909026": { + "is_default": False, + "source": "act_2", + "target": "cg_1", + "id": "3368add44347310eaef1f26f25909026", + }, + "05f91b45a15b37d7b0c96d3ff94bff80": { + "is_default": False, + "source": "cg_1", + "target": "end_event_id", + "id": "05f91b45a15b37d7b0c96d3ff94bff80", + }, + "3fc89273786a36b8a6e7beac8301274d": { + "is_default": False, + "source": "pg_1", + "target": "act_1", + "id": "3fc89273786a36b8a6e7beac8301274d", + }, + "76caeed0e6053fea9db84a89f56a74a8": { + "is_default": False, + "source": "pg_1", + "target": "act_2", + "id": "76caeed0e6053fea9db84a89f56a74a8", + }, + "76casdgd0e6053ea9db84a89f56a1234": { + "is_default": False, + "source": "pg_1", + "target": "cg_1", + "id": "76caeed0e6053fea9db84a89f56a74a8", + }, + }, + "gateways": { + "cg_1": { + "type": "ConvergeGateway", + "outgoing": "05f91b45a15b37d7b0c96d3ff94bff80", + "incoming": [ + "82b12b6aae533e55bdcc5bccfb014c2d", + "3368add44347310eaef1f26f25909026", + "76casdgd0e6053ea9db84a89f56a1234", + ], + "id": "cg_1", + "name": None, + }, + "pg_1": { + "outgoing": [ + "3fc89273786a36b8a6e7beac8301274d", + "76caeed0e6053fea9db84a89f56a74a8", + "76casdgd0e6053ea9db84a89f56a1234", + ], + "incoming": ["27a9cdeaef623d37834ac6917d05eac5"], + "name": None, + "converge_gateway_id": "cg_1", + "type": "ParallelGateway", + "id": "pg_1", + }, + }, + "start_event": { + "type": "EmptyStartEvent", + "outgoing": "27a9cdeaef623d37834ac6917d05eac5", + "incoming": "", + "id": "start_event_id", + "name": None, + }, + "data": {"inputs": {}, "outputs": {}}, + "id": "c986802cd1e23a5f920c85b005f16dc3", + } + + +def flow_valid_edge_case_3(): + + start = EmptyStartEvent() + end = EmptyEndEvent() + eg_1 = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456"}) + eg_2 = ExclusiveGateway(id=exclusive_gw_id(2), conditions={0: "123", 1: "456"}) + eg_3 = ExclusiveGateway(id=exclusive_gw_id(3), conditions={0: "123", 1: "456"}) + eg_4 = ExclusiveGateway(id=exclusive_gw_id(4), conditions={0: "123", 1: "456"}) + pg_1 = ParallelGateway(id=parallel_gw_id(1)) + cg = ConvergeGateway(id=converge_gw_id(1)) + + start.connect(eg_1) + eg_1.connect(pg_1, end) + pg_1.connect(eg_2, eg_3) + eg_2.connect(eg_2, cg) + eg_3.connect(eg_4, eg_4) + eg_4.connect(eg_4, cg) + cg.connect(end) + + return build_tree(start) + + +def flow_valid_edge_case_4(): + start = EmptyStartEvent(id=start_event_id) + pg = ParallelGateway(id=parallel_gw_id(1)) + eg = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456", 2: "789"}) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(pg).connect(cg, eg) + eg.connect(eg, cg) + cg.connect(end) + + return build_tree(start) + + +def flow_valid_edge_case_5(): + start = EmptyStartEvent(id=start_event_id) + eg = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456", 2: "789"}) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(eg).connect(cg, cg, end) + cg.connect(eg) + + return build_tree(start) + + +def flow_invalid_case_1(): + start = EmptyStartEvent(id=start_event_id) + act_1 = ServiceActivity(id=act_id(1)) + pg = ParallelGateway(id=parallel_gw_id(1)) + act_2 = ServiceActivity(id=act_id(2)) + act_3 = ServiceActivity(id=act_id(3)) + eg = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456"}) + act_4 = ServiceActivity(id=act_id(4)) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(act_1).extend(pg).connect(act_2, act_3, eg).to(eg).connect(act_3, act_4) + + act_2.connect(cg) + act_3.connect(cg) + act_4.connect(cg) + cg.extend(end) + + return build_tree(start) + + +def flow_invalid_case_2(): + start = EmptyStartEvent(id=start_event_id) + act_1 = ServiceActivity(id=act_id(1)) + eg = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456"}) + act_2 = ServiceActivity(id=act_id(2)) + pg = ParallelGateway(id=parallel_gw_id(1)) + act_3 = ServiceActivity(id=act_id(3)) + act_4 = ServiceActivity(id=act_id(4)) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(act_1).extend(eg).connect(act_3, act_2).to(act_2).extend(pg).connect(act_3, act_4).converge(cg).extend( + end + ) + + return build_tree(start) + + +flow_valid_edge_cases = [ + {"case": flow_valid_edge_case_1}, + {"case": flow_valid_edge_case_2}, + {"case": flow_valid_edge_case_3}, + {"case": flow_valid_edge_case_4}, + {"case": flow_valid_edge_case_5}, +] + +flow_invalid_cases = [ + {"case": flow_invalid_case_1, "assert_invalid": act_id(3)}, + {"case": flow_invalid_case_2, "assert_invalid": act_id(3)}, +] + + +def gateway_valid_case(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2, 3], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [exclusive_gw_id(1)], + PE.id: converge_gw_id(1), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 5, + "in_len": 3, + }, + converge_gw_id(2): { + PE.incoming: [1, 2, 3], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [exclusive_gw_id(6)], + PE.id: converge_gw_id(2), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 5, + "in_len": 1, + }, + converge_gw_id(3): { + PE.incoming: [1, 2, 3, 4], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [exclusive_gw_id(3)], + PE.id: converge_gw_id(3), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 7, + "in_len": 4, + }, + converge_gw_id(4): { + PE.incoming: [1, 2, 3, 4, 5], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [end_event_id], + PE.id: converge_gw_id(4), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 9, + "in_len": 5, + }, + converge_gw_id(5): { + PE.incoming: [1, 2, 3], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [parallel_gw_id(1)], + PE.id: converge_gw_id(5), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 2, + "in_len": 3, + }, + } + gateway = { + exclusive_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [parallel_gw_id(2), converge_gw_id(3)], + PE.id: exclusive_gw_id(1), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 6, + }, + exclusive_gw_id(2): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [exclusive_gw_id(2), converge_gw_id(2), converge_gw_id(2)], + PE.id: exclusive_gw_id(2), + "match": None, + "match_assert": converge_gw_id(2), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(3): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [parallel_gw_id(4), end_event_id, exclusive_gw_id(4), parallel_gw_id(3), parallel_gw_id(1)], + PE.id: exclusive_gw_id(3), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": True, + "distance": 8, + }, + exclusive_gw_id(4): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [exclusive_gw_id(3), converge_gw_id(4)], + PE.id: exclusive_gw_id(4), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 8, + }, + exclusive_gw_id(5): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [converge_gw_id(4), converge_gw_id(4)], + PE.id: exclusive_gw_id(5), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 9, + }, + exclusive_gw_id(6): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [converge_gw_id(2), converge_gw_id(3)], + PE.id: exclusive_gw_id(6), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 6, + }, + exclusive_gw_id(7): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [converge_gw_id(3), exclusive_gw_id(8)], + PE.id: exclusive_gw_id(7), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(8): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [exclusive_gw_id(7), converge_gw_id(3)], + PE.id: exclusive_gw_id(8), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 5, + }, + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ConditionalParallelGateway, + PE.target: [parallel_gw_id(2), exclusive_gw_id(7), exclusive_gw_id(2)], + PE.id: parallel_gw_id(1), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 3, + }, + parallel_gw_id(2): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), converge_gw_id(1)], + PE.id: parallel_gw_id(2), + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + parallel_gw_id(3): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ConditionalParallelGateway, + PE.target: [converge_gw_id(4), converge_gw_id(4), exclusive_gw_id(5)], + PE.id: parallel_gw_id(3), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 9, + }, + parallel_gw_id(4): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(5), converge_gw_id(5), converge_gw_id(5)], + PE.id: parallel_gw_id(4), + "match": None, + "match_assert": converge_gw_id(5), + "converge_end": None, + "converge_end_assert": False, + "distance": 1, + }, + } + stack = Stack() + converge_in = {} + distances = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(4), distances, converge_in + + +def gateway_valid_edge_case_1(): + converge = {} + gateway = { + exclusive_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [exclusive_gw_id(1), exclusive_gw_id(1), end_event_id], + PE.id: exclusive_gw_id(1), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": True, + "distance": 2, + } + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, exclusive_gw_id(1), distances, converge_in + + +def gateway_valid_edge_case_2(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [end_event_id], + PE.id: converge_gw_id(1), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 3, + "in_len": 2, + }, + } + gateway = { + exclusive_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [exclusive_gw_id(1), converge_gw_id(1)], + PE.id: exclusive_gw_id(1), + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 2, + }, + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(1), exclusive_gw_id(1)], + PE.id: parallel_gw_id(1), + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 1, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +def gateway_invalid_case_1(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2, 3], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(1), + "match": None, + "distance": 2, + "in_len": 3, + }, + } + gateway = { + exclusive_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ConditionalParallelGateway, + PE.target: [converge_gw_id(1), end_event_id], + PE.id: exclusive_gw_id(1), + "match": None, + "distance": 2, + }, + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), exclusive_gw_id(1)], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +def gateway_invalid_case_2(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2, 3, 4], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 4, + }, + } + gateway = { + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ConditionalParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), parallel_gw_id(2)], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 1, + }, + parallel_gw_id(2): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1)], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 2, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +def gateway_invalid_case_3(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2, 3], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 4, + } + } + gateway = { + exclusive_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [parallel_gw_id(1), converge_gw_id(1)], + PE.id: exclusive_gw_id(1), + "match": None, + "distance": 2, + }, + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ConditionalParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), exclusive_gw_id(1)], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = Stack() + + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +def gateway_invalid_case_4(): + converge = {} + gateway = { + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [end_event_id, end_event_id, end_event_id], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +def gateway_invalid_case_5(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(1), + "match": None, + "distance": 2, + "in_len": 2, + }, + converge_gw_id(2): { + PE.incoming: [3, 4], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(2), + "match": None, + "distance": 2, + "in_len": 2, + }, + } + gateway = { + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ConditionalParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), converge_gw_id(2), converge_gw_id(2)], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +def gateway_invalid_case_6(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(1), + "match": None, + "distance": 2, + "in_len": 2, + }, + converge_gw_id(2): { + PE.incoming: [3, 4], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(2), + "match": None, + "distance": 2, + "in_len": 2, + }, + } + gateway = { + exclusive_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ExclusiveGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), converge_gw_id(2), converge_gw_id(2)], + PE.id: exclusive_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, exclusive_gw_id(1), distances, converge_in + + +def gateway_invalid_case_7(): + converge = { + converge_gw_id(1): { + PE.incoming: [1, 2], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 2, + }, + converge_gw_id(2): { + PE.incoming: [1, 2, 3], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(2), + "match": None, + "distance": 4, + "in_len": 3, + }, + } + gateway = { + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(2), converge_gw_id(2), parallel_gw_id(2)], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 1, + }, + parallel_gw_id(2): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), parallel_gw_id(1)], + PE.id: parallel_gw_id(2), + "match": None, + "distance": 2, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +def gateway_invalid_case_8(): + converge = { + converge_gw_id(1): { + PE.incoming: [1], + PE.outgoing: [], + PE.type: PE.ConvergeGateway, + PE.target: [], + PE.id: converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 1, + }, + } + gateway = { + parallel_gw_id(1): { + PE.incoming: [], + PE.outgoing: [], + PE.type: PE.ParallelGateway, + PE.target: [converge_gw_id(1), converge_gw_id(1), parallel_gw_id(1)], + PE.id: parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = Stack() + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return converge, gateway, stack, end_event_id, parallel_gw_id(1), distances, converge_in + + +gateway_valid_cases = [ + {"case": gateway_valid_case}, + {"case": gateway_valid_edge_case_1}, + {"case": gateway_valid_edge_case_2}, +] + +gateway_invalid_cases = [ + {"case": gateway_invalid_case_1, "invalid_assert": exclusive_gw_id(1)}, + {"case": gateway_invalid_case_2, "invalid_assert": converge_gw_id(1)}, + {"case": gateway_invalid_case_3, "invalid_assert": exclusive_gw_id(1)}, + {"case": gateway_invalid_case_4, "invalid_assert": parallel_gw_id(1)}, + {"case": gateway_invalid_case_5, "invalid_assert": parallel_gw_id(1)}, + {"case": gateway_invalid_case_6, "invalid_assert": exclusive_gw_id(1)}, + {"case": gateway_invalid_case_7, "invalid_assert": parallel_gw_id(2)}, + {"case": gateway_invalid_case_8, "invalid_assert": parallel_gw_id(1)}, +] diff --git a/runtime/bamboo-pipeline/pipeline/tests/validators/test_gateway.py b/runtime/bamboo-pipeline/pipeline/tests/validators/test_gateway.py new file mode 100644 index 00000000..2472c3e4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/validators/test_gateway.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.test import TestCase + +from pipeline.tests.validators.cases import * # noqa + + +class TestStreamValidation(TestCase): + def test_distance_from_start(self): + tree, gateway_validation_assert, _ = flow_valid_case(self) + distances = {} + for gid, g in list(tree[PE.gateways].items()): + distance_from(origin=tree[PE.start_event], node=g, tree=tree, marked=distances) + + for gid, ga in list(gateway_validation_assert.items()): + actual = distances[gid] + expect = ga["distance"] + self.assertEqual(actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=gid, a=actual, e=expect)) + + for gid, ga in list(gateway_validation_assert.items()): + actual = distance_from(origin=tree[PE.start_event], node=tree[PE.gateways][gid], tree=tree, marked={}) + expect = ga["distance"] + self.assertEqual(actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=gid, a=actual, e=expect)) + + def test_match_converge(self): + for n, i in enumerate(gateway_valid_cases, start=1): + converge, gateway, stack, eid, start, distances, in_len = i["case"]() + block_nodes = {start: set()} + + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + if converge_id: + while converge[converge_id][PE.target][0] != eid: + start = converge[converge_id][PE.target][0] + block_nodes[start] = set() + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + if converge_id is None: + break + + for _, c in list(converge.items()): + actual = c["match"] + expect = c["match_assert"] + self.assertEqual( + actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=c[PE.id], a=actual, e=expect) + ) + + actual = c["converge_end"] + expect = c["converge_end_assert"] + self.assertEqual( + actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=c[PE.id], a=actual, e=expect) + ) + + for _, g in list(gateway.items()): + actual = g["match"] + expect = g["match_assert"] + self.assertEqual( + actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=g[PE.id], a=actual, e=expect) + ) + + actual = g["converge_end"] + expect = g["converge_end_assert"] + self.assertEqual( + actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=g[PE.id], a=actual, e=expect) + ) + + for n, i in enumerate(gateway_invalid_cases, start=1): + converge, gateway, stack, eid, start, distances, in_len = i["case"]() + invalid = False + block_nodes = {start: set()} + try: + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + while converge[converge_id][PE.target][0] != eid: + start = converge[converge_id][PE.target][0] + block_nodes[start] = set() + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + except exceptions.ConvergeMatchError as e: + invalid = True + actual = e.gateway_id + expect = i["invalid_assert"] + self.assertEqual( + actual, expect, msg="invalid assert{id} actual: {a}, expect: {e}".format(id=n, a=actual, e=expect) + ) + + self.assertTrue(invalid, msg="invalid case %s expect raise exception" % n) + + def test_validate_gateway(self): + tree, gateway_validation_assert, _ = flow_valid_case(self) + converged = validate_gateways(tree) + + for cid, converge_items in list(converged.items()): + actual = len(converge_items) + expect = gateway_validation_assert[cid]["converged_len"] + self.assertEqual(actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=cid, a=actual, e=expect)) + + actual = set(converge_items) + expect = gateway_validation_assert[cid]["converged"] + + self.assertEqual(actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=cid, a=actual, e=expect)) + + for gid, gateway in list(tree[PE.gateways].items()): + if gateway[PE.type] != PE.ConvergeGateway: + actual = gateway[PE.converge_gateway_id] + expect = gateway_validation_assert[gid]["match_assert"] + self.assertEqual(actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=gid, a=actual, e=expect)) + + # edge cases + for i, c in enumerate(flow_valid_edge_cases): + tree = c["case"]() + print(f"test gateway valid edge case {i+1}") + converged = validate_gateways(tree) + + def test_validate_stream(self): + + tree, gateway_validation_assert, stream_assert = flow_valid_case(self) + validate_gateways(tree) + data = validate_stream(tree) + + for nid, expect in list(stream_assert.items()): + actual = data[nid][STREAM] + self.assertEqual(actual, expect, msg="{id} actual: {a}, expect: {e}".format(id=nid, a=actual, e=expect)) + + for n, c in enumerate(flow_valid_edge_cases): + tree = c["case"]() + validate_gateways(tree) + try: + validate_stream(tree) + except Exception as e: + self.assertTrue(False, msg="valid edge case {} raise exception: {}".format(n, e)) + + for n, item in enumerate(flow_invalid_cases, start=1): + tree = item["case"]() + invalid = False + validate_gateways(tree) + try: + validate_stream(tree) + except exceptions.StreamValidateError as e: + actual = e.node_id + expect = item["assert_invalid"] + self.assertEqual( + actual, expect, msg="invalid assert{id} actual: {a}, expect: {e}".format(id=n, a=actual, e=expect) + ) + invalid = True + + self.assertTrue(invalid, msg="invalid case %s expect raise exception" % n) diff --git a/runtime/bamboo-pipeline/pipeline/tests/validators/utils.py b/runtime/bamboo-pipeline/pipeline/tests/validators/utils.py new file mode 100644 index 00000000..d2d8cd6f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/tests/validators/utils.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +start_event_id = "start_event_id" +end_event_id = "end_event_id" + + +def exclusive_gw_id(num): + return "eg_%s" % num + + +def converge_gw_id(num): + return "cg_%s" % num + + +def parallel_gw_id(num): + return "pg_%s" % num + + +def act_id(num): + return "act_%s" % num diff --git a/runtime/bamboo-pipeline/pipeline/utils/__init__.py b/runtime/bamboo-pipeline/pipeline/utils/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/utils/boolrule/__init__.py b/runtime/bamboo-pipeline/pipeline/utils/boolrule/__init__.py new file mode 100644 index 00000000..28fa00af --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/boolrule/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +__author__ = "Steve Webster" +__email__ = "spjwebster@gmail.com" +__version__ = "0.2.1" + +from .boolrule import BoolRule, MissingVariableException, UnknownOperatorException # noqa diff --git a/runtime/bamboo-pipeline/pipeline/utils/boolrule/boolrule.py b/runtime/bamboo-pipeline/pipeline/utils/boolrule/boolrule.py new file mode 100644 index 00000000..3708440e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/boolrule/boolrule.py @@ -0,0 +1,287 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pyparsing import ( + CaselessLiteral, + Combine, + Forward, + Group, + Keyword, + Optional, + ParseException, + ParseResults, + QuotedString, + Suppress, + Word, + ZeroOrMore, + alphanums, + alphas, + delimitedList, + nums, + oneOf, +) + + +class SubstituteVal(object): + """ + Represents a token that will later be replaced by a context value. + """ + + def __init__(self, t): + self._path = t[0] + + def get_val(self, context): + if not context: + # raise MissingVariableException( + # 'context missing or empty' + # ) + return self._path + + val = context + + try: + for part in self._path.split(pathDelimiter): + val = getattr(val, part) if hasattr(val, part) else val[part] + + except KeyError: + raise MissingVariableException("no value supplied for {}".format(self._path)) + + return val + + def __repr__(self): + return "SubstituteVal(%s)" % self._path + + +# Grammar definition +pathDelimiter = "." +# match gcloud's variable +identifier = Combine(Optional("${") + Optional("_") + Word(alphas, alphanums + "_") + Optional("}")) +# identifier = Word(alphas, alphanums + "_") +propertyPath = delimitedList(identifier, pathDelimiter, combine=True) + +and_ = Keyword("and", caseless=True) +or_ = Keyword("or", caseless=True) +in_ = Keyword("in", caseless=True) + +lparen = Suppress("(") +rparen = Suppress(")") + +binaryOp = oneOf("== != < > >= <= in notin issuperset notissuperset", caseless=True)("operator") + +E = CaselessLiteral("E") +numberSign = Word("+-", exact=1) +realNumber = Combine( + Optional(numberSign) + + (Word(nums) + "." + Optional(Word(nums)) | ("." + Word(nums))) + + Optional(E + Optional(numberSign) + Word(nums)) +) + +integer = Combine(Optional(numberSign) + Word(nums) + Optional(E + Optional("+") + Word(nums))) + +# str_ = quotedString.addParseAction(removeQuotes) +str_ = QuotedString('"') | QuotedString("'") +bool_ = oneOf("true false", caseless=True) + +simpleVals = ( + realNumber.setParseAction(lambda toks: float(toks[0])) + | integer.setParseAction(lambda toks: int(toks[0])) + | str_ + | bool_.setParseAction(lambda toks: toks[0] == "true") + | propertyPath.setParseAction(lambda toks: SubstituteVal(toks)) +) # need to add support for alg expressions + +propertyVal = simpleVals | (lparen + Group(delimitedList(simpleVals)) + rparen) + +boolExpression = Forward() +boolCondition = Group( + (Group(propertyVal)("lval") + binaryOp + Group(propertyVal)("rval")) | (lparen + boolExpression + rparen) +) +boolExpression << boolCondition + ZeroOrMore((and_ | or_) + boolExpression) + + +def double_equals_trans(lval, rval, operator): + # double equals + if operator in ["in", "notin"]: + if isinstance(rval, list) and len(rval): + transed_rval = [] + if isinstance(lval, int): + for item in rval: + try: + transed_rval.append(int(item)) + except Exception: + pass + elif isinstance(lval, str): + for item in rval: + try: + transed_rval.append(str(item)) + except Exception: + pass + rval += transed_rval + + elif operator in ["issuperset", "notissuperset"]: + # avoid convert set('abc') to {a, b, c}, but keep {'abc'} + if isinstance(lval, str): + lval = [lval] + if isinstance(rval, str): + rval = [rval] + + else: + try: + if isinstance(lval, int): + rval = int(rval) + elif isinstance(rval, int): + lval = int(lval) + if isinstance(lval, str): + rval = str(rval) + elif isinstance(rval, str): + lval = str(lval) + except Exception: + pass + + return lval, rval + + +class BoolRule(object): + """ + Represents a boolean expression and provides a `test` method to evaluate + the expression and determine its truthiness. + + :param query: A string containing the query to be evaluated + :param lazy: If ``True``, parse the query the first time it's tested rather + than immediately. This can help with performance if you + instantiate a lot of rules and only end up evaluating a + small handful. + """ + + _compiled = False + _tokens = None + _query = None + + def __init__(self, query, lazy=False, strict=True): + self._query = query + self.strict = strict + if not lazy: + self._compile() + + def test(self, context=None): + """ + Test the expression against the given context and return the result. + + :param context: A dict context to evaluate the expression against. + :return: True if the expression succesfully evaluated against the + context, or False otherwise. + """ + if self._is_match_all(): + return True + + self._compile() + return self._test_tokens(self._tokens, context) + + def _is_match_all(self): + return True if self._query == "*" else False + + def _compile(self): + if not self._compiled: + + # special case match-all query + if self._is_match_all(): + return + + try: + self._tokens = boolExpression.parseString(self._query, parseAll=self.strict) + except ParseException: + raise + + self._compiled = True + + def _expand_val(self, val, context): + if type(val) == list: + val = [self._expand_val(v, context) for v in val] + + if isinstance(val, SubstituteVal): + ret = val.get_val(context) + return ret + + if isinstance(val, ParseResults): + return [self._expand_val(x, context) for x in val.asList()] + + return val + + def _test_tokens(self, tokens, context): + passed = False + + for token in tokens: + + if not isinstance(token, ParseResults): + if token == "or" and passed: + return True + elif token == "and" and not passed: + return False + continue + + if not token.getName(): + passed = self._test_tokens(token, context) + continue + + items = token.asDict() + + operator = items["operator"] + lval = self._expand_val(items["lval"][0], context) + rval = self._expand_val(items["rval"][0], context) + lval, rval = double_equals_trans(lval, rval, operator) + + if operator in ("=", "==", "eq"): + passed = lval == rval + elif operator in ("!=", "ne"): + passed = lval != rval + elif operator in (">", "gt"): + passed = lval > rval + elif operator in (">=", "ge"): + passed = lval >= rval + elif operator in ("<", "lt"): + passed = lval < rval + elif operator in ("<=", "le"): + passed = lval <= rval + elif operator == "in": + passed = lval in rval + elif operator == "notin": + passed = lval not in rval + elif operator == "issuperset": + passed = set(lval).issuperset(set(rval)) + elif operator == "notissuperset": + passed = not set(lval).issuperset(set(rval)) + else: + raise UnknownOperatorException("Unknown operator '{}'".format(operator)) + + return passed + + +class MissingVariableException(Exception): + """ + Raised when an expression contains a property path that's not supplied in + the context. + """ + + pass + + +class UnknownOperatorException(Exception): + """ + Raised when an expression uses an unknown operator. + + This should never be thrown since the operator won't be correctly parsed as + a token by pyparsing, but it's useful to have this hanging around for when + additional operators are being added. + """ + + pass diff --git a/runtime/bamboo-pipeline/pipeline/utils/collections.py b/runtime/bamboo-pipeline/pipeline/utils/collections.py new file mode 100644 index 00000000..e45c26df --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/collections.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class FancyDict(dict): + def __getattr__(self, key): + try: + return self[key] + except KeyError as k: + raise AttributeError(k) + + def __setattr__(self, key, value): + # 内建属性不放入 key 中 + if key.startswith("__") and key.endswith("__"): + super().__setattr__(key, value) + else: + self[key] = value + + def __delattr__(self, key): + try: + del self[key] + except KeyError as k: + raise AttributeError(k) diff --git a/runtime/bamboo-pipeline/pipeline/utils/crypt.py b/runtime/bamboo-pipeline/pipeline/utils/crypt.py new file mode 100644 index 00000000..73229974 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/crypt.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import base64 +import rsa + + +def rsa_decrypt_password(encrypted_password, private_key): + return rsa.decrypt( + base64.decodestring(encrypted_password.encode("utf-8")), rsa.PrivateKey.load_pkcs1(private_key) + ).decode("utf-8") diff --git a/runtime/bamboo-pipeline/pipeline/utils/dj.py b/runtime/bamboo-pipeline/pipeline/utils/dj.py new file mode 100644 index 00000000..15f7951f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/dj.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import sys + + +def in_test(): + return sys.argv[1:2] == ["test"] diff --git a/runtime/bamboo-pipeline/pipeline/utils/env.py b/runtime/bamboo-pipeline/pipeline/utils/env.py new file mode 100644 index 00000000..9d08c071 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/env.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import sys +import traceback + +DJANGO_MANAGE_CMD = "manage.py" + + +def get_django_command(): + if sys.argv and sys.argv[0] == DJANGO_MANAGE_CMD: + try: + return sys.argv[1] + except Exception: + print( + "get django start up command error with argv: {argv}, traceback: {traceback}".format( + argv=sys.argv, traceback=traceback.format_exc() + ) + ) + + return None + + return None diff --git a/runtime/bamboo-pipeline/pipeline/utils/graph.py b/runtime/bamboo-pipeline/pipeline/utils/graph.py new file mode 100644 index 00000000..fad1c14a --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/graph.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class Graph(object): + def __init__(self, nodes, flows): + self.nodes = nodes + self.flows = flows + self.path = [] + self.last_visited_node = "" + self.graph = {node: [] for node in self.nodes} + for flow in self.flows: + self.graph[flow[0]].append(flow[1]) + + def has_cycle(self): + self.path = [] + visited = {node: False for node in self.nodes} + visit_stack = {node: False for node in self.nodes} + + for node in self.nodes: + if self._has_cycle(node, visited, visit_stack): + return True + return False + + def _has_cycle(self, node, visited, visit_stack): + self.last_visited_node = node + self.path.append(node) + visited[node] = True + visit_stack[node] = True + + for neighbor in self.graph[node]: + if not visited[neighbor]: + if self._has_cycle(neighbor, visited, visit_stack): + return True + elif visit_stack[neighbor]: + self.path.append(neighbor) + return True + + self.path.remove(node) + visit_stack[node] = False + return False + + def get_cycle(self): + if self.has_cycle(): + cross_node = self.path[-1] + if self.path.count(cross_node) > 1: + return self.path[self.path.index(cross_node) :] + else: + return self.path + return [] + + +if __name__ == "__main__": + graph1 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4]]) + assert not graph1.has_cycle() + assert graph1.get_cycle() == [] + graph2 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 1]]) + assert graph2.has_cycle() + assert graph2.get_cycle() == [1, 2, 3, 4, 1] + graph3 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 2]]) + assert graph3.has_cycle() + assert graph3.get_cycle() == [2, 3, 4, 2] + graph4 = Graph( + [ + "n20c4a0601193f268bfa168f1192eacd", + "nef42d10350b3961b53df7af67e16d9b", + "n0ada7b4abe63771a43052eaf188dc4b", + "n0cd3b95c714388bacdf1a486ab432fc", + "n1430047af8537f88710c4bbf3cbfb0f", + "n383748fe27434d582f0ca17af9d968a", + "n51426abd4be3a4691c80a73c3f93b3c", + "n854753a77933562ae72ec87c365f23d", + "n89f083892a731d7b9d7edb0f372006d", + "n8d4568db0ad364692b0387e86a2f1e0", + "n8daedbb02273a0fbc94cc118c90649f", + "n90b7ef55fe839b181879e036b4f8ffe", + "n99817348b4a36a6931854c93eed8c5f", + "na02956eba6f3a36ab9b0af2f2350213", + "nc3d0d49adf530bbaffe53630c184c0a", + "nca50848d1aa340f8c2b4776ce81868d", + "ncab9a48e79d357195dcee68dad3a31f", + "ncb4e013a6a8348bab087cc8500a3876", + "ne1f86f902a23e7fa4a67192e8b38a05", + "ne26def77df1385caa206c64e7e3ea53", + "nf3ebee137c53da28091ad7d140ce00c", + "nfc1dcdd7476393b9a81a988c113e1cf", + "n0197f8f210b3a1b8a7fc2f90e94744e", + "n01fb40259ad3cf285bb11a8bbbe59f2", + "n03f39191e8a32629145ba6a677ed040", + "n03ffc3b9e12316d8be63261cb9dec71", + "n07982b8985139249bca3a046f3a4379", + "n0b9e36e6b633ddb906d2044f658f110", + "n136c4fedebe3eb0ba932495aff6a945", + "n17cdc62c5d43976a413bda8f35634eb", + "n1d48483d8023439ad98d61d156c85fb", + "n26725bdcc0931fab0bc73e7244545ca", + "n2890db24f6c3cd1bbcd6b7d8cf2c045", + "n2ad9caac5b737bd897d4c8844c85f12", + "n2c88d1c1d8b35aebf883cbf259fb6bc", + "n302d25dfc9c369ab13104d5208e7119", + "n31688b7ab44338e9e6cb8dcaf259eef", + "n374443fbdc1313d98ebbe19d535fec2", + "n38c3dd0344a3f86bc7511c454bcdf4c", + "n3934eef90463940a6a9cf4ba2e63b1c", + "n40d5f0ca4bc3dd99c0b264cb186f00f", + "n476ddcb6dd33e2abac43596b08c2bc1", + "n4790f8aa48e335aa712e2af757e180b", + "n48bbfdc912334fc89c4f48c05e8969e", + "n5bef4f4532a382eaf79a0af70b2396b", + "n5ced56bcc863060ac4977755f35a5f5", + "n66a0562670e37648a3e05c243335bff", + "n6dc118cd3f7341d9ef8c97c63e2e9d9", + "n6e9d52e1ea53958a93e5b34022e7037", + "n786694b5ed33295a885b5bcd8c7c1ce", + "n7dccd56c80233469a4609f684ebe457", + "n8492d92ab6a3da48c2b49d6fcb8a479", + "n86a8b1a56f9399f90c4c227594a9d03", + "n8a805c0cd02307bad9f7828880b53dc", + "n8c7e35b0457300d9d6a96a6b1d18329", + "n91fdaed36403d06a07f4afe85e2892c", + "n9335d0718a937f9a39ec5b36d5637fe", + "n9372fb07ad936cba31f3d4e440f395a", + "n9ab96f926d83a93a5d3ebe2888fd343", + "na2a8a54e68033d0a276eb88dbff91c3", + "na493a7b5d5b3cc29f4070a6c4589cb7", + "nadfa68cb2503a39aac6626d6c72484a", + "nae1218ddd2e3448b562bc79dc084401", + "nc012287be793377b975b0230b35d713", + "ncb2e01f0c5336fe82b0e0e496f2612b", + "ncb5843900903b4c8a0a8302474d8c51", + "ncbf4db2c48f3348b2c7081f9e3b363a", + "nd4ee6c3248935ce9239e4bb20a81ab8", + "ndb1cf7af0e2319c9868530d0df8fd93", + "ne36a6858a733430bffa4fec053dc1ab", + "ne7af4a7c3613b3d81fe9e6046425a36", + "ne8035dd8de732758c1cc623f80f2fc8", + "ned91fdb914c35f3a21f320f62d72ffd", + "nf5448b3c66430f4a299d08208d313a6", + "nfaa0756a06f300495fb2e2e45e05ed3", + ], + [ + ["n8d4568db0ad364692b0387e86a2f1e0", "n5bef4f4532a382eaf79a0af70b2396b"], + ["n8daedbb02273a0fbc94cc118c90649f", "nf5448b3c66430f4a299d08208d313a6"], + ["n01fb40259ad3cf285bb11a8bbbe59f2", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["ncab9a48e79d357195dcee68dad3a31f", "n0197f8f210b3a1b8a7fc2f90e94744e"], + ["na493a7b5d5b3cc29f4070a6c4589cb7", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["n89f083892a731d7b9d7edb0f372006d", "n136c4fedebe3eb0ba932495aff6a945"], + ["n51426abd4be3a4691c80a73c3f93b3c", "n9ab96f926d83a93a5d3ebe2888fd343"], + ["n89f083892a731d7b9d7edb0f372006d", "n8492d92ab6a3da48c2b49d6fcb8a479"], + ["n17cdc62c5d43976a413bda8f35634eb", "n6e9d52e1ea53958a93e5b34022e7037"], + ["n476ddcb6dd33e2abac43596b08c2bc1", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["n6dc118cd3f7341d9ef8c97c63e2e9d9", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["n91fdaed36403d06a07f4afe85e2892c", "ncb4e013a6a8348bab087cc8500a3876"], + ["n8a805c0cd02307bad9f7828880b53dc", "n3934eef90463940a6a9cf4ba2e63b1c"], + ["n2890db24f6c3cd1bbcd6b7d8cf2c045", "n0ada7b4abe63771a43052eaf188dc4b"], + ["ned91fdb914c35f3a21f320f62d72ffd", "n383748fe27434d582f0ca17af9d968a"], + ["n89f083892a731d7b9d7edb0f372006d", "n0b9e36e6b633ddb906d2044f658f110"], + ["nc3d0d49adf530bbaffe53630c184c0a", "na493a7b5d5b3cc29f4070a6c4589cb7"], + ["ncb2e01f0c5336fe82b0e0e496f2612b", "nc012287be793377b975b0230b35d713"], + ["n86a8b1a56f9399f90c4c227594a9d03", "nf3ebee137c53da28091ad7d140ce00c"], + ["nc3d0d49adf530bbaffe53630c184c0a", "nadfa68cb2503a39aac6626d6c72484a"], + ["na02956eba6f3a36ab9b0af2f2350213", "na2a8a54e68033d0a276eb88dbff91c3"], + ["n8daedbb02273a0fbc94cc118c90649f", "n07982b8985139249bca3a046f3a4379"], + ["n136c4fedebe3eb0ba932495aff6a945", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["n9372fb07ad936cba31f3d4e440f395a", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n8d4568db0ad364692b0387e86a2f1e0", "n91fdaed36403d06a07f4afe85e2892c"], + ["n854753a77933562ae72ec87c365f23d", "n40d5f0ca4bc3dd99c0b264cb186f00f"], + ["n854753a77933562ae72ec87c365f23d", "n1d48483d8023439ad98d61d156c85fb"], + ["n9ab96f926d83a93a5d3ebe2888fd343", "n383748fe27434d582f0ca17af9d968a"], + ["ne36a6858a733430bffa4fec053dc1ab", "n0cd3b95c714388bacdf1a486ab432fc"], + ["n03ffc3b9e12316d8be63261cb9dec71", "nca50848d1aa340f8c2b4776ce81868d"], + ["ne8035dd8de732758c1cc623f80f2fc8", "n0ada7b4abe63771a43052eaf188dc4b"], + ["n51426abd4be3a4691c80a73c3f93b3c", "ned91fdb914c35f3a21f320f62d72ffd"], + ["nd4ee6c3248935ce9239e4bb20a81ab8", "nfaa0756a06f300495fb2e2e45e05ed3"], + ["n5bef4f4532a382eaf79a0af70b2396b", "ncb4e013a6a8348bab087cc8500a3876"], + ["ne26def77df1385caa206c64e7e3ea53", "n786694b5ed33295a885b5bcd8c7c1ce"], + ["n854753a77933562ae72ec87c365f23d", "ne8035dd8de732758c1cc623f80f2fc8"], + ["n374443fbdc1313d98ebbe19d535fec2", "ndb1cf7af0e2319c9868530d0df8fd93"], + ["nfaa0756a06f300495fb2e2e45e05ed3", "n8c7e35b0457300d9d6a96a6b1d18329"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n26725bdcc0931fab0bc73e7244545ca"], + ["n8d4568db0ad364692b0387e86a2f1e0", "ncb2e01f0c5336fe82b0e0e496f2612b"], + ["ncb5843900903b4c8a0a8302474d8c51", "ncb4e013a6a8348bab087cc8500a3876"], + ["nf5448b3c66430f4a299d08208d313a6", "nf3ebee137c53da28091ad7d140ce00c"], + ["n20c4a0601193f268bfa168f1192eacd", "nd4ee6c3248935ce9239e4bb20a81ab8"], + ["nca50848d1aa340f8c2b4776ce81868d", "nc3d0d49adf530bbaffe53630c184c0a"], + ["na02956eba6f3a36ab9b0af2f2350213", "n03ffc3b9e12316d8be63261cb9dec71"], + ["n7dccd56c80233469a4609f684ebe457", "n8daedbb02273a0fbc94cc118c90649f"], + ["n0ada7b4abe63771a43052eaf188dc4b", "na02956eba6f3a36ab9b0af2f2350213"], + ["n9335d0718a937f9a39ec5b36d5637fe", "n99817348b4a36a6931854c93eed8c5f"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n5ced56bcc863060ac4977755f35a5f5"], + ["ncb4e013a6a8348bab087cc8500a3876", "ne26def77df1385caa206c64e7e3ea53"], + ["na02956eba6f3a36ab9b0af2f2350213", "n4790f8aa48e335aa712e2af757e180b"], + ["nc012287be793377b975b0230b35d713", "ncb4e013a6a8348bab087cc8500a3876"], + ["n8d4568db0ad364692b0387e86a2f1e0", "ncb5843900903b4c8a0a8302474d8c51"], + ["n40d5f0ca4bc3dd99c0b264cb186f00f", "n0ada7b4abe63771a43052eaf188dc4b"], + ["n38c3dd0344a3f86bc7511c454bcdf4c", "n17cdc62c5d43976a413bda8f35634eb"], + ["n6e9d52e1ea53958a93e5b34022e7037", "n90b7ef55fe839b181879e036b4f8ffe"], + ["nf3ebee137c53da28091ad7d140ce00c", "n51426abd4be3a4691c80a73c3f93b3c"], + ["n99817348b4a36a6931854c93eed8c5f", "n89f083892a731d7b9d7edb0f372006d"], + ["n89f083892a731d7b9d7edb0f372006d", "n6dc118cd3f7341d9ef8c97c63e2e9d9"], + ["n8daedbb02273a0fbc94cc118c90649f", "n66a0562670e37648a3e05c243335bff"], + ["nadfa68cb2503a39aac6626d6c72484a", "ne1f86f902a23e7fa4a67192e8b38a05"], + ["n383748fe27434d582f0ca17af9d968a", "nef42d10350b3961b53df7af67e16d9b"], + ["na02956eba6f3a36ab9b0af2f2350213", "n03f39191e8a32629145ba6a677ed040"], + ["nae1218ddd2e3448b562bc79dc084401", "n383748fe27434d582f0ca17af9d968a"], + ["n26725bdcc0931fab0bc73e7244545ca", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n48bbfdc912334fc89c4f48c05e8969e", "n8a805c0cd02307bad9f7828880b53dc"], + ["ne7af4a7c3613b3d81fe9e6046425a36", "ncb4e013a6a8348bab087cc8500a3876"], + ["nfc1dcdd7476393b9a81a988c113e1cf", "n8d4568db0ad364692b0387e86a2f1e0"], + ["n0197f8f210b3a1b8a7fc2f90e94744e", "n99817348b4a36a6931854c93eed8c5f"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n302d25dfc9c369ab13104d5208e7119"], + ["n1d48483d8023439ad98d61d156c85fb", "n0ada7b4abe63771a43052eaf188dc4b"], + ["na2a8a54e68033d0a276eb88dbff91c3", "nca50848d1aa340f8c2b4776ce81868d"], + ["n90b7ef55fe839b181879e036b4f8ffe", "n9372fb07ad936cba31f3d4e440f395a"], + ["ndb1cf7af0e2319c9868530d0df8fd93", "n2ad9caac5b737bd897d4c8844c85f12"], + ["n8492d92ab6a3da48c2b49d6fcb8a479", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["n8d4568db0ad364692b0387e86a2f1e0", "ne7af4a7c3613b3d81fe9e6046425a36"], + ["n302d25dfc9c369ab13104d5208e7119", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n51426abd4be3a4691c80a73c3f93b3c", "n2c88d1c1d8b35aebf883cbf259fb6bc"], + ["n786694b5ed33295a885b5bcd8c7c1ce", "n0cd3b95c714388bacdf1a486ab432fc"], + ["n854753a77933562ae72ec87c365f23d", "n2890db24f6c3cd1bbcd6b7d8cf2c045"], + ["nc3d0d49adf530bbaffe53630c184c0a", "n476ddcb6dd33e2abac43596b08c2bc1"], + ["n2c88d1c1d8b35aebf883cbf259fb6bc", "n383748fe27434d582f0ca17af9d968a"], + ["n0cd3b95c714388bacdf1a486ab432fc", "n854753a77933562ae72ec87c365f23d"], + ["n51426abd4be3a4691c80a73c3f93b3c", "nae1218ddd2e3448b562bc79dc084401"], + ["nc3d0d49adf530bbaffe53630c184c0a", "n01fb40259ad3cf285bb11a8bbbe59f2"], + ["ne1f86f902a23e7fa4a67192e8b38a05", "n374443fbdc1313d98ebbe19d535fec2"], + ["n0b9e36e6b633ddb906d2044f658f110", "nfc1dcdd7476393b9a81a988c113e1cf"], + ["ncab9a48e79d357195dcee68dad3a31f", "ncbf4db2c48f3348b2c7081f9e3b363a"], + ["n8daedbb02273a0fbc94cc118c90649f", "n86a8b1a56f9399f90c4c227594a9d03"], + ["ncbf4db2c48f3348b2c7081f9e3b363a", "n99817348b4a36a6931854c93eed8c5f"], + ["n1430047af8537f88710c4bbf3cbfb0f", "ncab9a48e79d357195dcee68dad3a31f"], + ["n4790f8aa48e335aa712e2af757e180b", "nca50848d1aa340f8c2b4776ce81868d"], + ["ne26def77df1385caa206c64e7e3ea53", "ne36a6858a733430bffa4fec053dc1ab"], + ["ncab9a48e79d357195dcee68dad3a31f", "n31688b7ab44338e9e6cb8dcaf259eef"], + ["n07982b8985139249bca3a046f3a4379", "nf3ebee137c53da28091ad7d140ce00c"], + ["n66a0562670e37648a3e05c243335bff", "nf3ebee137c53da28091ad7d140ce00c"], + ["n03f39191e8a32629145ba6a677ed040", "nca50848d1aa340f8c2b4776ce81868d"], + ["n8c7e35b0457300d9d6a96a6b1d18329", "n38c3dd0344a3f86bc7511c454bcdf4c"], + ["n5ced56bcc863060ac4977755f35a5f5", "n1430047af8537f88710c4bbf3cbfb0f"], + ["n2ad9caac5b737bd897d4c8844c85f12", "n48bbfdc912334fc89c4f48c05e8969e"], + ["n31688b7ab44338e9e6cb8dcaf259eef", "n99817348b4a36a6931854c93eed8c5f"], + ["n3934eef90463940a6a9cf4ba2e63b1c", "n7dccd56c80233469a4609f684ebe457"], + ["ncab9a48e79d357195dcee68dad3a31f", "n9335d0718a937f9a39ec5b36d5637fe"], + ], + ) + assert not graph4.has_cycle() + assert graph4.get_cycle() == [] + graph5 = Graph([1, 2, 3, 4, 5], [[1, 2], [2, 3], [2, 4], [4, 5], [5, 2]]) + assert graph5.has_cycle() + assert graph5.get_cycle() == [2, 4, 5, 2] diff --git a/runtime/bamboo-pipeline/pipeline/utils/http.py b/runtime/bamboo-pipeline/pipeline/utils/http.py new file mode 100644 index 00000000..f175d8b4 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/http.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +import requests +import ujson as module_json + +logger = logging.getLogger("root") + + +def http_post_request(url, data=None, json=None, **kwargs): + response = requests.post(url, data=data, json=json, **kwargs) + if response.status_code == 200: + try: + content_dict = module_json.loads(response.content) + return content_dict + except Exception as e: + message = "the format of HTTP request result is valid: %s" % e + logger.exception(message) + return {"result": False, "code": 1, "message": message} + message = "HTTP request failed,Http status code is:%s" % response.status_code + logger.error(message) + return {"result": False, "code": response.status_code, "message": message} + + +def http_get_request(url, params=None, **kwargs): + response = requests.get(url, params=params, **kwargs) + if response.status_code == 200: + try: + content_dict = module_json.loads(response.content) + return content_dict + except Exception as e: + message = "the format of HTTP request result is valid: %s" % e + logger.exception(message) + return {"result": False, "code": 1, "message": message} + message = "HTTP request failed,Http status code is:%s" % response.status_code + logger.error(message) + return {"result": False, "code": response.status_code, "message": message} diff --git a/runtime/bamboo-pipeline/pipeline/utils/imoports.py b/runtime/bamboo-pipeline/pipeline/utils/imoports.py new file mode 100644 index 00000000..efd4da4d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/imoports.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +def qualname(obj): + if not hasattr(obj, "__name__") and hasattr(obj, "__class__"): + obj = obj.__class__ + q = getattr(obj, "__name__") + if "." not in q: + q = ".".join((obj.__module__, q)) + return q diff --git a/runtime/bamboo-pipeline/pipeline/utils/mako_utils/__init__.py b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/utils/mako_utils/checker.py b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/checker.py new file mode 100644 index 00000000..232fb46d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/checker.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +import ast +from typing import List + +from mako import parsetree +from mako.exceptions import MakoException +from mako.lexer import Lexer + +from .code_extract import MakoNodeCodeExtractor +from .exceptions import ForbiddenMakoTemplateException + + +def parse_template_nodes( + nodes: List[parsetree.Node], node_visitor: ast.NodeVisitor, code_extractor: MakoNodeCodeExtractor, +): + """ + 解析mako模板节点,逐个节点解析抽象语法树并检查安全性 + :param nodes: mako模板节点列表 + :param node_visitor: 节点访问类,用于遍历AST节点 + :param code_extractor: Mako 词法节点处理器,用于提取 python 代码 + """ + for node in nodes: + code = code_extractor.extract(node) + if code is None: + continue + + ast_node = ast.parse(code, "", "exec") + node_visitor.visit(ast_node) + if hasattr(node, "nodes"): + parse_template_nodes(node.nodes, node_visitor) + + +def check_mako_template_safety(text: str, node_visitor: ast.NodeVisitor, code_extractor: MakoNodeCodeExtractor) -> bool: + """ + 检查mako模板是否安全,若不安全直接抛出异常,安全则返回True + :param text: mako模板内容 + :param node_visitor: 节点访问器,用于遍历AST节点 + """ + try: + lexer_template = Lexer(text).parse() + except MakoException as mako_error: + raise ForbiddenMakoTemplateException("非mako模板,解析失败, {err_msg}".format(err_msg=mako_error.__class__.__name__)) + parse_template_nodes(lexer_template.nodes, node_visitor, code_extractor) + return True diff --git a/runtime/bamboo-pipeline/pipeline/utils/mako_utils/code_extract.py b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/code_extract.py new file mode 100644 index 00000000..9c1ea529 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/code_extract.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import abc + +from mako import parsetree +from mako.ast import PythonFragment + +from .exceptions import ForbiddenMakoTemplateException + + +class MakoNodeCodeExtractor(object): + @abc.abstractmethod + def extract(self, node): + """处理 Mako Lexer 分割出来的 code 对象,返回需要检测的 python 代码,返回 None 表示该节点不需要处理 + + :param node: mako parsetree node + :return: 需要处理的代码,或 None + """ + raise NotImplementedError() + + +class StrictMakoNodeCodeExtractor(MakoNodeCodeExtractor): + def extract(self, node): + if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): + return node.text + elif isinstance(node, parsetree.ControlLine): + if node.isend: + return None + return PythonFragment(node.text).code + elif isinstance(node, parsetree.Text): + return None + else: + raise ForbiddenMakoTemplateException("不支持[{}]节点".format(node.__class__.__name__)) diff --git a/runtime/bamboo-pipeline/pipeline/utils/mako_utils/exceptions.py b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/exceptions.py new file mode 100644 index 00000000..384bbfb6 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/exceptions.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +class ForbiddenMakoTemplateException(Exception): + pass diff --git a/runtime/bamboo-pipeline/pipeline/utils/mako_utils/visitors.py b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/visitors.py new file mode 100644 index 00000000..45f82ec5 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/mako_utils/visitors.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import _ast +import ast + +from django.utils.module_loading import import_string + +from .exceptions import ForbiddenMakoTemplateException + + +class StrictNodeVisitor(ast.NodeVisitor): + """ + 遍历语法树节点,遇到魔术方法使用或import时,抛出异常 + """ + + BLACK_LIST_MODULE_METHODS = { + "os": dir(__import__("os")), + "subprocess": dir(__import__("subprocess")), + "shutil": dir(__import__("shutil")), + "ctypes": dir(__import__("ctypes")), + "codecs": dir(__import__("codecs")), + "sys": dir(__import__("sys")), + "socket": dir(__import__("socket")), + "webbrowser": dir(__import__("webbrowser")), + "threading": dir(__import__("threading")), + "sqlite3": dir(__import__("threading")), + "signal": dir(__import__("signal")), + "imaplib": dir(__import__("imaplib")), + "fcntl": dir(__import__("fcntl")), + "pdb": dir(__import__("pdb")), + "pty": dir(__import__("pty")), + "glob": dir(__import__("glob")), + "tempfile": dir(__import__("tempfile")), + "types": dir(import_string("types.CodeType")) + dir(import_string("types.FrameType")), + "builtins": [ + "getattr", + "hasattr", + "breakpoint", + "compile", + "delattr", + "open", + "eval", + "exec", + "execfile", + "exit", + "dir", + "globals", + "locals", + "input", + "iter", + "next", + "quit", + "setattr", + "vars", + "memoryview", + "super", + "print", + ], + } + + BLACK_LIST_METHODS = [] + for module_name, methods in BLACK_LIST_MODULE_METHODS.items(): + BLACK_LIST_METHODS.append(module_name) + BLACK_LIST_METHODS.extend(methods) + BLACK_LIST_METHODS = set(BLACK_LIST_METHODS) + + WHITE_LIST_MODULES = ["datetime", "re", "random", "json", "math"] + + def __init__(self, black_list_methods=None, white_list_modules=None): + self.black_list_methods = black_list_methods or self.BLACK_LIST_METHODS + self.white_list_modules = white_list_modules or self.WHITE_LIST_MODULES + + @staticmethod + def is_white_list_ast_obj(ast_obj: _ast.AST) -> bool: + """ + 判断是否白名单对象,特殊豁免 + :param ast_obj: 抽象语法树节点 + :return: bool + """ + # re 正则表达式允许使用 compile + if isinstance(ast_obj, _ast.Attribute) and isinstance(ast_obj.value, _ast.Name): + if ast_obj.value.id == "re" and ast_obj.attr in ["compile"]: + return True + + return False + + def visit_Attribute(self, node): + if self.is_white_list_ast_obj(node): + return + + if node.attr in self.black_list_methods or node.attr.startswith("_"): + raise ForbiddenMakoTemplateException("Mako template forbidden.") + + def visit_Name(self, node): + if node.id in self.black_list_methods or node.id.startswith("_"): + raise ForbiddenMakoTemplateException("Mako template forbidden.") + + def visit_Import(self, node): + for name in node.names: + if name.name not in self.white_list_modules: + raise ForbiddenMakoTemplateException("Mako template forbidden.") + + def visit_ImportFrom(self, node): + self.visit_Import(node) diff --git a/runtime/bamboo-pipeline/pipeline/utils/register.py b/runtime/bamboo-pipeline/pipeline/utils/register.py new file mode 100644 index 00000000..9de35c93 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/register.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging +import pkgutil +import os +import sys +from importlib import import_module + + +logger = logging.getLogger("root") + + +def find_all_modules(module_dir, sub_dir=None): + modules = [] + for _, name, is_pkg in pkgutil.iter_modules([module_dir]): + if name.startswith("_"): + continue + module = name if sub_dir is None else "{}.{}".format(sub_dir, name) + if is_pkg: + modules += find_all_modules(os.path.join(module_dir, name), module) + else: + modules.append(module) + return modules + + +def autodiscover_items(module): + """ + Given a path to discover, auto register all items + """ + # Workaround for a Python 3.2 bug with pkgutil.iter_modules + module_dir = module.__path__[0] + sys.path_importer_cache.pop(module_dir, None) + modules = find_all_modules(module_dir) + for name in modules: + module_path = "{}.{}".format(module.__name__, name) + try: + __import__(module_path) + except Exception as e: + logger.error(f"[!] module({module_path}) import failed with err: {e}") + + +def autodiscover_collections(path): + """ + Auto-discover INSTALLED_APPS modules and fail silently when + not present. This forces an import on them to register any admin bits they + may want. + """ + from django.apps import apps + + for app_config in apps.get_app_configs(): + # Attempt to import the app's module. + try: + + _module = import_module("%s.%s" % (app_config.name, path)) + autodiscover_items(_module) + except ImportError as e: + if not str(e) == "No module named %s" % path: + pass diff --git a/runtime/bamboo-pipeline/pipeline/utils/uniqid.py b/runtime/bamboo-pipeline/pipeline/utils/uniqid.py new file mode 100644 index 00000000..e42be647 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/uniqid.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import uuid + +from pipeline.conf import settings + + +def uniqid(): + return uuid.uuid3(uuid.uuid1(), uuid.uuid4().hex).hex + + +def node_uniqid(): + uid = uniqid() + return "n%s" % uid[1:] if settings.UUID_DIGIT_STARTS_SENSITIVE else uid + + +def line_uniqid(): + uid = uniqid() + return "l%s" % uid[1:] if settings.UUID_DIGIT_STARTS_SENSITIVE else uid diff --git a/runtime/bamboo-pipeline/pipeline/utils/utils.py b/runtime/bamboo-pipeline/pipeline/utils/utils.py new file mode 100644 index 00000000..c39a55c8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/utils/utils.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +ITERATED = 1 +NEW = 0 +ITERATING = -1 + + +def has_circle(graph): + # init marks + marks = {} + for node in graph: + # marks as not iterated + marks[node] = NEW + + # dfs every node + for cur_node in graph: + trace = [cur_node] + for node in graph[cur_node]: + if marks[node] == ITERATED: + continue + trace.append(node) + # return immediately when circle be detected + if _has_circle(graph, node, marks, trace): + return True, trace + trace.pop() + # mark as iterated + marks[cur_node] = ITERATED + + return False, [] + + +def _has_circle(graph, cur_node, marks, trace): + # detect circle when iterate to a node which been marked as -1 + if marks[cur_node] == ITERATING: + return True + # mark as iterating + marks[cur_node] = ITERATING + # dfs + for node in graph[cur_node]: + # return immediately when circle be detected + trace.append(node) + if _has_circle(graph, node, marks, trace): + return True + trace.pop() + # mark as iterated + marks[cur_node] = ITERATED + + return False + + +def convert_bytes_to_str(obj): + + converted = set() + + def _convert(obj, converted): + if isinstance(obj, dict): + new_dict = obj.__class__() + + for attr, value in obj.items(): + + if isinstance(attr, bytes): + attr = attr.decode("utf-8") + + value = _convert(value, converted) + + new_dict[attr] = value + + obj = new_dict + + if isinstance(obj, list): + new_list = obj.__class__() + + for item in obj: + new_list.append(_convert(item, converted)) + + obj = new_list + + elif isinstance(obj, bytes): + + try: + obj = obj.decode("utf-8") + except Exception: + pass + + elif hasattr(obj, "__dict__"): + + if id(obj) in converted: + return obj + else: + converted.add(id(obj)) + + new__dict__ = {} + + for attr, value in obj.__dict__.items(): + + if isinstance(attr, bytes): + attr = attr.decode("utf-8") + + new__dict__[attr] = _convert(value, converted) + + obj.__dict__ = new__dict__ + + return obj + + return _convert(obj, converted) diff --git a/runtime/bamboo-pipeline/pipeline/validators/__init__.py b/runtime/bamboo-pipeline/pipeline/validators/__init__.py new file mode 100644 index 00000000..c529e9d8 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/validators/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from .base import validate_pipeline_tree # noqa diff --git a/runtime/bamboo-pipeline/pipeline/validators/base.py b/runtime/bamboo-pipeline/pipeline/validators/base.py new file mode 100644 index 00000000..8399d27b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/validators/base.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline import exceptions +from pipeline.validators.connection import ( + validate_graph_connection, + validate_graph_without_circle, +) +from pipeline.validators.gateway import validate_gateways, validate_stream +from pipeline.validators.utils import format_pipeline_tree_io_to_list + + +def validate_pipeline_tree(pipeline_tree, cycle_tolerate=False): + format_pipeline_tree_io_to_list(pipeline_tree) + # 1. connection validation + try: + validate_graph_connection(pipeline_tree) + except exceptions.ConnectionValidateError as e: + raise exceptions.ParserException(e.detail) + + # do not tolerate circle in flow + if not cycle_tolerate: + no_cycle = validate_graph_without_circle(pipeline_tree) + if not no_cycle["result"]: + raise exceptions.ParserException(no_cycle["message"]) + + # 2. gateway validation + validate_gateways(pipeline_tree) + + # 3. stream validation + validate_stream(pipeline_tree) diff --git a/runtime/bamboo-pipeline/pipeline/validators/connection.py b/runtime/bamboo-pipeline/pipeline/validators/connection.py new file mode 100644 index 00000000..922eda79 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/validators/connection.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.utils.translation import ugettext_lazy as _ + +from pipeline.exceptions import ConnectionValidateError +from pipeline.utils.graph import Graph +from pipeline.validators.rules import NODE_RULES +from pipeline.validators.utils import get_nodes_dict +from pipeline.core.constants import PE + + +def validate_graph_connection(data): + """ + 节点连接合法性校验 + """ + nodes = get_nodes_dict(data) + + result = {"result": True, "message": {}, "failed_nodes": []} + + for i in nodes: + node_type = nodes[i][PE.type] + rule = NODE_RULES[node_type] + message = "" + for j in nodes[i][PE.target]: + if nodes[j][PE.type] not in rule["allowed_out"]: + message += _("不能连接%s类型节点\n") % nodes[i][PE.type] + if rule["min_in"] > len(nodes[i][PE.source]) or len(nodes[i][PE.source]) > rule["max_in"]: + message += _("节点的入度最大为%s,最小为%s\n") % (rule["max_in"], rule["min_in"]) + if rule["min_out"] > len(nodes[i][PE.target]) or len(nodes[i][PE.target]) > rule["max_out"]: + message += _("节点的出度最大为%s,最小为%s\n") % (rule["max_out"], rule["min_out"]) + if message: + result["failed_nodes"].append(i) + result["message"][i] = message + + if result["failed_nodes"]: + raise ConnectionValidateError(failed_nodes=result["failed_nodes"], detail=result["message"]) + + +def validate_graph_without_circle(data): + """ + validate if a graph has not cycle + + return { + "result": False, + "message": "error message", + "error_data": ["node1_id", "node2_id", "node1_id"] + } + """ + + nodes = [data[PE.start_event][PE.id], data[PE.end_event][PE.id]] + nodes += list(data[PE.gateways].keys()) + list(data[PE.activities].keys()) + flows = [[flow[PE.source], flow[PE.target]] for _, flow in list(data[PE.flows].items())] + cycle = Graph(nodes, flows).get_cycle() + if cycle: + return {"result": False, "message": "pipeline graph has circle", "error_data": cycle} + return {"result": True, "data": []} diff --git a/runtime/bamboo-pipeline/pipeline/validators/gateway.py b/runtime/bamboo-pipeline/pipeline/validators/gateway.py new file mode 100644 index 00000000..2627e517 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/validators/gateway.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import queue + +from django.utils.translation import ugettext_lazy as _ + +from pipeline import exceptions +from pipeline.core.constants import PE +from pipeline.engine.utils import Stack +from pipeline.validators.utils import get_node_for_sequence, get_nodes_dict + +STREAM = "stream" +P_STREAM = "p_stream" +P = "p" +MAIN_STREAM = "main" + +PARALLEL_GATEWAYS = {PE.ParallelGateway, PE.ConditionalParallelGateway} + + +def not_in_parallel_gateway(gateway_stack, start_from=None): + """ + check whether there is parallel gateway in stack from specific gateway + :param gateway_stack: + :param start_from: + :return: + """ + start = 0 + if start_from: + id_stack = [g[PE.id] for g in gateway_stack] + start = id_stack.index(start_from) + + for i in range(start, len(gateway_stack)): + gateway = gateway_stack[i] + if gateway[PE.type] in PARALLEL_GATEWAYS: + return False + return True + + +def matched_in_prev_blocks(gid, current_start, block_nodes): + """ + check whether gateway with gid is matched in previous block + :param gid: + :param current_start: + :param block_nodes: + :return: + """ + prev_nodes = set() + for prev_start, nodes in list(block_nodes.items()): + if prev_start == current_start: + continue + prev_nodes.update(nodes) + + return gid in prev_nodes + + +def match_converge( + converges, + gateways, + cur_index, + end_event_id, + block_start, + block_nodes, + converged, + dist_from_start, + converge_in_len, + stack=None, +): + """ + find converge for parallel and exclusive in blocks, and check sanity of gateway + :param converges: + :param gateways: + :param cur_index: + :param end_event_id: + :param block_start: + :param block_nodes: + :param converged: + :param dist_from_start: + :param stack: + :param converge_in_len: + :return: + """ + + if stack is None: + stack = Stack() + + if cur_index not in gateways: + return None, False + + # return if this node is already matched + if gateways[cur_index]["match"]: + return gateways[cur_index]["match"], gateways[cur_index]["share_converge"] + + current_gateway = gateways[cur_index] + target = gateways[cur_index][PE.target] + stack.push(gateways[cur_index]) + stack_id_set = {g[PE.id] for g in stack} + + # find closest converge recursively + for i in range(len(target)): + + # do not process prev blocks nodes + if matched_in_prev_blocks(target[i], block_start, block_nodes): + target[i] = None + continue + + block_nodes[block_start].add(target[i]) + + # do not find self's converge node again + while target[i] in gateways and target[i] != current_gateway[PE.id]: + + if target[i] in stack_id_set: + # return to previous gateway + + if not_in_parallel_gateway(stack, start_from=target[i]): + # do not trace back + target[i] = None + break + else: + raise exceptions.ConvergeMatchError(cur_index, _("并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关")) + + converge_id, shared = match_converge( + converges=converges, + gateways=gateways, + cur_index=target[i], + end_event_id=end_event_id, + block_start=block_start, + block_nodes=block_nodes, + stack=stack, + converged=converged, + dist_from_start=dist_from_start, + converge_in_len=converge_in_len, + ) + if converge_id: + target[i] = converge_id + + if not shared: + # try to get next node fo converge which is not shared + target[i] = converges[converge_id][PE.target][0] + + else: + # can't find corresponding converge gateway, which means this gateway will reach end event directly + target[i] = end_event_id + + if target[i] in converges and dist_from_start[target[i]] < dist_from_start[cur_index]: + # do not match previous converge + target[i] = None + + stack.pop() + + is_exg = current_gateway[PE.type] == PE.ExclusiveGateway + converge_id = None + shared = False + cur_to_converge = len(target) + converge_end = False + + # gateway match validation + for i in range(len(target)): + + # mark first converge + if target[i] in converges and not converge_id: + converge_id = target[i] + + # same converge node + elif target[i] in converges and converge_id == target[i]: + pass + + # exclusive gateway point to end + elif is_exg and target[i] == end_event_id: + if not_in_parallel_gateway(stack): + converge_end = True + else: + raise exceptions.ConvergeMatchError(cur_index, _("并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关")) + + # exclusive gateway point back to self + elif is_exg and target[i] == current_gateway[PE.id]: + # not converge behavior + cur_to_converge -= 1 + pass + + # exclusive gateway converge at different converge gateway + elif is_exg and target[i] in converges and converge_id != target[i]: + raise exceptions.ConvergeMatchError(cur_index, _("分支网关的所有分支第一个遇到的汇聚网关必须是同一个")) + + # meet previous node + elif is_exg and target[i] is None: + # not converge behavior + cur_to_converge -= 1 + pass + + # invalid cases + else: + raise exceptions.ConvergeMatchError(cur_index, _("非法网关,请检查其分支是否符合规则")) + + if is_exg: + if converge_id in converges: + # this converge is shared by multiple gateway + # only compare to the number of positive incoming + shared = converge_in_len[converge_id] > cur_to_converge or converge_id in converged + else: + # for parallel gateway + + converge_incoming = len(converges[converge_id][PE.incoming]) + gateway_outgoing = len(target) + + if converge_incoming > gateway_outgoing: + for gateway_id in converged.get(converge_id, []): + # find another parallel gateway + if gateways[gateway_id][PE.type] in PARALLEL_GATEWAYS: + raise exceptions.ConvergeMatchError(converge_id, _("汇聚网关只能汇聚来自同一个并行网关的分支")) + + shared = True + + elif converge_incoming < gateway_outgoing: + raise exceptions.ConvergeMatchError(converge_id, _("汇聚网关没有汇聚其对应的并行网关的所有分支")) + + current_gateway["match"] = converge_id + current_gateway["share_converge"] = shared + current_gateway["converge_end"] = converge_end + + converged.setdefault(converge_id, []).append(current_gateway[PE.id]) + block_nodes[block_start].add(current_gateway[PE.id]) + + return converge_id, shared + + +def distance_from(origin, node, tree, marked, visited=None): + """ + get max distance from origin to node + :param origin: + :param node: + :param tree: + :param marked: + :param visited: + :return: + """ + if visited is None: + visited = set() + + if node[PE.id] in marked: + return marked[node[PE.id]] + + if node[PE.id] == origin[PE.id]: + return 0 + + if node[PE.id] in visited: + # do not trace circle + return None + + visited.add(node[PE.id]) + + incoming_dist = [] + for incoming in node[PE.incoming]: + prev_node = get_node_for_sequence(incoming, tree, PE.source) + + # get incoming node's distance recursively + dist = distance_from(origin=origin, node=prev_node, tree=tree, marked=marked, visited=visited) + + # if this incoming do not trace back to current node + if dist is not None: + incoming_dist.append(dist + 1) + + if not incoming_dist: + return None + + # get max distance + res = max(incoming_dist) + marked[node[PE.id]] = res + return res + + +def validate_gateways(tree): + """ + check sanity of gateways and find their converge gateway + :param tree: + :return: + """ + converges = {} + gateways = {} + all = {} + distances = {} + converge_positive_in = {} + process_order = [] + + # data preparation + for i, item in list(tree[PE.gateways].items()): + node = { + PE.incoming: item[PE.incoming] if isinstance(item[PE.incoming], list) else [item[PE.incoming]], + PE.outgoing: item[PE.outgoing] if isinstance(item[PE.outgoing], list) else [item[PE.outgoing]], + PE.type: item[PE.type], + PE.target: [], + PE.source: [], + PE.id: item[PE.id], + "match": None, + } + + # find all first reach nodes(ConvergeGateway, ExclusiveGateway, ParallelGateway, EndEvent) + # which is not ServiceActivity for each gateway + for index in node[PE.outgoing]: + index = tree[PE.flows][index][PE.target] + while index in tree[PE.activities]: + index = tree[PE.flows][tree[PE.activities][index][PE.outgoing]][PE.target] + + # append this node's id to current gateway's target list + node[PE.target].append(index) + + # get current node's distance from start event + if not distance_from(node=node, origin=tree[PE.start_event], tree=tree, marked=distances): + raise exceptions.ConvergeMatchError(node[PE.id], _("无法获取该网关距离开始节点的距离")) + + if item[PE.type] == PE.ConvergeGateway: + converges[i] = node + else: + process_order.append(i) + gateways[i] = node + + all[i] = node + + # calculate positive incoming number for converge + for nid, node in list(all.items()): + for t in node[PE.target]: + if t in converges and distances[t] > distances[nid]: + converge_positive_in[t] = converge_positive_in.setdefault(t, 0) + 1 + + process_order.sort(key=lambda gid: distances[gid]) + end_event_id = tree[PE.end_event][PE.id] + converged = {} + block_nodes = {} + visited = set() + + # process in distance order + for gw in process_order: + if gw in visited or "match" in gw: + continue + visited.add(gw) + + block_nodes[gw] = set() + + match_converge( + converges=converges, + gateways=gateways, + cur_index=gw, + end_event_id=end_event_id, + converged=converged, + block_start=gw, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=converge_positive_in, + ) + + # set converge gateway + for i in gateways: + if gateways[i]["match"]: + tree[PE.gateways][i][PE.converge_gateway_id] = gateways[i]["match"] + + return converged + + +def blend(source, target, custom_stream=None): + """ + blend source and target streams + :param source: + :param target: + :param custom_stream: + :return: + """ + + if custom_stream: + # use custom stream instead of source's stream + if isinstance(custom_stream, set): + for stream in custom_stream: + target[STREAM].add(stream) + else: + target[STREAM].add(custom_stream) + + return + + if len(source[STREAM]) == 0: + raise exceptions.InvalidOperationException("stream validation error, node(%s) stream is empty" % source[PE.id]) + + # blend + for s in source[STREAM]: + target[STREAM].add(s) + + +def streams_for_parallel(p): + streams = set() + for i, target_id in enumerate(p[PE.target]): + streams.add("{}_{}".format(p[PE.id], i)) + + return streams + + +def flowing(where, to, parallel_converges): + """ + mark target's stream from target + :param where: + :param to: + :param parallel_converges: + :return: + """ + is_parallel = where[PE.type] in PARALLEL_GATEWAYS + + stream = None + if is_parallel: + # add parallel's stream to its converge + parallel_converge = to[where[PE.converge_gateway_id]] + blend(source=where, target=parallel_converge, custom_stream=stream) + + if len(parallel_converge[STREAM]) > 1: + raise exceptions.StreamValidateError(node_id=parallel_converge) + + # flow to target + for i, target_id in enumerate(where[PE.target]): + target = to[target_id] + fake = False + + # generate different stream + if is_parallel: + stream = "{}_{}".format(where[PE.id], i) + + if target_id in parallel_converges: + + is_valid_branch = where[STREAM].issubset(parallel_converges[target_id][P_STREAM]) + is_direct_connect = where.get(PE.converge_gateway_id) == target_id + + if is_valid_branch or is_direct_connect: + # do not flow when branch of parallel converge to its converge gateway + fake = True + + if not fake: + blend(source=where, target=target, custom_stream=stream) + + # sanity check + if len(target[STREAM]) != 1: + raise exceptions.StreamValidateError(node_id=target_id) + + +def validate_stream(tree): + """ + validate flow stream + :param tree: pipeline tree + :return: + """ + # data preparation + start_event_id = tree[PE.start_event][PE.id] + end_event_id = tree[PE.end_event][PE.id] + nodes = get_nodes_dict(tree) + nodes[start_event_id][STREAM] = {MAIN_STREAM} + nodes[end_event_id][STREAM] = {MAIN_STREAM} + parallel_converges = {} + visited = set({}) + + for nid, node in list(nodes.items()): + node.setdefault(STREAM, set()) + + # set allow streams for parallel's converge + if node[PE.type] in PARALLEL_GATEWAYS: + parallel_converges[node[PE.converge_gateway_id]] = {P_STREAM: streams_for_parallel(node), P: nid} + + # build stream from start + node_queue = queue.Queue() + node_queue.put(nodes[start_event_id]) + while not node_queue.empty(): + + # get node + node = node_queue.get() + + if node[PE.id] in visited: + # flow again to validate stream, but do not add target to queue + flowing(where=node, to=nodes, parallel_converges=parallel_converges) + continue + + # add to queue + for target_id in node[PE.target]: + node_queue.put(nodes[target_id]) + + # mark as visited + visited.add(node[PE.id]) + + # flow + flowing(where=node, to=nodes, parallel_converges=parallel_converges) + + # data clean + for nid, n in list(nodes.items()): + if len(n[STREAM]) != 1: + raise exceptions.StreamValidateError(node_id=nid) + + # replace set to str + n[STREAM] = n[STREAM].pop() + + # isolate node check + for __, node in list(nodes.items()): + if not node[STREAM]: + raise exceptions.IsolateNodeError() + + return nodes diff --git a/runtime/bamboo-pipeline/pipeline/validators/handlers.py b/runtime/bamboo-pipeline/pipeline/validators/handlers.py new file mode 100644 index 00000000..52af526b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/validators/handlers.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.dispatch import receiver + +from pipeline.core.flow.event import EndEvent +from pipeline.core.flow.signals import post_new_end_event_register +from pipeline.validators import rules + + +@receiver(post_new_end_event_register, sender=EndEvent) +def post_new_end_event_register_handler(sender, node_type, node_cls, **kwargs): + rules.NODE_RULES[node_type] = rules.SINK_RULE + rules.FLOW_NODES_WITHOUT_STARTEVENT.append(node_type) diff --git a/runtime/bamboo-pipeline/pipeline/validators/rules.py b/runtime/bamboo-pipeline/pipeline/validators/rules.py new file mode 100644 index 00000000..5b3b814d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/validators/rules.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.core.flow import FlowNodeClsFactory + +MAX_IN = 1000 +MAX_OUT = 1000 +FLOW_NODES_WITHOUT_STARTEVENT = FlowNodeClsFactory.node_types_without_start_event() + +FLOW_NODES_WITHOUT_START_AND_END = FlowNodeClsFactory.node_types_without_start_end_event() + +SOURCE_RULE = {"min_in": 0, "max_in": 0, "min_out": 1, "max_out": 1, "allowed_out": FLOW_NODES_WITHOUT_START_AND_END} + +SINK_RULE = {"min_in": 1, "max_in": MAX_IN, "min_out": 0, "max_out": 0, "allowed_out": []} + +ACTIVITY_RULE = { + "min_in": 1, + "max_in": MAX_IN, + "min_out": 1, + "max_out": 1, + "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, +} + +EMIT_RULE = { + "min_in": 1, + "max_in": MAX_IN, + "min_out": 1, + "max_out": MAX_OUT, + "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, +} + +CONVERGE_RULE = { + "min_in": 1, + "max_in": MAX_IN, + "min_out": 1, + "max_out": 1, + "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, +} + +# rules of activity graph +NODE_RULES = { + "EmptyStartEvent": SOURCE_RULE, + "EmptyEndEvent": SINK_RULE, + "ServiceActivity": ACTIVITY_RULE, + "ExclusiveGateway": EMIT_RULE, + "ParallelGateway": EMIT_RULE, + "ConditionalParallelGateway": EMIT_RULE, + "ConvergeGateway": CONVERGE_RULE, + "SubProcess": ACTIVITY_RULE, +} diff --git a/runtime/bamboo-pipeline/pipeline/validators/utils.py b/runtime/bamboo-pipeline/pipeline/validators/utils.py new file mode 100644 index 00000000..044d618d --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/validators/utils.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from copy import deepcopy + +from pipeline import exceptions +from pipeline.core.constants import PE + + +def format_to_list(notype): + """ + format a data to list + :return: + """ + if isinstance(notype, list): + return notype + if not notype: + return [] + return [notype] + + +def format_node_io_to_list(node, i=True, o=True): + if i: + node["incoming"] = format_to_list(node["incoming"]) + + if o: + node["outgoing"] = format_to_list(node["outgoing"]) + + +def format_pipeline_tree_io_to_list(pipeline_tree): + """ + :summary: format incoming and outgoing to list + :param pipeline_tree: + :return: + """ + for act in list(pipeline_tree[PE.activities].values()): + format_node_io_to_list(act, o=False) + + for gateway in list(pipeline_tree[PE.gateways].values()): + format_node_io_to_list(gateway, o=False) + + format_node_io_to_list(pipeline_tree[PE.end_event], o=False) + + +def get_node_for_sequence(sid, tree, node_type): + target_id = tree[PE.flows][sid][node_type] + + if target_id in tree[PE.activities]: + return tree[PE.activities][target_id] + elif target_id in tree[PE.gateways]: + return tree[PE.gateways][target_id] + elif target_id == tree[PE.end_event][PE.id]: + return tree[PE.end_event] + elif target_id == tree[PE.start_event][PE.id]: + return tree[PE.start_event] + + raise exceptions.InvalidOperationException("node(%s) not in data" % target_id) + + +def get_nodes_dict(data): + """ + get all FlowNodes of a pipeline + """ + data = deepcopy(data) + start = data[PE.start_event][PE.id] + end = data[PE.end_event][PE.id] + + nodes = {start: data[PE.start_event], end: data[PE.end_event]} + + nodes.update(data[PE.activities]) + nodes.update(data[PE.gateways]) + + for node in list(nodes.values()): + # format to list + node[PE.incoming] = format_to_list(node[PE.incoming]) + node[PE.outgoing] = format_to_list(node[PE.outgoing]) + + node[PE.source] = [data[PE.flows][incoming][PE.source] for incoming in node[PE.incoming]] + node[PE.target] = [data[PE.flows][outgoing][PE.target] for outgoing in node[PE.outgoing]] + + return nodes diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/__init__.py b/runtime/bamboo-pipeline/pipeline/variable_framework/__init__.py new file mode 100644 index 00000000..7ea9a1ba --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +default_app_config = "pipeline.variable_framework.apps.VariableFrameworkConfig" diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/admin.py b/runtime/bamboo-pipeline/pipeline/variable_framework/admin.py new file mode 100644 index 00000000..853cfe90 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/admin.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.contrib import admin + +from pipeline.variable_framework import models + + +@admin.register(models.VariableModel) +class VariableModelAdmin(admin.ModelAdmin): + list_display = ["id", "code", "status"] + search_fields = ["code"] + list_filter = ["status"] diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/apps.py b/runtime/bamboo-pipeline/pipeline/variable_framework/apps.py new file mode 100644 index 00000000..a60a5e67 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/apps.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import logging + +from django.apps import AppConfig +from django.db.utils import OperationalError, ProgrammingError + +from pipeline.conf import settings +from pipeline.utils.register import autodiscover_collections +from pipeline.variable_framework import context + +logger = logging.getLogger("root") + + +class VariableFrameworkConfig(AppConfig): + name = "pipeline.variable_framework" + verbose_name = "PipelineVariableFramework" + + def ready(self): + """ + @summary: 注册公共部分和RUN_VER下的变量到数据库 + @return: + """ + from pipeline.variable_framework.signals.handlers import pre_variable_register_handler # noqa + + for path in settings.VARIABLE_AUTO_DISCOVER_PATH: + autodiscover_collections(path) + + if context.skip_update_var_models(): + return + + from pipeline.variable_framework.models import VariableModel + from pipeline.core.data.library import VariableLibrary + + try: + print("update variable models") + VariableModel.objects.exclude(code__in=list(VariableLibrary.variables.keys())).update(status=False) + print("update variable models finish") + except (ProgrammingError, OperationalError) as e: + # first migrate + logger.exception(e) diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/context.py b/runtime/bamboo-pipeline/pipeline/variable_framework/context.py new file mode 100644 index 00000000..3f26946b --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/context.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from pipeline.conf import settings +from pipeline.utils import env + +UPDATE_TRIGGER = "update_variable_models" + + +def skip_update_var_models(): + if settings.AUTO_UPDATE_VARIABLE_MODELS: + return False + + django_command = env.get_django_command() + if django_command is None: + return True + + return django_command != UPDATE_TRIGGER diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/management/__init__.py b/runtime/bamboo-pipeline/pipeline/variable_framework/management/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/management/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/management/commands/__init__.py b/runtime/bamboo-pipeline/pipeline/variable_framework/management/commands/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/management/commands/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/management/commands/update_variable_models.py b/runtime/bamboo-pipeline/pipeline/variable_framework/management/commands/update_variable_models.py new file mode 100644 index 00000000..525d010e --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/management/commands/update_variable_models.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.core.management import BaseCommand + + +class Command(BaseCommand): + def handle(self, *args, **options): + # do not need to do anything, the app ready will handle model update work + print("variable models update finished.") diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/migrations/0001_initial.py b/runtime/bamboo-pipeline/pipeline/variable_framework/migrations/0001_initial.py new file mode 100644 index 00000000..bf00d16f --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/migrations/0001_initial.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="VariableModel", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("code", models.CharField(max_length=255, unique=True, verbose_name="\u53d8\u91cf\u7f16\u7801")), + ("status", models.BooleanField(default=True, verbose_name="\u53d8\u91cf\u662f\u5426\u53ef\u7528")), + ], + options={"verbose_name": "Variable\u53d8\u91cf", "verbose_name_plural": "Variable\u53d8\u91cf"}, + ), + ] diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/migrations/__init__.py b/runtime/bamboo-pipeline/pipeline/variable_framework/migrations/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/migrations/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/models.py b/runtime/bamboo-pipeline/pipeline/variable_framework/models.py new file mode 100644 index 00000000..84b49a51 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/models.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from pipeline.core.data.library import VariableLibrary + + +class VariableModel(models.Model): + """ + 注册的变量 + """ + + code = models.CharField(_("变量编码"), max_length=255, unique=True) + status = models.BooleanField(_("变量是否可用"), default=True) + + class Meta: + verbose_name = _("Variable变量") + verbose_name_plural = _("Variable变量") + + def __unicode__(self): + return self.code + + def get_class(self): + return VariableLibrary.get_var_class(self.code) + + @property + def name(self): + return self.get_class().name + + @property + def form(self): + return self.get_class().form + + @property + def type(self): + return self.get_class().type + + @property + def tag(self): + return self.get_class().tag + + @property + def meta_tag(self): + return getattr(self.get_class(), "meta_tag") diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/signals/__init__.py b/runtime/bamboo-pipeline/pipeline/variable_framework/signals/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/signals/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/runtime/bamboo-pipeline/pipeline/variable_framework/signals/handlers.py b/runtime/bamboo-pipeline/pipeline/variable_framework/signals/handlers.py new file mode 100644 index 00000000..de683177 --- /dev/null +++ b/runtime/bamboo-pipeline/pipeline/variable_framework/signals/handlers.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from django.db.utils import ProgrammingError +from django.dispatch import receiver + +from pipeline.core.data.var import LazyVariable +from pipeline.core.signals import pre_variable_register +from pipeline.variable_framework.models import VariableModel +from pipeline.variable_framework import context + + +@receiver(pre_variable_register, sender=LazyVariable) +def pre_variable_register_handler(sender, variable_cls, **kwargs): + if context.skip_update_var_models(): + return + + try: + print("update {} variable model".format(variable_cls.code)) + obj, created = VariableModel.objects.get_or_create(code=variable_cls.code, defaults={"status": __debug__}) + if not created and not obj.status: + obj.status = True + obj.save() + except ProgrammingError: + # first migrate + pass diff --git a/runtime/bamboo-pipeline/pyproject.toml b/runtime/bamboo-pipeline/pyproject.toml new file mode 100644 index 00000000..446077b9 --- /dev/null +++ b/runtime/bamboo-pipeline/pyproject.toml @@ -0,0 +1,16 @@ +[tool.black] +line-length = 120 +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist +)/ +''' diff --git a/runtime/bamboo-pipeline/release.md b/runtime/bamboo-pipeline/release.md new file mode 100644 index 00000000..401926b9 --- /dev/null +++ b/runtime/bamboo-pipeline/release.md @@ -0,0 +1,332 @@ +# 3.6.2 + +- optimization: + - 对无效的 schedule 请求增加防御机制, 防止受到单个节点 schedule 请求风暴的影响 +- minor: + - bamboo-engine 升级至 1.3.2 + + +# 3.6.1 + +- minor: + - bamboo-engine 升级至 1.3.1 + +# 3.6.0 + +- feature: + - eri 3.0.0 支持 + +# 3.5.4 + +- optimization: + - engine.pipelineprocess parent_id 字段增加索引 +- bugfix: + - 修复子流程中存在间接引用子流程节点输出变量时无法渲染的问题 + - 修复 ERI 实现中子流程 lazy 参数渲染失败的问题 + +# 3.5.3 + +- bugfix: + - 修复老版本引擎启动 celery 任务时未传入 queue 参数的问题 + +# 3.5.2 + +- bugfix: + - 修复 bamboo-engine 周期任务启动时不支持有环任务及未传入额外流程上下文的问题 + +# 3.5.1 + +- bugfix: + - 修复pre_render_keys拼写问题 + +# 3.5.0 + +- features: + - 增加 pipeline 引擎历史数据清理功能 + +# 3.4.0 + +- features: + - eri 版本限制功能支持 + - engine runtime prepare_run_pipeline API 支持配置子流程预置上下文 + +# 3.3.0 + +- optimization: + - 兼容 django3 +- bugfix: + - 修复多层级process时父流程change_key丢失导致渲染失败问题 + +# 3.2.6rc2 +- feature: + - TemplateRelationship descendant_template_id 字段添加索引 + +# 3.2.6rc1 + +- feature: + - PipelineTemplate model 支持识别 always_use_latest 字段 +- bugfix: + - 修复条件并行网关内部 ID 替换失败的问题 + +# 3.2.5 + +- minor: + - pyparsing 依赖版本调整 + +# 3.2.4 +- feature: + - 开始节点支持进行变量预渲染 +- bugfix: + - 修复条件并行网关内部 ID 替换失败的问题 + +# 3.2.3 +- bugfix: + - 修复节点能否重试/跳过属性设置不生效的问题 +# 3.2.2 +- bugfix: + - 修复 pipeline.eri.codec 文件不存在的问题 + +# 3.2.1 +- feature: + - bamboo_engine runtime 支持设置 root_pipeline_context + +# 3.2.0 +- optimization: + - 引擎内部流转日志优化 + +# 3.1.9 +- feature: + - 数据统计支持从 bamboo_engine 启动的任务 + +# 3.1.8 + +- feature: + - 周期任务支持启动 bamboo_engine 任务 +# 3.1.7 + +- feature: + - bamboo_engine State model error_ignored 字段支持 +- bugfix + - 修复 bamboo_engine 强制失败后日志版本未刷新的问题 + +# 3.1.6 + +- minor + - setup optimization + +# 3.1.5 + +- bugfix + - 修复节点日志无法区分多个节点版本状态的问题 +# 3.1.4 + +- feature + - 添加对 bamboo-engine 的 prometheus metrics 支持 + +# 3.1.3 + +- feature + - get_process_info_with_root_pipeline 接口支持 + +# 3.1.2 + +- bugfix + - 修复 service wrapper 在 schedule 返回 None 时导致节点卡住的问题 + +# 3.1.1 + +- bugfix: + - 修复 pypi module 模式下 create_plugins_app 无法工作的问题 + +# 3.1.0 + +- features: + - 增加 bamboo-engine runtime 实现 + - s3远程插件加载支持配置源目录 + - 插件测试框架添加对插件input/output format的检查 & 同步修改service_activity result type类型 +- optimization + - engine admin 页面性能优化 + - worker 检测增加 rabbitmq 连接错误容错机制 +- bugfix: + - 修复并行网关子进程解析了 splice 类型变量后 _value 没有同步到父进程的问题 + - 修复分支网关表达式字符串未转义导致判断出错问题 + - 修复某些场景下网关合法性校验死循环的问题 + +# 3.0.0rc2 + +- bugfix: + - 修复高并发情况下 sleep 可能会覆写 schedule 产生的数据的问题 +# 3.0.0rc1 + +- features: + - 支持 Django2,支持 Celery 4 +# 2.4.4 + +- feature: + - redis sentinel 模式支持配置 redis sentinel 独立密码 + - mako 模板渲染上下文支持配置屏蔽关键字及导入模块 + - 添加 engine context 模块,支持直接获取当前上下文中正在执行的节点信息 + - 支持对 mako 模板进行安全检查 +- optimization: + - 结束节点写流程输出失败时不抛出错误,兼容输出字段可能未输出的情况 + - 优化 pipeline set_{status} 系列接口 + - 优化 component model 和 variable model 的加载逻辑 + - 优化有向图环检测算法 + - 为 pipeline.log LogEntry 的 logged_at 字段添加索引 +- bugfix: + - 修复子流程参数传递过程中内外层存在同名 key 时导致参数传递失败的问题 + - 修复 lazy 变量 get_value 异常时没有捕获的 bug + - 修复 pipeline.log.handlers 没有捕获可能会出现的 AppRegistryNotReady 异常 + - IOField 支持序列化节点数更多的流程 + - 修复解析 mako 变量值时没有捕获所有可能异常的问题 + - 修复分支网关提前解析了引用输出的变量后导致后续节点获取不到值的问题 +# 2.4.3 + +- feature: + - 增加引擎内部消息流转检测功能,记录发送失败的消息,开发者能够对其进行重放 + - databackend 添加自动过期功能 +- bugfix: + - 修复 pipeline worker 检查未使用最新 mq 链接的问题 + +# 2.4.2 + +- optimization: + - pipeline worker 检查添加重连机制 +- bugfix: + - 旧版本存留任务 pickle 数据兼容 _runtime_attrs 不存在的情况 + +# 2.4.1 + +- fetures: + - force_fail 支持传入 ex_data 参数 + - 变量引擎渲染内置函数时返回原字符串 + - 标准插件支持自动加载模块子路径下的所有插件 +- optimization: + - 在 schedule 时更新节点对应的 Data 对象 + - 部分 DB 字段增加索引,解决数据增长后带来的慢查询问题 +- bugfix: + - 修复读取 python manage cmd 可能出现的 IndexError + +# 2.4.0 + +- fetures: + - 插件统计信息支持记录插件在流程中的版本号 + - 添加僵死任务检测功能 + - 在节点 RuntimeAttrs 中添加 `root_pipeline_id` +- optimization: + - 插件扫描功能支持忽略特定命令 +- bugfix: + - 修复对子流程中的节点进行强制失败时可能会失败的问题 + - 修复并发多次回调数据混乱问题 + +# 2.3.0rc1 + +- features: + - 节点重入时 `_loop` 的开始值可以配置 + - 增加用户自定义配置隔离队列功能 + - 添加引擎状态(workers, queeus)获取接口 + - 调度节点支持多次回调 +- bugfix: + - 修复子流程中有环时无法执行任务的问题 + +# 2.2.0rc1 + +- features: + - 添加插件执行命令 `manage.py run_component` + - 输出变量支持配置多个 source_act + - redis replication 模式支持配置多 sentinels + - 支持配置备选 data_backend, 提升流程执行容错性 + - PipelineInstance 新增 is_revoked 属性 +- improvements: + - 优化并行网关的执行效率 + - 优化无法从 settings 中获取 redis 配置时的日志提示 + - 插件模块导入错误时添加错误日志 +- bugfix: + - 修复 MySQLDatabackend 更新数据时产生死锁问题 + - 修复带打回流程 tree 导致 parser 解析错误问题 + - 修复 py3 下部分编码问题 + - 修复多次对不存在的流程调用 revoke_piepline 接口返回结果不一致的问题 + - 修复 snapshot 为空时 in_subprocess 调用报错的问题 + - 修复汇聚网关是否被共享判断逻辑有漏洞的问题 + - 修复节点重入时记录的 history 中 started_time 不正确的问题 + - 修复读取 python2 pickle dump 的数据可能会导致 DecodeError 的问题 + +# 2.1.0rc1 + +- features: + - 添加插件版本管理功能 + +# 2.0.0rc2 + +- bugfix: + - 插件单元测试执行出错时,测试命令不会返回 0 + +# 2.0.0rc1 + +- features: + - py3 支持 + +# 1.0.0 + +- features: + - 流程启动支持传入优先级 + +# 0.9.8 + +- features: + - builder 中 ServiceActivity 元素添加可忽略错误等配置项 + - pipeline log 模块日志级别支持配置 + - task_service 添加节点执行日志获取接口 + - 添加能够自定义执行逻辑的结束节点 + +# 0.9.7 + +- features: + - builder 增加新的全局数据的传递方式 + - 添加条件并行网关 +- bugfix: + - 修复子流程中结束节点执行错误时无法优雅退出进程的 bug + +# 0.9.6 + +- features: + - 节点支持重新执行,以支持循环和打回 + - 流程结构支持更加复杂的环状结构 +- bugs fix: + - 修复同时发起同一个根流程下子进程的唤醒后部分子进程无法往下执行的问题 + - 修复子进程完成后在调整子流程栈中的状态时未处理暂停状态的问题 + - 修复批量重试时部分子进程因为根流程处于 BLOCKED 状态而无法继续执行的问题 + +# 0.9.5 + +- minors: + - 单元测试完善 + +# 0.9.4 + +- minors: + - 定时流程在激活时不允许修改流程的常量 + +# 0.9.3 + +- features: + - 流程模板在保存时设置是否含有子流程的信息 + +# 0.9.2 + +- improvements: + - 将 models 模块下与 web 层相关的代码移动到 pipeline_web 中 + +# 0.9.1 + +- features: + - 模板接口兼容 web 及 sdk 模式下的数据 + +# 0.9.0 + +- features: + - 当引擎冻结时不再启动周期任务,并将当前启动记入失败历史 +- bugs fix: + - 修复节点超时强制失败操作执行失败时仍然发送节点执行失败的信号的 bug + + \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..b1a23b4c --- /dev/null +++ b/setup.py @@ -0,0 +1,65 @@ +"""A setuptools based setup module. + +See: +https://packaging.python.org/en/latest/distributing.html +https://github.com/pypa/sampleproject +""" + +# To use a consistent encoding +from codecs import open +from os import path + +# Always prefer setuptools over distutils +from setuptools import find_packages, setup + +here = path.abspath(path.dirname(__file__)) +about = {} +with open(path.join(here, "bamboo_engine", "__version__.py"), "r", encoding="utf-8") as f: + exec(f.read(), about) + +long_description = """ +Bamboo-engine is a general-purpose process engine which can parse, +execute and schedule process tasks created by users, and provides flexible control capabilities such as pause, revoke, skip, force failure, retry and re-execute, +and advanced features such as parallelism and sub-processes. +It can further improve the concurrent processing ability of tasks through horizontal expansion. +""" +version = about["__version__"] + +setup( + name="bamboo-engine", + # Versions should comply with PEP440. For a discussion on single-sourcing + # the version across setup.py and the project code, see + # https://packaging.python.org/en/latest/single_source_version.html + version=version, + description="bamboo-engine", + long_description=long_description, + # The project's main homepage. + url="https://github.com/Tencent/bk-sops/tree/sdk/sdk/bamboo-engine", + # Author details + author="Blueking", + author_email="howellliang@tencent.com", + include_package_data=True, + # You can just specify the packages manually here if your project is + # simple. Or you can use find_packages(). + packages=find_packages(), + # Alternatively, if you want to distribute just a my_module.py, uncomment + # this: + # py_modules=["my_module"], + # List run-time dependencies here. These will be installed by pip when + # your project is installed. For an analysis of "install_requires" vs pip's + # requirements files see: + # https://packaging.python.org/en/latest/requirements.html + install_requires=[ + "Werkzeug>=1.0.1,<2.0", + "pyparsing>=2.2.0,<3.0", + "mako>=1.1.4,<2.0", + "prometheus-client>=0.9.0,<1.0.0", + ], + zip_safe=False, + # To provide executable scripts, use entry points in preference to the + # "scripts" keyword. Entry points provide cross-platform support and allow + # pip to create the appropriate form of executable for the target platform. + # entry_points={ + # "console_scripts": ["bk-admin=blueapps.contrib.bk_commands:bk_admin"], + # }, +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/tests/engine/__init__.py b/tests/engine/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/tests/engine/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/tests/engine/test_engine_api.py b/tests/engine/test_engine_api.py new file mode 100644 index 00000000..be68bdc5 --- /dev/null +++ b/tests/engine/test_engine_api.py @@ -0,0 +1,1181 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import pytest +from mock import MagicMock, call, patch + +from bamboo_engine.api import preview_node_inputs +from bamboo_engine.eri import SuspendedProcessInfo, NodeType, Variable +from bamboo_engine import states, exceptions +from bamboo_engine.engine import Engine + + +def test_run_pipeline(): + process_id = 1 + start_event_id = "token" + options = {"priority": 100, "queue": "q"} + + runtime = MagicMock() + runtime.prepare_run_pipeline = MagicMock(return_value=process_id) + runtime.execute = MagicMock() + validator = MagicMock() + + pipeline = {"start_event": {"id": start_event_id}} + root_pipeline_data = {"k": "v"} + root_pipeline_context = {"k1": "v1"} + subprocess_context = {"k2": "v2"} + + engine = Engine(runtime=runtime) + + with patch("bamboo_engine.engine.validator", validator): + engine.run_pipeline( + pipeline=pipeline, + root_pipeline_data=root_pipeline_data, + root_pipeline_context=root_pipeline_context, + subprocess_context=subprocess_context, + **options + ) + + validator.validate_and_process_pipeline.assert_called_once_with(pipeline, False) + runtime.pre_prepare_run_pipeline.assert_called_once_with( + pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options + ) + runtime.prepare_run_pipeline.assert_called_once_with( + pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options + ) + runtime.post_prepare_run_pipeline.assert_called_once_with( + pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options + ) + runtime.execute.assert_called_once_with(process_id, start_event_id) + + +def test_pause_pipeline(): + pipeline_id = "pid" + + runtime = MagicMock() + runtime.has_state = MagicMock(return_value=True) + + engine = Engine(runtime=runtime) + engine.pause_pipeline(pipeline_id) + + runtime.has_state.assert_called_once_with(pipeline_id) + runtime.pre_pause_pipeline.assert_called_once_with(pipeline_id) + runtime.set_state.assert_called_once_with(node_id=pipeline_id, to_state=states.SUSPENDED) + runtime.post_pause_pipeline.assert_called_once_with(pipeline_id) + + +def test_pause_pipeline__pipeline_not_exist(): + pipeline_id = "pid" + + runtime = MagicMock() + runtime.has_state = MagicMock(return_value=False) + runtime.pre_pause_pipeline = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.NotFoundError): + engine.pause_pipeline(pipeline_id) + + +def test_revoke_pipeline(): + pipeline_id = "pid" + + runtime = MagicMock() + runtime.has_state = MagicMock(return_value=True) + + engine = Engine(runtime=runtime) + engine.revoke_pipeline(pipeline_id) + + runtime.has_state.assert_called_once_with(pipeline_id) + runtime.pre_revoke_pipeline.assert_called_once_with(pipeline_id) + runtime.set_state.assert_called_once_with(node_id=pipeline_id, to_state=states.REVOKED) + runtime.post_revoke_pipeline.assert_called_once_with(pipeline_id) + + +def test_revoke_pipeline__pipeline_not_exist(): + pipeline_id = "pid" + + runtime = MagicMock() + runtime.has_state = MagicMock(return_value=False) + runtime.pre_revoke_pipeline = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.NotFoundError): + engine.revoke_pipeline(pipeline_id) + + +def test_resume_pipeline(): + pipeline_id = "pid" + suspended_process_info = [ + SuspendedProcessInfo(process_id=1, current_node=2), + SuspendedProcessInfo(process_id=3, current_node=4), + ] + state = MagicMock() + state.name = "SUSPENDED" + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.get_suspended_process_info = MagicMock(return_value=suspended_process_info) + + engine = Engine(runtime=runtime) + engine.resume_pipeline(pipeline_id) + + runtime.get_state.assert_called_once_with(pipeline_id) + runtime.get_suspended_process_info.assert_called_once_with(pipeline_id) + runtime.pre_resume_pipeline.assert_called_once_with(pipeline_id) + runtime.set_state.assert_called_once_with(node_id=pipeline_id, to_state=states.RUNNING) + runtime.batch_resume.assert_called_once_with( + process_id_list=[ + suspended_process_info[0].process_id, + suspended_process_info[1].process_id, + ] + ) + runtime.execute.assert_has_calls( + [ + call( + suspended_process_info[0].process_id, + suspended_process_info[0].current_node, + ), + call( + suspended_process_info[1].process_id, + suspended_process_info[1].current_node, + ), + ] + ) + runtime.post_resume_pipeline.assert_called_once_with(pipeline_id) + + +def test_resume_pipeline__state_not_match(): + pipeline_id = "pid" + suspended_process_info = [] + state = MagicMock() + state.name = "RUNNING" + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.InvalidOperationError): + engine.resume_pipeline(pipeline_id) + + runtime.get_state.assert_called_once_with(pipeline_id) + runtime.set_state.assert_not_called() + + +def test_resume_pipeline__can_not_find_suspended_process(): + pipeline_id = "pid" + suspended_process_info = [] + state = MagicMock() + state.name = "SUSPENDED" + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.get_suspended_process_info = MagicMock(return_value=suspended_process_info) + + engine = Engine(runtime=runtime) + engine.resume_pipeline(pipeline_id) + + runtime.get_state.assert_called_once_with(pipeline_id) + runtime.get_suspended_process_info.assert_called_once_with(pipeline_id) + runtime.pre_resume_pipeline.assert_called_once_with(pipeline_id) + runtime.set_state.assert_called_once_with(node_id=pipeline_id, to_state=states.RUNNING) + runtime.batch_resume.assert_not_called() + runtime.execute.assert_not_called() + runtime.post_resume_pipeline.assert_called_once_with(pipeline_id) + + +def test_pause_node_appoint(): + node_id = "nid" + node_type = NodeType.ServiceActivity + + node = MagicMock() + node.type = node_type + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + + engine = Engine(runtime=runtime) + engine.pause_node_appoint(node_id) + + runtime.pre_pause_node.assert_called_once_with(node_id) + runtime.set_state.assert_called_once_with(node_id=node_id, to_state=states.SUSPENDED) + runtime.post_pause_node.assert_called_once_with(node_id) + + +def test_pause_node_appoint__node_type_is_subprocess(): + node_id = "nid" + node_type = NodeType.SubProcess + + node = MagicMock() + node.type = node_type + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.pre_pause_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.pause_node_appoint(node_id) + + +def test_resume_node_appoint(): + node_id = "nid" + node_type = NodeType.ServiceActivity + + node = MagicMock() + node.type = node_type + suspended_process_info_list = [ + SuspendedProcessInfo("1", "2"), + ] + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_suspended_process_info = MagicMock(return_value=suspended_process_info_list) + + engine = Engine(runtime=runtime) + engine.resume_node_appoint(node_id) + + runtime.get_node.assert_called_once_with(node_id) + runtime.pre_resume_node.assert_called_once_with(node_id) + runtime.set_state.assert_called_once_with(node_id=node_id, to_state=states.READY) + runtime.get_suspended_process_info.assert_called_once_with(node_id) + runtime.resume.assert_called_once_with(process_id=suspended_process_info_list[0].process_id) + runtime.execute.assert_called_once_with( + suspended_process_info_list[0].process_id, + suspended_process_info_list[0].current_node, + ) + runtime.post_resume_node.assert_called_once_with(node_id) + + +def test_resume_node_appoint__node_type_is_subprocess(): + node_id = "nid" + node_type = NodeType.SubProcess + + node = MagicMock() + node.type = node_type + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.pre_resume_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.resume_node_appoint(node_id) + + +def test_resume_node_appoint__without_suspended_process(): + node_id = "nid" + node_type = NodeType.ServiceActivity + + node = MagicMock() + node.type = node_type + suspended_process_info_list = [] + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_suspended_process_info = MagicMock(return_value=suspended_process_info_list) + runtime.resume = MagicMock(side_effect=Exception) + runtime.execute = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + engine.resume_node_appoint(node_id) + + runtime.get_node.assert_called_once_with(node_id) + runtime.pre_resume_node.assert_called_once_with(node_id) + runtime.set_state.assert_called_once_with(node_id=node_id, to_state=states.READY) + runtime.get_suspended_process_info.assert_called_once_with(node_id) + runtime.resume.assert_not_called() + runtime.execute.assert_not_called() + runtime.post_resume_node.assert_called_once_with(node_id) + + +def test_retry_node(): + node_id = "nid" + process_id = "pid" + data = {} + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + state.started_time = "started_time" + state.archived_time = "archived_time" + state.loop = 1 + state.skip = True + state.retry = 0 + state.version = "version" + + execution_data = MagicMock() + execution_data.inputs = "inputs" + execution_data.outputs = "outputs" + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_execution_data = MagicMock(return_value=execution_data) + + engine = Engine(runtime=runtime) + engine.retry_node(node_id, data) + + runtime.pre_retry_node.assert_called_once_with(node_id, data) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_sleep_process_with_current_node_id.assert_called_once_with(node_id) + runtime.set_data_inputs.assert_called_once_with(node_id, data) + runtime.add_history.assert_called_once_with( + node_id=node_id, + started_time=state.started_time, + archived_time=state.archived_time, + loop=state.loop, + skip=state.skip, + retry=state.retry, + version=state.version, + inputs=execution_data.inputs, + outputs=execution_data.outputs, + ) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.READY, + is_retry=True, + refresh_version=True, + clear_started_time=True, + clear_archived_time=True, + ) + runtime.execute.assert_called_once_with(process_id, node_id) + runtime.post_retry_node.assert_called_once_with(node_id, data) + + +def test_retry_node__state_is_not_failed(): + node_id = "nid" + data = {} + + state = MagicMock() + state.name = states.RUNNING + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.pre_retry_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.retry_node(node_id, data) + + +def test_retry_node__can_retry_is_false(): + node_id = "nid" + process_id = "pid" + data = {} + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + + node = MagicMock() + node.can_retry = False + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.get_node = MagicMock(return_value=node) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.retry_node(node_id, data) + + +def test_retry_node__can_not_find_sleep_process(): + node_id = "nid" + data = {} + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=None) + runtime.pre_retry_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.retry_node(node_id, data) + + +def test_retry_node__with_none_data(): + node_id = "nid" + process_id = "pid" + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + state.started_time = "started_time" + state.archived_time = "archived_time" + state.loop = 1 + state.skip = True + state.retry = 0 + state.version = "version" + + execution_data = MagicMock() + execution_data.inputs = "inputs" + execution_data.outputs = "outputs" + + runtime = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_execution_data = MagicMock(return_value=execution_data) + + engine = Engine(runtime=runtime) + engine.retry_node(node_id) + + runtime.pre_retry_node.assert_called_once_with(node_id, None) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_sleep_process_with_current_node_id.assert_called_once_with(node_id) + runtime.set_data_inputs.assert_not_called() + runtime.add_history.assert_called_once_with( + node_id=node_id, + started_time=state.started_time, + archived_time=state.archived_time, + loop=state.loop, + skip=state.skip, + retry=state.retry, + version=state.version, + inputs=execution_data.inputs, + outputs=execution_data.outputs, + ) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.READY, + is_retry=True, + refresh_version=True, + clear_started_time=True, + clear_archived_time=True, + ) + runtime.execute.assert_called_once_with(process_id, node_id) + runtime.post_retry_node.assert_called_once_with(node_id, None) + + +def test_skip_node(): + node_id = "nid" + process_id = "pid" + + node = MagicMock() + node.type = NodeType.ServiceActivity + node.can_skip = True + node.target_nodes = ["target_node"] + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + state.started_time = "started_time" + state.archived_time = "archived_time" + state.loop = 1 + state.skip = True + state.retry = 0 + state.version = "version" + + execution_data = MagicMock() + execution_data.inputs = "inputs" + execution_data.outputs = "outputs" + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_execution_data = MagicMock(return_value=execution_data) + + engine = Engine(runtime=runtime) + engine.skip_node(node_id) + + runtime.get_node.assert_called_once_with(node_id) + runtime.pre_skip_node.assert_called_once_with(node_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_sleep_process_with_current_node_id(node_id) + runtime.add_history.assert_called_once_with( + node_id=node_id, + started_time=state.started_time, + archived_time=state.archived_time, + loop=state.loop, + skip=state.skip, + retry=state.retry, + version=state.version, + inputs=execution_data.inputs, + outputs=execution_data.outputs, + ) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.FINISHED, + is_skip=True, + refresh_version=True, + set_archive_time=True, + ) + runtime.execute.assert_called_once_with(process_id, node.target_nodes[0]) + runtime.post_skip_node.assert_called_once_with(node_id) + + +def test_skip_node__node_can_not_skip(): + node_id = "nid" + process_id = "pid" + + node = MagicMock() + node.type = NodeType.ServiceActivity + node.can_skip = False + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.pre_skip_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.InvalidOperationError): + engine.skip_node(node_id) + + +def test_skip_node__node_type_not_fit(): + node_id = "nid" + process_id = "pid" + + node = MagicMock() + node.type = NodeType.SubProcess + node.can_skip = True + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.pre_skip_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.InvalidOperationError): + engine.skip_node(node_id) + + +def test_skip_node__state_is_not_failed(): + node_id = "nid" + process_id = "pid" + + node = MagicMock() + node.type = NodeType.ServiceActivity + node.can_skip = True + node.target_nodes = ["target_node"] + + state = MagicMock() + state.node_id = node_id + state.name = states.RUNNING + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.pre_skip_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.InvalidOperationError): + engine.skip_node(node_id) + + runtime.get_state.assert_called_once_with(node_id) + + +def test_skip_node__can_not_find_sleep_process(): + node_id = "nid" + + node = MagicMock() + node.type = NodeType.ServiceActivity + node.can_skip = True + node.target_nodes = ["target_node"] + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=None) + runtime.pre_retry_node = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.skip_node(node_id) + + runtime.get_state.assert_called_once_with(node_id) + + +def test_skip_exclusive_gateway(): + node_id = "nid" + process_id = "pid" + flow_id = "flow_1" + + node = MagicMock() + node.id = node_id + node.type = NodeType.ExclusiveGateway + node.targets = {flow_id: "target_1"} + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + state.started_time = "started_time" + state.archived_time = "archived_time" + state.loop = 1 + state.skip = True + state.retry = 0 + state.version = "version" + + execution_data = MagicMock() + execution_data.inputs = "inputs" + execution_data.outputs = "outputs" + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.pre_skip_exclusive_gateway = MagicMock() + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_execution_data = MagicMock(return_value=execution_data) + + engine = Engine(runtime=runtime) + engine.skip_exclusive_gateway(node_id, flow_id) + + runtime.get_node.assert_called_once_with(node_id) + runtime.pre_skip_exclusive_gateway.assert_called_once_with(node_id, flow_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_sleep_process_with_current_node_id(node_id) + runtime.add_history.assert_called_once_with( + node_id=node_id, + started_time=state.started_time, + archived_time=state.archived_time, + loop=state.loop, + skip=state.skip, + retry=state.retry, + version=state.version, + inputs=execution_data.inputs, + outputs=execution_data.outputs, + ) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.FINISHED, + is_skip=True, + refresh_version=True, + set_archive_time=True, + ) + runtime.execute.assert_called_once_with(process_id, node.targets[flow_id]) + runtime.post_skip_exclusive_gateway.assert_called_once_with(node_id, flow_id) + + +def test_skip_exclusive_gateway__node_type_not_fit(): + node_id = "nid" + flow_id = "flow_1" + + node = MagicMock() + node.type = NodeType.ParallelGateway + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.InvalidOperationError): + engine.skip_exclusive_gateway(node_id, flow_id) + + +def test_skip_exclusive_gateway__node_is_not_failed(): + node_id = "nid" + process_id = "pid" + flow_id = "flow_1" + + node = MagicMock() + node.type = NodeType.ExclusiveGateway + node.can_skip = True + node.targets = {flow_id: "target1"} + + state = MagicMock() + state.node_id = node_id + state.name = states.RUNNING + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.pre_skip_exclusive_gateway = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + + with pytest.raises(exceptions.InvalidOperationError): + engine.skip_exclusive_gateway(node_id, flow_id) + + runtime.get_state.assert_called_once_with(node_id) + + +def test_skip_exclusive_gateway__can_not_find_sleep_proces(): + node_id = "nid" + flow_id = "flow_1" + + node = MagicMock() + node.type = NodeType.ExclusiveGateway + node.targets = {flow_id: "target1"} + + state = MagicMock() + state.node_id = node_id + state.name = states.FAILED + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=None) + runtime.pre_skip_exclusive_gateway = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.skip_exclusive_gateway(node_id, flow_id) + + runtime.get_state.assert_called_once_with(node_id) + + +def test_forced_fail_activity(): + node_id = "nid" + ex_data = "ex_msg" + process_id = "pid" + + node = MagicMock() + node.type = NodeType.ServiceActivity + + state = MagicMock() + state.name = states.RUNNING + state.version = "old_version" + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_process_id_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.set_state = MagicMock(return_value="new_version") + + engine = Engine(runtime=runtime) + engine.forced_fail_activity(node_id, ex_data) + + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_process_id_with_current_node_id.assert_called_once_with(node_id) + runtime.pre_forced_fail_activity.assert_called_once_with(node_id, ex_data) + runtime.get_execution_data_outputs.assert_called_once_with(node_id) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.FAILED, + refresh_version=True, + set_archive_time=True, + ) + runtime.set_execution_data_outputs.assert_called_once_with(node_id, {"ex_data": ex_data, "_forced_failed": True}) + runtime.kill.assert_called_once_with(process_id) + runtime.post_forced_fail_activity.assert_called_once_with(node_id, ex_data, "old_version", "new_version") + + +def test_forced_fail_activity__node_type_not_fit(): + node_id = "nid" + ex_data = "ex_msg" + + node = MagicMock() + node.type = NodeType.SubProcess + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.forced_fail_activity(node_id, ex_data) + + +def test_forced_fail_activity__node_is_not_running(): + node_id = "nid" + ex_data = "ex_msg" + + node = MagicMock() + node.type = NodeType.ServiceActivity + + state = MagicMock() + state.name = states.FINISHED + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_process_id_with_current_node_id = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.forced_fail_activity(node_id, ex_data) + + runtime.get_state.assert_called_once_with(node_id) + + +def test_forced_fail_activity__can_not_find_process_id(): + node_id = "nid" + ex_data = "ex_msg" + + node = MagicMock() + node.type = NodeType.ServiceActivity + + state = MagicMock() + state.name = states.RUNNING + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_process_id_with_current_node_id = MagicMock(return_value=None) + runtime.pre_forced_fail_activity = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.forced_fail_activity(node_id, ex_data) + + runtime.get_state.get_process_id_with_current_node_id(node_id) + + +def test_callback(): + node_id = "nid" + version = "v1" + process_id = "pid" + data = {"data": 1} + data_id = 1 + + state = MagicMock() + state.version = version + + schedule = MagicMock() + schedule_id = 2 + schedule.finished = False + schedule.expired = False + + runtime = MagicMock() + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_state = MagicMock(return_value=state) + runtime.get_schedule_with_node_and_version = MagicMock(return_value=schedule) + runtime.set_callback_data = MagicMock(return_value=data_id) + + engine = Engine(runtime=runtime) + engine.callback(node_id, version, data) + + runtime.get_sleep_process_with_current_node_id.assert_called_once_with(node_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_schedule_with_node_and_version(node_id, version) + runtime.pre_callback.assert_called_once_with(node_id, version, data) + runtime.set_callback_data.assert_called_once_with(node_id, version, data) + runtime.schedule.assert_called_once_with(process_id, node_id, schedule.id, data_id) + runtime.post_callback.assert_called_once_with(node_id, version, data) + + +def test_callback__can_not_find_process_id(): + node_id = "nid" + version = "v1" + data = {"data": 1} + + runtime = MagicMock() + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=None) + runtime.get_state = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.callback(node_id, version, data) + + +def test_callback__version_not_match(): + node_id = "nid" + version = "v1" + process_id = "pid" + data = {"data": 1} + + state = MagicMock() + state.version = "v2" + + schedule = MagicMock() + schedule_id = 2 + + runtime = MagicMock() + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_schedule_with_node_and_version = MagicMock(return_value=schedule) + runtime.get_state = MagicMock(return_value=state) + runtime.pre_callback = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.callback(node_id, version, data) + + runtime.expire_schedule.assert_called_once_with(schedule.id) + + +def test_callback__schedule_finished(): + node_id = "nid" + version = "v1" + process_id = "pid" + data = {"data": 1} + + state = MagicMock() + state.version = version + + schedule = MagicMock() + schedule_id = 2 + schedule.finished = True + schedule.expired = False + + runtime = MagicMock() + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_state = MagicMock(return_value=state) + runtime.get_schedule_with_node_and_version = MagicMock(return_value=schedule) + runtime.pre_callback = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.callback(node_id, version, data) + + runtime.get_schedule_with_node_and_version.assert_called_once_with(node_id, version) + runtime.expire_schedule.assert_not_called() + + +def test_callback__schedule_expired(): + node_id = "nid" + version = "v1" + process_id = "pid" + data = {"data": 1} + + state = MagicMock() + state.version = version + + schedule = MagicMock() + schedule_id = 2 + schedule.finished = False + schedule.expired = True + + runtime = MagicMock() + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_state = MagicMock(return_value=state) + runtime.get_schedule_with_node_and_version = MagicMock(return_value=schedule) + runtime.pre_callback = MagicMock(side_effect=Exception) + + engine = Engine(runtime=runtime) + with pytest.raises(exceptions.InvalidOperationError): + engine.callback(node_id, version, data) + + runtime.get_schedule_with_node_and_version.assert_called_once_with(node_id, version) + runtime.expire_schedule.assert_not_called() + + +def test_preview_node_inputs__plain_variable(): + node_id = "nid" + pipeline = {"data": {}, "activities": {"nid": {"component": {"inputs": {"input": {"value": "test"}}}}}} + runtime = MagicMock() + + api_result = preview_node_inputs(runtime, pipeline, node_id) + assert api_result.result is True + assert api_result.data == {"input": "test"} + + +class MockCV(Variable): + def __init__(self, value): + self.value = value + + def get(self): + return "compute_result of {}".format(self.value) + + +def test_preview_node_inputs__ref_variable(): + node_id = "nid" + pipeline = { + "data": { + "inputs": { + "${test}": {"custom_type": "custom_type", "value": "test", "is_param": False, "type": "lazy"}, + "${input}": {"type": "splice", "is_param": False, "value": "${test} in input"}, + } + }, + "activities": { + "nid": { + "component": { + "inputs": { + "input": { + "type": "splice", + "is_param": False, + "value": "${input}", + } + } + } + } + }, + } + + compute_var = MockCV(pipeline["data"]["inputs"]["${test}"]["value"]) + runtime = MagicMock() + runtime.get_compute_variable = MagicMock(return_value=compute_var) + + api_result = preview_node_inputs(runtime, pipeline, node_id) + assert api_result.result is True + assert api_result.data == {"input": "compute_result of test in input"} + + +def test_preview_node_inputs__with_subprocess(): + node_id = "nid" + subprocess_id = "sid" + subprocess_pipeline = { + "data": {"inputs": {"${input}": {"value": "${test}", "type": "splice", "is_param": True}}}, + "activities": { + "nid": {"component": {"inputs": {"input": {"value": "${input}", "type": "splice", "is_param": False}}}} + }, + } + pipeline = { + "data": { + "inputs": { + "${test}": { + "value": "test_value", + "type": "plain", + "is_param": False, + }, + "${input}": {"value": "parent_input", "type": "plain", "is_param": False}, + } + }, + "activities": { + "sid": {"pipeline": subprocess_pipeline, "params": {"${input}": {"type": "splice", "value": "${test}"}}} + }, + } + runtime = MagicMock() + + api_result = preview_node_inputs(runtime, pipeline, node_id, subprocess_stack=[subprocess_id]) + assert api_result.result is True + assert api_result.data == {"input": "test_value"} + + +def test_retry_subprocess__type_not_match(): + node_id = "nid" + + node = MagicMock() + node.type = NodeType.ServiceActivity + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + + engine = Engine(runtime) + + try: + engine.retry_subprocess(node_id) + except exceptions.InvalidOperationError: + pass + else: + assert False, "InvalidOperationError not raise" + + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state.assert_not_called() + + +def test_retry_subprocess__state_is_not_fail(): + node_id = "nid" + + node = MagicMock() + node.type = NodeType.SubProcess + + state = MagicMock() + state.name = states.RUNNING + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + + engine = Engine(runtime) + + try: + engine.retry_subprocess(node_id) + except exceptions.InvalidOperationError: + pass + else: + assert False, "InvalidOperationError not raise" + + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.pre_retry_subprocess.assert_not_called() + + +def test_retry_subprocess__success_and_need_reset_pipeline_stack(): + node_id = "nid" + process_id = "process_id" + + node = MagicMock() + node.type = NodeType.SubProcess + + state = MagicMock() + state.name = states.FAILED + state.node_id = node_id + + process_info = MagicMock() + process_info.pipeline_stack = ["p", "nid"] + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_process_info = MagicMock(return_value=process_info) + + engine = Engine(runtime) + engine._add_history = MagicMock() + + engine.retry_subprocess(node_id) + + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_sleep_process_with_current_node_id.assert_called_once_with(node_id) + runtime.pre_retry_subprocess.assert_called_once_with(node_id) + runtime.set_pipeline_stack.assert_called_once_with(process_id, process_info.pipeline_stack[:-1]) + engine._add_history.assert_called_once_with(node_id, state) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.READY, + is_retry=True, + refresh_version=True, + clear_started_time=True, + clear_archived_time=True, + ) + runtime.execute.assert_called_once_with(process_id, node_id) + runtime.post_retry_subprocess.assert_called_once_with(node_id) + + +def test_retry_subprocess__success(): + node_id = "nid" + process_id = "process_id" + + node = MagicMock() + node.type = NodeType.SubProcess + + state = MagicMock() + state.name = states.FAILED + state.node_id = node_id + + process_info = MagicMock() + process_info.pipeline_stack = ["p"] + + runtime = MagicMock() + runtime.get_node = MagicMock(return_value=node) + runtime.get_state = MagicMock(return_value=state) + runtime.get_sleep_process_with_current_node_id = MagicMock(return_value=process_id) + runtime.get_process_info = MagicMock(return_value=process_info) + + engine = Engine(runtime) + engine._add_history = MagicMock() + + engine.retry_subprocess(node_id) + + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_sleep_process_with_current_node_id.assert_called_once_with(node_id) + runtime.pre_retry_subprocess.assert_called_once_with(node_id) + runtime.set_pipeline_stack.assert_not_called() + engine._add_history.assert_called_once_with(node_id, state) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.READY, + is_retry=True, + refresh_version=True, + clear_started_time=True, + clear_archived_time=True, + ) + runtime.execute.assert_called_once_with(process_id, node_id) + runtime.post_retry_subprocess.assert_called_once_with(node_id) diff --git a/tests/engine/test_engine_execute.py b/tests/engine/test_engine_execute.py new file mode 100644 index 00000000..5a152bd2 --- /dev/null +++ b/tests/engine/test_engine_execute.py @@ -0,0 +1,1159 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import pytest +import mock +from mock import MagicMock, call + +from bamboo_engine.eri import ( + ProcessInfo, + ServiceActivity, + State, + NodeType, + ExecutionData, + ScheduleType, + Schedule, + DispatchProcess, +) +from bamboo_engine import states +from bamboo_engine.engine import Engine +from bamboo_engine.exceptions import StateVersionNotMatchError +from bamboo_engine.handler import HandlerFactory, ExecuteResult + + +def test_execute__reach_destination_and_wake_up_failed(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="nid", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.child_process_finish = MagicMock(return_value=False) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.wake_up.assert_called_once_with(pi.process_id) + runtime.beat.assert_called_once_with(pi.process_id) + runtime.child_process_finish.assert_called_once_with(pi.parent_id, pi.process_id) + runtime.execute.assert_not_called() + + +def test_execute__reach_destination_and_wake_up_success(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="nid", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.child_process_finish = MagicMock(return_value=True) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.wake_up.assert_called_once_with(pi.process_id) + runtime.beat.assert_called_once_with(pi.process_id) + runtime.child_process_finish.assert_called_once_with(pi.parent_id, pi.process_id) + runtime.execute.assert_called_once_with(pi.parent_id, pi.destination_id) + + +def test_execute__engine_frozen(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=True) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.set_current_node.assert_called_once_with(pi.process_id, node_id) + runtime.freeze.assert_called_once_with(pi.process_id) + + +def test_execute__root_pipeline_revoked(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.REVOKED}) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.die.assert_called_once_with(pi.process_id) + + +def test_execute__root_pipeline_suspended(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.SUSPENDED}) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.suspend.assert_called_once_with(pi.process_id, pi.root_pipeline_id) + + +def test_execute__suspended_in_pipeline_stack(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root", "s1", "s2"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock( + return_value={ + "root": states.RUNNING, + "s1": states.SUSPENDED, + "s2": states.SUSPENDED, + } + ) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.suspend.assert_called_once_with(pi.process_id, pi.pipeline_stack[1]) + + +def test_execute__exceed_rerun_limit(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=11, + inner_loop=11, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=state) + runtime.node_rerun_limit = MagicMock(return_value=10) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_called_once_with(pi.root_pipeline_id, node_id) + runtime.set_execution_data_outputs.assert_called_once_with( + node_id, {"ex_data": "node execution exceed rerun limit 10"} + ) + runtime.set_state.assert_called_once_with( + node_id=node_id, to_state=states.FAILED, set_archive_time=True + ) + runtime.sleep.assert_called_once_with(pi.process_id) + + +def test_execute__node_has_suspended_appoint(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.SUSPENDED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=state) + runtime.node_rerun_limit = MagicMock(return_value=10) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_called_once_with(pi.root_pipeline_id, node_id) + runtime.set_state_root_and_parent.assert_called_once_with( + node_id=node_id, root_id=pi.root_pipeline_id, parent_id=pi.top_pipeline_id + ) + runtime.suspend.assert_called_once_with(pi.process_id, node_id) + + +def test_execute__node_can_not_transit_to_running(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.RUNNING, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=state) + runtime.node_rerun_limit = MagicMock(return_value=10) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_called_once_with(pi.root_pipeline_id, node_id) + runtime.set_state.assert_not_called() + runtime.sleep.assert_called_once_with(pi.process_id) + + +def test_execute__rerun_and_have_to_sleep(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + execution_data = ExecutionData(inputs={"1": "1"}, outputs={"2": "2"}) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=state) + runtime.node_rerun_limit = MagicMock(return_value=10) + runtime.get_execution_data = MagicMock(return_value=execution_data) + runtime.set_state = MagicMock(return_value=state.version) + + handler = MagicMock() + handler.execute = MagicMock( + return_value=ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + should_die=False, + ) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", + get_handler, + ): + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_called_once_with(pi.root_pipeline_id, node_id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.add_history.assert_called_once_with( + node_id=node.id, + started_time=state.started_time, + archived_time=state.archived_time, + loop=state.loop, + skip=state.skip, + retry=state.retry, + version=state.version, + inputs=execution_data.inputs, + outputs=execution_data.outputs, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.RUNNING, + loop=state.loop + 1, + inner_loop=state.inner_loop+1, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + reset_skip=True, + reset_retry=True, + reset_error_ignored=True, + refresh_version=True, + ) + runtime.sleep.assert_called_once_with(pi.process_id) + runtime.set_schedule.assert_not_called() + runtime.schedule.assert_not_called() + runtime.execute.assert_not_called() + runtime.die.assert_not_called() + + get_handler.assert_called_once_with(node, runtime) + handler.execute.assert_called_once_with(pi, state.loop + 1, state.loop + 1, state.version) + + +def test_execute__have_to_sleep(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + execution_data = ExecutionData(inputs={"1": "1"}, outputs={"2": "2"}) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=None) + runtime.get_state = MagicMock(return_value=state) + runtime.set_state = MagicMock(return_value=state.version) + + handler = MagicMock() + handler.execute = MagicMock( + return_value=ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + should_die=False, + ) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", + get_handler, + ): + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.RUNNING, + loop=1, + inner_loop=1, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + reset_skip=False, + reset_retry=False, + reset_error_ignored=False, + refresh_version=False, + ) + runtime.sleep.assert_called_once_with(pi.process_id) + runtime.set_schedule.assert_not_called() + runtime.schedule.assert_not_called() + runtime.execute.assert_not_called() + runtime.die.assert_not_called() + + get_handler.assert_called_once_with(node, runtime) + handler.execute.assert_called_once_with(pi, state.loop, state.inner_loop, state.version) + + +def test_execute__poll_schedule_ready(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + execution_data = ExecutionData(inputs={"1": "1"}, outputs={"2": "2"}) + schedule = Schedule( + id=2, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version=state.version, + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=None) + runtime.get_state = MagicMock(return_value=state) + runtime.set_schedule = MagicMock(return_value=schedule) + runtime.set_state = MagicMock(return_value=state.version) + + handler = MagicMock() + execute_result = ExecuteResult( + should_sleep=True, + schedule_ready=True, + schedule_type=ScheduleType.POLL, + schedule_after=5, + dispatch_processes=[], + next_node_id=None, + should_die=False, + ) + handler.execute = MagicMock(return_value=execute_result) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", + get_handler, + ): + engine.execute(pi.process_id, node_id) + + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.RUNNING, + loop=1, + inner_loop=1, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + reset_skip=False, + reset_retry=False, + reset_error_ignored=False, + refresh_version=False, + ) + runtime.sleep.assert_called_once_with(pi.process_id) + runtime.set_schedule.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version=state.version, + schedule_type=execute_result.schedule_type, + ) + runtime.schedule.assert_called_once_with(pi.process_id, node.id, schedule.id) + runtime.execute.assert_not_called() + runtime.die.assert_not_called() + + get_handler.assert_called_once_with(node, runtime) + handler.execute.assert_called_once_with(pi, state.loop, state.inner_loop, state.version) + + +def test_execute__callback_schedule_ready(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + execution_data = ExecutionData(inputs={"1": "1"}, outputs={"2": "2"}) + schedule = Schedule( + id=2, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version=state.version, + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=None) + runtime.get_state = MagicMock(return_value=state) + runtime.set_schedule = MagicMock(return_value=schedule) + runtime.set_state = MagicMock(return_value=state.version) + + handler = MagicMock() + execute_result = ExecuteResult( + should_sleep=True, + schedule_ready=True, + schedule_type=ScheduleType.CALLBACK, + schedule_after=5, + dispatch_processes=[], + next_node_id=None, + should_die=False, + ) + handler.execute = MagicMock(return_value=execute_result) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", + get_handler, + ): + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.RUNNING, + loop=1, + inner_loop=1, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + reset_skip=False, + reset_retry=False, + reset_error_ignored=False, + refresh_version=False, + ) + runtime.sleep.assert_called_once_with(pi.process_id) + runtime.set_schedule.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version=state.version, + schedule_type=execute_result.schedule_type, + ) + runtime.schedule.assert_not_called() + runtime.execute.assert_not_called() + runtime.die.assert_not_called() + + get_handler.assert_called_once_with(node, runtime) + handler.execute.assert_called_once_with(pi, state.loop, state.inner_loop, state.version) + + +def test_execute__multi_callback_schedule_ready(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + execution_data = ExecutionData(inputs={"1": "1"}, outputs={"2": "2"}) + schedule = Schedule( + id=2, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version=state.version, + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=None) + runtime.get_state = MagicMock(return_value=state) + runtime.set_schedule = MagicMock(return_value=schedule) + runtime.set_state = MagicMock(return_value=state.version) + + handler = MagicMock() + execute_result = ExecuteResult( + should_sleep=True, + schedule_ready=True, + schedule_type=ScheduleType.MULTIPLE_CALLBACK, + schedule_after=5, + dispatch_processes=[], + next_node_id=None, + should_die=False, + ) + handler.execute = MagicMock(return_value=execute_result) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", + get_handler, + ): + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.RUNNING, + loop=1, + inner_loop=1, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + reset_skip=False, + reset_retry=False, + reset_error_ignored=False, + refresh_version=False, + ) + runtime.sleep.assert_called_once_with(pi.process_id) + runtime.set_schedule.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version=state.version, + schedule_type=execute_result.schedule_type, + ) + runtime.schedule.assert_not_called() + runtime.execute.assert_not_called() + runtime.die.assert_not_called() + + get_handler.assert_called_once_with(node, runtime) + handler.execute.assert_called_once_with(pi, state.loop, state.inner_loop, state.version) + + +def test_execute__has_dispatch_processes(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + execution_data = ExecutionData(inputs={"1": "1"}, outputs={"2": "2"}) + dispatch_processes = [ + DispatchProcess(process_id=3, node_id="n3"), + DispatchProcess(process_id=4, node_id="n4"), + ] + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=None) + runtime.get_state = MagicMock(return_value=state) + runtime.set_state = MagicMock(return_value=state.version) + + handler = MagicMock() + handler.execute = MagicMock( + return_value=ExecuteResult( + should_sleep=True, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=dispatch_processes, + next_node_id=None, + should_die=False, + ) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", + get_handler, + ): + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.RUNNING, + loop=1, + inner_loop=1, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + reset_skip=False, + reset_retry=False, + reset_error_ignored=False, + refresh_version=False, + ) + runtime.sleep.assert_called_once_with(pi.process_id) + runtime.set_schedule.assert_not_called() + runtime.schedule.assert_not_called() + runtime.join.assert_called_once_with( + pi.process_id, [d.process_id for d in dispatch_processes] + ) + runtime.execute.assert_has_calls( + [ + call(dispatch_processes[0].process_id, dispatch_processes[0].node_id), + call(dispatch_processes[1].process_id, dispatch_processes[1].node_id), + ] + ) + runtime.die.assert_not_called() + + get_handler.assert_called_once_with(node, runtime) + handler.execute.assert_called_once_with(pi, state.loop, state.inner_loop, state.version) + + +def test_execute__have_to_die(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id="d1", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FINISHED, + version="v", + loop=1, + inner_loop=1, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + execution_data = ExecutionData(inputs={"1": "1"}, outputs={"2": "2"}) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.is_frozen = MagicMock(return_value=False) + runtime.batch_get_state_name = MagicMock(return_value={"root": states.RUNNING}) + runtime.get_node = MagicMock(return_value=node) + runtime.get_state_or_none = MagicMock(return_value=None) + runtime.get_state = MagicMock(return_value=state) + runtime.set_state = MagicMock(return_value=state.version) + + handler = MagicMock() + handler.execute = MagicMock( + return_value=ExecuteResult( + should_sleep=False, + schedule_ready=False, + schedule_type=None, + schedule_after=-1, + dispatch_processes=[], + next_node_id=None, + should_die=True, + ) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", + get_handler, + ): + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_state_or_none.assert_called_once_with(node_id) + runtime.node_rerun_limit.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.RUNNING, + loop=1, + inner_loop=1, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + reset_skip=False, + reset_retry=False, + reset_error_ignored=False, + refresh_version=False, + ) + runtime.sleep.assert_not_called() + runtime.set_schedule.assert_not_called() + runtime.schedule.assert_not_called() + runtime.execute.assert_not_called() + runtime.die.assert_called_once_with(pi.process_id) + + get_handler.assert_called_once_with(node, runtime) + handler.execute.assert_called_once_with(pi, state.loop, state.inner_loop, state.version) + + +def test_execute__unexpect_raise(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id=None, + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.set_current_node = MagicMock(side_effect=Exception) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.get_execution_data_outputs.assert_called_once_with(node_id) + runtime.set_state.assert_called_once_with( + node_id=node_id, + to_state=states.FAILED, + root_id=pi.root_pipeline_id, + parent_id=pi.top_pipeline_id, + set_started_time=True, + set_archive_time=True, + ) + runtime.set_execution_data_outputs.assert_called_once() + runtime.sleep.assert_called_once_with(pi.process_id) + + +def test_execute__raise_state_version_not_match(): + node_id = "nid" + pi = ProcessInfo( + process_id="pid", + destination_id=None, + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.set_current_node = MagicMock(side_effect=StateVersionNotMatchError) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + + engine = Engine(runtime=runtime) + engine.execute(pi.process_id, node_id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.get_execution_data_outputs.assert_not_called() + runtime.set_state.assert_not_called() + runtime.set_execution_data_outputs.assert_not_called() + runtime.sleep.assert_not_called() diff --git a/tests/engine/test_engine_schedule.py b/tests/engine/test_engine_schedule.py new file mode 100644 index 00000000..ba8adbea --- /dev/null +++ b/tests/engine/test_engine_schedule.py @@ -0,0 +1,678 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import mock +from mock import MagicMock + +from bamboo_engine.eri import ( + ProcessInfo, + ServiceActivity, + State, + NodeType, + ScheduleType, + Schedule, + CallbackData, +) +from bamboo_engine import states +from bamboo_engine.engine import Engine +from bamboo_engine.exceptions import StateVersionNotMatchError +from bamboo_engine.handler import ScheduleResult + + +def test_schedule__lock_get_failed(): + node_id = "nid" + schedule_id = 1 + version = "v" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + state = MagicMock() + state.name = states.RUNNING + state.version = version + schedule = Schedule( + id=schedule_id, + type=ScheduleType.MULTIPLE_CALLBACK, + process_id=1, + node_id="nid", + finished=False, + expired=False, + version=version, + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=False) + runtime.get_state = MagicMock(return_value=state) + runtime.get_schedule = MagicMock(return_value=schedule) + + engine = Engine(runtime=runtime) + engine.schedule(pi.process_id, node_id, schedule_id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_schedule.assert_called_once_with(schedule_id) + runtime.apply_schedule_lock.assert_called_once_with(schedule_id) + assert runtime.set_next_schedule.call_args.kwargs["process_id"] == pi.process_id + assert runtime.set_next_schedule.call_args.kwargs["node_id"] == node_id + assert runtime.set_next_schedule.call_args.kwargs["schedule_id"] == schedule_id + assert runtime.set_next_schedule.call_args.kwargs["callback_data_id"] is None + assert runtime.set_next_schedule.call_args.kwargs["schedule_after"] <= 5 + runtime.beat.assert_not_called() + + +def test_schedule__lock_get_failed_but_not_retry(): + node_id = "nid" + schedule_id = 1 + version = "v" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + state = MagicMock() + state.name = states.RUNNING + state.version = version + schedule = Schedule( + id=schedule_id, + type=ScheduleType.CALLBACK, + process_id=1, + node_id="nid", + finished=False, + expired=False, + version=version, + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=False) + runtime.get_state = MagicMock(return_value=state) + runtime.get_schedule = MagicMock(return_value=schedule) + + engine = Engine(runtime=runtime) + engine.schedule(pi.process_id, node_id, schedule_id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_schedule.assert_called_once_with(schedule_id) + runtime.set_next_schedule.assert_not_called() + runtime.beat.assert_not_called() + + +def test_schedule__schedule_is_finished(): + node_id = "nid" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=True, + expired=False, + version="v1", + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=True) + runtime.get_schedule = MagicMock(return_value=schedule) + runtime.get_state = MagicMock() + + engine = Engine(runtime=runtime) + engine.schedule(pi.process_id, node_id, schedule.id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_schedule.assert_called_once_with(schedule.id) + runtime.apply_schedule_lock.assert_not_called() + runtime.beat.assert_not_called() + + +def test_schedule__schedule_version_not_match(): + node_id = "nid" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version="v1", + times=0, + ) + + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.RUNNING, + version="v2", + loop=11, + inner_loop=11, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=True) + runtime.get_schedule = MagicMock(return_value=schedule) + runtime.get_state = MagicMock(return_value=state) + + engine = Engine(runtime=runtime) + engine.schedule(pi.process_id, node_id, schedule.id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.schedule.assert_not_called() + runtime.get_schedule.assert_called_once_with(schedule.id) + runtime.get_state.assert_called_once_with(node_id) + runtime.expire_schedule.assert_called_once_with(schedule.id) + runtime.beat.assert_not_called() + runtime.apply_schedule_lock.assert_not_called() + + +def test_schedule__schedule_node_state_is_not_running(): + node_id = "nid" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version="v1", + times=0, + ) + + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.FAILED, + version="v1", + loop=11, + inner_loop=11, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=True) + runtime.get_schedule = MagicMock(return_value=schedule) + runtime.get_state = MagicMock(return_value=state) + + engine = Engine(runtime=runtime) + engine.schedule(pi.process_id, node_id, schedule.id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.schedule.assert_not_called() + runtime.get_schedule.assert_called_once_with(schedule.id) + runtime.get_state.assert_called_once_with(node_id) + runtime.expire_schedule.assert_called_once_with(schedule.id) + runtime.get_node.assert_not_called() + runtime.beat.assert_not_called() + runtime.apply_schedule_lock.assert_not_called() + + +def test_schedule__unexpected_raise(): + node_id = "nid" + schedule_id = 1 + version = "v" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + state = MagicMock() + state.name = states.RUNNING + state.version = version + schedule = Schedule( + id=schedule_id, + type=ScheduleType.MULTIPLE_CALLBACK, + process_id=1, + node_id="nid", + finished=False, + expired=False, + version=version, + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(side_effect=Exception) + runtime.get_state = MagicMock(return_value=state) + runtime.get_schedule = MagicMock(return_value=schedule) + + engine = Engine(runtime=runtime) + engine.schedule(pi.process_id, node_id, schedule_id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_schedule.assert_called_once_with(schedule_id) + runtime.apply_schedule_lock.assert_called_once_with(schedule_id) + runtime.set_state.assert_called_once_with(node_id=node_id, to_state=states.FAILED, set_archive_time=True) + runtime.get_execution_data_outputs.assert_called_once_with(node_id) + runtime.set_execution_data_outputs.assert_called_once() + runtime.release_schedule_lock.assert_called_once_with(schedule_id) + runtime.beat.assert_not_called() + + +def test_schedule__process_info_exception(): + node_id = "nid" + schedule_id = 1 + + runtime = MagicMock() + runtime.get_process_info = MagicMock(side_effect=Exception) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + + engine = Engine(runtime=runtime) + engine.schedule("pid", node_id, schedule_id) + + runtime.apply_schedule_lock.assert_not_called() + runtime.set_state.assert_called_once_with(node_id=node_id, to_state=states.FAILED, set_archive_time=True) + runtime.get_execution_data_outputs.assert_called_once_with(node_id) + runtime.set_execution_data_outputs.assert_called_once() + runtime.release_schedule_lock.assert_called_once_with(schedule_id) + runtime.get_schedule.assert_not_called() + + +def test_schedule__raise_state_not_match(): + node_id = "nid" + schedule_id = 1 + version = "v" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + state = MagicMock() + state.name = states.RUNNING + state.version = version + schedule = Schedule( + id=schedule_id, + type=ScheduleType.MULTIPLE_CALLBACK, + process_id=1, + node_id="nid", + finished=False, + expired=False, + version=version, + times=0, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(side_effect=StateVersionNotMatchError) + runtime.get_state = MagicMock(return_value=state) + runtime.get_schedule = MagicMock(return_value=schedule) + engine = Engine(runtime=runtime) + engine.schedule(pi.process_id, node_id, schedule_id) + + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_schedule.assert_called_once_with(schedule_id) + runtime.apply_schedule_lock.assert_called_once_with(schedule_id) + runtime.set_state.assert_not_called() + runtime.get_execution_data_outputs.assert_not_called() + runtime.set_execution_data_outputs.assert_not_called() + runtime.release_schedule_lock.assert_not_called() + runtime.beat.assert_not_called() + + +def test_schedule__has_callback_data(): + node_id = "nid" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version="v1", + times=0, + ) + + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.RUNNING, + version="v1", + loop=11, + inner_loop=11, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + callback_data = CallbackData(id=1, node_id=node_id, version="v1", data={}) + + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=True) + runtime.get_schedule = MagicMock(return_value=schedule) + runtime.get_state = MagicMock(return_value=state) + runtime.get_callback_data = MagicMock(return_value=callback_data) + runtime.get_node = MagicMock(return_value=node) + + handler = MagicMock() + handler.schedule = MagicMock( + return_value=ScheduleResult(has_next_schedule=False, schedule_after=-1, schedule_done=False, next_node_id=None,) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", get_handler, + ): + engine.schedule(pi.process_id, node_id, schedule.id, callback_data.id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.apply_schedule_lock.assert_called_once_with(schedule.id) + runtime.schedule.assert_not_called() + runtime.get_schedule.assert_called_once_with(schedule.id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_callback_data.assert_called_once_with(callback_data.id) + handler.schedule.assert_called_once_with(pi, state.loop, state.inner_loop, schedule, callback_data) + + +def test_schedule__without_callback_data(): + node_id = "nid" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version="v1", + times=0, + ) + + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.RUNNING, + version="v1", + loop=11, + inner_loop=11, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=True) + runtime.get_schedule = MagicMock(return_value=schedule) + runtime.get_state = MagicMock(return_value=state) + runtime.get_node = MagicMock(return_value=node) + + handler = MagicMock() + handler.schedule = MagicMock( + return_value=ScheduleResult(has_next_schedule=False, schedule_after=-1, schedule_done=False, next_node_id=None,) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", get_handler, + ): + engine.schedule(pi.process_id, node_id, schedule.id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.apply_schedule_lock.assert_called_once_with(schedule.id) + runtime.schedule.assert_not_called() + runtime.get_schedule.assert_called_once_with(schedule.id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_callback_data.assert_not_called() + handler.schedule.assert_called_once_with(pi, state.loop, state.inner_loop, schedule, None) + + +def test_schedule__has_next_schedule(): + node_id = "nid" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version="v1", + times=0, + ) + + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.RUNNING, + version="v1", + loop=11, + inner_loop=11, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=True) + runtime.get_schedule = MagicMock(return_value=schedule) + runtime.get_state = MagicMock(return_value=state) + runtime.get_node = MagicMock(return_value=node) + + handler = MagicMock() + handler.schedule = MagicMock( + return_value=ScheduleResult(has_next_schedule=True, schedule_after=60, schedule_done=False, next_node_id=None,) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", get_handler, + ): + engine.schedule(pi.process_id, node_id, schedule.id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.apply_schedule_lock.assert_called_once_with(schedule.id) + runtime.schedule.assert_not_called() + runtime.get_schedule.assert_called_once_with(schedule.id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_callback_data.assert_not_called() + handler.schedule.assert_called_once_with(pi, state.loop, state.inner_loop, schedule, None) + runtime.set_next_schedule.assert_called_once_with(pi.process_id, node_id, schedule.id, 60) + runtime.finish_schedule.assert_not_called() + runtime.execute.assert_not_called() + + +def test_schedule__schedule_done(): + node_id = "nid" + + pi = ProcessInfo( + process_id="pid", destination_id="", root_pipeline_id="root", pipeline_stack=["root"], parent_id="parent", + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node_id, + finished=False, + expired=False, + version="v1", + times=0, + ) + + state = State( + node_id=node_id, + root_id="root", + parent_id="root", + name=states.RUNNING, + version="v1", + loop=11, + inner_loop=11, + retry=0, + skip=False, + error_ignored=False, + created_time=None, + started_time=None, + archived_time=None, + ) + + node = ServiceActivity( + id=node_id, + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + code="", + version="", + timeout=None, + error_ignorable=False, + ) + + runtime = MagicMock() + runtime.get_process_info = MagicMock(return_value=pi) + runtime.apply_schedule_lock = MagicMock(return_value=True) + runtime.get_schedule = MagicMock(return_value=schedule) + runtime.get_state = MagicMock(return_value=state) + runtime.get_node = MagicMock(return_value=node) + + handler = MagicMock() + handler.schedule = MagicMock( + return_value=ScheduleResult( + has_next_schedule=False, schedule_after=-1, schedule_done=True, next_node_id="nid2", + ) + ) + get_handler = MagicMock(return_value=handler) + + engine = Engine(runtime=runtime) + + with mock.patch( + "bamboo_engine.engine.HandlerFactory.get_handler", get_handler, + ): + engine.schedule(pi.process_id, node_id, schedule.id) + + runtime.beat.assert_called_once_with(pi.process_id) + runtime.get_process_info.assert_called_once_with(pi.process_id) + runtime.apply_schedule_lock.assert_called_once_with(schedule.id) + runtime.schedule.assert_not_called() + runtime.get_schedule.assert_called_once_with(schedule.id) + runtime.get_state.assert_called_once_with(node_id) + runtime.get_node.assert_called_once_with(node_id) + runtime.get_callback_data.assert_not_called() + handler.schedule.assert_called_once_with(pi, state.loop, state.inner_loop, schedule, None) + runtime.set_next_schedule.assert_not_called() + runtime.finish_schedule.assert_called_once_with(schedule.id) + runtime.execute.assert_called_once_with(pi.process_id, "nid2") diff --git a/tests/handlers/__init__.py b/tests/handlers/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/tests/handlers/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/tests/handlers/test_conditional_parallel_gateway.py b/tests/handlers/test_conditional_parallel_gateway.py new file mode 100644 index 00000000..36c886e5 --- /dev/null +++ b/tests/handlers/test_conditional_parallel_gateway.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock, patch + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + ConditionalParallelGateway, + Condition, +) +from bamboo_engine.handlers.conditional_parallel_gateway import ( + ConditionalParallelGatewayHandler, +) + + +def test_exclusive_gateway__context_hydrate_raise(): + conditions = [ + Condition(name="c1", evaluation="${k} == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="0 == 1", target_id="t2", flow_id="f2"), + ] + node = ConditionalParallelGateway( + conditions=conditions, + id="nid", + type=NodeType.ConditionalParallelGateway, + target_flows=["f1", "f2"], + target_nodes=["t1", "t2"], + targets={"f1": "t1", "f2": "t2"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + converge_gateway_id="cg", + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.get_data_inputs = MagicMock(return_value={}) + + raise_context = MagicMock() + raise_context.hydrate = MagicMock(side_effect=Exception) + + handler = ConditionalParallelGatewayHandler(node, runtime) + with patch("bamboo_engine.handlers.conditional_parallel_gateway.Context", MagicMock(return_value=raise_context)): + with patch("bamboo_engine.handlers.conditional_parallel_gateway.BoolRule", MagicMock(side_effect=Exception)): + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_execution_data_outputs.assert_called_once_with(node.id) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FAILED, set_archive_time=True) + runtime.set_execution_data_outputs.assert_called_once() + + +def test_conditional_parallel_gateway__execute_bool_rule_test_raise(): + conditions = [ + Condition(name="c1", evaluation="${k} == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="0 == 1", target_id="t2", flow_id="f2"), + ] + node = ConditionalParallelGateway( + conditions=conditions, + id="nid", + type=NodeType.ConditionalParallelGateway, + target_flows=["f1", "f2"], + target_nodes=["t1", "t2"], + targets={"f1": "t1", "f2": "t2"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + converge_gateway_id="cg", + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + context_values = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.get_data_inputs = MagicMock(return_value={}) + + handler = ConditionalParallelGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_execution_data_outputs.assert_called_once_with(node.id) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FAILED, set_archive_time=True) + runtime.set_execution_data_outputs.assert_called_once() + + +def test_conditional_parallel_gateway__execute_not_fork_targets(): + conditions = [ + Condition(name="c1", evaluation="0 == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="0 == 1", target_id="t2", flow_id="f2"), + ] + node = ConditionalParallelGateway( + conditions=conditions, + id="nid", + type=NodeType.ConditionalParallelGateway, + target_flows=["f1", "f2"], + target_nodes=["t1", "t2"], + targets={"f1": "t1", "f2": "t2"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + converge_gateway_id="cg", + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + context_values = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.get_data_inputs = MagicMock(return_value={}) + + handler = ConditionalParallelGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_execution_data_outputs.assert_called_once_with(node.id) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FAILED, set_archive_time=True) + runtime.set_execution_data_outputs.assert_called_once_with( + node.id, {"ex_data": "all conditions of branches are not meet"} + ) + + +def test_conditional_parallel_gateway__execute_success(): + conditions = [ + Condition(name="c1", evaluation="0 == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="1 == 1", target_id="t2", flow_id="f2"), + Condition(name="c3", evaluation="1 == 1", target_id="t3", flow_id="f3"), + ] + node = ConditionalParallelGateway( + conditions=conditions, + id="nid", + type=NodeType.ConditionalParallelGateway, + target_flows=["f1", "f2", "f3"], + target_nodes=["t1", "t2", "t3"], + targets={"f1": "t1", "f2": "t2", "f3": "t3"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + converge_gateway_id="cg", + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + context_values = [] + dispatch_processes = ["p1", "p2", "p3"] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.fork = MagicMock(return_value=dispatch_processes) + runtime.get_data_inputs = MagicMock(return_value={}) + + handler = ConditionalParallelGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == dispatch_processes + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.fork.assert_called_once_with( + parent_id=pi.process_id, + root_pipeline_id=pi.root_pipeline_id, + pipeline_stack=pi.pipeline_stack, + from_to={"t2": "cg", "t3": "cg"}, + ) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FINISHED, set_archive_time=True) diff --git a/tests/handlers/test_converge_gateway.py b/tests/handlers/test_converge_gateway.py new file mode 100644 index 00000000..9085d217 --- /dev/null +++ b/tests/handlers/test_converge_gateway.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock + +from bamboo_engine import states +from bamboo_engine.eri import ProcessInfo, NodeType, ConvergeGateway +from bamboo_engine.handlers.converge_gateway import ConvergeGatewayHandler + + +def test_empty_start_event_handler__execute_success(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + node = ConvergeGateway( + id="nid", + type=NodeType.ConvergeGateway, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + + runtime = MagicMock() + + handler = ConvergeGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == node.target_nodes[0] + assert result.should_die == False + + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.FINISHED, + set_archive_time=True, + ) diff --git a/tests/handlers/test_empty_end_event.py b/tests/handlers/test_empty_end_event.py new file mode 100644 index 00000000..4c8741ae --- /dev/null +++ b/tests/handlers/test_empty_end_event.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock, call + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + EmptyEndEvent, + ContextValue, + ContextValueType, + SubProcess, +) +from bamboo_engine.handlers.empty_end_event import EmptyEndEventHandler + + +def test_empty_end_event_handler__root_pipeline_execute_success(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + node = EmptyEndEvent( + id="nid", + type=NodeType.EmptyEndEvent, + target_flows=[], + target_nodes=[], + targets={}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + + context_outputs = ["${a}", "${b}", "${c}", "${d}"] + context_values = [ + ContextValue(key="${a}", value="1", type=ContextValueType.PLAIN), + ContextValue(key="${b}", value="2", type=ContextValueType.PLAIN), + ContextValue(key="${c}", value="3", type=ContextValueType.PLAIN), + ] + + runtime = MagicMock() + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_outputs = MagicMock(return_value=context_outputs) + runtime.get_context_values = MagicMock(return_value=context_values) + + handler = EmptyEndEventHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == True + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_outputs.assert_called_once_with("root") + runtime.get_context_values.assert_called_once_with( + pipeline_id="root", keys=context_outputs + ) + runtime.set_execution_data_outputs.assert_called_once_with( + node_id="root", + outputs={ + "${a}": "1", + "${b}": "2", + "${c}": "3", + "${d}": "${d}", + }, + ) + runtime.set_state.assert_has_calls( + [ + call( + node_id=node.id, + to_state=states.FINISHED, + set_archive_time=True, + ), + call( + node_id="root", + to_state=states.FINISHED, + set_archive_time=True, + ), + ] + ) + assert pi.pipeline_stack == [] + + +def test_empty_end_event_handler__subprocess_execute_success(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root", "sub1"], + parent_id="parent", + ) + + node = EmptyEndEvent( + id="nid", + type=NodeType.EmptyEndEvent, + target_flows=[], + target_nodes=[], + targets={}, + root_pipeline_id="root", + parent_pipeline_id="sub1", + can_skip=True, + ) + + subprocess_node = SubProcess( + id="nid", + type=NodeType.SubProcess, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="sub1", + can_skip=True, + start_event_id="start_nid", + ) + + subprocess_outputs = {} + + state = MagicMock() + state.loop = 1 + state.inner_loop = 1 + + context_outputs = ["${a}", "${b}", "${c}", "${d}"] + context_values = [ + ContextValue(key="${a}", value="1", type=ContextValueType.PLAIN), + ContextValue(key="${b}", value="2", type=ContextValueType.PLAIN), + ContextValue(key="${c}", value="3", type=ContextValueType.PLAIN), + ] + + runtime = MagicMock() + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_outputs = MagicMock(return_value=context_outputs) + runtime.get_context_values = MagicMock(return_value=context_values) + runtime.get_node = MagicMock(return_value=subprocess_node) + runtime.get_data_outputs = MagicMock(return_value=subprocess_outputs) + runtime.get_state = MagicMock(return_value=state) + + handler = EmptyEndEventHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == subprocess_node.target_nodes[0] + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_outputs.assert_called_once_with("sub1") + runtime.get_context_values.assert_called_once_with( + pipeline_id="sub1", keys=context_outputs + ) + runtime.set_execution_data_outputs.assert_called_once_with( + node_id="sub1", + outputs={ + "${a}": "1", + "${b}": "2", + "${c}": "3", + "${d}": "${d}", + "_loop": 1, + "_inner_loop": 1 + }, + ) + runtime.get_node.assert_called_once_with("sub1") + runtime.set_pipeline_stack.assert_called_once_with(pi.process_id, ["root"]) + runtime.get_data_outputs.assert_called_once_with("sub1") + runtime.set_state.assert_has_calls( + [ + call( + node_id=node.id, + to_state=states.FINISHED, + set_archive_time=True, + ), + call( + node_id="sub1", + to_state=states.FINISHED, + set_archive_time=True, + ), + ] + ) + runtime.get_state.assert_called_once_with("sub1") + assert pi.pipeline_stack == ["root"] \ No newline at end of file diff --git a/tests/handlers/test_empty_start_event.py b/tests/handlers/test_empty_start_event.py new file mode 100644 index 00000000..e292880a --- /dev/null +++ b/tests/handlers/test_empty_start_event.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + EmptyStartEvent, + Data, + ContextValue, + ContextValueType, DataInput, +) +from bamboo_engine.handlers.empty_start_event import EmptyStartEventHandler + + +def test_empty_start_event_handler__execute_success(): + # ContextValue 各个属性值相等即判断为相等,用于assert生成的函数入参 + def mock_eq_func(self, other): + return ( + self.key == other.key + and self.value == other.value + and self.type == other.type + and self.code == other.code + ) + + setattr(ContextValue, "__eq__", mock_eq_func) + + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + node = EmptyStartEvent( + id="nid", + type=NodeType.EmptyStartEvent, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + + context_values = [ + ContextValue( + key="${a}", + value="${c}", + type=ContextValueType.SPLICE, + ), + ContextValue( + key="${b}", + value="b: ${a}", + type=ContextValueType.SPLICE, + ), + ContextValue( + key="${c}", + value="1", + type=ContextValueType.PLAIN + ) + ] + + upsert_context_dict = { + "${a}": ContextValue( + key="${a}", + value="1", + type=ContextValueType.PLAIN, + ), + "${b}": ContextValue( + key="${b}", + value="b: 1", + type=ContextValueType.PLAIN, + ), + } + + data = Data( + inputs={"pre_render_keys": DataInput(need_render=True, value=["${a}", "${b}"])}, + outputs={}, + ) + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value={"${c}"}) + runtime.get_context_values = MagicMock(return_value=context_values) + runtime.get_data = MagicMock(return_value=data) + + handler = EmptyStartEventHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == node.target_nodes[0] + assert result.should_die == False + + runtime.set_state.assert_called_once_with( + node_id=node.id, + to_state=states.FINISHED, + set_archive_time=True, + ) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${a}", "${b}", "${c}"}) + runtime.upsert_plain_context_values.assert_called_once_with( + pi.top_pipeline_id, upsert_context_dict + ) diff --git a/tests/handlers/test_exclusive_gateway.py b/tests/handlers/test_exclusive_gateway.py new file mode 100644 index 00000000..682fb24d --- /dev/null +++ b/tests/handlers/test_exclusive_gateway.py @@ -0,0 +1,282 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock, patch + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + ExclusiveGateway, + Condition, +) +from bamboo_engine.handlers.exclusive_gateway import ( + ExclusiveGatewayHandler, +) + + +def test_exclusive_gateway__context_hydrate_raise(): + conditions = [ + Condition(name="c1", evaluation="${k} == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="0 == 1", target_id="t2", flow_id="f2"), + ] + node = ExclusiveGateway( + conditions=conditions, + id="nid", + type=NodeType.ExclusiveGateway, + target_flows=["f1", "f2"], + target_nodes=["t1", "t2"], + targets={"f1": "t1", "f2": "t2"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.get_data_inputs = MagicMock(return_value={}) + + raise_context = MagicMock() + raise_context.hydrate = MagicMock(side_effect=Exception) + + handler = ExclusiveGatewayHandler(node, runtime) + with patch("bamboo_engine.handlers.exclusive_gateway.Context", MagicMock(return_value=raise_context)): + with patch("bamboo_engine.handlers.exclusive_gateway.BoolRule", MagicMock(side_effect=Exception)): + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_execution_data_outputs.assert_called_once_with(node.id) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FAILED, set_archive_time=True) + runtime.set_execution_data_outputs.assert_called_once() + + +def test_exclusive_gateway__execute_bool_rule_test_raise(): + conditions = [ + Condition(name="c1", evaluation="${k} == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="0 == 1", target_id="t2", flow_id="f2"), + ] + node = ExclusiveGateway( + conditions=conditions, + id="nid", + type=NodeType.ExclusiveGateway, + target_flows=["f1", "f2"], + target_nodes=["t1", "t2"], + targets={"f1": "t1", "f2": "t2"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.get_data_inputs = MagicMock(return_value={}) + + handler = ExclusiveGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys={"${k}"}) + runtime.get_execution_data_outputs.assert_called_once_with(node.id) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FAILED, set_archive_time=True) + runtime.set_execution_data_outputs.assert_called_once() + + +def test_exclusive_gateway__execute_not_meet_targets(): + conditions = [ + Condition(name="c1", evaluation="0 == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="0 == 1", target_id="t2", flow_id="f2"), + ] + node = ExclusiveGateway( + conditions=conditions, + id="nid", + type=NodeType.ExclusiveGateway, + target_flows=["f1", "f2"], + target_nodes=["t1", "t2"], + targets={"f1": "t1", "f2": "t2"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.get_data_inputs = MagicMock(return_value={}) + + handler = ExclusiveGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_execution_data_outputs.assert_called_once_with(node.id) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FAILED, set_archive_time=True) + runtime.set_execution_data_outputs.assert_called_once_with( + node.id, {"ex_data": "all conditions of branches are not meet"} + ) + + +def test_exclusive_gateway__execute_mutiple_meet_targets(): + conditions = [ + Condition(name="c1", evaluation="1 == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="1 == 1", target_id="t2", flow_id="f2"), + ] + node = ExclusiveGateway( + conditions=conditions, + id="nid", + type=NodeType.ExclusiveGateway, + target_flows=["f1", "f2"], + target_nodes=["t1", "t2"], + targets={"f1": "t1", "f2": "t2"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_execution_data_outputs = MagicMock(return_value={}) + runtime.get_data_inputs = MagicMock(return_value={}) + + handler = ExclusiveGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_execution_data_outputs.assert_called_once_with(node.id) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FAILED, set_archive_time=True) + runtime.set_execution_data_outputs.assert_called_once_with( + node.id, {"ex_data": "multiple conditions meet: ['c1', 'c2']"} + ) + + +def test_exclusive_gateway__execute_success(): + conditions = [ + Condition(name="c1", evaluation="0 == 1", target_id="t1", flow_id="f1"), + Condition(name="c2", evaluation="0 == 1", target_id="t2", flow_id="f2"), + Condition(name="c3", evaluation="1 == 1", target_id="t3", flow_id="f3"), + ] + node = ExclusiveGateway( + conditions=conditions, + id="nid", + type=NodeType.ExclusiveGateway, + target_flows=["f1", "f2", "f3"], + target_nodes=["t1", "t2", "t3"], + targets={"f1": "t1", "f2": "t2", "f3": "t3"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + ) + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + additional_refs = [] + + runtime = MagicMock() + runtime.get_context_key_references = MagicMock(return_value=additional_refs) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_data_inputs = MagicMock(return_value={}) + + handler = ExclusiveGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == "t3" + assert result.should_die == False + + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.set_state.assert_called_once_with(node_id=node.id, to_state=states.FINISHED, set_archive_time=True) diff --git a/tests/handlers/test_executable_end_event.py b/tests/handlers/test_executable_end_event.py new file mode 100644 index 00000000..1985786f --- /dev/null +++ b/tests/handlers/test_executable_end_event.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations underExecutableEndEvent the License. +""" + +from mock import MagicMock, call + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + ExecutableEndEvent, + ContextValue, + ContextValueType, +) +from bamboo_engine.handlers.executable_end_event import ExecutableEndEventHandler + + +def test_executable_end_event_handler__event_execute_error(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + node = ExecutableEndEvent( + id="nid", + type=NodeType.ExecutableEndEvent, + target_flows=[], + target_nodes=[], + targets={}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="eee", + ) + + event = MagicMock() + event.execute = MagicMock(side_effect=Exception) + + runtime = MagicMock() + runtime.get_executable_end_event = MagicMock(return_value=event) + + handler = ExecutableEndEventHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_executable_end_event.assert_called_once_with(code=node.code) + event.execute.assert_called_once_with( + pipeline_stack=["root"], root_pipeline_id="root" + ) + runtime.set_execution_data_outputs.assert_called_once() + runtime.set_state.assert_called_once_with( + node_id=node.id, to_state=states.FAILED, set_archive_time=True + ) + + +def test_executable_end_event_handler__event_execute_success(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + node = ExecutableEndEvent( + id="nid", + type=NodeType.ExecutableEndEvent, + target_flows=[], + target_nodes=[], + targets={}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="eee", + ) + + event = MagicMock() + event.execute = MagicMock() + + context_outputs = ["${a}", "${b}", "${c}", "${d}"] + context_values = [ + ContextValue(key="${a}", value="1", type=ContextValueType.PLAIN), + ContextValue(key="${b}", value="2", type=ContextValueType.PLAIN), + ContextValue(key="${c}", value="3", type=ContextValueType.PLAIN), + ] + + runtime = MagicMock() + runtime.get_executable_end_event = MagicMock(return_value=event) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_outputs = MagicMock(return_value=context_outputs) + runtime.get_context_values = MagicMock(return_value=context_values) + + handler = ExecutableEndEventHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == True + + runtime.get_executable_end_event.assert_called_once_with(code=node.code) + event.execute.assert_called_once_with( + pipeline_stack=["root"], root_pipeline_id="root" + ) + runtime.get_data_inputs.assert_called_once_with("root") + runtime.get_context_outputs.assert_called_once_with("root") + runtime.get_context_values.assert_called_once_with( + pipeline_id="root", keys=context_outputs + ) + runtime.set_execution_data_outputs.assert_called_once_with( + node_id="root", + outputs={ + "${a}": "1", + "${b}": "2", + "${c}": "3", + "${d}": "${d}", + }, + ) + runtime.set_state.assert_has_calls( + [ + call( + node_id=node.id, + to_state=states.FINISHED, + set_archive_time=True, + ), + call( + node_id="root", + to_state=states.FINISHED, + set_archive_time=True, + ), + ] + ) + assert pi.pipeline_stack == [] diff --git a/tests/handlers/test_parallel_gateway.py b/tests/handlers/test_parallel_gateway.py new file mode 100644 index 00000000..edffe80f --- /dev/null +++ b/tests/handlers/test_parallel_gateway.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + ParallelGateway, +) +from bamboo_engine.handlers.parallel_gateway import ( + ParallelGatewayHandler, +) + + +def test_parallel_gateway_handler__execute_success(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + node = ParallelGateway( + id="nid", + type=NodeType.ParallelGateway, + target_flows=["f1", "f2", "f3"], + target_nodes=["t1", "t2", "t3"], + targets={"f1": "t1", "f2": "t2", "f3": "t3"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + converge_gateway_id="cg", + ) + + dispatch_processes = ["p1", "p2", "p3"] + + runtime = MagicMock() + runtime.fork = MagicMock(return_value=dispatch_processes) + + handler = ParallelGatewayHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == dispatch_processes + assert result.next_node_id == None + assert result.should_die == False + + runtime.fork.assert_called_once_with( + parent_id=pi.process_id, + root_pipeline_id=pi.root_pipeline_id, + pipeline_stack=pi.pipeline_stack, + from_to={ + "t1": "cg", + "t2": "cg", + "t3": "cg", + }, + ) + + runtime.set_state.assert_called_once_with( + node_id=node.id, to_state=states.FINISHED, set_archive_time=True + ) diff --git a/tests/handlers/test_service_activity.py b/tests/handlers/test_service_activity.py new file mode 100644 index 00000000..bce6f480 --- /dev/null +++ b/tests/handlers/test_service_activity.py @@ -0,0 +1,1334 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations underExecutableEndEvent the License. +""" + +from mock import MagicMock, call, patch + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + ServiceActivity, + ContextValue, + ContextValueType, + Data, + DataInput, + ScheduleType, + Schedule, + ExecutionData, +) +from bamboo_engine.handlers.service_activity import ServiceActivityHandler + + +def test_execute__raise_not_ignore(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=False, + timeout=10, + ) + + data = Data({}, {}) + + service = MagicMock() + service.execute = MagicMock(side_effect=Exception) + service.need_schedule = MagicMock(return_value=False) + + runtime = MagicMock() + runtime.get_data = MagicMock(return_value=data) + runtime.get_context_key_references = MagicMock(return_value=set()) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.start_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, version="v1", to_state=states.FAILED, set_archive_time=True + ) + runtime.stop_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert "ex_data" in runtime.set_execution_data.call_args.kwargs["data"].outputs + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.pre_execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert "ex_data" in service.pre_execute.call_args.kwargs["data"].outputs + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert "ex_data" in service.execute.call_args.kwargs["data"].outputs + assert service.execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + +def test_execute__raise_ignore(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=True, + timeout=None, + ) + + data = Data({}, {}) + + service = MagicMock() + service.execute = MagicMock(side_effect=Exception) + service.need_schedule = MagicMock(return_value=True) + + runtime = MagicMock() + runtime.get_data = MagicMock(return_value=data) + runtime.get_context_key_references = MagicMock(return_value=set()) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == node.target_nodes[0] + assert result.should_die == False + + runtime.get_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.start_timeout_monitor.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + version="v1", + to_state=states.FINISHED, + set_archive_time=True, + error_ignored=True, + ) + runtime.stop_timeout_monitor.assert_not_called() + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert "ex_data" in runtime.set_execution_data.call_args.kwargs["data"].outputs + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.pre_execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert "ex_data" in service.pre_execute.call_args.kwargs["data"].outputs + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert "ex_data" in service.execute.call_args.kwargs["data"].outputs + assert service.execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + +def test_context_hydrate__raise(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=False, + timeout=10, + ) + + data = Data({}, {}) + + service = MagicMock() + service.execute = MagicMock(side_effect=Exception) + service.need_schedule = MagicMock(return_value=False) + + runtime = MagicMock() + runtime.get_data = MagicMock(return_value=data) + runtime.get_context_key_references = MagicMock(return_value=set()) + runtime.get_context_values = MagicMock(return_value=[]) + + raise_context = MagicMock() + raise_context.hydrate = MagicMock(side_effect=Exception) + + handler = ServiceActivityHandler(node, runtime) + with patch("bamboo_engine.handlers.service_activity.Context", MagicMock(return_value=raise_context)): + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.set_state.assert_called_once_with( + node_id=node.id, version="v1", to_state=states.FAILED, set_archive_time=True + ) + runtime.get_service.assert_not_called() + + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert "ex_data" in runtime.set_execution_data.call_args.kwargs["data"].outputs + + +def test_execute__success_and_schedule(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=False, + timeout=10, + ) + + data = Data({}, {}) + + service = MagicMock() + service.need_schedule = MagicMock(return_value=True) + service.schedule_type = MagicMock(return_value=ScheduleType.POLL) + service.schedule_after = MagicMock(return_value=5) + service.execute = MagicMock(return_value=True) + + runtime = MagicMock() + runtime.get_data = MagicMock(return_value=data) + runtime.get_context_key_references = MagicMock(return_value=set()) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == True + assert result.schedule_type == ScheduleType.POLL + assert result.schedule_after == 5 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.start_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_state.assert_not_called() + runtime.stop_timeout_monitor.assert_not_called() + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert runtime.set_execution_data.call_args.kwargs["data"].outputs == { + "_result": True, + "_loop": 1, + "_inner_loop": 1, + } + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.pre_execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert service.pre_execute.call_args.kwargs["data"].outputs == {"_result": True, "_loop": 1, "_inner_loop": 1} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + assert service.execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert service.execute.call_args.kwargs["data"].outputs == {"_result": True, "_loop": 1, "_inner_loop": 1} + assert service.execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + +def test_execute__success_and_no_schedule(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=False, + timeout=10, + ) + + data = Data( + { + "k1": DataInput(need_render=True, value="${k4}"), + "k2": DataInput(need_render=True, value="2"), + "k3": DataInput(need_render=False, value="${k5}"), + }, + {}, + ) + + service = MagicMock() + service.need_schedule = MagicMock(return_value=False) + service.schedule_type = MagicMock(return_value=None) + service.schedule_after = MagicMock(return_value=-1) + service.execute = MagicMock(return_value=True) + + runtime = MagicMock() + runtime.get_data = MagicMock(return_value=data) + runtime.get_context_key_references = MagicMock(return_value=set(["${k6}"])) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == node.target_nodes[0] + assert result.should_die == False + + runtime.get_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set(["${k4}"])) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set(["${k4}", "${k6}"])) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.start_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, + version="v1", + to_state=states.FINISHED, + set_archive_time=True, + ) + runtime.stop_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == { + "k1": "${k4}", + "k2": "2", + "k3": "${k5}", + "_loop": 1, + "_inner_loop": 1, + } + assert runtime.set_execution_data.call_args.kwargs["data"].outputs == { + "_result": True, + "_loop": 1, + "_inner_loop": 1, + } + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.pre_execute.call_args.kwargs["data"].inputs == { + "k1": "${k4}", + "k2": "2", + "k3": "${k5}", + "_loop": 1, + "_inner_loop": 1, + } + assert service.pre_execute.call_args.kwargs["data"].outputs == {"_result": True, "_loop": 1, "_inner_loop": 1} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + assert service.execute.call_args.kwargs["data"].inputs == { + "k1": "${k4}", + "k2": "2", + "k3": "${k5}", + "_loop": 1, + "_inner_loop": 1, + } + assert service.execute.call_args.kwargs["data"].outputs == {"_result": True, "_loop": 1, "_inner_loop": 1} + assert service.execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + +def test_execute__fail_and_schedule(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=False, + timeout=10, + ) + + data = Data({}, {}) + + service = MagicMock() + service.execute = MagicMock(return_value=False) + service.need_schedule = MagicMock(return_value=True) + service.schedule_type = MagicMock(return_value=ScheduleType.POLL) + service.schedule_after = MagicMock(return_value=5) + + runtime = MagicMock() + runtime.get_data = MagicMock(return_value=data) + runtime.get_context_key_references = MagicMock(return_value=set()) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == True + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == None + assert result.should_die == False + + runtime.get_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_context_key_references.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_context_values.assert_called_once_with(pipeline_id=pi.top_pipeline_id, keys=set()) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.start_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, version="v1", to_state=states.FAILED, set_archive_time=True + ) + runtime.stop_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert runtime.set_execution_data.call_args.kwargs["data"].outputs == { + "_result": False, + "_loop": 1, + "_inner_loop": 1, + } + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.pre_execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert service.pre_execute.call_args.kwargs["data"].outputs == {"_result": False, "_loop": 1, "_inner_loop": 1} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.pre_execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + assert service.execute.call_args.kwargs["data"].inputs == {"_loop": 1, "_inner_loop": 1} + assert service.execute.call_args.kwargs["data"].outputs == {"_result": False, "_loop": 1, "_inner_loop": 1} + assert service.execute.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.execute.call_args.kwargs["root_pipeline_data"].outputs == {} + + +def test_schedule__raise_not_ignore(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=False, + timeout=10, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(side_effect=Exception) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == -1 + assert result.schedule_done == False + assert result.next_node_id == None + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert "ex_data" in runtime.set_execution_data.call_args.kwargs["data"].outputs + runtime.stop_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version=schedule.version, + timeout=node.timeout, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, version="v1", to_state=states.FAILED, set_archive_time=True + ) + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None + + +def test_schedule__raise_ignore(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=True, + timeout=None, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(side_effect=Exception) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == -1 + assert result.schedule_done == True + assert result.next_node_id == node.target_nodes[0] + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert "ex_data" in runtime.set_execution_data.call_args.kwargs["data"].outputs + runtime.stop_timeout_monitor.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + version=schedule.version, + to_state=states.FINISHED, + set_archive_time=True, + error_ignored=True, + ) + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None + + +def test_schedule__poll_success_and_not_done(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=True, + timeout=10, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(return_value=True) + service.schedule_after = MagicMock(return_value=5) + service.is_schedule_done = MagicMock(return_value=False) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == 5 + assert result.schedule_done == False + assert result.next_node_id == None + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + runtime.stop_timeout_monitor.assert_not_called() + runtime.set_state.assert_not_called() + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + service.is_schedule_done.assert_called_once() + service.schedule_after.call_args.kwargs["schedule"] == schedule + service.schedule_after.call_args.kwargs["data"] == service_data + service.schedule_after.call_args.kwargs["root_pipeline_data"].inputs == {} + service.schedule_after.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None + + +def test_schedule__poll_success_and_done(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=True, + timeout=10, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(return_value=True) + service.is_schedule_done = MagicMock(return_value=True) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == -1 + assert result.schedule_done == True + assert result.next_node_id == node.target_nodes[0] + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert runtime.set_execution_data.call_args.kwargs["data"].outputs == { + "_result": True, + "_loop": 1, + "_inner_loop": 1, + } + runtime.stop_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, + version=schedule.version, + to_state=states.FINISHED, + set_archive_time=True, + error_ignored=False, + ) + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + service.is_schedule_done.assert_called_once() + service.schedule_after.assert_not_called() + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None + + +def test_schedule__callback_success(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=True, + timeout=10, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.CALLBACK, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(return_value=True) + service.is_schedule_done = MagicMock(return_value=True) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == -1 + assert result.schedule_done == True + assert result.next_node_id == node.target_nodes[0] + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert runtime.set_execution_data.call_args.kwargs["data"].outputs == { + "_result": True, + "_loop": 1, + "_inner_loop": 1, + } + runtime.stop_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, + version=schedule.version, + to_state=states.FINISHED, + set_archive_time=True, + error_ignored=False, + ) + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + service.is_schedule_done.assert_called_once() + service.schedule_after.assert_not_called() + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None + + +def test_schedule__multi_callback_success_and_not_done(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=True, + timeout=10, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.MULTIPLE_CALLBACK, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(return_value=True) + service.schedule_after = MagicMock(return_value=5) + service.is_schedule_done = MagicMock(return_value=False) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == 5 + assert result.schedule_done == False + assert result.next_node_id == None + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + runtime.stop_timeout_monitor.assert_not_called() + runtime.set_state.assert_not_called() + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + service.is_schedule_done.assert_called_once() + service.schedule_after.call_args.kwargs["schedule"] == schedule + service.schedule_after.call_args.kwargs["data"] == service_data + service.schedule_after.call_args.kwargs["root_pipeline_data"].inputs == {} + service.schedule_after.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None + + +def test_schedule__multi_callback_success_and_done(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=True, + timeout=10, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(return_value=True) + service.is_schedule_done = MagicMock(return_value=True) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == -1 + assert result.schedule_done == True + assert result.next_node_id == node.target_nodes[0] + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert runtime.set_execution_data.call_args.kwargs["data"].outputs == { + "_result": True, + "_loop": 1, + "_inner_loop": 1, + } + runtime.stop_timeout_monitor.assert_called_once_with( + process_id=pi.process_id, + node_id=node.id, + version="v1", + timeout=node.timeout, + ) + runtime.set_state.assert_called_once_with( + node_id=node.id, + version=schedule.version, + to_state=states.FINISHED, + set_archive_time=True, + error_ignored=False, + ) + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + service.is_schedule_done.assert_called_once() + service.schedule_after.assert_not_called() + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None + + +def test_schedule__fail(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="", + ) + + node = ServiceActivity( + id="nid", + type=NodeType.ServiceActivity, + target_flows=["f1"], + target_nodes=["t1"], + targets={"f1": "t1"}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + code="test_service", + version="legacy", + error_ignorable=False, + timeout=None, + ) + + schedule = Schedule( + id=1, + type=ScheduleType.POLL, + process_id=pi.process_id, + node_id=node.id, + finished=False, + expired=False, + version="v1", + times=1, + ) + + service_data = ExecutionData({}, {}) + data_outputs = {} + + service = MagicMock() + service.schedule = MagicMock(return_value=False) + + runtime = MagicMock() + runtime.get_data_outputs = MagicMock(return_value=data_outputs) + runtime.get_execution_data = MagicMock(return_value=service_data) + runtime.get_data_inputs = MagicMock(return_value={}) + runtime.get_context_values = MagicMock(return_value=[]) + runtime.get_service = MagicMock(return_value=service) + + handler = ServiceActivityHandler(node, runtime) + result = handler.schedule(pi, 1, 1, schedule, None) + + assert result.has_next_schedule == False + assert result.schedule_after == -1 + assert result.schedule_done == False + assert result.next_node_id == None + + runtime.get_data_outputs.assert_called_once_with(node.id) + runtime.get_execution_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_service.assert_called_once_with(code=node.code, version=node.version) + runtime.add_schedule_times.assert_called_once_with(schedule.id) + runtime.set_execution_data.assert_called_once() + assert runtime.set_execution_data.call_args.kwargs["node_id"] == node.id + assert runtime.set_execution_data.call_args.kwargs["data"].inputs == {} + assert runtime.set_execution_data.call_args.kwargs["data"].outputs == { + "_result": False, + "_loop": 1, + "_inner_loop": 1, + } + runtime.stop_timeout_monitor.assert_not_called() + runtime.set_state.assert_called_once_with( + node_id=node.id, + version=schedule.version, + to_state=states.FAILED, + set_archive_time=True, + ) + + service.setup_runtime_attributes.assert_called_once_with( + id=node.id, + version="v1", + root_pipeline_id=pi.root_pipeline_id, + top_pipeline_id=pi.top_pipeline_id, + loop=1, + inner_loop=1, + ) + assert service.schedule.call_args.kwargs["schedule"] == schedule + assert service.schedule.call_args.kwargs["data"] == service_data + assert service.schedule.call_args.kwargs["root_pipeline_data"].inputs == {} + assert service.schedule.call_args.kwargs["root_pipeline_data"].outputs == {} + assert service.schedule.call_args.kwargs["callback_data"] == None diff --git a/tests/handlers/test_subprocess.py b/tests/handlers/test_subprocess.py new file mode 100644 index 00000000..86bbd75d --- /dev/null +++ b/tests/handlers/test_subprocess.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations underExecutableEndEvent the License. +""" + +from mock import MagicMock, call + +from bamboo_engine import states +from bamboo_engine.eri import ( + ProcessInfo, + NodeType, + SubProcess, + ContextValue, + ContextValueType, + Data, + DataInput, +) +from bamboo_engine.handlers.subprocess import SubProcessHandler + + +def test_subprocess_handler__execute_success(): + pi = ProcessInfo( + process_id="pid", + destination_id="", + root_pipeline_id="root", + pipeline_stack=["root"], + parent_id="parent", + ) + + node = SubProcess( + id="nid", + type=NodeType.ExecutableEndEvent, + target_flows=[], + target_nodes=[], + targets={}, + root_pipeline_id="root", + parent_pipeline_id="root", + can_skip=True, + start_event_id="sid", + ) + + data = Data( + inputs={ + "${k1}": DataInput(need_render=True, value="${v1}"), + "${k2}": DataInput(need_render=True, value="${sub_loop}"), + }, + outputs={"_loop": "${sub_loop}"}, + ) + + context_values = [ + ContextValue(key="${v1}", value="var", type=ContextValueType.PLAIN) + ] + + runtime = MagicMock() + runtime.get_data = MagicMock(return_value=data) + runtime.get_context_key_references = MagicMock(return_value=set()) + runtime.get_context_values = MagicMock(return_value=context_values) + + handler = SubProcessHandler(node, runtime) + result = handler.execute(pi, 1, 1, "v1") + + assert result.should_sleep == False + assert result.schedule_ready == False + assert result.schedule_type == None + assert result.schedule_after == -1 + assert result.dispatch_processes == [] + assert result.next_node_id == node.start_event_id + assert result.should_die == False + + runtime.get_data.assert_called_once_with(node.id) + runtime.get_data_inputs.assert_called_once_with(pi.root_pipeline_id) + runtime.get_context_key_references.assert_called_once_with( + pipeline_id="root", keys={"${v1}", "${sub_loop}"} + ) + runtime.get_context_values.assert_called_once_with( + pipeline_id="root", keys={"${v1}", "${sub_loop}"} + ) + runtime.reset_children_state_inner_loop.assert_called_once_with(node.id) + upsert_call_args = runtime.upsert_plain_context_values.call_args.args + assert upsert_call_args[0] == node.id + assert upsert_call_args[1]["${k1}"].key == "${k1}" + assert upsert_call_args[1]["${k1}"].type == ContextValueType.PLAIN + assert upsert_call_args[1]["${k1}"].value == "var" + assert upsert_call_args[1]["${k2}"].key == "${k2}" + assert upsert_call_args[1]["${k2}"].type == ContextValueType.PLAIN + assert upsert_call_args[1]["${k2}"].value == 1 + runtime.set_pipeline_stack.assert_called_once_with(pi.process_id, ["root", "nid"]) + assert pi.pipeline_stack == ["root", "nid"] diff --git a/tests/template/__init__.py b/tests/template/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/tests/template/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/tests/template/test_template.py b/tests/template/test_template.py new file mode 100644 index 00000000..584c430f --- /dev/null +++ b/tests/template/test_template.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import datetime + +from bamboo_engine.config import Settings +from bamboo_engine.template import Template + + +def test_get_reference(): + t = Template(["${a}", ["${a}", "${a+int(b)}"]]) + assert t.get_reference() == {"${a}", "${b}", "${int}"} + + t = Template(['${a["c"]}', ['${"%s" % a}', "${a+int(b)}"]]) + assert t.get_reference() == {"${a}", "${b}", "${int}"} + + t = Template("a-${1 if t else 2}-${a}") + assert t.render({"t": False, "a": "c"}) == "a-2-c" + t = Template("${'a-%s-c' % 1 if t else 2}") + assert t.render({"t": True}) == "a-1-c" + + +def test_get_templates(): + t = Template(["${a}", ["${a}", "${a+int(b)}"]]) + assert set(t.get_templates()) == {"${a+int(b)}", "${a}"} + + +def test_render(): + list_template = Template(["${a}", ["${a}", "${a+int(b)}"]]) + assert list_template.render({"a": 2, "b": "3"}), [2, [2, "5"]] + + tuple_template = Template(("${a}", ("${a}", "${a+int(b)}"))) + assert tuple_template.render({"a": 2, "b": "3"}), (2, (2, "5")) + + dict_template = Template({"aaaa": {"a": "${a}", "b": "${a+int(b)}"}}) + assert dict_template.render({"a": 2, "b": "3"}), {"aaaa": {"a": 2, "b": "5"}} + + simple_template = Template("${a}") + assert simple_template.render({"a": "1"}) == "1" + + calculate_template = Template("${a+int(b)}") + assert calculate_template.render({"a": 2, "b": "3"}) == "5" + + split_template = Template("${a[0]}") + assert split_template.render({"a": [1, 2]}) == "1" + + dict_item_template = Template('${a["b"]}') + assert dict_item_template.render({"a": {"b": 1}}) == "1" + + not_exists_template = Template("${a}") + assert not_exists_template.render({}) == "${a}" + + syntax_error_template = Template("${a.b}") + assert syntax_error_template.render({}) == "${a.b}" + + syntax_error_template = Template("${a:b}") + assert syntax_error_template.render({}) == "${a:b}" + + +def test_render__with_sandbox(): + + r1 = Template("""${exec(print(''))}""").render({}) + assert r1 == """${exec(print(''))}""" + + r2 = Template("""${datetime.datetime.now().strftime("%Y")}""").render({}) + assert r2 == """${datetime.datetime.now().strftime("%Y")}""" + + Settings.MAKO_SANDBOX_IMPORT_MODULES = {"datetime": "datetime"} + + r2 = Template("""${datetime.datetime.now().strftime("%Y")}""").render({}) + year = datetime.datetime.now().strftime("%Y") + assert r2 == year + + Settings.MAKO_SANDBOX_IMPORT_MODULES = {} + + r3 = Template("""${exec(print(''))}""").render({}) + assert r1 == """${exec(print(''))}""" + + +def test_render__built_in_functions__with_args(): + int_template = Template("${int(111)}") + assert int_template.render({}) == "111" + + int_template = Template("${str('aaa')}") + assert int_template.render({}) == "aaa" + + +def test_redner__built_in_functions__cover(): + int_template = Template("${int}") + assert int_template.render({"int": "cover"}) == "cover" + + +def test_mako_attack(): + attack_templates = [ + '${"".__class__.__mro__[-1].__subclasses__()[127].__init__.__globals__["system"]("whoami")}', # noqa + '${getattr("", dir(0)[0][0] + dir(0)[0][0] + "class" + dir(0)[0][0]+ dir(0)[0][0])}', # noqa + 'a-${__import__("os").system("whoami")}', + "${while True: pass}", + """<% import json %> ${json.codecs.builtins.exec('import os; os.system("whoami")')}""", # noqa + ] + for at in attack_templates: + assert Template(at).render({}) == at diff --git a/tests/test_context.py b/tests/test_context.py new file mode 100644 index 00000000..f68c94d8 --- /dev/null +++ b/tests/test_context.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from mock import MagicMock + +from bamboo_engine.eri import ContextValue, ContextValueType, Variable +from bamboo_engine.context import Context, PlainVariable, SpliceVariable + + +def test_hydrate(): + class CV(Variable): + def __init__(self, value): + self.value = value + + def get(self): + return "compute_result" + + compute_var = CV("compute_value") + + runtime = MagicMock() + runtime.get_compute_variable = MagicMock(return_value=compute_var) + + values = [ + ContextValue("${a}", type=ContextValueType.PLAIN, value="1"), + ContextValue("${b}", type=ContextValueType.PLAIN, value="2"), + ContextValue("${c}", type=ContextValueType.SPLICE, value="${a}-${b}"), + ContextValue("${d}", type=ContextValueType.SPLICE, value="${int(a) + int(b)}"), + ContextValue( + "${e}", + type=ContextValueType.COMPUTE, + value="compute_value", + code="compute_var", + ), + ContextValue( + "${f}", type=ContextValueType.SPLICE, value="${a}-${b}-${c}-${d}-${e}" + ), + ] + + context = Context(runtime, values, {"id": 1}) + hydrated = context.hydrate() + assert hydrated == { + "${a}": "1", + "${b}": "2", + "${c}": "1-2", + "${d}": "3", + "${e}": "compute_result", + "${f}": "1-2-1-2-3-compute_result", + } + runtime.get_compute_variable.assert_called_once() + + context = Context(runtime, values, {"id": 1}) + hydrated = context.hydrate(deformat=True) + assert hydrated == { + "a": "1", + "b": "2", + "c": "1-2", + "d": "3", + "e": "compute_result", + "f": "1-2-1-2-3-compute_result", + } + + +def test_extract_outputs(): + pipeline_id = "pipeline" + data_outputs = {"a": "b", "c": "d", "e": "f"} + execution_data_outputs = {"a": 1, "e": 2} + + runtime = MagicMock() + + context = Context(runtime, [], {}) + context.extract_outputs(pipeline_id, data_outputs, execution_data_outputs) + + upsert_call_args = runtime.upsert_plain_context_values.call_args.kwargs + assert upsert_call_args["pipeline_id"] == pipeline_id + assert len(upsert_call_args["update"]) == 2 + assert upsert_call_args["update"]["b"].key == "b" + assert upsert_call_args["update"]["b"].type == ContextValueType.PLAIN + assert upsert_call_args["update"]["b"].value == 1 + assert upsert_call_args["update"]["f"].key == "f" + assert upsert_call_args["update"]["f"].type == ContextValueType.PLAIN + assert upsert_call_args["update"]["f"].value == 2 diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/tests/utils/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/tests/utils/boolrule/__init__.py b/tests/utils/boolrule/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/tests/utils/boolrule/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/tests/utils/boolrule/test_boolrule.py b/tests/utils/boolrule/test_boolrule.py new file mode 100644 index 00000000..56b9fe9c --- /dev/null +++ b/tests/utils/boolrule/test_boolrule.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.utils.boolrule.boolrule import BoolRule + + +def test_eq(): + assert BoolRule("1 == 1").test() == True + assert BoolRule('"1" == 1').test() == True + + assert BoolRule("True == true").test() == True + assert BoolRule("False == false").test() == True + + assert BoolRule("1 == True").test() == True + assert BoolRule("0 == False").test() == True + assert BoolRule('"1" == True').test() == True + assert BoolRule('"0" == False').test() == True + assert BoolRule('"3.14" == 3.14').test() == True + + assert BoolRule('"abc" == "abc"').test() == True + + assert BoolRule("1 == 2").test() == False + assert BoolRule('123 == "123a"').test() == False + assert BoolRule('1 == "2"').test() == False + + assert BoolRule('True == "true"').test() == False + assert BoolRule('False == "false"').test() == False + + +def test_ne(): + assert BoolRule("1 != 2").test() == True + assert BoolRule('"1" != 2').test() == True + + assert BoolRule('True != "true"').test() == True + + assert BoolRule('"abc" != "cba"').test() == True + + assert BoolRule("1 != 1").test() == False + + +def test_gt(): + assert BoolRule("2 > 1").test() == True + assert BoolRule('"2" > 1').test() == True + + assert BoolRule("1 > 2").test() == False + assert BoolRule('"1" > 2').test() == False + + +def test_lt(): + assert BoolRule("1 < 2").test() == True + assert BoolRule('"1" < 2').test() == True + + assert BoolRule("2 < 1").test() == False + assert BoolRule("2 < 2").test() == False + + +def test_in(): + assert BoolRule("1 in (1, 2)").test() == True + assert BoolRule('1 in ("1", "2")').test() == True + assert BoolRule('"1" in (1, 2)').test() == True + assert BoolRule('"1" in ("1", "2")').test() == True + + assert BoolRule("1 in (0, 2)").test() == False + assert BoolRule('1 in ("11", 2)').test() == False + + +def test_notin(): + assert BoolRule("1 notin (0, 2)").test() == True + assert BoolRule('1 notin ("0", "2")').test() == True + assert BoolRule('"abc" notin (0, 2)').test() == True + + +def test_and(): + assert BoolRule("1 < 2 and 2 < 3").test() == True + assert BoolRule('"a" < "s" and 2 < 3').test() == True + + assert BoolRule("1 > 2 and 2 > 1").test() == False + assert BoolRule("2 > 1 and 1 > 2").test() == False + assert BoolRule("2 > 1 and 1 > 2").test() == False + assert BoolRule('"s" > "s" and 2 < 3').test() == False + assert BoolRule('"s" < "s" and 2 < 3').test() == False + + +def test_or(): + assert BoolRule("1 < 2 or 2 < 3").test() == True + assert BoolRule("1 < 2 or 2 < 1").test() == True + assert BoolRule("1 > 2 or 2 > 1").test() == True + assert BoolRule('"s" > "s" or "su" > "st"').test() == True + + assert BoolRule("1 > 2 or 2 > 3").test() == False + assert BoolRule('"a" > "s" or "s" > "st"').test() == False + + +def test_context(): + context = {"${v1}": 1, "${v2}": "1"} + assert BoolRule("${v1} == ${v2}").test(context) == True + assert BoolRule("${v1} == 1").test(context) == True + assert BoolRule('${v1} == "1"').test(context) == True + assert BoolRule('${v2} == "1"').test(context) == True + assert BoolRule('${v2} == "1"').test(context) == True + + assert BoolRule('${v1} in ("1")').test(context) == True + + +def test_gt_or_equal(): + context = {"${v1}": 1, "${v2}": "1"} + assert BoolRule("${v1} >= ${v2}").test(context) == True + assert BoolRule("${v1} >= 1").test(context) == True + assert BoolRule('${v1} >= "1"').test(context) == True + assert BoolRule("${v1} >= 0").test(context) == True + assert BoolRule('${v1} >= "0"').test(context) == True + + # assert BoolRule('${v1} >= 2').test(context) == True + assert BoolRule('${v2} >= "2"').test(context) == False + + +def test_lt_or_equal(): + context = {"${v1}": 1, "${v2}": "1"} + assert BoolRule("${v1} <= ${v2}").test(context) == True + assert BoolRule("${v1} <= 1").test(context) == True + assert BoolRule('${v1} <= "2"').test(context) == True + assert BoolRule('${v1} <= "123456789111"').test(context) == True + assert BoolRule("${v1} <= 123456789111").test(context) == True + assert BoolRule("${v1} <= 0").test(context) == False + assert BoolRule('${v1} <= "0"').test(context) == False + assert BoolRule('"a" <= "b"').test(context) == True + assert BoolRule('"a" <= "49"').test(context) == False + + +def test_true_equal(): + context = {"${v1}": True, "${v2}": "True"} + # 下面的表达式测试不符合预期 + # assert BoolRule('${v1} == ${v2}').test(context) == True + assert BoolRule("${v1} == True").test(context) == True + assert BoolRule("${v1} == true").test(context) == True + assert BoolRule("${v1} == ${v1}").test(context) == True + assert BoolRule("${v1} == 1").test(context) == True + assert BoolRule('${v1} == "1"').test(context) == True + + assert BoolRule('${v1} == "s"').test(context) == False + assert BoolRule("${v1} == 0").test(context) == False + assert BoolRule('${v1} == "0"').test(context) == False + assert BoolRule("${v1} == false").test(context) == False + assert BoolRule("${v1} == False").test(context) == False + assert BoolRule('${v1} == "false"').test(context) == False + assert BoolRule('${v1} == "False"').test(context) == False + + +def test_false_equal(): + context = {"${v1}": False, "${v2}": "False"} + # 下面的表达式测试不符合预期 + # assert BoolRule('${v1} == "False"').test(context) == True + assert BoolRule("${v1} == ${v1}").test(context) == True + assert BoolRule("${v1} == false").test(context) == True + assert BoolRule("${v1} == False").test(context) == True + assert BoolRule('${v1} == "0"').test(context) == True + assert BoolRule("${v1} == 0").test(context) == True + assert BoolRule('${v1} == "0"').test(context) == True + + assert BoolRule('${v1} == "1"').test(context) == False + assert BoolRule("${v1} == true").test(context) == False + assert BoolRule('${v1} == "true"').test(context) == False + assert BoolRule("${v1} == True").test(context) == False + assert BoolRule('${v1} == "True"').test(context) == False + assert BoolRule('${v1} == "s"').test(context) == False + + +def test_multi_or(): + assert BoolRule('("s" > "s" or "su" > "st") or (1 > 3 and 2 < 3)').test() == True + assert BoolRule('(1 > 3 and 2 < 3) or ("s" > "s" or "su" > "st")').test() == True + assert BoolRule('(1 < 3 and 2 < 3) or ("s" > "s" or "su" > "st")').test() == True + assert ( + BoolRule( + '(1 > 2 or 2 > 3) or ("s" > "s" or "su" > "st") or (4 > 5 and 5 < 6)' + ).test() + == True + ) + + assert BoolRule('(1 > 2 or 2 > 3) or ("s" > "s" or "su" < "st")').test() == False + assert ( + BoolRule( + '(1 > 2 or 2 > 3) or ("s" > "s" or "su" < "st") or (4 > 5 and 5 < 6)' + ).test() + == False + ) + + +def test_multi_and(): + assert BoolRule('("s" > "s" or "su" > "st") and (1 < 3 and 2 < 3)').test() == True + + assert BoolRule('(1 < 2 or 2 > 3) and ("s" > "s" or "su" < "st")').test() == False + assert BoolRule('(1 > 2 or 2 > 3) and ("s" > "s" or "su" > "st")').test() == False + assert BoolRule('(1 > 2 or 2 > 3) and ("s" > "s" or "su" < "st")').test() == False + assert ( + BoolRule( + '(1 < 3 and 2 < 3) and ("s" > "s" or "su" > "st") and (4 > 5 and 5 < 6)' + ).test() + == False + ) diff --git a/tests/utils/test_string.py b/tests/utils/test_string.py new file mode 100644 index 00000000..06e43153 --- /dev/null +++ b/tests/utils/test_string.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.utils import string + + +def test_format_var_key(): + assert string.format_var_key("k") == "${k}" + + +def test_deformat_var_key(): + assert string.deformat_var_key("${k}") == "k" + + +def test_transform_escape_char(): + assert string.transform_escape_char({}) == {} + assert string.transform_escape_char("k") == "k" + assert string.transform_escape_char("\nk") == "\\nk" + assert string.transform_escape_char("\\nk") == "\\nk" \ No newline at end of file diff --git a/tests/validator/__init__.py b/tests/validator/__init__.py new file mode 100644 index 00000000..40097292 --- /dev/null +++ b/tests/validator/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/tests/validator/cases.py b/tests/validator/cases.py new file mode 100644 index 00000000..58dcf792 --- /dev/null +++ b/tests/validator/cases.py @@ -0,0 +1,1339 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from bamboo_engine.builder import * # noqa +from bamboo_engine.validator.gateway import * # noqa + +from .utils import * # noqa + + +def flow_valid_case(): + def _(num): + return num - 1 + + def out_assert_case(length, out_set): + return {"len": length, "outgoing": out_set} + + outgoing_assert = { + start_event_id: out_assert_case(1, {act_id(1)}), + act_id(1): out_assert_case(1, {parallel_gw_id(1)}), + parallel_gw_id(1): out_assert_case( + 3, {parallel_gw_id(2), act_id(5), act_id(6)} + ), + parallel_gw_id(2): out_assert_case(3, {act_id(2), act_id(3), act_id(4)}), + act_id(2): out_assert_case(1, {converge_gw_id(1)}), + act_id(3): out_assert_case(1, {converge_gw_id(1)}), + act_id(4): out_assert_case(1, {converge_gw_id(1)}), + converge_gw_id(1): out_assert_case(1, {act_id(7)}), + act_id(7): out_assert_case(1, {exclusive_gw_id(1)}), + exclusive_gw_id(1): out_assert_case(2, {parallel_gw_id(2), converge_gw_id(3)}), + act_id(5): out_assert_case(1, {exclusive_gw_id(7)}), + exclusive_gw_id(7): out_assert_case(2, {act_id(8), converge_gw_id(3)}), + act_id(8): out_assert_case(1, {exclusive_gw_id(8)}), + exclusive_gw_id(8): out_assert_case(2, {act_id(8), act_id(11)}), + act_id(11): out_assert_case(1, {converge_gw_id(3)}), + act_id(6): out_assert_case(1, {exclusive_gw_id(2)}), + exclusive_gw_id(2): out_assert_case(3, {act_id(6), act_id(9), act_id(10)}), + act_id(9): out_assert_case(1, {converge_gw_id(2)}), + act_id(10): out_assert_case(1, {converge_gw_id(2)}), + converge_gw_id(2): out_assert_case(1, {act_id(12)}), + act_id(12): out_assert_case(1, {exclusive_gw_id(6)}), + exclusive_gw_id(6): out_assert_case( + 3, {act_id(6), converge_gw_id(3), converge_gw_id(2)} + ), + converge_gw_id(3): out_assert_case(1, {act_id(13)}), + act_id(13): out_assert_case(1, {exclusive_gw_id(3)}), + exclusive_gw_id(3): out_assert_case( + 4, {end_event_id, act_id(14), parallel_gw_id(3), act_id(1)} + ), + act_id(14): out_assert_case(1, {exclusive_gw_id(4)}), + exclusive_gw_id(4): out_assert_case(2, {act_id(13), converge_gw_id(4)}), + parallel_gw_id(3): out_assert_case(3, {act_id(15), act_id(16), act_id(17)}), + act_id(15): out_assert_case(1, {act_id(18)}), + act_id(18): out_assert_case(1, {converge_gw_id(4)}), + act_id(16): out_assert_case(1, {converge_gw_id(4)}), + act_id(17): out_assert_case(1, {exclusive_gw_id(5)}), + exclusive_gw_id(5): out_assert_case(2, {act_id(19), act_id(20)}), + act_id(19): out_assert_case(1, {converge_gw_id(4)}), + act_id(20): out_assert_case(1, {converge_gw_id(4)}), + converge_gw_id(4): out_assert_case(1, {end_event_id}), + end_event_id: out_assert_case(0, set()), + } + + stream_assert = { + start_event_id: MAIN_STREAM, + act_id(1): MAIN_STREAM, + parallel_gw_id(1): MAIN_STREAM, + parallel_gw_id(2): "pg_1_0", + act_id(2): "pg_2_0", + act_id(3): "pg_2_1", + act_id(4): "pg_2_2", + converge_gw_id(1): "pg_1_0", + act_id(7): "pg_1_0", + exclusive_gw_id(1): "pg_1_0", + act_id(5): "pg_1_1", + exclusive_gw_id(7): "pg_1_1", + act_id(8): "pg_1_1", + exclusive_gw_id(8): "pg_1_1", + act_id(11): "pg_1_1", + act_id(6): "pg_1_2", + exclusive_gw_id(2): "pg_1_2", + act_id(9): "pg_1_2", + act_id(10): "pg_1_2", + converge_gw_id(2): "pg_1_2", + act_id(12): "pg_1_2", + exclusive_gw_id(6): "pg_1_2", + converge_gw_id(3): MAIN_STREAM, + act_id(13): MAIN_STREAM, + exclusive_gw_id(3): MAIN_STREAM, + act_id(14): MAIN_STREAM, + exclusive_gw_id(4): MAIN_STREAM, + parallel_gw_id(3): MAIN_STREAM, + act_id(15): "pg_3_0", + act_id(18): "pg_3_0", + act_id(16): "pg_3_1", + act_id(17): "pg_3_2", + exclusive_gw_id(5): "pg_3_2", + act_id(19): "pg_3_2", + act_id(20): "pg_3_2", + converge_gw_id(4): MAIN_STREAM, + end_event_id: MAIN_STREAM, + } + + gateway_validation_assert = { + converge_gw_id(1): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 1, + "converged": {parallel_gw_id(2)}, + "distance": 5, + }, + converge_gw_id(2): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 1, + "converged": {exclusive_gw_id(2)}, + "distance": 6, + }, + converge_gw_id(3): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 5, + "converged": { + parallel_gw_id(1), + exclusive_gw_id(1), + exclusive_gw_id(7), + exclusive_gw_id(8), + exclusive_gw_id(6), + }, + "distance": 9, + }, + converge_gw_id(4): { + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "converged_len": 4, + "converged": { + parallel_gw_id(3), + exclusive_gw_id(3), + exclusive_gw_id(4), + exclusive_gw_id(5), + }, + "distance": 16, + }, + exclusive_gw_id(1): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 7, + }, + exclusive_gw_id(2): { + "match": None, + "match_assert": converge_gw_id(2), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(3): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": True, + "distance": 11, + }, + exclusive_gw_id(4): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 13, + }, + exclusive_gw_id(5): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 14, + }, + exclusive_gw_id(6): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 8, + }, + exclusive_gw_id(7): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(8): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 6, + }, + parallel_gw_id(1): { + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 2, + }, + parallel_gw_id(2): { + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 3, + }, + parallel_gw_id(3): { + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 12, + }, + } + + start = EmptyStartEvent(id=start_event_id) + acts = [ServiceActivity(id=act_id(i)) for i in range(1, 21)] + pgs = [ParallelGateway(id=parallel_gw_id(i)) for i in range(1, 3)] + pgs.append( + ConditionalParallelGateway( + id=parallel_gw_id(3), conditions={0: "123", 1: "456", 2: "789"} + ) + ) + egs = [ + ExclusiveGateway( + id=exclusive_gw_id(i), + conditions={0: "123", 1: "456", 2: "789", 3: "101112"}, + ) + for i in range(1, 9) + ] + cgs = [ConvergeGateway(id=converge_gw_id(i)) for i in range(1, 5)] + end = EmptyEndEvent(id=end_event_id) + + nodes = [start, end] + nodes.extend(acts) + nodes.extend(pgs) + nodes.extend(egs) + nodes.extend(cgs) + + start.extend(acts[_(1)]).extend(pgs[_(1)]).connect( + pgs[_(2)], acts[_(5)], acts[_(6)] + ) + + pgs[_(2)].connect(acts[_(2)], acts[_(3)], acts[_(4)]).converge(cgs[_(1)]).extend( + acts[_(7)] + ).extend(egs[_(1)]).connect(pgs[_(2)], cgs[_(3)]) + acts[_(5)].extend(egs[_(7)]).connect(cgs[_(3)], acts[_(8)]).to(acts[_(8)]).extend( + egs[_(8)] + ).connect(acts[_(8)], acts[_(11)]).to(acts[_(11)]).extend(cgs[_(3)]) + acts[_(6)].extend(egs[_(2)]).connect(acts[_(9)], acts[_(10)],).converge( + cgs[_(2)] + ).extend(acts[_(12)]).extend(egs[_(6)]).connect( + acts[_(6)], cgs[_(3)], cgs[_(2)] + ).to( + egs[_(2)] + ).connect( + acts[_(6)] + ) + + cgs[_(3)].extend(acts[_(13)]).extend(egs[_(3)]).connect( + end, acts[_(14)], pgs[_(3)], acts[_(1)] + ) + + acts[_(14)].extend(egs[_(4)]).connect(acts[_(13)], cgs[_(4)]) + pgs[_(3)].connect(acts[_(15)], acts[_(16)], acts[_(17)]).to(acts[_(15)]).extend( + acts[_(18)] + ).extend(cgs[_(4)]).to(acts[_(17)]).extend(egs[_(5)]).connect( + acts[_(19)], acts[_(20)] + ).to( + acts[_(19)] + ).extend( + cgs[_(4)] + ).to( + acts[_(20)] + ).extend( + cgs[_(4)] + ).to( + acts[_(16)] + ).extend( + cgs[_(4)] + ).extend( + end + ) + + for node in nodes: + a = outgoing_assert[node.id] + out = {out.id for out in node.outgoing} + assert a["len"] == len(node.outgoing), "{id} actual: {a}, expect: {e}".format( + id=node.id, a=len(node.outgoing), e=a["len"] + ) + assert a["outgoing"] == out, "{id} actual: {a}, expect: {e}".format( + id=node.id, a=out, e=a["outgoing"] + ) + + return build_tree(start), gateway_validation_assert, stream_assert + + +def flow_valid_edge_case_1(): + start = EmptyStartEvent(id=start_event_id) + act_1 = ServiceActivity(id=act_id(1)) + act_2 = ServiceActivity(id=act_id(2)) + eg = ExclusiveGateway( + id=exclusive_gw_id(1), conditions={0: "123", 1: "456", 2: "789"} + ) + act_3 = ServiceActivity(id=act_id(3)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(act_1).extend(act_2).extend(eg).connect(act_1, act_2, act_3).to( + act_3 + ).extend(end) + + return build_tree(start) + + +def flow_valid_edge_case_2(): + return { + "activities": { + "act_1": { + "component": {"inputs": {}, "code": None}, + "outgoing": "82b12b6aae533e55bdcc5bccfb014c2d", + "incoming": ["3fc89273786a36b8a6e7beac8301274d"], + "name": None, + "error_ignorable": False, + "type": "ServiceActivity", + "id": "act_1", + "optional": False, + }, + "act_2": { + "component": {"inputs": {}, "code": None}, + "outgoing": "3368add44347310eaef1f26f25909026", + "incoming": ["76caeed0e6053fea9db84a89f56a74a8"], + "name": None, + "error_ignorable": False, + "type": "ServiceActivity", + "id": "act_2", + "optional": False, + }, + }, + "end_event": { + "type": "EmptyEndEvent", + "outgoing": "", + "incoming": ["05f91b45a15b37d7b0c96d3ff94bff80"], + "id": "end_event_id", + "name": None, + }, + "flows": { + "27a9cdeaef623d37834ac6917d05eac5": { + "is_default": False, + "source": "start_event_id", + "target": "pg_1", + "id": "27a9cdeaef623d37834ac6917d05eac5", + }, + "82b12b6aae533e55bdcc5bccfb014c2d": { + "is_default": False, + "source": "act_1", + "target": "cg_1", + "id": "82b12b6aae533e55bdcc5bccfb014c2d", + }, + "3368add44347310eaef1f26f25909026": { + "is_default": False, + "source": "act_2", + "target": "cg_1", + "id": "3368add44347310eaef1f26f25909026", + }, + "05f91b45a15b37d7b0c96d3ff94bff80": { + "is_default": False, + "source": "cg_1", + "target": "end_event_id", + "id": "05f91b45a15b37d7b0c96d3ff94bff80", + }, + "3fc89273786a36b8a6e7beac8301274d": { + "is_default": False, + "source": "pg_1", + "target": "act_1", + "id": "3fc89273786a36b8a6e7beac8301274d", + }, + "76caeed0e6053fea9db84a89f56a74a8": { + "is_default": False, + "source": "pg_1", + "target": "act_2", + "id": "76caeed0e6053fea9db84a89f56a74a8", + }, + "76casdgd0e6053ea9db84a89f56a1234": { + "is_default": False, + "source": "pg_1", + "target": "cg_1", + "id": "76caeed0e6053fea9db84a89f56a74a8", + }, + }, + "gateways": { + "cg_1": { + "type": "ConvergeGateway", + "outgoing": "05f91b45a15b37d7b0c96d3ff94bff80", + "incoming": [ + "82b12b6aae533e55bdcc5bccfb014c2d", + "3368add44347310eaef1f26f25909026", + "76casdgd0e6053ea9db84a89f56a1234", + ], + "id": "cg_1", + "name": None, + }, + "pg_1": { + "outgoing": [ + "3fc89273786a36b8a6e7beac8301274d", + "76caeed0e6053fea9db84a89f56a74a8", + "76casdgd0e6053ea9db84a89f56a1234", + ], + "incoming": ["27a9cdeaef623d37834ac6917d05eac5"], + "name": None, + "converge_gateway_id": "cg_1", + "type": "ParallelGateway", + "id": "pg_1", + }, + }, + "start_event": { + "type": "EmptyStartEvent", + "outgoing": "27a9cdeaef623d37834ac6917d05eac5", + "incoming": "", + "id": "start_event_id", + "name": None, + }, + "data": {"inputs": {}, "outputs": {}}, + "id": "c986802cd1e23a5f920c85b005f16dc3", + } + + +def flow_valid_edge_case_3(): + + start = EmptyStartEvent() + end = EmptyEndEvent() + eg_1 = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456"}) + eg_2 = ExclusiveGateway(id=exclusive_gw_id(2), conditions={0: "123", 1: "456"}) + eg_3 = ExclusiveGateway(id=exclusive_gw_id(3), conditions={0: "123", 1: "456"}) + eg_4 = ExclusiveGateway(id=exclusive_gw_id(4), conditions={0: "123", 1: "456"}) + pg_1 = ParallelGateway(id=parallel_gw_id(1)) + cg = ConvergeGateway(id=converge_gw_id(1)) + + start.connect(eg_1) + eg_1.connect(pg_1, end) + pg_1.connect(eg_2, eg_3) + eg_2.connect(eg_2, cg) + eg_3.connect(eg_4, eg_4) + eg_4.connect(eg_4, cg) + cg.connect(end) + + return build_tree(start) + + +def flow_valid_edge_case_4(): + start = EmptyStartEvent(id=start_event_id) + pg = ParallelGateway(id=parallel_gw_id(1)) + eg = ExclusiveGateway( + id=exclusive_gw_id(1), conditions={0: "123", 1: "456", 2: "789"} + ) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(pg).connect(cg, eg) + eg.connect(eg, cg) + cg.connect(end) + + return build_tree(start) + + +def flow_valid_edge_case_5(): + start = EmptyStartEvent(id=start_event_id) + eg = ExclusiveGateway( + id=exclusive_gw_id(1), conditions={0: "123", 1: "456", 2: "789"} + ) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(eg).connect(cg, cg, end) + cg.connect(eg) + + return build_tree(start) + + +def flow_invalid_case_1(): + start = EmptyStartEvent(id=start_event_id) + act_1 = ServiceActivity(id=act_id(1)) + pg = ParallelGateway(id=parallel_gw_id(1)) + act_2 = ServiceActivity(id=act_id(2)) + act_3 = ServiceActivity(id=act_id(3)) + eg = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456"}) + act_4 = ServiceActivity(id=act_id(4)) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(act_1).extend(pg).connect(act_2, act_3, eg).to(eg).connect( + act_3, act_4 + ) + + act_2.connect(cg) + act_3.connect(cg) + act_4.connect(cg) + cg.extend(end) + + return build_tree(start) + + +def flow_invalid_case_2(): + start = EmptyStartEvent(id=start_event_id) + act_1 = ServiceActivity(id=act_id(1)) + eg = ExclusiveGateway(id=exclusive_gw_id(1), conditions={0: "123", 1: "456"}) + act_2 = ServiceActivity(id=act_id(2)) + pg = ParallelGateway(id=parallel_gw_id(1)) + act_3 = ServiceActivity(id=act_id(3)) + act_4 = ServiceActivity(id=act_id(4)) + cg = ConvergeGateway(id=converge_gw_id(1)) + end = EmptyEndEvent(id=end_event_id) + + start.extend(act_1).extend(eg).connect(act_3, act_2).to(act_2).extend(pg).connect( + act_3, act_4 + ).converge(cg).extend(end) + + return build_tree(start) + + +flow_valid_edge_cases = [ + {"case": flow_valid_edge_case_1}, + {"case": flow_valid_edge_case_2}, + {"case": flow_valid_edge_case_3}, + {"case": flow_valid_edge_case_4}, + {"case": flow_valid_edge_case_5}, +] + +flow_invalid_cases = [ + {"case": flow_invalid_case_1, "assert_invalid": act_id(3)}, + {"case": flow_invalid_case_2, "assert_invalid": act_id(3)}, +] + + +def gateway_valid_case(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2, 3], + "outgoing": [], + "type": "ConvergeGateway", + "target": [exclusive_gw_id(1)], + "id": converge_gw_id(1), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 5, + "in_len": 3, + }, + converge_gw_id(2): { + "incoming": [1, 2, 3], + "outgoing": [], + "type": "ConvergeGateway", + "target": [exclusive_gw_id(6)], + "id": converge_gw_id(2), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 5, + "in_len": 1, + }, + converge_gw_id(3): { + "incoming": [1, 2, 3, 4], + "outgoing": [], + "type": "ConvergeGateway", + "target": [exclusive_gw_id(3)], + "id": converge_gw_id(3), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 7, + "in_len": 4, + }, + converge_gw_id(4): { + "incoming": [1, 2, 3, 4, 5], + "outgoing": [], + "type": "ConvergeGateway", + "target": [end_event_id], + "id": converge_gw_id(4), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 9, + "in_len": 5, + }, + converge_gw_id(5): { + "incoming": [1, 2, 3], + "outgoing": [], + "type": "ConvergeGateway", + "target": [parallel_gw_id(1)], + "id": converge_gw_id(5), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 2, + "in_len": 3, + }, + } + gateway = { + exclusive_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [parallel_gw_id(2), converge_gw_id(3)], + "id": exclusive_gw_id(1), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 6, + }, + exclusive_gw_id(2): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [exclusive_gw_id(2), converge_gw_id(2), converge_gw_id(2)], + "id": exclusive_gw_id(2), + "match": None, + "match_assert": converge_gw_id(2), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(3): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [ + parallel_gw_id(4), + end_event_id, + exclusive_gw_id(4), + parallel_gw_id(3), + parallel_gw_id(1), + ], + "id": exclusive_gw_id(3), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": True, + "distance": 8, + }, + exclusive_gw_id(4): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [exclusive_gw_id(3), converge_gw_id(4)], + "id": exclusive_gw_id(4), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 8, + }, + exclusive_gw_id(5): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [converge_gw_id(4), converge_gw_id(4)], + "id": exclusive_gw_id(5), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 9, + }, + exclusive_gw_id(6): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [converge_gw_id(2), converge_gw_id(3)], + "id": exclusive_gw_id(6), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 6, + }, + exclusive_gw_id(7): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [converge_gw_id(3), exclusive_gw_id(8)], + "id": exclusive_gw_id(7), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + exclusive_gw_id(8): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [exclusive_gw_id(7), converge_gw_id(3)], + "id": exclusive_gw_id(8), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 5, + }, + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ConditionalParallelGateway", + "target": [parallel_gw_id(2), exclusive_gw_id(7), exclusive_gw_id(2)], + "id": parallel_gw_id(1), + "match": None, + "match_assert": converge_gw_id(3), + "converge_end": None, + "converge_end_assert": False, + "distance": 3, + }, + parallel_gw_id(2): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(1), converge_gw_id(1), converge_gw_id(1)], + "id": parallel_gw_id(2), + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 4, + }, + parallel_gw_id(3): { + "incoming": [], + "outgoing": [], + "type": "ConditionalParallelGateway", + "target": [converge_gw_id(4), converge_gw_id(4), exclusive_gw_id(5)], + "id": parallel_gw_id(3), + "match": None, + "match_assert": converge_gw_id(4), + "converge_end": None, + "converge_end_assert": False, + "distance": 9, + }, + parallel_gw_id(4): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(5), converge_gw_id(5), converge_gw_id(5)], + "id": parallel_gw_id(4), + "match": None, + "match_assert": converge_gw_id(5), + "converge_end": None, + "converge_end_assert": False, + "distance": 1, + }, + } + stack = [] + converge_in = {} + distances = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(4), + distances, + converge_in, + ) + + +def gateway_valid_edge_case_1(): + converge = {} + gateway = { + exclusive_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [exclusive_gw_id(1), exclusive_gw_id(1), end_event_id], + "id": exclusive_gw_id(1), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": True, + "distance": 2, + } + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + exclusive_gw_id(1), + distances, + converge_in, + ) + + +def gateway_valid_edge_case_2(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2], + "outgoing": [], + "type": "ConvergeGateway", + "target": [end_event_id], + "id": converge_gw_id(1), + "match": None, + "match_assert": None, + "converge_end": None, + "converge_end_assert": None, + "distance": 3, + "in_len": 2, + }, + } + gateway = { + exclusive_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [exclusive_gw_id(1), converge_gw_id(1)], + "id": exclusive_gw_id(1), + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 2, + }, + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(1), exclusive_gw_id(1)], + "id": parallel_gw_id(1), + "match": None, + "match_assert": converge_gw_id(1), + "converge_end": None, + "converge_end_assert": False, + "distance": 1, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_1(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2, 3], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(1), + "match": None, + "distance": 2, + "in_len": 3, + }, + } + gateway = { + exclusive_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ConditionalParallelGateway", + "target": [converge_gw_id(1), end_event_id], + "id": exclusive_gw_id(1), + "match": None, + "distance": 2, + }, + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(1), converge_gw_id(1), exclusive_gw_id(1)], + "id": parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_2(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2, 3, 4], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 4, + }, + } + gateway = { + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ConditionalParallelGateway", + "target": [converge_gw_id(1), converge_gw_id(1), parallel_gw_id(2)], + "id": parallel_gw_id(1), + "match": None, + "distance": 1, + }, + parallel_gw_id(2): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(1), converge_gw_id(1)], + "id": parallel_gw_id(1), + "match": None, + "distance": 2, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_3(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2, 3], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 4, + } + } + gateway = { + exclusive_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [parallel_gw_id(1), converge_gw_id(1)], + "id": exclusive_gw_id(1), + "match": None, + "distance": 2, + }, + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ConditionalParallelGateway", + "target": [converge_gw_id(1), converge_gw_id(1), exclusive_gw_id(1)], + "id": parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = [] + + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_4(): + converge = {} + gateway = { + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [end_event_id, end_event_id, end_event_id], + "id": parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_5(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(1), + "match": None, + "distance": 2, + "in_len": 2, + }, + converge_gw_id(2): { + "incoming": [3, 4], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(2), + "match": None, + "distance": 2, + "in_len": 2, + }, + } + gateway = { + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ConditionalParallelGateway", + "target": [ + converge_gw_id(1), + converge_gw_id(1), + converge_gw_id(2), + converge_gw_id(2), + ], + "id": parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_6(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(1), + "match": None, + "distance": 2, + "in_len": 2, + }, + converge_gw_id(2): { + "incoming": [3, 4], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(2), + "match": None, + "distance": 2, + "in_len": 2, + }, + } + gateway = { + exclusive_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ExclusiveGateway", + "target": [ + converge_gw_id(1), + converge_gw_id(1), + converge_gw_id(2), + converge_gw_id(2), + ], + "id": exclusive_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + exclusive_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_7(): + converge = { + converge_gw_id(1): { + "incoming": [1, 2], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 2, + }, + converge_gw_id(2): { + "incoming": [1, 2, 3], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(2), + "match": None, + "distance": 4, + "in_len": 3, + }, + } + gateway = { + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(2), converge_gw_id(2), parallel_gw_id(2)], + "id": parallel_gw_id(1), + "match": None, + "distance": 1, + }, + parallel_gw_id(2): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(1), converge_gw_id(1), parallel_gw_id(1)], + "id": parallel_gw_id(2), + "match": None, + "distance": 2, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +def gateway_invalid_case_8(): + converge = { + converge_gw_id(1): { + "incoming": [1], + "outgoing": [], + "type": "ConvergeGateway", + "target": [], + "id": converge_gw_id(1), + "match": None, + "distance": 3, + "in_len": 1, + }, + } + gateway = { + parallel_gw_id(1): { + "incoming": [], + "outgoing": [], + "type": "ParallelGateway", + "target": [converge_gw_id(1), converge_gw_id(1), parallel_gw_id(1)], + "id": parallel_gw_id(1), + "match": None, + "distance": 1, + }, + } + + stack = [] + distances = {} + converge_in = {} + for gid, g in list(gateway.items()): + distances[gid] = g["distance"] + for cid, c in list(converge.items()): + distances[cid] = c["distance"] + converge_in[cid] = c["in_len"] + + return ( + converge, + gateway, + stack, + end_event_id, + parallel_gw_id(1), + distances, + converge_in, + ) + + +gateway_valid_cases = [ + {"case": gateway_valid_case}, + {"case": gateway_valid_edge_case_1}, + {"case": gateway_valid_edge_case_2}, +] + +gateway_invalid_cases = [ + {"case": gateway_invalid_case_1, "invalid_assert": exclusive_gw_id(1)}, + {"case": gateway_invalid_case_2, "invalid_assert": converge_gw_id(1)}, + {"case": gateway_invalid_case_3, "invalid_assert": exclusive_gw_id(1)}, + {"case": gateway_invalid_case_4, "invalid_assert": parallel_gw_id(1)}, + {"case": gateway_invalid_case_5, "invalid_assert": parallel_gw_id(1)}, + {"case": gateway_invalid_case_6, "invalid_assert": exclusive_gw_id(1)}, + {"case": gateway_invalid_case_7, "invalid_assert": parallel_gw_id(2)}, + {"case": gateway_invalid_case_8, "invalid_assert": parallel_gw_id(1)}, +] diff --git a/tests/validator/test_gateway.py b/tests/validator/test_gateway.py new file mode 100644 index 00000000..9a342336 --- /dev/null +++ b/tests/validator/test_gateway.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + + +from .cases import * # noqa + + +def test_distance_from_start(): + tree, gateway_validation_assert, _ = flow_valid_case() + distances = {} + for gid, g in list(tree["gateways"].items()): + distance_from(origin=tree["start_event"], node=g, tree=tree, marked=distances) + + for gid, ga in list(gateway_validation_assert.items()): + actual = distances[gid] + expect = ga["distance"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=gid, a=actual, e=expect + ) + + for gid, ga in list(gateway_validation_assert.items()): + actual = distance_from( + origin=tree["start_event"], + node=tree["gateways"][gid], + tree=tree, + marked={}, + ) + expect = ga["distance"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=gid, a=actual, e=expect + ) + + +def test_match_converge(): + for n, i in enumerate(gateway_valid_cases, start=1): + converge, gateway, stack, eid, start, distances, in_len = i["case"]() + block_nodes = {start: set()} + + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + if converge_id: + while converge[converge_id]["target"][0] != eid: + start = converge[converge_id]["target"][0] + block_nodes[start] = set() + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + if converge_id is None: + break + + for _, c in list(converge.items()): + actual = c["match"] + expect = c["match_assert"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=c["id"], a=actual, e=expect + ) + + actual = c["converge_end"] + expect = c["converge_end_assert"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=c["id"], a=actual, e=expect + ) + + for _, g in list(gateway.items()): + actual = g["match"] + expect = g["match_assert"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=g["id"], a=actual, e=expect + ) + + actual = g["converge_end"] + expect = g["converge_end_assert"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=g["id"], a=actual, e=expect + ) + + for n, i in enumerate(gateway_invalid_cases, start=1): + converge, gateway, stack, eid, start, distances, in_len = i["case"]() + invalid = False + block_nodes = {start: set()} + try: + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + while converge[converge_id]["target"][0] != eid: + start = converge[converge_id]["target"][0] + block_nodes[start] = set() + converge_id, _ = match_converge( + converges=converge, + gateways=gateway, + cur_index=start, + end_event_id=end_event_id, + converged={}, + block_start=start, + block_nodes=block_nodes, + dist_from_start=distances, + converge_in_len=in_len, + ) + except exceptions.ConvergeMatchError as e: + invalid = True + actual = e.gateway_id + expect = i["invalid_assert"] + assert ( + actual == expect + ), "invalid assert{id} actual: {a}, expect: {e}".format( + id=n, a=actual, e=expect + ) + + assert invalid == True, "invalid case %s expect raise exception" % n + + +def test_validate_gateway(): + tree, gateway_validation_assert, _ = flow_valid_case() + converged = validate_gateways(tree) + + for cid, converge_items in list(converged.items()): + actual = len(converge_items) + expect = gateway_validation_assert[cid]["converged_len"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=cid, a=actual, e=expect + ) + + actual = set(converge_items) + expect = gateway_validation_assert[cid]["converged"] + + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=cid, a=actual, e=expect + ) + + for gid, gateway in list(tree["gateways"].items()): + if gateway["type"] != "ConvergeGateway": + actual = gateway["converge_gateway_id"] + expect = gateway_validation_assert[gid]["match_assert"] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=gid, a=actual, e=expect + ) + + # edge cases + for i, c in enumerate(flow_valid_edge_cases): + tree = c["case"]() + print(f"test gateway valid edge case {i+1}") + converged = validate_gateways(tree) + + +def test_validate_stream(): + + tree, gateway_validation_assert, stream_assert = flow_valid_case() + validate_gateways(tree) + data = validate_stream(tree) + + for nid, expect in list(stream_assert.items()): + actual = data[nid][STREAM] + assert actual == expect, "{id} actual: {a}, expect: {e}".format( + id=nid, a=actual, e=expect + ) + + for n, c in enumerate(flow_valid_edge_cases): + tree = c["case"]() + validate_gateways(tree) + try: + validate_stream(tree) + except Exception as e: + assert True == False, "valid edge case {} raise exception: {}".format(n, e) + + for n, item in enumerate(flow_invalid_cases, start=1): + tree = item["case"]() + invalid = False + validate_gateways(tree) + try: + validate_stream(tree) + except exceptions.StreamValidateError as e: + actual = e.node_id + expect = item["assert_invalid"] + assert ( + actual == expect + ), "invalid assert{id} actual: {a}, expect: {e}".format( + id=n, a=actual, e=expect + ) + invalid = True + + assert invalid == True, "invalid case %s expect raise exception" % n diff --git a/tests/validator/utils.py b/tests/validator/utils.py new file mode 100644 index 00000000..d2d8cd6f --- /dev/null +++ b/tests/validator/utils.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +""" +Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community +Edition) available. +Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +start_event_id = "start_event_id" +end_event_id = "end_event_id" + + +def exclusive_gw_id(num): + return "eg_%s" % num + + +def converge_gw_id(num): + return "cg_%s" % num + + +def parallel_gw_id(num): + return "pg_%s" % num + + +def act_id(num): + return "act_%s" % num diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..d1491acb --- /dev/null +++ b/tox.ini @@ -0,0 +1,18 @@ +# content of: tox.ini , put in same dir as setup.py +[tox] +envlist = py{36,37} + +[testenv] +deps = + Werkzeug>=1.0.1,<2.0 + pyparsing>=2.2.0,<3.0 + mako>=1.1.4,<2.0 + coverage==5.5 + mock==4.0.3 + pytest==6.2.2 + black==20.8b1 + prometheus-client>=0.9.0,<1.0 + pytest==6.2.2 +commands = + coverage run -m pytest -vv --disable-pytest-warnings + coverage report -m \ No newline at end of file