-
Notifications
You must be signed in to change notification settings - Fork 373
/
preview_base.py
342 lines (281 loc) · 13.8 KB
/
preview_base.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http:https://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
import traceback
from copy import deepcopy
import ujson as json
from django.core.exceptions import ValidationError
from pipeline.models import TemplateScheme
from pipeline.core.constants import PE
from pipeline.component_framework.constant import ConstantPool
from pipeline.validators.gateway import validate_gateways
from pipeline.validators.utils import format_node_io_to_list
logger = logging.getLogger("root")
class PipelineTemplateWebPreviewer(object):
@staticmethod
def get_template_exclude_task_nodes_with_schemes(pipeline_tree, scheme_id_list, check_schemes_exist=False):
"""
根据执行方案获取要剔除的模版节点
@param pipeline_tree:
@param scheme_id_list:
@param check_schemes_exist:
@return:
"""
template_nodes_set = set(pipeline_tree[PE.activities].keys())
exclude_task_nodes_id_set = set()
if scheme_id_list:
scheme_dict = TemplateScheme.objects.in_bulk(scheme_id_list)
if check_schemes_exist and len(scheme_dict) != len(scheme_id_list):
raise ValidationError(f"not all input scheme id exit: {set(scheme_id_list)-set(scheme_dict.keys())}")
scheme_data_set = set()
for scheme in scheme_dict.values():
scheme_data = json.loads(scheme.data)
scheme_data_set.update(scheme_data)
exclude_task_nodes_id_set = template_nodes_set - scheme_data_set
# 不可选节点一定执行
for node_id, node in pipeline_tree[PE.activities].items():
if not node["optional"]:
exclude_task_nodes_id_set.discard(node_id)
return list(exclude_task_nodes_id_set)
@staticmethod
def get_template_exclude_task_nodes_with_appoint_nodes(pipeline_tree, appoint_nodes_id):
"""
根据执行方案获取要剔除的模版节点
@param pipeline_tree:
@param appoint_nodes_id:
@return:
"""
template_nodes_set = set(pipeline_tree[PE.activities].keys())
not_optional_nodes_set = set(
[node_id for node_id, node in pipeline_tree[PE.activities].items() if not node["optional"]]
)
appoint_nodes_id_set = set(appoint_nodes_id)
exclude_task_nodes_id_set = template_nodes_set - appoint_nodes_id_set - not_optional_nodes_set
return list(exclude_task_nodes_id_set)
@staticmethod
def preview_pipeline_tree_exclude_task_nodes(
pipeline_tree, exclude_task_nodes_id=None, remove_outputs_without_refs=True
):
"""
@param pipeline_tree:
@param exclude_task_nodes_id:
@param remove_outputs_without_refs: 是否移除在当前流程设置为输出但未被引用的自定义变量
@return:
"""
if exclude_task_nodes_id is None:
exclude_task_nodes_id = []
locations = {item["id"]: item for item in pipeline_tree.get("location", [])}
lines = {item["id"]: item for item in pipeline_tree.get("line", [])}
for act_id in exclude_task_nodes_id:
if act_id not in pipeline_tree[PE.activities]:
error = "task node[id=%s] is not in template pipeline tree" % act_id
raise Exception(error)
act = pipeline_tree[PE.activities].pop(act_id)
if not act["optional"]:
error = "task node[id=%s] is not optional" % act_id
raise Exception(error)
PipelineTemplateWebPreviewer._ignore_act(
act=act, locations=locations, lines=lines, pipeline_tree=pipeline_tree
)
PipelineTemplateWebPreviewer._remove_useless_parallel(pipeline_tree, lines, locations)
pipeline_tree["line"] = list(lines.values())
pipeline_tree["location"] = list(locations.values())
PipelineTemplateWebPreviewer._remove_useless_constants(
exclude_task_nodes_id=exclude_task_nodes_id,
pipeline_tree=pipeline_tree,
remove_outputs_without_refs=remove_outputs_without_refs,
)
return True
@staticmethod
def _try_to_ignore_parallel(parallel, converge_id, lines, locations, pipeline_tree):
ignore_whole_parallel = True
converge = pipeline_tree[PE.gateways][converge_id]
parallel_outgoing = deepcopy(parallel[PE.outgoing])
for outgoing_id in parallel_outgoing:
# meet not converge node
if pipeline_tree[PE.flows][outgoing_id][PE.target] != converge_id:
ignore_whole_parallel = False
continue
# remove boring sequence
converge[PE.incoming].remove(outgoing_id)
parallel[PE.outgoing].remove(outgoing_id)
pipeline_tree[PE.flows].pop(outgoing_id)
lines.pop(outgoing_id)
if not ignore_whole_parallel:
return
target_of_converge = pipeline_tree[PE.flows][converge[PE.outgoing]][PE.target]
next_node_of_converge = (
pipeline_tree[PE.activities].get(target_of_converge)
or pipeline_tree[PE.gateways].get(target_of_converge)
or pipeline_tree[PE.end_event]
)
# remove converge outgoing
lines.pop(converge[PE.outgoing])
pipeline_tree[PE.flows].pop(converge[PE.outgoing])
# sequences not come from parallel to be removed
new_incoming_list = []
# redirect converge rerun incoming
for incoming in converge[PE.incoming]:
pipeline_tree[PE.flows][incoming][PE.target] = target_of_converge
lines[incoming][PE.target]["id"] = target_of_converge
new_incoming_list.append(incoming)
# redirect parallel rerun incoming
gateway_incoming = parallel[PE.incoming]
gateway_incoming = gateway_incoming if isinstance(gateway_incoming, list) else [gateway_incoming]
for incoming in gateway_incoming:
pipeline_tree[PE.flows][incoming][PE.target] = target_of_converge
lines[incoming][PE.target]["id"] = target_of_converge
new_incoming_list.append(incoming)
# process next node's incoming
PipelineTemplateWebPreviewer._replace_node_incoming(
next_node=next_node_of_converge, replaced_incoming=converge[PE.outgoing], new_incoming=new_incoming_list
)
# remove parallel and converge
pipeline_tree[PE.gateways].pop(parallel["id"])
pipeline_tree[PE.gateways].pop(converge["id"])
locations.pop(parallel["id"])
locations.pop(converge["id"])
@staticmethod
def _replace_node_incoming(next_node, replaced_incoming, new_incoming):
if isinstance(next_node[PE.incoming], list):
next_node[PE.incoming].pop(next_node[PE.incoming].index(replaced_incoming))
next_node[PE.incoming].extend(new_incoming)
else:
is_boring_list = isinstance(new_incoming, list) and len(new_incoming) == 1
next_node[PE.incoming] = new_incoming[0] if is_boring_list else new_incoming
@staticmethod
def _ignore_act(act, locations, lines, pipeline_tree):
# change next_node's incoming: task node、control node is different
# change incoming_flow's target to next node
# delete outgoing_flow
incoming_id_list, outgoing_id = act[PE.incoming], act[PE.outgoing]
incoming_id_list = incoming_id_list if isinstance(incoming_id_list, list) else [incoming_id_list]
outgoing_flow = pipeline_tree[PE.flows][outgoing_id]
target_id = outgoing_flow[PE.target]
next_node = (
pipeline_tree[PE.activities].get(target_id)
or pipeline_tree[PE.gateways].get(target_id)
or pipeline_tree[PE.end_event]
)
PipelineTemplateWebPreviewer._replace_node_incoming(
next_node=next_node, replaced_incoming=outgoing_id, new_incoming=incoming_id_list
)
for incoming_id in incoming_id_list:
incoming_flow = pipeline_tree[PE.flows][incoming_id]
incoming_flow[PE.target] = next_node["id"]
pipeline_tree[PE.flows].pop(outgoing_id)
# web location data
try:
locations.pop(act["id"])
lines.pop(outgoing_id)
for incoming_id in incoming_id_list:
lines[incoming_id][PE.target]["id"] = next_node["id"]
except Exception:
logger.exception(
"create_pipeline_instance_exclude_task_nodes adjust web data error: %s" % traceback.format_exc()
)
@staticmethod
def _remove_useless_constants(exclude_task_nodes_id, pipeline_tree, remove_outputs_without_refs=True):
"""
@param exclude_task_nodes_id:
@param pipeline_tree:
@param remove_outputs_without_refs: 是否移除在当前流程设置为输出但未被引用的自定义变量
@return:
"""
# pop unreferenced constant
data = {}
for act_id, act in list(pipeline_tree[PE.activities].items()):
if act["type"] == PE.ServiceActivity:
node_data = {("%s_%s" % (act_id, key)): value for key, value in list(act["component"]["data"].items())}
# PE.SubProcess
else:
node_data = {
("%s_%s" % (act_id, key)): value
for key, value in list(act.get("constants", {}).items())
if value["show_type"] == "show"
}
data.update(node_data)
for gw_id, gw in list(pipeline_tree[PE.gateways].items()):
if gw["type"] in [PE.ExclusiveGateway, PE.ConditionalParallelGateway]:
gw_data = {
("%s_%s" % (gw_id, key)): {"value": value["evaluate"]}
for key, value in list(gw["conditions"].items())
}
data.update(gw_data)
# get all referenced constants in flow
constants = pipeline_tree[PE.constants]
referenced_keys = []
while True:
last_count = len(referenced_keys)
cons_pool = ConstantPool(data, lazy=True)
refs = cons_pool.get_reference_info(strict=False)
for keys in list(refs.values()):
for key in keys:
# add outputs keys later
if key in constants and key not in referenced_keys:
referenced_keys.append(key)
data.update({key: constants[key]})
if len(referenced_keys) == last_count:
break
# keep outputs constants
outputs_keys = [key for key, value in list(constants.items()) if value["source_type"] == "component_outputs"]
referenced_keys = list(set(referenced_keys + outputs_keys))
init_outputs = pipeline_tree[PE.outputs]
pipeline_tree[PE.outputs] = [key for key in init_outputs if key in referenced_keys]
# rebuild constants index
referenced_keys.sort(key=lambda x: constants[x]["index"])
new_constants = {}
for index, key in enumerate(referenced_keys):
value = constants[key]
value["index"] = index
# delete constant reference info to task node
for act_id in exclude_task_nodes_id:
if act_id in value["source_info"]:
value["source_info"].pop(act_id)
new_constants[key] = value
if not remove_outputs_without_refs:
for key, value in constants.items():
if value["source_type"] == "custom" and key in init_outputs and key not in pipeline_tree[PE.outputs]:
new_constants[key] = value
pipeline_tree[PE.outputs].append(key)
pipeline_tree[PE.constants] = new_constants
@staticmethod
def _remove_useless_parallel(pipeline_tree, lines, locations):
copy_tree = deepcopy(pipeline_tree)
for act in list(copy_tree["activities"].values()):
format_node_io_to_list(act, o=False)
for gateway in list(copy_tree["gateways"].values()):
format_node_io_to_list(gateway, o=False)
format_node_io_to_list(copy_tree["end_event"], o=False)
converges = validate_gateways(copy_tree)
while True:
gateway_count = len(pipeline_tree[PE.gateways])
for converge_id, converged_list in list(converges.items()):
for converged in converged_list:
gateway = pipeline_tree[PE.gateways].get(converged)
if not gateway: # had been removed
continue
# conditional parallel gateway do not need to trim
is_parallel = gateway[PE.type] == PE.ParallelGateway
# only process parallel gateway
if not is_parallel:
continue
PipelineTemplateWebPreviewer._try_to_ignore_parallel(
parallel=gateway,
converge_id=converge_id,
lines=lines,
locations=locations,
pipeline_tree=pipeline_tree,
)
if gateway_count == len(pipeline_tree[PE.gateways]):
break