mirror of
https://github.com/veops/cmdb.git
synced 2025-09-06 05:17:03 +08:00
Compare commits
584 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
100a889cb8 | ||
|
b093569453 | ||
|
3919dfdfbb | ||
|
ef85ba2542 | ||
|
3d5c2ec5bc | ||
|
c143d6ae5b | ||
|
10b273ee81 | ||
|
855cb91b31 | ||
|
ffae57642c | ||
|
9ed1108c20 | ||
|
72b2f8b6de | ||
|
20f3e917fe | ||
|
c430515377 | ||
|
a6e2aca281 | ||
|
18313b7bd1 | ||
|
dbc44a8ad6 | ||
|
f143e30cf5 | ||
|
4beece5a6e | ||
|
920295d955 | ||
|
6eb8ae1dac | ||
|
f1f86ce25a | ||
|
090007487d | ||
|
aa98a304c1 | ||
|
fe22e363b4 | ||
|
2d2fb6e1d6 | ||
|
9894c77300 | ||
|
4ea947f741 | ||
|
e5ab2c2573 | ||
|
5581fa8d0f | ||
|
76c939fe5c | ||
|
6e9ce08e2c | ||
|
092a8b9b92 | ||
|
b45fd0cbbb | ||
|
5320ecfd62 | ||
|
1d253d7ad3 | ||
|
73d53f0440 | ||
|
d4a37af183 | ||
|
e03849b054 | ||
|
faed3fe6a2 | ||
|
21c9d9accd | ||
|
a06599ce33 | ||
|
2b69217136 | ||
|
03a3b8b169 | ||
|
cd319421d5 | ||
|
c918d54ea5 | ||
|
cf0ad7bad6 | ||
|
e0c8263542 | ||
|
275e8b15f3 | ||
|
6ff942c107 | ||
|
d3c87ee500 | ||
|
a4f65e7fc6 | ||
|
10527bf9b8 | ||
|
0414121c27 | ||
|
edde467c87 | ||
|
d525e1ec54 | ||
|
91c49b690f | ||
|
05453becf9 | ||
|
5fe27f8678 | ||
|
0924b8846f | ||
|
62a669159a | ||
|
2933bf1efa | ||
|
981f8b0145 | ||
|
213bda671c | ||
|
837aabfe77 | ||
|
cc599d414a | ||
|
01fcd7f80e | ||
|
af254ddbeb | ||
|
422e89c6c6 | ||
|
c2c11b38ed | ||
|
45d3f57228 | ||
|
c711c3d3fd | ||
|
2ae4aeee67 | ||
|
46238b8b51 | ||
|
b1528ec511 | ||
|
8ef4edb7d2 | ||
|
0172206c7c | ||
|
9d5bdf1b0d | ||
|
c0726b228d | ||
|
5b314aa907 | ||
|
c9f0de9838 | ||
|
2db41dd992 | ||
|
89e492c1f3 | ||
|
1aeb9a2702 | ||
|
b090a88b76 | ||
|
d5c479f7e5 | ||
|
b342258e75 | ||
|
3d716eff3e | ||
|
9791a184e3 | ||
|
142b8c95c5 | ||
|
363b89011f | ||
|
321372fa88 | ||
|
fc27dc6e64 | ||
|
8ad98f4ba4 | ||
|
9b050fa7fa | ||
|
a30826827a | ||
|
691b50aec4 | ||
|
55ca2d5eb5 | ||
|
779cd1ea9e | ||
|
7d53180a2f | ||
|
b6c41c00dd | ||
|
fff5679f6e | ||
|
7556dfe56b | ||
|
44b6f2b2ad | ||
|
f5607d96f3 | ||
|
2e85a9971b | ||
|
8d177266dc | ||
|
42d870ea4e | ||
|
424cad2130 | ||
|
430308a679 | ||
|
cc7570a4b2 | ||
|
413c0dfdfe | ||
|
f8fee771c4 | ||
|
3d05cef7cd | ||
|
30477f736e | ||
|
0cd5c0277b | ||
|
4fcddd1010 | ||
|
b269ef894f | ||
|
ea13432e4c | ||
|
c8a68bd185 | ||
|
f9ee895f58 | ||
|
59d9f2c79a | ||
|
f629558d60 | ||
|
4cbf70e756 | ||
|
40b9bec122 | ||
|
17b27d492a | ||
|
dd8f66a3fa | ||
|
d501436e3d | ||
|
8c6389b4f8 | ||
|
63ed73fa14 | ||
|
ef90506916 | ||
|
c0541b7e50 | ||
|
388134b38c | ||
|
f57d640c5d | ||
|
42c506ded8 | ||
|
373346e71e | ||
|
983cb4041c | ||
|
718fca57d2 | ||
|
2b1b0333ff | ||
|
9bb787d3f4 | ||
|
9f5979c1b1 | ||
|
b58230fd56 | ||
|
8e6ce05c04 | ||
|
b8d93bc9eb | ||
|
968ef93153 | ||
|
e0a0113e69 | ||
|
7181f2879a | ||
|
ff1626ff07 | ||
|
dca8781a03 | ||
|
43c5d74661 | ||
|
b1d0bdb536 | ||
|
2420631ed5 | ||
|
deabebb922 | ||
|
cd451060e3 | ||
|
82d4c4f961 | ||
|
79f5dac661 | ||
|
1a7c3fcd21 | ||
|
606be7f8e8 | ||
|
7f66f7688b | ||
|
25ad2192fd | ||
|
881b8cc1fa | ||
|
68905281f2 | ||
|
3cb78f30d2 | ||
|
ebce839eaa | ||
|
3170048ea9 | ||
|
976cac6742 | ||
|
99e5a932ae | ||
|
058585504f | ||
|
6d50a13cf0 | ||
|
198ecad13a | ||
|
1660139b27 | ||
|
0155eb98a2 | ||
|
c0c05bca86 | ||
|
32227d375a | ||
|
eb9c20a295 | ||
|
b826de4195 | ||
|
c3b55c2850 | ||
|
33313b6206 | ||
|
00fbe15a19 | ||
|
9e5c926bfd | ||
|
f0467c3d3b | ||
|
6758b994f8 | ||
|
d8646f8e7c | ||
|
0fa95aed36 | ||
|
d82356444a | ||
|
b8fa68ec8d | ||
|
78c542e71d | ||
|
6594863934 | ||
|
61e8c61fc6 | ||
|
ec9b5a0fa0 | ||
|
3664994cc5 | ||
|
366311e59b | ||
|
03f2ff912d | ||
|
ff9af51465 | ||
|
04fc588935 | ||
|
f215e887c9 | ||
|
3e6955fc7a | ||
|
b1cfb88308 | ||
|
d1af0eba79 | ||
|
f41873dc8c | ||
|
63562007df | ||
|
a99ecb9ea5 | ||
|
08f9bd9071 | ||
|
17c851f354 | ||
|
3382195a25 | ||
|
cd70b16eb3 | ||
|
2a98c00d97 | ||
|
5044af5490 | ||
|
320dc07676 | ||
|
95d3b0233a | ||
|
093466ef06 | ||
|
ceacc0ab15 | ||
|
cb3a9d9432 | ||
|
23fd56cf28 | ||
|
dd1c783919 | ||
|
8296e9a552 | ||
|
590565bdf0 | ||
|
9e2bf3d1f0 | ||
|
7547b67805 | ||
|
4abe4d7e8f | ||
|
0ebd52f3fd | ||
|
4cc2e47f02 | ||
|
b16bfdfa03 | ||
|
ece24080d5 | ||
|
a0ea8a193b | ||
|
d8e09d449e | ||
|
44c99e0f2e | ||
|
7110b4bcb3 | ||
|
3989859945 | ||
|
29ed4dd708 | ||
|
dbcbe33ba0 | ||
|
ed340a1c33 | ||
|
17ee7622b4 | ||
|
51877778bf | ||
|
1de8b492ea | ||
|
1fe38c4ec3 | ||
|
92bff08b84 | ||
|
37d0dacd2e | ||
|
e22f45fd25 | ||
|
56ef0a055a | ||
|
522991e23b | ||
|
ff061d4d2e | ||
|
5b84a704b1 | ||
|
3ba83821f2 | ||
|
45b82aa52d | ||
|
c99790bda2 | ||
|
7272880062 | ||
|
8130dd92ab | ||
|
9ca2d38307 | ||
|
e61bbdbcb7 | ||
|
88960c3082 | ||
|
0aa433882e | ||
|
d4f5713e0a | ||
|
790204ea28 | ||
|
927ed393d3 | ||
|
bf92b89a5f | ||
|
7ddaa143da | ||
|
157361cc7a | ||
|
dcf768376a | ||
|
e339ad2c23 | ||
|
8b10c5c72d | ||
|
bd13f6b744 | ||
|
934d00e87d | ||
|
9d421993a0 | ||
|
6751f673d5 | ||
|
c420a2195a | ||
|
ec3a6e0b6e | ||
|
ab0a5399f7 | ||
|
33e63c762f | ||
|
53b2a228ae | ||
|
c687e7ad9b | ||
|
ea3cdbd5f5 | ||
|
1ebe74d966 | ||
|
1f935d25a2 | ||
|
14e4dbacac | ||
|
f7a9257e16 | ||
|
99562df6cd | ||
|
630013cb85 | ||
|
6351d3af64 | ||
|
2d96048828 | ||
|
3e67bb94bc | ||
|
a26c9ff542 | ||
|
252003f76d | ||
|
928149f2a0 | ||
|
0283af812c | ||
|
6ccb2e74e0 | ||
|
5016464016 | ||
|
b787bd9b7b | ||
|
11f69d8679 | ||
|
c3168035ed | ||
|
5140c0a58f | ||
|
ee97582579 | ||
|
83e9172722 | ||
|
341e687987 | ||
|
93d9804127 | ||
|
f3d42cb356 | ||
|
04b9a3c929 | ||
|
e98160b84a | ||
|
7c769a53da | ||
|
08dc343083 | ||
|
52e654ff60 | ||
|
c570b5b61f | ||
|
60aec1f9ef | ||
|
9ec590664b | ||
|
49d612d8db | ||
|
b9d83f7539 | ||
|
158d9f8a76 | ||
|
4eff09900d | ||
|
23ae320223 | ||
|
a22324ca49 | ||
|
d0e7198b9c | ||
|
14c6d7178c | ||
|
3e830f266b | ||
|
67d7062f39 | ||
|
e31fee9f6d | ||
|
61b5144362 | ||
|
154f84d64e | ||
|
b0c21fe735 | ||
|
328105ed05 | ||
|
1436ffb3b2 | ||
|
abc83db77b | ||
|
806f5af830 | ||
|
c6e150a3df | ||
|
41ae241deb | ||
|
7c83f9dca7 | ||
|
77199c1b69 | ||
|
15941f3ee3 | ||
|
a644783b7c | ||
|
2c9f33b851 | ||
|
8b3f10dece | ||
|
9665294dd9 | ||
|
2048202efb | ||
|
1861d24593 | ||
|
3e51f89c5a | ||
|
e02d92c335 | ||
|
7abc1ad581 | ||
|
e45070ebdc | ||
|
2ef4c438e2 | ||
|
64eab86497 | ||
|
4495f006c0 | ||
|
6c56757a9d | ||
|
109d4f1a2e | ||
|
e3a3580206 | ||
|
db5ff60aff | ||
|
c444fed436 | ||
|
98c7f97274 | ||
|
9c76b388c9 | ||
|
7936b9cb09 | ||
|
5256f79ba1 | ||
|
3c6cc7f91b | ||
|
8a5dadcaa4 | ||
|
08fcac6a12 | ||
|
103596238e | ||
|
889d9155bc | ||
|
e6d3c34f75 | ||
|
97fc5896a5 | ||
|
c1beffac3b | ||
|
0cac7fa3f4 | ||
|
fb9186aa1e | ||
|
9af0009516 | ||
|
5758b8e999 | ||
|
da29e72d17 | ||
|
2bfb1839e5 | ||
|
3cc1915ff0 | ||
|
66aef97b06 | ||
|
672660330f | ||
|
d6731409b5 | ||
|
ed279416c8 | ||
|
e8ed71afbc | ||
|
51321ec86c | ||
|
d73e5e8ef0 | ||
|
53779e037e | ||
|
5b73f69d60 | ||
|
92637f0537 | ||
|
6f7b0a3b76 | ||
|
647b11734f | ||
|
e62d71c6c7 | ||
|
df90a54ae7 | ||
|
4a1ed9be13 | ||
|
a44be48c7e | ||
|
0da1712343 | ||
|
f5472c7fa8 | ||
|
3a9a966a60 | ||
|
91647767ed | ||
|
e69404bada | ||
|
332abe223d | ||
|
3bb32e1ead | ||
|
66476ed254 | ||
|
a4eb221199 | ||
|
3f53eab514 | ||
|
ba2a6a65b5 | ||
|
486fcac138 | ||
|
cf5e4966c0 | ||
|
79536732c1 | ||
|
84b7b53a73 | ||
|
6b11b50bd5 | ||
|
ccccce262f | ||
|
d4b3e259c8 | ||
|
450508efee | ||
|
8ea0379a0d | ||
|
6f320d4440 | ||
|
8d7897665e | ||
|
1505d9d948 | ||
|
c0ff2ac5b9 | ||
|
a9aff714d1 | ||
|
040986df20 | ||
|
9ef6d3e955 | ||
|
28888a04ee | ||
|
e7c004fa19 | ||
|
93c0eefe3c | ||
|
84fcbaefb6 | ||
|
b9ef7856fe | ||
|
c3b5b30b1f | ||
|
9c47d9c733 | ||
|
c305aaaef6 | ||
|
2ed10069f1 | ||
|
8d3756566b | ||
|
91c844ba63 | ||
|
93ebc8d2e9 | ||
|
342a79ed0c | ||
|
ddc3b564fb | ||
|
bacda2ed1e | ||
|
61e6f36e0c | ||
|
b798d5e2cc | ||
|
7b3f95b41d | ||
|
82c921bb88 | ||
|
d975aceaab | ||
|
a188218818 | ||
|
1484a308cf | ||
|
ea0a7df472 | ||
|
b4de2bdb9e | ||
|
d2f3589154 | ||
|
07d2af6671 | ||
|
acf881dcb7 | ||
|
cd0797e860 | ||
|
328d678455 | ||
|
4e0dc1282f | ||
|
e336b31902 | ||
|
f6c3edfac1 | ||
|
e7f856fecd | ||
|
780c51c364 | ||
|
ba142266fb | ||
|
87deba2e46 | ||
|
048f556936 | ||
|
90a97402ef | ||
|
362122952b | ||
|
00f4eeb90d | ||
|
5da5d32b54 | ||
|
d49cd99881 | ||
|
680a547df4 | ||
|
356cf58135 | ||
|
28ff27c019 | ||
|
bb9d7da435 | ||
|
1fa9bf8880 | ||
|
75bf42ea3c | ||
|
5931e9879b | ||
|
d82dc10451 | ||
|
705f4916b9 | ||
|
96f4760a92 | ||
|
d8d48e6d13 | ||
|
9d3d2e6b00 | ||
|
a3ff843f54 | ||
|
4a421b4da2 | ||
|
e4ecee8264 | ||
|
1129e110dd | ||
|
c0a3e39367 | ||
|
934f4cf93e | ||
|
47b712620c | ||
|
4e7e853173 | ||
|
03bc27087d | ||
|
317a16aa21 | ||
|
3a8d4ecebc | ||
|
b2b355b0dd | ||
|
60814cea2f | ||
|
6801058a3f | ||
|
92183423df | ||
|
ccf1d1c09a | ||
|
13e9a0b7bd | ||
|
e129b0a32e | ||
|
18122de03e | ||
|
c07f39d669 | ||
|
81ecffa166 | ||
|
4316880d02 | ||
|
9af34d0beb | ||
|
2eb6159e6e | ||
|
fc11c5626e | ||
|
5669e253a9 | ||
|
6973cc68ed | ||
|
e73810b456 | ||
|
261f673e17 | ||
|
b0e6248cd1 | ||
|
c663a2b783 | ||
|
9b9ae61505 | ||
|
4987908368 | ||
|
d492e7188c | ||
|
92c163971c | ||
|
e351c7cf36 | ||
|
38b4293a33 | ||
|
8ca93d72f7 | ||
|
f7b94dbe6e | ||
|
92cb9633b0 | ||
|
2f3b5a03ee | ||
|
47cfce7ee9 | ||
|
2210e9715b | ||
|
0151e0ff56 | ||
|
bc63548dd8 | ||
|
39aaa916dc | ||
|
43bacef520 | ||
|
a822503b67 | ||
|
77ed1ddb28 | ||
|
2d17e29441 | ||
|
6c4eefa9c0 | ||
|
a492f97199 | ||
|
4c7deee866 | ||
|
31a97d7ad0 | ||
|
c2e4090aea | ||
|
40e49aa1e9 | ||
|
0bf8bfb113 | ||
|
bc7b51c544 | ||
|
1c6d056d58 | ||
|
f3a0761ffc | ||
|
2ce8a6bdbf | ||
|
7ae58611c0 | ||
|
4ec3b310e4 | ||
|
bd34023512 | ||
|
2205c359b8 | ||
|
2723f187ac | ||
|
bb91f21e40 | ||
|
f1c2bcbd79 | ||
|
a8255d2b69 | ||
|
22c02f94e0 | ||
|
e1a5c01438 | ||
|
fd21dfe94d | ||
|
4bc6492b21 | ||
|
eaa5cb8bf1 | ||
|
47c66be179 | ||
|
089a17ded8 | ||
|
cb0eb7eadd | ||
|
82a402883b | ||
|
00f6e8e7d9 | ||
|
63cae8510f | ||
|
62b56da0d8 | ||
|
79a19d4b49 | ||
|
f91c82b241 | ||
|
89ffec35e8 | ||
|
2935ca5923 | ||
|
46213dd758 | ||
|
aee66c075b | ||
|
29e80f8597 | ||
|
618cd7ff16 | ||
|
de22a4e04b | ||
|
3fdfa4dc92 | ||
|
7da14f6aa0 | ||
|
046d0e60b7 | ||
|
dc18e3b82f | ||
|
cb8ef65e79 | ||
|
341a4359fa | ||
|
f89169de96 | ||
|
47767e22d8 | ||
|
5f389459e8 | ||
|
02ea6afa9c | ||
|
47692c16ef | ||
|
d9a2cff82a | ||
|
f05aec3d16 | ||
|
ed54fca4b8 | ||
|
538cc3debf | ||
|
2bfe6f0fe1 | ||
|
802a995cbb | ||
|
8d13a7630f | ||
|
c80291a1b3 | ||
|
77ad772d95 | ||
|
174375d3bf | ||
|
f00d97522a | ||
|
93df278328 | ||
|
f592af0ff4 | ||
|
872d9baa59 | ||
|
56baa2bedc | ||
|
2615fd7661 | ||
|
9b609724bd | ||
|
f3aacd72ee | ||
|
3bead5438d | ||
|
6ef1df9f4f | ||
|
c7ab0c2beb | ||
|
20fd6393e4 | ||
|
02c01f60bf |
60
.github/ISSUE_TEMPLATE/1bug.yaml
vendored
Normal file
60
.github/ISSUE_TEMPLATE/1bug.yaml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["☢️ bug"]
|
||||
assignees:
|
||||
- Selina316
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: aspects
|
||||
attributes:
|
||||
label: This bug is related to UI or API?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
value: "A bug happened!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: browsers
|
||||
attributes:
|
||||
label: What browsers are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Firefox
|
||||
- Chrome
|
||||
- Safari
|
||||
- Microsoft Edge
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
44
.github/ISSUE_TEMPLATE/2feature.yaml
vendored
Normal file
44
.github/ISSUE_TEMPLATE/2feature.yaml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Feature wanted
|
||||
description: A new feature would be good
|
||||
title: "[Feature]: "
|
||||
labels: ["✏️ feature"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your feature suggestion; we will evaluate it carefully!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: aspects
|
||||
attributes:
|
||||
label: feature is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
label: What is your advice?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you want!
|
||||
value: "everyone wants this feature!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
36
.github/ISSUE_TEMPLATE/3consultation.yaml
vendored
Normal file
36
.github/ISSUE_TEMPLATE/3consultation.yaml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Help wanted
|
||||
description: I have a question
|
||||
title: "[help wanted]: "
|
||||
labels: ["help wanted"]
|
||||
assignees:
|
||||
- ivonGwy
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please tell us what's you need!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: What is your question?
|
||||
description: Also tell us, how can we help?
|
||||
placeholder: Tell us what you need!
|
||||
value: "i have a question!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
60
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
60
.github/ISSUE_TEMPLATE/bug.yaml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["bug"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: bug is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
value: "A bug happened!"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
default: 2.3.5
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: browsers
|
||||
attributes:
|
||||
label: What browsers are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Firefox
|
||||
- Chrome
|
||||
- Safari
|
||||
- Microsoft Edge
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
6
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
6
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: veops official website
|
||||
url: https://veops.cn/#hero
|
||||
about: you can contact us here.
|
||||
|
0
.github/ISSUE_TEMPLATE/consultation.yaml
vendored
Normal file
0
.github/ISSUE_TEMPLATE/consultation.yaml
vendored
Normal file
44
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
44
.github/ISSUE_TEMPLATE/feature.yaml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Feature wanted
|
||||
description: A new feature would be good
|
||||
title: "[Feature]: "
|
||||
labels: ["feature"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your feature suggestion; we will evaluate it carefully!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: feature is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: describe the feature
|
||||
attributes:
|
||||
label: What is your advice?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you want!
|
||||
value: "everyone wants this feature!"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
default: 2.3.5
|
||||
validations:
|
||||
required: true
|
0
.github/config.yml
vendored
Normal file
0
.github/config.yml
vendored
Normal file
2
.gitignore
vendored
2
.gitignore
vendored
@@ -40,6 +40,7 @@ nosetests.xml
|
||||
.pytest_cache
|
||||
cmdb-api/test-output
|
||||
cmdb-api/api/uploaded_files
|
||||
cmdb-api/migrations/versions
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@@ -69,6 +70,7 @@ settings.py
|
||||
# UI
|
||||
cmdb-ui/node_modules
|
||||
cmdb-ui/dist
|
||||
cmdb-ui/yarn.lock
|
||||
|
||||
# Log files
|
||||
cmdb-ui/npm-debug.log*
|
||||
|
4
Makefile
4
Makefile
@@ -9,7 +9,7 @@ help: ## display this help
|
||||
|
||||
env: ## create a development environment using pipenv
|
||||
sudo easy_install pip && \
|
||||
pip install pipenv -i https://pypi.douban.com/simple && \
|
||||
pip install pipenv -i https://repo.huaweicloud.com/repository/pypi/simple && \
|
||||
npm install yarn && \
|
||||
make deps
|
||||
.PHONY: env
|
||||
@@ -36,7 +36,7 @@ api: ## start api server
|
||||
.PHONY: api
|
||||
|
||||
worker: ## start async tasks worker
|
||||
cd cmdb-api && pipenv run celery -A celery_worker.celery worker -E -Q one_cmdb_async --concurrency=1 -D && pipenv run celery -A celery_worker.celery worker -E -Q acl_async --concurrency=1 -D
|
||||
cd cmdb-api && pipenv run celery -A celery_worker.celery worker -E -Q one_cmdb_async --autoscale=5,2 --logfile=one_cmdb_async.log -D && pipenv run celery -A celery_worker.celery worker -E -Q acl_async --autoscale=2,1 --logfile=one_acl_async.log -D
|
||||
.PHONY: worker
|
||||
|
||||
ui: ## start ui server
|
||||
|
72
README.md
72
README.md
@@ -1,12 +1,20 @@
|
||||

|
||||
|
||||
[](https://github.com/veops/cmdb/blob/master/LICENSE)
|
||||
[](https://github.com/sendya/ant-design-pro-vue)
|
||||
[](https://github.com/pallets/flask)
|
||||
<p align="center">
|
||||
<a href="https://veops.cn"><img src="docs/images/logo.png" alt="维易CMDB" width="300"/></a>
|
||||
</p>
|
||||
<h3 align="center">简单、轻量、通用的运维配置管理数据库</h3>
|
||||
<p align="center">
|
||||
<a href="https://github.com/veops/cmdb/blob/master/LICENSE"><img src="https://img.shields.io/badge/License-AGPLv3-brightgreen" alt="License: GPLv3"></a>
|
||||
<a href="https:https://github.com/sendya/ant-design-pro-vue"><img src="https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-brightgreen" alt="UI"></a>
|
||||
<a href="https://github.com/pallets/flask"><img src="https://img.shields.io/badge/API-Flask-brightgreen" alt="API"></a>
|
||||
</p>
|
||||
|
||||
|
||||
------------------------------
|
||||
|
||||
[English](docs/README_en.md) / [中文](README.md)
|
||||
- 产品文档:https://veops.cn/docs/
|
||||
- 在线体验: <a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
|
||||
- 在线体验:<a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
|
||||
@@ -15,45 +23,43 @@
|
||||
|
||||
## 系统介绍
|
||||
|
||||
### 整体架构
|
||||
### 系统概览
|
||||
|
||||
<img src=docs/images/view.jpg />
|
||||
<img src=docs/images/dashboard.png />
|
||||
|
||||
### 相关文档
|
||||
[查看更多展示](docs/screenshot.md)
|
||||
|
||||
### 相关文章
|
||||
|
||||
- <a href="https://mp.weixin.qq.com/s/v3eANth64UBW5xdyOkK3tg" target="_blank">概要设计</a>
|
||||
- <a href="https://github.com/veops/cmdb/tree/master/docs/cmdb_api.md" target="_blank">API 文档</a>
|
||||
- <a href="https://mp.weixin.qq.com/s/rQaf4AES7YJsyNQG_MKOLg" target="_blank">自动发现</a>
|
||||
- 更多文章可以在公众号 **维易科技OneOps** 里查看
|
||||
|
||||
### 特点
|
||||
|
||||
- 灵活性
|
||||
1. 规范并统一纳管复杂数据资产
|
||||
2. 自动发现、入库 IT 资产
|
||||
1. 配置灵活,不设定任何运维场景,有内置模板
|
||||
2. 自动发现、入库 IT 资产
|
||||
- 安全性
|
||||
1. 细粒度访问控制
|
||||
1. 细粒度权限控制
|
||||
2. 完备操作日志
|
||||
- 多应用
|
||||
1. 丰富视图展示维度
|
||||
2. 提供 Restful API
|
||||
2. API简单强大
|
||||
3. 支持定义属性触发器、计算属性
|
||||
|
||||
### 主要功能
|
||||
|
||||
- 模型属性支持索引、多值、默认排序、字体颜色,支持计算属性
|
||||
- 支持自动发现、定时巡检、文件导入
|
||||
- 支持资源、树形、关系视图展示
|
||||
- 支持资源、层级、关系视图展示
|
||||
- 支持模型间关系配置和展示
|
||||
- 细粒度访问控制,完备的操作日志
|
||||
- 支持跨模型搜索
|
||||
|
||||
### 系统概览
|
||||
|
||||
- 服务树
|
||||
|
||||

|
||||
|
||||
[查看更多展示](docs/screenshot.md)
|
||||
|
||||
|
||||
### 更多功能
|
||||
@@ -67,22 +73,36 @@
|
||||
## 安装
|
||||
|
||||
### Docker 一键快速构建
|
||||
- 进入主目录(先安装 docker 环境)
|
||||
|
||||
> 方法一
|
||||
- 第一步: 先安装 docker 环境, 以及docker-compose
|
||||
- 第二步: 拷贝项目
|
||||
```shell
|
||||
git clone https://github.com/veops/cmdb.git
|
||||
```
|
||||
- 第三步:进入主目录,执行:
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
> 方法二, 该方法适用于linux系统
|
||||
- 第一步: 先安装 docker 环境, 以及docker-compose
|
||||
- 第二步: 直接使用项目根目录下的install.sh 文件进行 `安装`、`启动`、`暂停`、`查状态`、`删除`、`卸载`
|
||||
```shell
|
||||
curl -so install.sh https://raw.githubusercontent.com/veops/cmdb/master/install.sh
|
||||
sh install.sh install
|
||||
```
|
||||
|
||||
### [本地开发环境搭建](docs/local.md)
|
||||
|
||||
### [Makefile 安装](docs/makefile.md)
|
||||
|
||||
## 验证
|
||||
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
|
||||
|
||||
---
|
||||
|
||||
_**欢迎关注我们的公众号,点击联系我们,加入微信、QQ群(336164978),获得更多产品、行业相关资讯**_
|
||||
_**欢迎关注公众号(维易科技OneOps),关注后可加入微信群,进行产品和技术交流。**_
|
||||
|
||||

|
||||

|
||||
|
@@ -6,7 +6,7 @@ name = "pypi"
|
||||
[packages]
|
||||
# Flask
|
||||
Flask = "==2.3.2"
|
||||
Werkzeug = "==2.3.6"
|
||||
Werkzeug = ">=2.3.6"
|
||||
click = ">=5.0"
|
||||
# Api
|
||||
Flask-RESTful = "==0.3.10"
|
||||
@@ -21,28 +21,29 @@ Flask-Migrate = "==2.5.2"
|
||||
gunicorn = "==21.0.1"
|
||||
supervisor = "==4.0.3"
|
||||
# Auth
|
||||
Flask-Login = "==0.6.2"
|
||||
Flask-Login = ">=0.6.2"
|
||||
Flask-Bcrypt = "==1.0.1"
|
||||
Flask-Cors = ">=3.0.8"
|
||||
python-ldap = "==3.4.0"
|
||||
ldap3 = "==2.9.1"
|
||||
pycryptodome = "==3.12.0"
|
||||
cryptography = ">=41.0.2"
|
||||
# Caching
|
||||
Flask-Caching = ">=1.0.0"
|
||||
# Environment variable parsing
|
||||
environs = "==4.2.0"
|
||||
marshmallow = "==2.20.2"
|
||||
# async tasks
|
||||
celery = "==5.3.1"
|
||||
celery = ">=5.3.1"
|
||||
celery_once = "==3.0.1"
|
||||
more-itertools = "==5.0.0"
|
||||
kombu = "==5.3.1"
|
||||
kombu = ">=5.3.1"
|
||||
# common setting
|
||||
timeout-decorator = "==0.5.0"
|
||||
WTForms = "==3.0.0"
|
||||
email-validator = "==1.3.1"
|
||||
treelib = "==1.6.1"
|
||||
flasgger = "==0.9.5"
|
||||
Pillow = "==9.3.0"
|
||||
Pillow = ">=10.0.1"
|
||||
# other
|
||||
six = "==1.16.0"
|
||||
bs4 = ">=0.0.1"
|
||||
@@ -58,6 +59,10 @@ Jinja2 = "==3.1.2"
|
||||
jinja2schema = "==0.1.4"
|
||||
msgpack-python = "==0.5.6"
|
||||
alembic = "==1.7.7"
|
||||
hvac = "==2.0.0"
|
||||
colorama = ">=0.4.6"
|
||||
pycryptodomex = ">=3.19.0"
|
||||
lz4 = ">=4.3.2"
|
||||
|
||||
[dev-packages]
|
||||
# Testing
|
||||
@@ -74,4 +79,3 @@ flake8-isort = "==2.7.0"
|
||||
isort = "==4.3.21"
|
||||
pep8-naming = "==0.8.2"
|
||||
pydocstyle = "==3.0.0"
|
||||
|
||||
|
@@ -7,6 +7,7 @@ import os
|
||||
import sys
|
||||
from inspect import getmembers
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Flask
|
||||
from flask import jsonify
|
||||
@@ -17,11 +18,15 @@ from flask.json.provider import DefaultJSONProvider
|
||||
|
||||
import api.views.entry
|
||||
from api.extensions import (bcrypt, cache, celery, cors, db, es, login_manager, migrate, rd)
|
||||
from api.flask_cas import CAS
|
||||
from api.extensions import inner_secrets
|
||||
from api.lib.perm.authentication.cas import CAS
|
||||
from api.lib.perm.authentication.oauth2 import OAuth2
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.models.acl import User
|
||||
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
PROJECT_ROOT = os.path.join(HERE, os.pardir)
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
@@ -76,15 +81,6 @@ class MyJSONEncoder(DefaultJSONProvider):
|
||||
return o
|
||||
|
||||
|
||||
def create_acl_app(config_object="settings"):
|
||||
app = Flask(__name__.split(".")[0])
|
||||
app.config.from_object(config_object)
|
||||
|
||||
register_extensions(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def create_app(config_object="settings"):
|
||||
"""Create application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
|
||||
|
||||
@@ -101,6 +97,7 @@ def create_app(config_object="settings"):
|
||||
register_shell_context(app)
|
||||
register_commands(app)
|
||||
CAS(app)
|
||||
OAuth2(app)
|
||||
app.wsgi_app = ReverseProxy(app.wsgi_app)
|
||||
configure_upload_dir(app)
|
||||
|
||||
@@ -125,7 +122,7 @@ def register_extensions(app):
|
||||
db.init_app(app)
|
||||
cors.init_app(app)
|
||||
login_manager.init_app(app)
|
||||
migrate.init_app(app, db)
|
||||
migrate.init_app(app, db, directory=f"{BASE_DIR}/migrations")
|
||||
rd.init_app(app)
|
||||
if app.config.get('USE_ES'):
|
||||
es.init_app(app)
|
||||
@@ -133,6 +130,10 @@ def register_extensions(app):
|
||||
app.config.update(app.config.get("CELERY"))
|
||||
celery.conf.update(app.config)
|
||||
|
||||
if app.config.get('SECRETS_ENGINE') == 'inner':
|
||||
with app.app_context():
|
||||
inner_secrets.init_app(app, InnerKVManger())
|
||||
|
||||
|
||||
def register_blueprints(app):
|
||||
for item in getmembers(api.views.entry):
|
||||
@@ -193,10 +194,11 @@ def configure_logger(app):
|
||||
app.logger.addHandler(handler)
|
||||
|
||||
log_file = app.config['LOG_PATH']
|
||||
file_handler = RotatingFileHandler(log_file,
|
||||
maxBytes=2 ** 30,
|
||||
backupCount=7)
|
||||
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
file_handler.setFormatter(formatter)
|
||||
app.logger.addHandler(file_handler)
|
||||
if log_file and log_file != "/dev/stdout":
|
||||
file_handler = RotatingFileHandler(log_file,
|
||||
maxBytes=2 ** 30,
|
||||
backupCount=7)
|
||||
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
file_handler.setFormatter(formatter)
|
||||
app.logger.addHandler(file_handler)
|
||||
app.logger.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import click
|
||||
from flask.cli import with_appcontext
|
||||
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@@ -23,50 +25,18 @@ def init_acl():
|
||||
role_rebuild.apply_async(args=(role.id, app.id), queue=ACL_QUEUE)
|
||||
|
||||
|
||||
# @click.command()
|
||||
# @with_appcontext
|
||||
# def acl_clean():
|
||||
# from api.models.acl import Resource
|
||||
# from api.models.acl import Permission
|
||||
# from api.models.acl import RolePermission
|
||||
#
|
||||
# perms = RolePermission.get_by(to_dict=False)
|
||||
#
|
||||
# for r in perms:
|
||||
# perm = Permission.get_by_id(r.perm_id)
|
||||
# if perm and perm.app_id != r.app_id:
|
||||
# resource_id = r.resource_id
|
||||
# resource = Resource.get_by_id(resource_id)
|
||||
# perm_name = perm.name
|
||||
# existed = Permission.get_by(resource_type_id=resource.resource_type_id, name=perm_name, first=True,
|
||||
# to_dict=False)
|
||||
# if existed is not None:
|
||||
# other = RolePermission.get_by(rid=r.rid, perm_id=existed.id, resource_id=resource_id)
|
||||
# if not other:
|
||||
# r.update(perm_id=existed.id)
|
||||
# else:
|
||||
# r.soft_delete()
|
||||
# else:
|
||||
# r.soft_delete()
|
||||
#
|
||||
#
|
||||
# @click.command()
|
||||
# @with_appcontext
|
||||
# def acl_has_resource_role():
|
||||
# from api.models.acl import Role
|
||||
# from api.models.acl import App
|
||||
# from api.lib.perm.acl.cache import HasResourceRoleCache
|
||||
# from api.lib.perm.acl.role import RoleCRUD
|
||||
#
|
||||
# roles = Role.get_by(to_dict=False)
|
||||
# apps = App.get_by(to_dict=False)
|
||||
# for role in roles:
|
||||
# if role.app_id:
|
||||
# res = RoleCRUD.recursive_resources(role.id, role.app_id)
|
||||
# if res.get('resources') or res.get('groups'):
|
||||
# HasResourceRoleCache.add(role.id, role.app_id)
|
||||
# else:
|
||||
# for app in apps:
|
||||
# res = RoleCRUD.recursive_resources(role.id, app.id)
|
||||
# if res.get('resources') or res.get('groups'):
|
||||
# HasResourceRoleCache.add(role.id, app.id)
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def add_user():
|
||||
"""
|
||||
create a user
|
||||
|
||||
is_admin: default is False
|
||||
|
||||
"""
|
||||
|
||||
username = click.prompt('Enter username', confirmation_prompt=False)
|
||||
password = click.prompt('Enter password', hide_input=True, confirmation_prompt=True)
|
||||
email = click.prompt('Enter email ', confirmation_prompt=False)
|
||||
|
||||
UserCRUD.add(username=username, password=password, email=email)
|
||||
|
@@ -7,6 +7,7 @@ import json
|
||||
import time
|
||||
|
||||
import click
|
||||
import requests
|
||||
from flask import current_app
|
||||
from flask.cli import with_appcontext
|
||||
from flask_login import login_user
|
||||
@@ -18,6 +19,7 @@ from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
@@ -28,7 +30,9 @@ from api.lib.perm.acl.cache import AppCache
|
||||
from api.lib.perm.acl.resource import ResourceCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.secrets.inner import global_key_threshold
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.models.acl import App
|
||||
from api.models.acl import ResourceType
|
||||
from api.models.cmdb import Attribute
|
||||
@@ -46,13 +50,19 @@ def cmdb_init_cache():
|
||||
|
||||
ci_relations = CIRelation.get_by(to_dict=False)
|
||||
relations = dict()
|
||||
relations2 = dict()
|
||||
for cr in ci_relations:
|
||||
relations.setdefault(cr.first_ci_id, {}).update({cr.second_ci_id: cr.second_ci.type_id})
|
||||
if cr.ancestor_ids:
|
||||
relations2.setdefault(cr.ancestor_ids, {}).update({cr.second_ci_id: cr.second_ci.type_id})
|
||||
for i in relations:
|
||||
relations[i] = json.dumps(relations[i])
|
||||
if relations:
|
||||
rd.create_or_update(relations, REDIS_PREFIX_CI_RELATION)
|
||||
if relations2:
|
||||
rd.create_or_update(relations2, REDIS_PREFIX_CI_RELATION2)
|
||||
|
||||
es = None
|
||||
if current_app.config.get("USE_ES"):
|
||||
from api.extensions import es
|
||||
from api.models.cmdb import Attribute
|
||||
@@ -107,7 +117,15 @@ def cmdb_init_acl():
|
||||
# 1. add resource type
|
||||
for resource_type in ResourceTypeEnum.all():
|
||||
try:
|
||||
ResourceTypeCRUD.add(app_id, resource_type, '', PermEnum.all())
|
||||
perms = PermEnum.all()
|
||||
if resource_type in (ResourceTypeEnum.CI_FILTER, ResourceTypeEnum.PAGE):
|
||||
perms = [PermEnum.READ]
|
||||
elif resource_type == ResourceTypeEnum.CI_TYPE_RELATION:
|
||||
perms = [PermEnum.ADD, PermEnum.DELETE, PermEnum.GRANT]
|
||||
elif resource_type == ResourceTypeEnum.RELATION_VIEW:
|
||||
perms = [PermEnum.READ, PermEnum.UPDATE, PermEnum.DELETE, PermEnum.GRANT]
|
||||
|
||||
ResourceTypeCRUD.add(app_id, resource_type, '', perms)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -123,10 +141,10 @@ def cmdb_init_acl():
|
||||
|
||||
# 3. add resource and grant
|
||||
ci_types = CIType.get_by(to_dict=False)
|
||||
type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
for ci_type in ci_types:
|
||||
try:
|
||||
ResourceCRUD.add(ci_type.name, type_id, app_id)
|
||||
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -136,10 +154,10 @@ def cmdb_init_acl():
|
||||
[PermEnum.READ])
|
||||
|
||||
relation_views = PreferenceRelationView.get_by(to_dict=False)
|
||||
type_id = ResourceType.get_by(name=ResourceTypeEnum.RELATION_VIEW, first=True, to_dict=False).id
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.RELATION_VIEW, first=True, to_dict=False).id
|
||||
for view in relation_views:
|
||||
try:
|
||||
ResourceCRUD.add(view.name, type_id, app_id)
|
||||
ResourceCRUD.add(view.name, resource_type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -149,57 +167,6 @@ def cmdb_init_acl():
|
||||
[PermEnum.READ])
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-u',
|
||||
'--user',
|
||||
help='username'
|
||||
)
|
||||
@click.option(
|
||||
'-p',
|
||||
'--password',
|
||||
help='password'
|
||||
)
|
||||
@click.option(
|
||||
'-m',
|
||||
'--mail',
|
||||
help='mail'
|
||||
)
|
||||
@with_appcontext
|
||||
def add_user(user, password, mail):
|
||||
"""
|
||||
create a user
|
||||
|
||||
is_admin: default is False
|
||||
|
||||
Example: flask add-user -u <username> -p <password> -m <mail>
|
||||
"""
|
||||
assert user is not None
|
||||
assert password is not None
|
||||
assert mail is not None
|
||||
UserCRUD.add(username=user, password=password, email=mail)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-u',
|
||||
'--user',
|
||||
help='username'
|
||||
)
|
||||
@with_appcontext
|
||||
def del_user(user):
|
||||
"""
|
||||
delete a user
|
||||
|
||||
Example: flask del-user -u <username>
|
||||
"""
|
||||
assert user is not None
|
||||
from api.models.acl import User
|
||||
|
||||
u = User.get_by(username=user, first=True, to_dict=False)
|
||||
u and UserCRUD.delete(u.uid)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_counter():
|
||||
@@ -311,3 +278,197 @@ def cmdb_index_table_upgrade():
|
||||
CIIndexValueDateTime.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
|
||||
i.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def valid_address(address):
|
||||
if not address:
|
||||
return False
|
||||
|
||||
if not address.startswith(("http://127.0.0.1", "https://127.0.0.1")):
|
||||
response = {
|
||||
"message": "Address should start with http://127.0.0.1 or https://127.0.0.1",
|
||||
"status": "failed"
|
||||
}
|
||||
KeyManage.print_response(response)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_init(address):
|
||||
"""
|
||||
init inner secrets for password feature
|
||||
"""
|
||||
res, ok = KeyManage(backend=InnerKVManger).init()
|
||||
if not ok:
|
||||
if res.get("status") == "failed":
|
||||
KeyManage.print_response(res)
|
||||
return
|
||||
|
||||
token = res.get("details", {}).get("root_token", "")
|
||||
if valid_address(address):
|
||||
token = current_app.config.get("INNER_TRIGGER_TOKEN", "") if not token else token
|
||||
if not token:
|
||||
token = click.prompt(f'Enter root token', hide_input=True, confirmation_prompt=False)
|
||||
assert token is not None
|
||||
resp = requests.post("{}/api/v0.1/secrets/auto_seal".format(address.strip("/")),
|
||||
headers={"Inner-Token": token})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.text or resp.status_code, "status": "failed"})
|
||||
else:
|
||||
KeyManage.print_response(res)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
required=True,
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_unseal(address):
|
||||
"""
|
||||
unseal the secrets feature
|
||||
"""
|
||||
if not valid_address(address):
|
||||
return
|
||||
address = "{}/api/v0.1/secrets/unseal".format(address.strip("/"))
|
||||
for i in range(global_key_threshold):
|
||||
token = click.prompt(f'Enter unseal token {i + 1}', hide_input=True, confirmation_prompt=False)
|
||||
assert token is not None
|
||||
resp = requests.post(address, headers={"Unseal-Token": token})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
if resp.json().get("status") in ["success", "skip"]:
|
||||
return
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
|
||||
return
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
required=True,
|
||||
)
|
||||
@click.option(
|
||||
'-k',
|
||||
'--token',
|
||||
help='root token',
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_seal(address, token):
|
||||
"""
|
||||
seal the secrets feature
|
||||
"""
|
||||
assert address is not None
|
||||
assert token is not None
|
||||
if not valid_address(address):
|
||||
return
|
||||
address = "{}/api/v0.1/secrets/seal".format(address.strip("/"))
|
||||
resp = requests.post(address, headers={
|
||||
"Inner-Token": token,
|
||||
})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_password_data_migrate():
|
||||
"""
|
||||
Migrate CI password data, version >= v2.3.6
|
||||
"""
|
||||
from api.models.cmdb import CIIndexValueText
|
||||
from api.models.cmdb import CIValueText
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.vault import VaultClient
|
||||
|
||||
attrs = Attribute.get_by(to_dict=False)
|
||||
for attr in attrs:
|
||||
if attr.is_password:
|
||||
|
||||
value_table = CIIndexValueText if attr.is_index else CIValueText
|
||||
|
||||
failed = False
|
||||
for i in value_table.get_by(attr_id=attr.id, to_dict=False):
|
||||
if current_app.config.get("SECRETS_ENGINE", 'inner') == 'inner':
|
||||
_, status = InnerCrypt().decrypt(i.value)
|
||||
if status:
|
||||
continue
|
||||
|
||||
encrypt_value, status = InnerCrypt().encrypt(i.value)
|
||||
if status:
|
||||
CIValueText.create(ci_id=i.ci_id, attr_id=attr.id, value=encrypt_value)
|
||||
else:
|
||||
failed = True
|
||||
continue
|
||||
elif current_app.config.get("SECRETS_ENGINE") == 'vault':
|
||||
if i.value == '******':
|
||||
continue
|
||||
|
||||
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
|
||||
try:
|
||||
vault.update("/{}/{}".format(i.ci_id, i.attr_id), dict(v=i.value))
|
||||
except Exception as e:
|
||||
print('save password to vault failed: {}'.format(e))
|
||||
failed = True
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
i.delete()
|
||||
|
||||
if not failed and attr.is_index:
|
||||
attr.update(is_index=False)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_agent_init():
|
||||
"""
|
||||
Initialize the agent's permissions and obtain the key and secret
|
||||
"""
|
||||
|
||||
from api.models.acl import User
|
||||
|
||||
user = User.get_by(username="cmdb_agent", first=True, to_dict=False)
|
||||
if user is None:
|
||||
click.echo(
|
||||
click.style('user cmdb_agent does not exist, please use flask add-user to create it first', fg='red'))
|
||||
return
|
||||
|
||||
# grant
|
||||
_app = AppCache.get('cmdb') or App.create(name='cmdb')
|
||||
app_id = _app.id
|
||||
|
||||
ci_types = CIType.get_by(to_dict=False)
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
for ci_type in ci_types:
|
||||
try:
|
||||
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
ACLManager().grant_resource_to_role(ci_type.name,
|
||||
"cmdb_agent",
|
||||
ResourceTypeEnum.CI,
|
||||
[PermEnum.READ, PermEnum.UPDATE, PermEnum.ADD, PermEnum.DELETE])
|
||||
|
||||
click.echo("Key : {}".format(click.style(user.key, bg='red')))
|
||||
click.echo("Secret: {}".format(click.style(user.secret, bg='red')))
|
||||
|
@@ -10,9 +10,6 @@ from api.models.common_setting import Employee, Department
|
||||
|
||||
|
||||
class InitEmployee(object):
|
||||
"""
|
||||
初始化员工
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.log = current_app.logger
|
||||
@@ -58,7 +55,8 @@ class InitEmployee(object):
|
||||
self.log.error(ErrFormat.acl_import_user_failed.format(user['username'], str(e)))
|
||||
self.log.error(e)
|
||||
|
||||
def get_rid_by_uid(self, uid):
|
||||
@staticmethod
|
||||
def get_rid_by_uid(uid):
|
||||
from api.models.acl import Role
|
||||
role = Role.get_by(first=True, uid=uid)
|
||||
return role['id'] if role is not None else 0
|
||||
@@ -71,7 +69,8 @@ class InitDepartment(object):
|
||||
def init(self):
|
||||
self.init_wide_company()
|
||||
|
||||
def hard_delete(self, department_id, department_name):
|
||||
@staticmethod
|
||||
def hard_delete(department_id, department_name):
|
||||
existed_deleted_list = Department.query.filter(
|
||||
Department.department_name == department_name,
|
||||
Department.department_id == department_id,
|
||||
@@ -80,11 +79,12 @@ class InitDepartment(object):
|
||||
for existed in existed_deleted_list:
|
||||
existed.delete()
|
||||
|
||||
def get_department(self, department_name):
|
||||
@staticmethod
|
||||
def get_department(department_name):
|
||||
return Department.query.filter(
|
||||
Department.department_name == department_name,
|
||||
Department.deleted == 0,
|
||||
).order_by(Department.created_at.asc()).first()
|
||||
).first()
|
||||
|
||||
def run(self, department_id, department_name, department_parent_id):
|
||||
self.hard_delete(department_id, department_name)
|
||||
@@ -94,7 +94,7 @@ class InitDepartment(object):
|
||||
if res.department_id == department_id:
|
||||
return
|
||||
else:
|
||||
new_d = res.update(
|
||||
res.update(
|
||||
department_id=department_id,
|
||||
department_parent_id=department_parent_id,
|
||||
)
|
||||
@@ -108,11 +108,11 @@ class InitDepartment(object):
|
||||
new_d = self.get_department(department_name)
|
||||
|
||||
if new_d.department_id != department_id:
|
||||
new_d = new_d.update(
|
||||
new_d.update(
|
||||
department_id=department_id,
|
||||
department_parent_id=department_parent_id,
|
||||
)
|
||||
self.log.info(f"初始化 {department_name} 部门成功.")
|
||||
self.log.info(f"init {department_name} success.")
|
||||
|
||||
def run_common(self, department_id, department_name, department_parent_id):
|
||||
try:
|
||||
@@ -123,19 +123,14 @@ class InitDepartment(object):
|
||||
raise Exception(e)
|
||||
|
||||
def init_wide_company(self):
|
||||
"""
|
||||
创建 id 0, name 全公司 的部门
|
||||
"""
|
||||
department_id = 0
|
||||
department_name = '全公司'
|
||||
department_parent_id = -1
|
||||
|
||||
self.run_common(department_id, department_name, department_parent_id)
|
||||
|
||||
def create_acl_role_with_department(self):
|
||||
"""
|
||||
当前所有部门,在ACL创建 role
|
||||
"""
|
||||
@staticmethod
|
||||
def create_acl_role_with_department():
|
||||
acl = ACLManager('acl')
|
||||
role_name_map = {role['name']: role for role in acl.get_all_roles()}
|
||||
|
||||
@@ -146,7 +141,7 @@ class InitDepartment(object):
|
||||
continue
|
||||
|
||||
role = role_name_map.get(department.department_name)
|
||||
if role is None:
|
||||
if not role:
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'name': department.department_name,
|
||||
@@ -165,50 +160,65 @@ class InitDepartment(object):
|
||||
acl = self.check_app('backend')
|
||||
resources_types = acl.get_all_resources_types()
|
||||
|
||||
perms = ['read', 'grant', 'delete', 'update']
|
||||
|
||||
acl_rid = self.get_admin_user_rid()
|
||||
|
||||
results = list(filter(lambda t: t['name'] == '操作权限', resources_types['groups']))
|
||||
if len(results) == 0:
|
||||
payload = dict(
|
||||
app_id=acl.app_name,
|
||||
name='操作权限',
|
||||
description='',
|
||||
perms=['read', 'grant', 'delete', 'update']
|
||||
perms=perms
|
||||
)
|
||||
resource_type = acl.create_resources_type(payload)
|
||||
else:
|
||||
resource_type = results[0]
|
||||
resource_type_id = resource_type['id']
|
||||
existed_perms = resources_types.get('id2perms', {}).get(resource_type_id, [])
|
||||
existed_perms = [p['name'] for p in existed_perms]
|
||||
new_perms = []
|
||||
for perm in perms:
|
||||
if perm not in existed_perms:
|
||||
new_perms.append(perm)
|
||||
if len(new_perms) > 0:
|
||||
resource_type['perms'] = existed_perms + new_perms
|
||||
acl.update_resources_type(resource_type_id, resource_type)
|
||||
|
||||
resource_list = acl.get_resource_by_type(None, None, resource_type['id'])
|
||||
|
||||
for name in ['公司信息', '公司架构', '通知设置']:
|
||||
payload = dict(
|
||||
type_id=resource_type['id'],
|
||||
app_id=acl.app_name,
|
||||
name=name,
|
||||
)
|
||||
try:
|
||||
acl.create_resource(payload)
|
||||
except Exception as e:
|
||||
if '已经存在' in str(e):
|
||||
pass
|
||||
else:
|
||||
raise Exception(e)
|
||||
target = list(filter(lambda r: r['name'] == name, resource_list))
|
||||
if len(target) == 0:
|
||||
payload = dict(
|
||||
type_id=resource_type['id'],
|
||||
app_id=acl.app_name,
|
||||
name=name,
|
||||
)
|
||||
resource = acl.create_resource(payload)
|
||||
else:
|
||||
resource = target[0]
|
||||
|
||||
def check_app(self, app_name):
|
||||
if acl_rid > 0:
|
||||
acl.grant_resource(acl_rid, resource['id'], perms)
|
||||
|
||||
@staticmethod
|
||||
def check_app(app_name):
|
||||
acl = ACLManager(app_name)
|
||||
payload = dict(
|
||||
name=app_name,
|
||||
description=app_name
|
||||
)
|
||||
try:
|
||||
app = acl.validate_app()
|
||||
if app:
|
||||
return acl
|
||||
|
||||
app = acl.validate_app()
|
||||
if not app:
|
||||
acl.create_app(payload)
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
if '不存在' in str(e):
|
||||
acl.create_app(payload)
|
||||
return acl
|
||||
raise Exception(e)
|
||||
return acl
|
||||
|
||||
@staticmethod
|
||||
def get_admin_user_rid():
|
||||
admin = Employee.get_by(first=True, username='admin', to_dict=False)
|
||||
return admin.acl_rid if admin else 0
|
||||
|
||||
|
||||
@click.command()
|
||||
@@ -241,17 +251,19 @@ def common_check_new_columns():
|
||||
from api.extensions import db
|
||||
from sqlalchemy import inspect, text
|
||||
|
||||
def get_model_by_table_name(table_name):
|
||||
for model in db.Model.registry._class_registry.values():
|
||||
if hasattr(model, '__tablename__') and model.__tablename__ == table_name:
|
||||
return model
|
||||
def get_model_by_table_name(_table_name):
|
||||
registry = getattr(db.Model, 'registry', None)
|
||||
class_registry = getattr(registry, '_class_registry', None)
|
||||
for _model in class_registry.values():
|
||||
if hasattr(_model, '__tablename__') and _model.__tablename__ == _table_name:
|
||||
return _model
|
||||
return None
|
||||
|
||||
def add_new_column(table_name, new_column):
|
||||
def add_new_column(target_table_name, new_column):
|
||||
column_type = new_column.type.compile(engine.dialect)
|
||||
default_value = new_column.default.arg if new_column.default else None
|
||||
|
||||
sql = f"ALTER TABLE {table_name} ADD COLUMN {new_column.name} {column_type} "
|
||||
sql = "ALTER TABLE " + target_table_name + " ADD COLUMN " + new_column.name + " " + column_type
|
||||
if new_column.comment:
|
||||
sql += f" comment '{new_column.comment}'"
|
||||
|
||||
@@ -277,7 +289,8 @@ def common_check_new_columns():
|
||||
model = get_model_by_table_name(table_name)
|
||||
if model is None:
|
||||
continue
|
||||
model_columns = model.__table__.columns._all_columns
|
||||
|
||||
model_columns = getattr(getattr(getattr(model, '__table__'), 'columns'), '_all_columns')
|
||||
for column in model_columns:
|
||||
if column.name not in existed_column_name_list:
|
||||
try:
|
||||
@@ -286,3 +299,20 @@ def common_check_new_columns():
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"add new column [{column.name}] in table [{table_name}] err:")
|
||||
current_app.logger.error(e)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def common_sync_file_to_db():
|
||||
from api.lib.common_setting.upload_file import CommonFileCRUD
|
||||
CommonFileCRUD.sync_file_to_db()
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option('--value', type=click.INT, default=-1)
|
||||
def set_auth_auto_redirect_enable(value):
|
||||
if value < 0:
|
||||
return
|
||||
from api.lib.common_setting.common_data import CommonDataCRUD
|
||||
CommonDataCRUD.set_auth_auto_redirect_enable(value)
|
||||
|
@@ -84,66 +84,6 @@ def clean():
|
||||
os.remove(full_pathname)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option("--url", default=None, help="Url to test (ex. /static/image.png)")
|
||||
@click.option(
|
||||
"--order", default="rule", help="Property on Rule to order by (default: rule)"
|
||||
)
|
||||
@with_appcontext
|
||||
def urls(url, order):
|
||||
"""Display all of the url matching routes for the project.
|
||||
|
||||
Borrowed from Flask-Script, converted to use Click.
|
||||
"""
|
||||
rows = []
|
||||
column_headers = ("Rule", "Endpoint", "Arguments")
|
||||
|
||||
if url:
|
||||
try:
|
||||
rule, arguments = current_app.url_map.bind("localhost").match(
|
||||
url, return_rule=True
|
||||
)
|
||||
rows.append((rule.rule, rule.endpoint, arguments))
|
||||
column_length = 3
|
||||
except (NotFound, MethodNotAllowed) as e:
|
||||
rows.append(("<{}>".format(e), None, None))
|
||||
column_length = 1
|
||||
else:
|
||||
rules = sorted(
|
||||
current_app.url_map.iter_rules(), key=lambda rule: getattr(rule, order)
|
||||
)
|
||||
for rule in rules:
|
||||
rows.append((rule.rule, rule.endpoint, None))
|
||||
column_length = 2
|
||||
|
||||
str_template = ""
|
||||
table_width = 0
|
||||
|
||||
if column_length >= 1:
|
||||
max_rule_length = max(len(r[0]) for r in rows)
|
||||
max_rule_length = max_rule_length if max_rule_length > 4 else 4
|
||||
str_template += "{:" + str(max_rule_length) + "}"
|
||||
table_width += max_rule_length
|
||||
|
||||
if column_length >= 2:
|
||||
max_endpoint_length = max(len(str(r[1])) for r in rows)
|
||||
max_endpoint_length = max_endpoint_length if max_endpoint_length > 8 else 8
|
||||
str_template += " {:" + str(max_endpoint_length) + "}"
|
||||
table_width += 2 + max_endpoint_length
|
||||
|
||||
if column_length >= 3:
|
||||
max_arguments_length = max(len(str(r[2])) for r in rows)
|
||||
max_arguments_length = max_arguments_length if max_arguments_length > 9 else 9
|
||||
str_template += " {:" + str(max_arguments_length) + "}"
|
||||
table_width += 2 + max_arguments_length
|
||||
|
||||
click.echo(str_template.format(*column_headers[:column_length]))
|
||||
click.echo("-" * table_width)
|
||||
|
||||
for row in rows:
|
||||
click.echo(str_template.format(*row[:column_length]))
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def db_setup():
|
||||
|
@@ -9,6 +9,7 @@ from flask_login import LoginManager
|
||||
from flask_migrate import Migrate
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.utils import ESHandler
|
||||
from api.lib.utils import RedisHandler
|
||||
|
||||
@@ -21,3 +22,4 @@ celery = Celery()
|
||||
cors = CORS(supports_credentials=True)
|
||||
rd = RedisHandler()
|
||||
es = ESHandler()
|
||||
inner_secrets = KeyManage()
|
||||
|
@@ -60,22 +60,34 @@ class AttributeManager(object):
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def _get_choice_values_from_other_ci(choice_other):
|
||||
def _get_choice_values_from_other(choice_other):
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search
|
||||
|
||||
type_ids = choice_other.get('type_ids')
|
||||
attr_id = choice_other.get('attr_id')
|
||||
other_filter = choice_other.get('filter') or ''
|
||||
if choice_other.get('type_ids'):
|
||||
type_ids = choice_other.get('type_ids')
|
||||
attr_id = choice_other.get('attr_id')
|
||||
other_filter = choice_other.get('filter') or ''
|
||||
|
||||
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
|
||||
s = search(query, fl=[str(attr_id)], facet=[str(attr_id)], count=1)
|
||||
try:
|
||||
_, _, _, _, _, facet = s.search()
|
||||
return [[i[0], {}] for i in (list(facet.values()) or [[]])[0]]
|
||||
except SearchError as e:
|
||||
current_app.logger.error("get choice values from other ci failed: {}".format(e))
|
||||
return []
|
||||
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
|
||||
s = search(query, fl=[str(attr_id)], facet=[str(attr_id)], count=1)
|
||||
try:
|
||||
_, _, _, _, _, facet = s.search()
|
||||
return [[i[0], {}] for i in (list(facet.values()) or [[]])[0]]
|
||||
except SearchError as e:
|
||||
current_app.logger.error("get choice values from other ci failed: {}".format(e))
|
||||
return []
|
||||
|
||||
elif choice_other.get('script'):
|
||||
try:
|
||||
x = compile(choice_other['script'], '', "exec")
|
||||
local_ns = {}
|
||||
exec(x, {}, local_ns)
|
||||
res = local_ns['ChoiceValue']().values() or []
|
||||
return [[i, {}] for i in res]
|
||||
except Exception as e:
|
||||
current_app.logger.error("get choice values from script: {}".format(e))
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def get_choice_values(cls, attr_id, value_type, choice_web_hook, choice_other,
|
||||
@@ -87,7 +99,7 @@ class AttributeManager(object):
|
||||
return []
|
||||
elif choice_other:
|
||||
if choice_other_parse and isinstance(choice_other, dict):
|
||||
return cls._get_choice_values_from_other_ci(choice_other)
|
||||
return cls._get_choice_values_from_other(choice_other)
|
||||
else:
|
||||
return []
|
||||
|
||||
@@ -96,7 +108,8 @@ class AttributeManager(object):
|
||||
return []
|
||||
choice_values = choice_table.get_by(fl=["value", "option"], attr_id=attr_id)
|
||||
|
||||
return [[choice_value['value'], choice_value['option']] for choice_value in choice_values]
|
||||
return [[ValueTypeMap.serialize[value_type](choice_value['value']), choice_value['option']]
|
||||
for choice_value in choice_values]
|
||||
|
||||
@staticmethod
|
||||
def add_choice_values(_id, value_type, choice_values):
|
||||
@@ -176,7 +189,8 @@ class AttributeManager(object):
|
||||
return attr
|
||||
|
||||
def get_attribute(self, key, choice_web_hook_parse=True, choice_other_parse=True):
|
||||
attr = AttributeCache.get(key).to_dict()
|
||||
attr = AttributeCache.get(key) or dict()
|
||||
attr = attr and attr.to_dict()
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(
|
||||
attr["id"],
|
||||
@@ -218,10 +232,15 @@ class AttributeManager(object):
|
||||
if name in BUILTIN_KEYWORDS:
|
||||
return abort(400, ErrFormat.attribute_name_cannot_be_builtin)
|
||||
|
||||
if kwargs.get('choice_other'):
|
||||
if (not isinstance(kwargs['choice_other'], dict) or not kwargs['choice_other'].get('type_ids') or
|
||||
not kwargs['choice_other'].get('attr_id')):
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
while kwargs.get('choice_other'):
|
||||
if isinstance(kwargs['choice_other'], dict):
|
||||
if kwargs['choice_other'].get('script'):
|
||||
break
|
||||
|
||||
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
|
||||
break
|
||||
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
|
||||
alias = kwargs.pop("alias", "")
|
||||
alias = name if not alias else alias
|
||||
@@ -232,6 +251,8 @@ class AttributeManager(object):
|
||||
|
||||
kwargs.get('is_computed') and cls.can_create_computed_attribute()
|
||||
|
||||
kwargs.get('choice_other') and kwargs['choice_other'].get('script') and cls.can_create_computed_attribute()
|
||||
|
||||
attr = Attribute.create(flush=True,
|
||||
name=name,
|
||||
alias=alias,
|
||||
@@ -317,9 +338,6 @@ class AttributeManager(object):
|
||||
def update(self, _id, **kwargs):
|
||||
attr = Attribute.get_by_id(_id) or abort(404, ErrFormat.attribute_not_found.format("id={}".format(_id)))
|
||||
|
||||
if not self._can_edit_attribute(attr):
|
||||
return abort(403, ErrFormat.cannot_edit_attribute)
|
||||
|
||||
if kwargs.get("name"):
|
||||
other = Attribute.get_by(name=kwargs['name'], first=True, to_dict=False)
|
||||
if other and other.id != attr.id:
|
||||
@@ -337,10 +355,15 @@ class AttributeManager(object):
|
||||
|
||||
self._change_index(attr, attr.is_index, kwargs['is_index'])
|
||||
|
||||
if kwargs.get('choice_other'):
|
||||
if (not isinstance(kwargs['choice_other'], dict) or not kwargs['choice_other'].get('type_ids') or
|
||||
not kwargs['choice_other'].get('attr_id')):
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
while kwargs.get('choice_other'):
|
||||
if isinstance(kwargs['choice_other'], dict):
|
||||
if kwargs['choice_other'].get('script'):
|
||||
break
|
||||
|
||||
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
|
||||
break
|
||||
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
|
||||
existed2 = attr.to_dict()
|
||||
if not existed2['choice_web_hook'] and not existed2.get('choice_other') and existed2['is_choice']:
|
||||
@@ -355,6 +378,14 @@ class AttributeManager(object):
|
||||
|
||||
kwargs.get('is_computed') and self.can_create_computed_attribute()
|
||||
|
||||
is_changed = False
|
||||
for k in kwargs:
|
||||
if kwargs[k] != getattr(attr, k, None):
|
||||
is_changed = True
|
||||
|
||||
if is_changed and not self._can_edit_attribute(attr):
|
||||
return abort(403, ErrFormat.cannot_edit_attribute)
|
||||
|
||||
attr.update(flush=True, filter_none=False, **kwargs)
|
||||
|
||||
if is_choice and choice_value:
|
||||
|
@@ -3,11 +3,6 @@ import datetime
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.auto_discovery.const import ClOUD_MAP
|
||||
from api.lib.cmdb.cache import CITypeAttributeCache
|
||||
@@ -28,6 +23,10 @@ from api.lib.utils import AESCrypto
|
||||
from api.models.cmdb import AutoDiscoveryCI
|
||||
from api.models.cmdb import AutoDiscoveryCIType
|
||||
from api.models.cmdb import AutoDiscoveryRule
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func
|
||||
|
||||
PWD = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -36,9 +35,10 @@ def parse_plugin_script(script):
|
||||
attributes = []
|
||||
try:
|
||||
x = compile(script, '', "exec")
|
||||
exec(x)
|
||||
unique_key = locals()['AutoDiscovery']().unique_key
|
||||
attrs = locals()['AutoDiscovery']().attributes() or []
|
||||
local_ns = {}
|
||||
exec(x, {}, local_ns)
|
||||
unique_key = local_ns['AutoDiscovery']().unique_key
|
||||
attrs = local_ns['AutoDiscovery']().attributes() or []
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@@ -250,20 +250,17 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
current_app.logger.warning(e)
|
||||
return abort(400, str(e))
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
self.cls.get_by(type_id=kwargs['type_id'], adr_id=kwargs.get('adr_id') or None) and abort(
|
||||
400, ErrFormat.ad_duplicate)
|
||||
|
||||
# self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
|
||||
@staticmethod
|
||||
def _can_add(**kwargs):
|
||||
|
||||
if kwargs.get('adr_id'):
|
||||
adr = AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
|
||||
AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
|
||||
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['adr_id'])))
|
||||
if not adr.is_plugin:
|
||||
other = self.cls.get_by(adr_id=adr.id, first=True, to_dict=False)
|
||||
if other:
|
||||
ci_type = CITypeCache.get(other.type_id)
|
||||
return abort(400, ErrFormat.adr_default_ref_once.format(ci_type.alias))
|
||||
# if not adr.is_plugin:
|
||||
# other = self.cls.get_by(adr_id=adr.id, first=True, to_dict=False)
|
||||
# if other:
|
||||
# ci_type = CITypeCache.get(other.type_id)
|
||||
# return abort(400, ErrFormat.adr_default_ref_once.format(ci_type.alias))
|
||||
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
|
||||
kwargs = check_plugin_script(**kwargs)
|
||||
|
@@ -29,6 +29,7 @@ from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RetKey
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.cmdb.history import AttributeHistoryManger
|
||||
from api.lib.cmdb.history import CIRelationHistoryManager
|
||||
from api.lib.cmdb.history import CITriggerHistoryManager
|
||||
@@ -42,6 +43,8 @@ from api.lib.notify import notify_send
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.perm.acl.acl import validate_permission
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.vault import VaultClient
|
||||
from api.lib.utils import Lock
|
||||
from api.lib.utils import handle_arg_list
|
||||
from api.lib.webhook import webhook_request
|
||||
@@ -60,6 +63,7 @@ from api.tasks.cmdb import ci_relation_cache
|
||||
from api.tasks.cmdb import ci_relation_delete
|
||||
|
||||
PRIVILEGED_USERS = {"worker", "cmdb_agent", "agent"}
|
||||
PASSWORD_DEFAULT_SHOW = "******"
|
||||
|
||||
|
||||
class CIManager(object):
|
||||
@@ -178,6 +182,9 @@ class CIManager(object):
|
||||
need_children and res.update(CIRelationManager.get_children(ci_id, ret_key=ret_key)) # one floor
|
||||
|
||||
ci_type = CITypeCache.get(ci.type_id)
|
||||
if not ci_type:
|
||||
return res
|
||||
|
||||
res["ci_type"] = ci_type.name
|
||||
|
||||
fields = CITypeAttributeManager.get_attr_names_by_type_id(ci.type_id) if not fields else fields
|
||||
@@ -323,6 +330,8 @@ class CIManager(object):
|
||||
ci_attr2type_attr = {type_attr.attr_id: type_attr for type_attr, _ in attrs}
|
||||
|
||||
ci = None
|
||||
record_id = None
|
||||
password_dict = {}
|
||||
need_lock = current_user.username not in current_app.config.get('PRIVILEGED_USERS', PRIVILEGED_USERS)
|
||||
with Lock(ci_type_name, need_lock=need_lock):
|
||||
existed = cls.ci_is_exist(unique_key, unique_value, ci_type.id)
|
||||
@@ -351,14 +360,23 @@ class CIManager(object):
|
||||
ci_dict.get(attr.name) is None and ci_dict.get(attr.alias) is None)):
|
||||
ci_dict[attr.name] = attr.default.get('default')
|
||||
|
||||
if type_attr.is_required and (attr.name not in ci_dict and attr.alias not in ci_dict):
|
||||
if (type_attr.is_required and not attr.is_computed and
|
||||
(attr.name not in ci_dict and attr.alias not in ci_dict)):
|
||||
return abort(400, ErrFormat.attribute_value_required.format(attr.name))
|
||||
else:
|
||||
for type_attr, attr in attrs:
|
||||
if attr.default and attr.default.get('default') == AttributeDefaultValueEnum.UPDATED_AT:
|
||||
ci_dict[attr.name] = now
|
||||
|
||||
computed_attrs = [attr.to_dict() for _, attr in attrs if attr.is_computed] or None
|
||||
computed_attrs = []
|
||||
for _, attr in attrs:
|
||||
if attr.is_computed:
|
||||
computed_attrs.append(attr.to_dict())
|
||||
elif attr.is_password:
|
||||
if attr.name in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.name)
|
||||
elif attr.alias in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.alias)
|
||||
|
||||
value_manager = AttributeValueManager()
|
||||
|
||||
@@ -377,8 +395,9 @@ class CIManager(object):
|
||||
k not in ci_type_attrs_alias and _no_attribute_policy == ExistPolicy.REJECT):
|
||||
return abort(400, ErrFormat.attribute_not_found.format(k))
|
||||
|
||||
if limit_attrs and ci_type_attrs_name.get(k) not in limit_attrs and (
|
||||
ci_type_attrs_alias.get(k) not in limit_attrs):
|
||||
_attr_name = ((ci_type_attrs_name.get(k) and ci_type_attrs_name[k].name) or
|
||||
(ci_type_attrs_alias.get(k) and ci_type_attrs_alias[k].name))
|
||||
if limit_attrs and _attr_name not in limit_attrs:
|
||||
return abort(403, ErrFormat.ci_filter_perm_attr_no_permission.format(k))
|
||||
|
||||
ci_dict = {k: v for k, v in ci_dict.items() if k in ci_type_attrs_name or k in ci_type_attrs_alias}
|
||||
@@ -395,6 +414,10 @@ class CIManager(object):
|
||||
cls.delete(ci.id)
|
||||
raise e
|
||||
|
||||
if password_dict:
|
||||
for attr_id in password_dict:
|
||||
record_id = cls.save_password(ci.id, attr_id, password_dict[attr_id], record_id, ci_type.id)
|
||||
|
||||
if record_id: # has change
|
||||
ci_cache.apply_async(args=(ci.id, operate_type, record_id), queue=CMDB_QUEUE)
|
||||
|
||||
@@ -414,7 +437,16 @@ class CIManager(object):
|
||||
if attr.default and attr.default.get('default') == AttributeDefaultValueEnum.UPDATED_AT:
|
||||
ci_dict[attr.name] = now
|
||||
|
||||
computed_attrs = [attr.to_dict() for _, attr in attrs if attr.is_computed] or None
|
||||
password_dict = dict()
|
||||
computed_attrs = list()
|
||||
for _, attr in attrs:
|
||||
if attr.is_computed:
|
||||
computed_attrs.append(attr.to_dict())
|
||||
elif attr.is_password:
|
||||
if attr.name in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.name)
|
||||
elif attr.alias in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.alias)
|
||||
|
||||
value_manager = AttributeValueManager()
|
||||
|
||||
@@ -423,6 +455,7 @@ class CIManager(object):
|
||||
|
||||
limit_attrs = self._valid_ci_for_no_read(ci) if not _is_admin else {}
|
||||
|
||||
record_id = None
|
||||
need_lock = current_user.username not in current_app.config.get('PRIVILEGED_USERS', PRIVILEGED_USERS)
|
||||
with Lock(ci.ci_type.name, need_lock=need_lock):
|
||||
self._valid_unique_constraint(ci.type_id, ci_dict, ci_id)
|
||||
@@ -440,6 +473,10 @@ class CIManager(object):
|
||||
except BadRequest as e:
|
||||
raise e
|
||||
|
||||
if password_dict:
|
||||
for attr_id in password_dict:
|
||||
record_id = self.save_password(ci.id, attr_id, password_dict[attr_id], record_id, ci.type_id)
|
||||
|
||||
if record_id: # has change
|
||||
ci_cache.apply_async(args=(ci_id, OperateType.UPDATE, record_id), queue=CMDB_QUEUE)
|
||||
|
||||
@@ -465,30 +502,33 @@ class CIManager(object):
|
||||
ci_dict = cls.get_cis_by_ids([ci_id])
|
||||
ci_dict = ci_dict and ci_dict[0]
|
||||
|
||||
triggers = CITriggerManager.get(ci_dict['_type'])
|
||||
for trigger in triggers:
|
||||
option = trigger['option']
|
||||
if not option.get('enable') or option.get('action') != OperateType.DELETE:
|
||||
continue
|
||||
if ci_dict:
|
||||
triggers = CITriggerManager.get(ci_dict['_type'])
|
||||
for trigger in triggers:
|
||||
option = trigger['option']
|
||||
if not option.get('enable') or option.get('action') != OperateType.DELETE:
|
||||
continue
|
||||
|
||||
if option.get('filter') and not CITriggerManager.ci_filter(ci_dict.get('_id'), option['filter']):
|
||||
continue
|
||||
if option.get('filter') and not CITriggerManager.ci_filter(ci_dict.get('_id'), option['filter']):
|
||||
continue
|
||||
|
||||
ci_delete_trigger.apply_async(args=(trigger, OperateType.DELETE, ci_dict), queue=CMDB_QUEUE)
|
||||
ci_delete_trigger.apply_async(args=(trigger, OperateType.DELETE, ci_dict), queue=CMDB_QUEUE)
|
||||
|
||||
attrs = CITypeAttribute.get_by(type_id=ci.type_id, to_dict=False)
|
||||
attr_names = set([AttributeCache.get(attr.attr_id).name for attr in attrs])
|
||||
for attr_name in attr_names:
|
||||
value_table = TableMap(attr_name=attr_name).table
|
||||
attrs = [AttributeCache.get(attr.attr_id) for attr in attrs]
|
||||
for attr in attrs:
|
||||
value_table = TableMap(attr=attr).table
|
||||
for item in value_table.get_by(ci_id=ci_id, to_dict=False):
|
||||
item.delete(commit=False)
|
||||
|
||||
for item in CIRelation.get_by(first_ci_id=ci_id, to_dict=False):
|
||||
ci_relation_delete.apply_async(args=(item.first_ci_id, item.second_ci_id), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(
|
||||
args=(item.first_ci_id, item.second_ci_id, item.ancestor_ids), queue=CMDB_QUEUE)
|
||||
item.delete(commit=False)
|
||||
|
||||
for item in CIRelation.get_by(second_ci_id=ci_id, to_dict=False):
|
||||
ci_relation_delete.apply_async(args=(item.first_ci_id, item.second_ci_id), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(
|
||||
args=(item.first_ci_id, item.second_ci_id, item.ancestor_ids), queue=CMDB_QUEUE)
|
||||
item.delete(commit=False)
|
||||
|
||||
ad_ci = AutoDiscoveryCI.get_by(ci_id=ci_id, to_dict=False, first=True)
|
||||
@@ -498,7 +538,8 @@ class CIManager(object):
|
||||
|
||||
db.session.commit()
|
||||
|
||||
AttributeHistoryManger.add(None, ci_id, [(None, OperateType.DELETE, ci_dict, None)], ci.type_id)
|
||||
if ci_dict:
|
||||
AttributeHistoryManger.add(None, ci_id, [(None, OperateType.DELETE, ci_dict, None)], ci.type_id)
|
||||
|
||||
ci_delete.apply_async(args=(ci_id,), queue=CMDB_QUEUE)
|
||||
|
||||
@@ -600,10 +641,13 @@ class CIManager(object):
|
||||
_fields = list()
|
||||
for field in fields:
|
||||
attr = AttributeCache.get(field)
|
||||
if attr is not None:
|
||||
if attr is not None and not attr.is_password:
|
||||
_fields.append(str(attr.id))
|
||||
filter_fields_sql = "WHERE A.attr_id in ({0})".format(",".join(_fields))
|
||||
|
||||
ci2pos = {int(_id): _pos for _pos, _id in enumerate(ci_ids)}
|
||||
res = [None] * len(ci_ids)
|
||||
|
||||
ci_ids = ",".join(map(str, ci_ids))
|
||||
if value_tables is None:
|
||||
value_tables = ValueTypeMap.table_name.values()
|
||||
@@ -614,11 +658,10 @@ class CIManager(object):
|
||||
# current_app.logger.debug(query_sql)
|
||||
cis = db.session.execute(query_sql).fetchall()
|
||||
ci_set = set()
|
||||
res = list()
|
||||
ci_dict = dict()
|
||||
unique_id2obj = dict()
|
||||
excludes = excludes and set(excludes)
|
||||
for ci_id, type_id, attr_id, attr_name, attr_alias, value, value_type, is_list in cis:
|
||||
for ci_id, type_id, attr_id, attr_name, attr_alias, value, value_type, is_list, is_password in cis:
|
||||
if not fields and excludes and (attr_name in excludes or attr_alias in excludes):
|
||||
continue
|
||||
|
||||
@@ -634,7 +677,7 @@ class CIManager(object):
|
||||
ci_dict["unique"] = unique_id2obj[ci_type.unique_id] and unique_id2obj[ci_type.unique_id].name
|
||||
ci_dict["unique_alias"] = unique_id2obj[ci_type.unique_id] and unique_id2obj[ci_type.unique_id].alias
|
||||
ci_set.add(ci_id)
|
||||
res.append(ci_dict)
|
||||
res[ci2pos[ci_id]] = ci_dict
|
||||
|
||||
if ret_key == RetKey.NAME:
|
||||
attr_key = attr_name
|
||||
@@ -645,11 +688,14 @@ class CIManager(object):
|
||||
else:
|
||||
return abort(400, ErrFormat.argument_invalid.format("ret_key"))
|
||||
|
||||
value = ValueTypeMap.serialize2[value_type](value)
|
||||
if is_list:
|
||||
ci_dict.setdefault(attr_key, []).append(value)
|
||||
if is_password and value:
|
||||
ci_dict[attr_key] = PASSWORD_DEFAULT_SHOW
|
||||
else:
|
||||
ci_dict[attr_key] = value
|
||||
value = ValueTypeMap.serialize2[value_type](value)
|
||||
if is_list:
|
||||
ci_dict.setdefault(attr_key, []).append(value)
|
||||
else:
|
||||
ci_dict[attr_key] = value
|
||||
|
||||
return res
|
||||
|
||||
@@ -681,6 +727,84 @@ class CIManager(object):
|
||||
|
||||
return cls._get_cis_from_db(ci_ids, ret_key, fields, value_tables, excludes=excludes)
|
||||
|
||||
@classmethod
|
||||
def save_password(cls, ci_id, attr_id, value, record_id, type_id):
|
||||
changed = None
|
||||
encrypt_value = None
|
||||
value_table = ValueTypeMap.table[ValueTypeEnum.PASSWORD]
|
||||
if current_app.config.get('SECRETS_ENGINE') == 'inner':
|
||||
if value:
|
||||
encrypt_value, status = InnerCrypt().encrypt(value)
|
||||
if not status:
|
||||
current_app.logger.error('save password failed: {}'.format(encrypt_value))
|
||||
return abort(400, ErrFormat.password_save_failed.format(encrypt_value))
|
||||
else:
|
||||
encrypt_value = PASSWORD_DEFAULT_SHOW
|
||||
|
||||
existed = value_table.get_by(ci_id=ci_id, attr_id=attr_id, first=True, to_dict=False)
|
||||
if existed is None:
|
||||
if value:
|
||||
value_table.create(ci_id=ci_id, attr_id=attr_id, value=encrypt_value)
|
||||
changed = [(ci_id, attr_id, OperateType.ADD, '', PASSWORD_DEFAULT_SHOW, type_id)]
|
||||
elif existed.value != encrypt_value:
|
||||
if value:
|
||||
existed.update(ci_id=ci_id, attr_id=attr_id, value=encrypt_value)
|
||||
changed = [(ci_id, attr_id, OperateType.UPDATE, PASSWORD_DEFAULT_SHOW, PASSWORD_DEFAULT_SHOW, type_id)]
|
||||
else:
|
||||
existed.delete()
|
||||
changed = [(ci_id, attr_id, OperateType.DELETE, PASSWORD_DEFAULT_SHOW, '', type_id)]
|
||||
|
||||
if current_app.config.get('SECRETS_ENGINE') == 'vault':
|
||||
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
|
||||
if value:
|
||||
try:
|
||||
vault.update("/{}/{}".format(ci_id, attr_id), dict(v=value))
|
||||
except Exception as e:
|
||||
current_app.logger.error('save password to vault failed: {}'.format(e))
|
||||
return abort(400, ErrFormat.password_save_failed.format('write vault failed'))
|
||||
else:
|
||||
try:
|
||||
vault.delete("/{}/{}".format(ci_id, attr_id))
|
||||
except Exception as e:
|
||||
current_app.logger.warning('delete password to vault failed: {}'.format(e))
|
||||
|
||||
if changed is not None:
|
||||
return AttributeValueManager.write_change2(changed, record_id)
|
||||
|
||||
@classmethod
|
||||
def load_password(cls, ci_id, attr_id):
|
||||
ci = CI.get_by_id(ci_id) or abort(404, ErrFormat.ci_not_found.format(ci_id))
|
||||
|
||||
limit_attrs = cls._valid_ci_for_no_read(ci, ci.ci_type)
|
||||
if limit_attrs:
|
||||
attr = AttributeCache.get(attr_id)
|
||||
if attr and attr.name not in limit_attrs:
|
||||
return abort(403, ErrFormat.no_permission2)
|
||||
|
||||
if current_app.config.get('SECRETS_ENGINE', 'inner') == 'inner':
|
||||
value_table = ValueTypeMap.table[ValueTypeEnum.PASSWORD]
|
||||
v = value_table.get_by(ci_id=ci_id, attr_id=attr_id, first=True, to_dict=False)
|
||||
|
||||
v = v and v.value
|
||||
if not v:
|
||||
return
|
||||
|
||||
decrypt_value, status = InnerCrypt().decrypt(v)
|
||||
if not status:
|
||||
current_app.logger.error('load password failed: {}'.format(decrypt_value))
|
||||
return abort(400, ErrFormat.password_load_failed.format(decrypt_value))
|
||||
|
||||
return decrypt_value
|
||||
|
||||
elif current_app.config.get('SECRETS_ENGINE') == 'vault':
|
||||
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
|
||||
data, status = vault.read("/{}/{}".format(ci_id, attr_id))
|
||||
if not status:
|
||||
current_app.logger.error('read password from vault failed: {}'.format(data))
|
||||
return abort(400, ErrFormat.password_load_failed.format(data))
|
||||
|
||||
return data.get('v')
|
||||
|
||||
|
||||
class CIRelationManager(object):
|
||||
"""
|
||||
@@ -768,12 +892,14 @@ class CIRelationManager(object):
|
||||
|
||||
@classmethod
|
||||
def get_ancestor_ids(cls, ci_ids, level=1):
|
||||
for _ in range(level):
|
||||
cis = db.session.query(CIRelation.first_ci_id).filter(
|
||||
level2ids = dict()
|
||||
for _level in range(1, level + 1):
|
||||
cis = db.session.query(CIRelation.first_ci_id, CIRelation.ancestor_ids).filter(
|
||||
CIRelation.second_ci_id.in_(ci_ids)).filter(CIRelation.deleted.is_(False))
|
||||
ci_ids = [i.first_ci_id for i in cis]
|
||||
level2ids[_level + 1] = {int(i.ancestor_ids.split(',')[-1]) for i in cis if i.ancestor_ids}
|
||||
|
||||
return ci_ids
|
||||
return ci_ids, level2ids
|
||||
|
||||
@staticmethod
|
||||
def _check_constraint(first_ci_id, first_type_id, second_ci_id, second_type_id, type_relation):
|
||||
@@ -800,13 +926,14 @@ class CIRelationManager(object):
|
||||
return abort(400, ErrFormat.relation_constraint.format("1-N"))
|
||||
|
||||
@classmethod
|
||||
def add(cls, first_ci_id, second_ci_id, more=None, relation_type_id=None):
|
||||
def add(cls, first_ci_id, second_ci_id, more=None, relation_type_id=None, ancestor_ids=None):
|
||||
|
||||
first_ci = CIManager.confirm_ci_existed(first_ci_id)
|
||||
second_ci = CIManager.confirm_ci_existed(second_ci_id)
|
||||
|
||||
existed = CIRelation.get_by(first_ci_id=first_ci_id,
|
||||
second_ci_id=second_ci_id,
|
||||
ancestor_ids=ancestor_ids,
|
||||
to_dict=False,
|
||||
first=True)
|
||||
if existed is not None:
|
||||
@@ -842,11 +969,12 @@ class CIRelationManager(object):
|
||||
|
||||
existed = CIRelation.create(first_ci_id=first_ci_id,
|
||||
second_ci_id=second_ci_id,
|
||||
relation_type_id=relation_type_id)
|
||||
relation_type_id=relation_type_id,
|
||||
ancestor_ids=ancestor_ids)
|
||||
|
||||
CIRelationHistoryManager().add(existed, OperateType.ADD)
|
||||
|
||||
ci_relation_cache.apply_async(args=(first_ci_id, second_ci_id), queue=CMDB_QUEUE)
|
||||
ci_relation_cache.apply_async(args=(first_ci_id, second_ci_id, ancestor_ids), queue=CMDB_QUEUE)
|
||||
|
||||
if more is not None:
|
||||
existed.upadte(more=more)
|
||||
@@ -870,53 +998,56 @@ class CIRelationManager(object):
|
||||
his_manager = CIRelationHistoryManager()
|
||||
his_manager.add(cr, operate_type=OperateType.DELETE)
|
||||
|
||||
ci_relation_delete.apply_async(args=(cr.first_ci_id, cr.second_ci_id), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(args=(cr.first_ci_id, cr.second_ci_id, cr.ancestor_ids), queue=CMDB_QUEUE)
|
||||
|
||||
return cr_id
|
||||
|
||||
@classmethod
|
||||
def delete_2(cls, first_ci_id, second_ci_id):
|
||||
def delete_2(cls, first_ci_id, second_ci_id, ancestor_ids=None):
|
||||
cr = CIRelation.get_by(first_ci_id=first_ci_id,
|
||||
second_ci_id=second_ci_id,
|
||||
ancestor_ids=ancestor_ids,
|
||||
to_dict=False,
|
||||
first=True)
|
||||
|
||||
ci_relation_delete.apply_async(args=(first_ci_id, second_ci_id), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(args=(first_ci_id, second_ci_id, ancestor_ids), queue=CMDB_QUEUE)
|
||||
|
||||
return cls.delete(cr.id)
|
||||
return cr and cls.delete(cr.id)
|
||||
|
||||
@classmethod
|
||||
def batch_update(cls, ci_ids, parents, children):
|
||||
def batch_update(cls, ci_ids, parents, children, ancestor_ids=None):
|
||||
"""
|
||||
only for many to one
|
||||
:param ci_ids:
|
||||
:param parents:
|
||||
:param children:
|
||||
:param ancestor_ids:
|
||||
:return:
|
||||
"""
|
||||
if isinstance(parents, list):
|
||||
for parent_id in parents:
|
||||
for ci_id in ci_ids:
|
||||
cls.add(parent_id, ci_id)
|
||||
cls.add(parent_id, ci_id, ancestor_ids=ancestor_ids)
|
||||
|
||||
if isinstance(children, list):
|
||||
for child_id in children:
|
||||
for ci_id in ci_ids:
|
||||
cls.add(ci_id, child_id)
|
||||
cls.add(ci_id, child_id, ancestor_ids=ancestor_ids)
|
||||
|
||||
@classmethod
|
||||
def batch_delete(cls, ci_ids, parents):
|
||||
def batch_delete(cls, ci_ids, parents, ancestor_ids=None):
|
||||
"""
|
||||
only for many to one
|
||||
:param ci_ids:
|
||||
:param parents:
|
||||
:param ancestor_ids:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if isinstance(parents, list):
|
||||
for parent_id in parents:
|
||||
for ci_id in ci_ids:
|
||||
cls.delete_2(parent_id, ci_id)
|
||||
cls.delete_2(parent_id, ci_id, ancestor_ids=ancestor_ids)
|
||||
|
||||
|
||||
class CITriggerManager(object):
|
||||
|
@@ -5,8 +5,10 @@ import copy
|
||||
import toposort
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from flask_login import current_user
|
||||
from toposort import toposort_flatten
|
||||
from werkzeug.exceptions import BadRequest
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.attribute import AttributeManager
|
||||
@@ -22,6 +24,7 @@ from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.cmdb.history import CITypeHistoryManager
|
||||
from api.lib.cmdb.perms import CIFilterPermsCRUD
|
||||
from api.lib.cmdb.relation_type import RelationTypeManager
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.value import AttributeValueManager
|
||||
@@ -75,12 +78,13 @@ class CITypeManager(object):
|
||||
def get_ci_types(type_name=None):
|
||||
resources = None
|
||||
if current_app.config.get('USE_ACL') and not is_app_admin('cmdb'):
|
||||
resources = set([i.get('name') for i in ACLManager().get_resources("CIType")])
|
||||
resources = set([i.get('name') for i in ACLManager().get_resources(ResourceTypeEnum.CI_TYPE)])
|
||||
|
||||
ci_types = CIType.get_by() if type_name is None else CIType.get_by_like(name=type_name)
|
||||
res = list()
|
||||
for type_dict in ci_types:
|
||||
type_dict["unique_key"] = AttributeCache.get(type_dict["unique_id"]).name
|
||||
attr = AttributeCache.get(type_dict["unique_id"])
|
||||
type_dict["unique_key"] = attr and attr.name
|
||||
if resources is None or type_dict['name'] in resources:
|
||||
res.append(type_dict)
|
||||
|
||||
@@ -113,6 +117,9 @@ class CITypeManager(object):
|
||||
@classmethod
|
||||
@kwargs_required("name")
|
||||
def add(cls, **kwargs):
|
||||
if current_app.config.get('USE_ACL') and not is_app_admin('cmdb'):
|
||||
if ErrFormat.ci_type_config not in {i['name'] for i in ACLManager().get_resources(ResourceTypeEnum.PAGE)}:
|
||||
return abort(403, ErrFormat.no_permission2)
|
||||
|
||||
unique_key = kwargs.pop("unique_key", None) or kwargs.pop("unique_id", None)
|
||||
unique_key = AttributeCache.get(unique_key) or abort(404, ErrFormat.unique_key_not_define)
|
||||
@@ -131,7 +138,11 @@ class CITypeManager(object):
|
||||
CITypeCache.clean(ci_type.name)
|
||||
|
||||
if current_app.config.get("USE_ACL"):
|
||||
ACLManager().add_resource(ci_type.name, ResourceTypeEnum.CI)
|
||||
try:
|
||||
ACLManager().add_resource(ci_type.name, ResourceTypeEnum.CI)
|
||||
except BadRequest:
|
||||
pass
|
||||
|
||||
ACLManager().grant_resource_to_role(ci_type.name,
|
||||
RoleEnum.CMDB_READ_ALL,
|
||||
ResourceTypeEnum.CI,
|
||||
@@ -243,7 +254,6 @@ class CITypeGroupManager(object):
|
||||
else:
|
||||
resources = {i['name']: i['permissions'] for i in resources if PermEnum.READ in i.get("permissions")}
|
||||
|
||||
current_app.logger.info(resources)
|
||||
groups = sorted(CITypeGroup.get_by(), key=lambda x: x['order'] or 0)
|
||||
group_types = set()
|
||||
for group in groups:
|
||||
@@ -283,7 +293,10 @@ class CITypeGroupManager(object):
|
||||
"""
|
||||
existed = CITypeGroup.get_by_id(gid) or abort(
|
||||
404, ErrFormat.ci_type_group_not_found.format("id={}".format(gid)))
|
||||
if name is not None:
|
||||
if name is not None and name != existed.name:
|
||||
if RoleEnum.CONFIG not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin("cmdb"):
|
||||
return abort(403, ErrFormat.role_required.format(RoleEnum.CONFIG))
|
||||
|
||||
existed.update(name=name)
|
||||
|
||||
max_order = max([i.order or 0 for i in CITypeGroupItem.get_by(group_id=gid, to_dict=False)] or [0])
|
||||
@@ -576,6 +589,11 @@ class CITypeRelationManager(object):
|
||||
ci_type_dict = CITypeCache.get(type_id).to_dict()
|
||||
ci_type_dict["ctr_id"] = relation_inst.id
|
||||
ci_type_dict["attributes"] = CITypeAttributeManager.get_attributes_by_type_id(ci_type_dict["id"])
|
||||
attr_filter = CIFilterPermsCRUD.get_attr_filter(type_id)
|
||||
if attr_filter:
|
||||
ci_type_dict["attributes"] = [attr for attr in (ci_type_dict["attributes"] or [])
|
||||
if attr['name'] in attr_filter]
|
||||
|
||||
ci_type_dict["relation_type"] = relation_inst.relation_type.name
|
||||
ci_type_dict["constraint"] = relation_inst.constraint
|
||||
|
||||
@@ -637,6 +655,16 @@ class CITypeRelationManager(object):
|
||||
current_app.logger.warning(str(e))
|
||||
return abort(400, ErrFormat.circular_dependency_error)
|
||||
|
||||
if constraint == ConstraintEnum.Many2Many:
|
||||
other_c = CITypeRelation.get_by(parent_id=p.id, constraint=ConstraintEnum.Many2Many,
|
||||
to_dict=False, first=True)
|
||||
other_p = CITypeRelation.get_by(child_id=c.id, constraint=ConstraintEnum.Many2Many,
|
||||
to_dict=False, first=True)
|
||||
if other_c and other_c.child_id != c.id:
|
||||
return abort(400, ErrFormat.m2m_relation_constraint.format(p.name, other_c.child.name))
|
||||
if other_p and other_p.parent_id != p.id:
|
||||
return abort(400, ErrFormat.m2m_relation_constraint.format(other_p.parent.name, c.name))
|
||||
|
||||
existed = cls._get(p.id, c.id)
|
||||
if existed is not None:
|
||||
existed.update(relation_type_id=relation_type_id,
|
||||
@@ -686,6 +714,24 @@ class CITypeRelationManager(object):
|
||||
|
||||
cls.delete(ctr.id)
|
||||
|
||||
@staticmethod
|
||||
def get_level2constraint(root_id, level):
|
||||
level = level + 1 if level == 1 else level
|
||||
ci = CI.get_by_id(root_id)
|
||||
if ci is None:
|
||||
return dict()
|
||||
|
||||
root_id = ci.type_id
|
||||
level2constraint = dict()
|
||||
for lv in range(1, int(level) + 1):
|
||||
for i in CITypeRelation.get_by(parent_id=root_id, to_dict=False):
|
||||
if i.constraint == ConstraintEnum.Many2Many:
|
||||
root_id = i.child_id
|
||||
level2constraint[lv] = ConstraintEnum.Many2Many
|
||||
break
|
||||
|
||||
return level2constraint
|
||||
|
||||
|
||||
class CITypeAttributeGroupManager(object):
|
||||
cls = CITypeAttributeGroup
|
||||
@@ -697,7 +743,7 @@ class CITypeAttributeGroupManager(object):
|
||||
grouped = list()
|
||||
|
||||
attributes = CITypeAttributeManager.get_attributes_by_type_id(type_id)
|
||||
id2attr = {i['id']: i for i in attributes}
|
||||
id2attr = {i.get('id'): i for i in attributes}
|
||||
|
||||
for group in groups:
|
||||
items = CITypeAttributeGroupItem.get_by(group_id=group["id"], to_dict=False)
|
||||
@@ -863,97 +909,58 @@ class CITypeAttributeGroupManager(object):
|
||||
|
||||
class CITypeTemplateManager(object):
|
||||
@staticmethod
|
||||
def __import(cls, data):
|
||||
id2obj_dicts = {i['id']: i for i in data}
|
||||
existed = cls.get_by(deleted=None, to_dict=False)
|
||||
id2existed = {i.id: i for i in existed}
|
||||
existed_ids = [i.id for i in existed]
|
||||
existed_no_delete_ids = [i.id for i in existed if not i.deleted]
|
||||
def __import(cls, data, unique_key='name'):
|
||||
id2obj_dicts = {i[unique_key]: i for i in data}
|
||||
existed = cls.get_by(to_dict=False)
|
||||
id2existed = {getattr(i, unique_key): i for i in existed}
|
||||
existed_ids = [getattr(i, unique_key) for i in existed]
|
||||
|
||||
id_map = dict()
|
||||
# add
|
||||
for added_id in set(id2obj_dicts.keys()) - set(existed_ids):
|
||||
_id = id2obj_dicts[added_id].pop('id', None)
|
||||
id2obj_dicts[added_id].pop('created_at', None)
|
||||
id2obj_dicts[added_id].pop('updated_at', None)
|
||||
id2obj_dicts[added_id].pop('uid', None)
|
||||
|
||||
if cls == CIType:
|
||||
CITypeManager.add(**id2obj_dicts[added_id])
|
||||
__id = CITypeManager.add(**id2obj_dicts[added_id])
|
||||
CITypeCache.clean(__id)
|
||||
elif cls == CITypeRelation:
|
||||
CITypeRelationManager.add(id2obj_dicts[added_id].get('parent_id'),
|
||||
id2obj_dicts[added_id].get('child_id'),
|
||||
id2obj_dicts[added_id].get('relation_type_id'),
|
||||
id2obj_dicts[added_id].get('constraint'),
|
||||
)
|
||||
__id = CITypeRelationManager.add(id2obj_dicts[added_id].get('parent_id'),
|
||||
id2obj_dicts[added_id].get('child_id'),
|
||||
id2obj_dicts[added_id].get('relation_type_id'),
|
||||
id2obj_dicts[added_id].get('constraint'),
|
||||
)
|
||||
else:
|
||||
cls.create(flush=True, **id2obj_dicts[added_id])
|
||||
obj = cls.create(flush=True, **id2obj_dicts[added_id])
|
||||
if cls == Attribute:
|
||||
AttributeCache.clean(obj)
|
||||
__id = obj.id
|
||||
|
||||
id_map[_id] = __id
|
||||
|
||||
# update
|
||||
for updated_id in set(id2obj_dicts.keys()) & set(existed_ids):
|
||||
_id = id2obj_dicts[updated_id].pop('id', None)
|
||||
|
||||
id2existed[updated_id].update(flush=True, **id2obj_dicts[updated_id])
|
||||
|
||||
id_map[_id] = id2existed[updated_id].id
|
||||
|
||||
if cls == Attribute:
|
||||
AttributeCache.clean(id2existed[updated_id])
|
||||
|
||||
if cls == CIType:
|
||||
deleted = id2existed[updated_id].deleted
|
||||
CITypeManager.update(updated_id, **id2obj_dicts[updated_id])
|
||||
if deleted and current_app.config.get("USE_ACL"):
|
||||
type_name = id2obj_dicts[updated_id]['name']
|
||||
ACLManager().add_resource(type_name, ResourceTypeEnum.CI)
|
||||
ACLManager().grant_resource_to_role(type_name,
|
||||
RoleEnum.CMDB_READ_ALL,
|
||||
ResourceTypeEnum.CI,
|
||||
permissions=[PermEnum.READ])
|
||||
ACLManager().grant_resource_to_role(type_name,
|
||||
current_user.username,
|
||||
ResourceTypeEnum.CI)
|
||||
CITypeCache.clean(id2existed[updated_id].id)
|
||||
|
||||
else:
|
||||
id2existed[updated_id].update(flush=True, **id2obj_dicts[updated_id])
|
||||
|
||||
# delete
|
||||
for deleted_id in set(existed_no_delete_ids) - set(id2obj_dicts.keys()):
|
||||
if cls == CIType:
|
||||
id2existed[deleted_id].soft_delete(flush=True)
|
||||
|
||||
CITypeCache.clean(deleted_id)
|
||||
|
||||
CITypeHistoryManager.add(CITypeOperateType.DELETE, deleted_id, change=id2existed[deleted_id].to_dict())
|
||||
|
||||
if current_app.config.get("USE_ACL"):
|
||||
ACLManager().del_resource(id2existed[deleted_id].name, ResourceTypeEnum.CI)
|
||||
else:
|
||||
id2existed[deleted_id].soft_delete(flush=True)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise Exception(str(e))
|
||||
|
||||
def _import_ci_types(self, ci_types):
|
||||
for i in ci_types:
|
||||
i.pop("unique_key", None)
|
||||
|
||||
self.__import(CIType, ci_types)
|
||||
|
||||
def _import_ci_type_groups(self, ci_type_groups):
|
||||
_ci_type_groups = copy.deepcopy(ci_type_groups)
|
||||
for i in _ci_type_groups:
|
||||
i.pop('ci_types', None)
|
||||
|
||||
self.__import(CITypeGroup, _ci_type_groups)
|
||||
|
||||
# import group type items
|
||||
for group in ci_type_groups:
|
||||
existed = CITypeGroupItem.get_by(group_id=group['id'], to_dict=False)
|
||||
for i in existed:
|
||||
i.soft_delete()
|
||||
|
||||
for order, ci_type in enumerate(group.get('ci_types') or []):
|
||||
payload = dict(group_id=group['id'], type_id=ci_type['id'], order=order)
|
||||
CITypeGroupItem.create(**payload)
|
||||
|
||||
def _import_relation_types(self, relation_types):
|
||||
self.__import(RelationType, relation_types)
|
||||
|
||||
def _import_ci_type_relations(self, ci_type_relations):
|
||||
for i in ci_type_relations:
|
||||
i.pop('parent', None)
|
||||
i.pop('child', None)
|
||||
i.pop('relation_type', None)
|
||||
|
||||
self.__import(CITypeRelation, ci_type_relations)
|
||||
return id_map
|
||||
|
||||
def _import_attributes(self, type2attributes):
|
||||
attributes = [attr for type_id in type2attributes for attr in type2attributes[type_id]]
|
||||
@@ -962,122 +969,262 @@ class CITypeTemplateManager(object):
|
||||
i.pop('default_show', None)
|
||||
i.pop('is_required', None)
|
||||
i.pop('order', None)
|
||||
i.pop('choice_web_hook', None)
|
||||
i.pop('choice_other', None)
|
||||
i.pop('order', None)
|
||||
choice_value = i.pop('choice_value', None)
|
||||
if not choice_value:
|
||||
i['is_choice'] = False
|
||||
|
||||
attrs.append((i, choice_value))
|
||||
|
||||
self.__import(Attribute, [i[0] for i in attrs])
|
||||
attr_id_map = self.__import(Attribute, [i[0] for i in copy.deepcopy(attrs)])
|
||||
|
||||
for i, choice_value in attrs:
|
||||
if choice_value:
|
||||
AttributeManager.add_choice_values(i['id'], i['value_type'], choice_value)
|
||||
if choice_value and not i.get('choice_web_hook') and not i.get('choice_other'):
|
||||
AttributeManager.add_choice_values(attr_id_map.get(i['id'], i['id']), i['value_type'], choice_value)
|
||||
|
||||
return attr_id_map
|
||||
|
||||
def _import_ci_types(self, ci_types, attr_id_map):
|
||||
for i in ci_types:
|
||||
i.pop("unique_key", None)
|
||||
i['unique_id'] = attr_id_map.get(i['unique_id'], i['unique_id'])
|
||||
i['uid'] = current_user.uid
|
||||
|
||||
return self.__import(CIType, ci_types)
|
||||
|
||||
def _import_ci_type_groups(self, ci_type_groups, type_id_map):
|
||||
_ci_type_groups = copy.deepcopy(ci_type_groups)
|
||||
for i in _ci_type_groups:
|
||||
i.pop('ci_types', None)
|
||||
|
||||
group_id_map = self.__import(CITypeGroup, _ci_type_groups)
|
||||
|
||||
# import group type items
|
||||
for group in ci_type_groups:
|
||||
for order, ci_type in enumerate(group.get('ci_types') or []):
|
||||
payload = dict(group_id=group_id_map.get(group['id'], group['id']),
|
||||
type_id=type_id_map.get(ci_type['id'], ci_type['id']),
|
||||
order=order)
|
||||
existed = CITypeGroupItem.get_by(group_id=payload['group_id'], type_id=payload['type_id'],
|
||||
first=True, to_dict=False)
|
||||
if existed is None:
|
||||
CITypeGroupItem.create(flush=True, **payload)
|
||||
else:
|
||||
existed.update(flush=True, **payload)
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise Exception(str(e))
|
||||
|
||||
def _import_relation_types(self, relation_types):
|
||||
return self.__import(RelationType, relation_types)
|
||||
|
||||
@staticmethod
|
||||
def _import_type_attributes(type2attributes):
|
||||
# add type attribute
|
||||
def _import_ci_type_relations(ci_type_relations, type_id_map, relation_type_id_map):
|
||||
for i in ci_type_relations:
|
||||
i.pop('parent', None)
|
||||
i.pop('child', None)
|
||||
i.pop('relation_type', None)
|
||||
|
||||
i['parent_id'] = type_id_map.get(i['parent_id'], i['parent_id'])
|
||||
i['child_id'] = type_id_map.get(i['child_id'], i['child_id'])
|
||||
i['relation_type_id'] = relation_type_id_map.get(i['relation_type_id'], i['relation_type_id'])
|
||||
|
||||
try:
|
||||
CITypeRelationManager.add(i.get('parent_id'),
|
||||
i.get('child_id'),
|
||||
i.get('relation_type_id'),
|
||||
i.get('constraint'),
|
||||
)
|
||||
except BadRequest:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _import_type_attributes(type2attributes, type_id_map, attr_id_map):
|
||||
for type_id in type2attributes:
|
||||
CITypeAttributesCache.clean(type_id_map.get(int(type_id), type_id))
|
||||
|
||||
for type_id in type2attributes:
|
||||
existed = CITypeAttribute.get_by(type_id=type_id, to_dict=False)
|
||||
existed_attr_ids = {i.attr_id: i for i in existed}
|
||||
new_attr_ids = {i['id']: i for i in type2attributes[type_id]}
|
||||
existed = CITypeAttributesCache.get2(type_id_map.get(int(type_id), type_id))
|
||||
existed_attr_names = {attr.name: ta for ta, attr in existed}
|
||||
|
||||
handled = set()
|
||||
for attr in type2attributes[type_id]:
|
||||
payload = dict(type_id=type_id,
|
||||
attr_id=attr['id'],
|
||||
payload = dict(type_id=type_id_map.get(int(type_id), type_id),
|
||||
attr_id=attr_id_map.get(attr['id'], attr['id']),
|
||||
default_show=attr['default_show'],
|
||||
is_required=attr['is_required'],
|
||||
order=attr['order'])
|
||||
if attr['id'] not in existed_attr_ids: # new
|
||||
CITypeAttribute.create(flush=True, **payload)
|
||||
else: # update
|
||||
existed_attr_ids[attr['id']].update(**payload)
|
||||
if attr['name'] not in handled:
|
||||
if attr['name'] not in existed_attr_names: # new
|
||||
CITypeAttribute.create(flush=True, **payload)
|
||||
else: # update
|
||||
existed_attr_names[attr['name']].update(flush=True, **payload)
|
||||
|
||||
# delete
|
||||
for i in existed:
|
||||
if i.attr_id not in new_attr_ids:
|
||||
i.soft_delete()
|
||||
handled.add(attr['name'])
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise Exception(str(e))
|
||||
|
||||
for type_id in type2attributes:
|
||||
CITypeAttributesCache.clean(type_id_map.get(int(type_id), type_id))
|
||||
|
||||
@staticmethod
|
||||
def _import_attribute_group(type2attribute_group):
|
||||
def _import_attribute_group(type2attribute_group, type_id_map, attr_id_map):
|
||||
for type_id in type2attribute_group:
|
||||
existed = CITypeAttributeGroup.get_by(type_id=type_id, to_dict=False)
|
||||
for i in existed:
|
||||
i.soft_delete()
|
||||
|
||||
for group in type2attribute_group[type_id] or []:
|
||||
_group = copy.deepcopy(group)
|
||||
_group.pop('attributes', None)
|
||||
_group.pop('id', None)
|
||||
new = CITypeAttributeGroup.create(**_group)
|
||||
existed = CITypeAttributeGroup.get_by(name=_group['name'],
|
||||
type_id=type_id_map.get(_group['type_id'], _group['type_id']),
|
||||
first=True, to_dict=False)
|
||||
if existed is None:
|
||||
_group['type_id'] = type_id_map.get(_group['type_id'], _group['type_id'])
|
||||
|
||||
existed = CITypeAttributeGroupItem.get_by(group_id=new.id, to_dict=False)
|
||||
for i in existed:
|
||||
i.soft_delete()
|
||||
existed = CITypeAttributeGroup.create(flush=True, **_group)
|
||||
|
||||
for order, attr in enumerate(group['attributes'] or []):
|
||||
CITypeAttributeGroupItem.create(group_id=new.id, attr_id=attr['id'], order=order)
|
||||
item_existed = CITypeAttributeGroupItem.get_by(group_id=existed.id,
|
||||
attr_id=attr_id_map.get(attr['id'], attr['id']),
|
||||
first=True, to_dict=False)
|
||||
if item_existed is None:
|
||||
CITypeAttributeGroupItem.create(group_id=existed.id,
|
||||
attr_id=attr_id_map.get(attr['id'], attr['id']),
|
||||
order=order)
|
||||
else:
|
||||
item_existed.update(flush=True, order=order)
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise Exception(str(e))
|
||||
|
||||
@staticmethod
|
||||
def _import_auto_discovery_rules(rules):
|
||||
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryRuleCRUD
|
||||
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryCITypeCRUD
|
||||
|
||||
for rule in rules:
|
||||
ci_type = CITypeCache.get(rule.pop('type_name', None))
|
||||
adr = rule.pop('adr', {}) or {}
|
||||
|
||||
if ci_type:
|
||||
rule['type_id'] = ci_type.id
|
||||
if rule.get('adr_name'):
|
||||
ad_rule = AutoDiscoveryRuleCRUD.get_by_name(rule.pop("adr_name"))
|
||||
adr.pop('created_at', None)
|
||||
adr.pop('updated_at', None)
|
||||
adr.pop('id', None)
|
||||
|
||||
if ad_rule:
|
||||
rule['adr_id'] = ad_rule.id
|
||||
ad_rule.update(**adr)
|
||||
|
||||
elif adr:
|
||||
ad_rule = AutoDiscoveryRuleCRUD().add(**adr)
|
||||
rule['adr_id'] = ad_rule.id
|
||||
else:
|
||||
continue
|
||||
|
||||
rule.pop("id", None)
|
||||
rule.pop("created_at", None)
|
||||
rule.pop("updated_at", None)
|
||||
|
||||
rule['uid'] = current_user.uid
|
||||
try:
|
||||
AutoDiscoveryCITypeCRUD.add(**rule)
|
||||
except Exception as e:
|
||||
current_app.logger.warning("import auto discovery rules failed: {}".format(e))
|
||||
|
||||
existed = False
|
||||
for i in AutoDiscoveryCIType.get_by(type_id=ci_type.id, adr_id=rule['adr_id'], to_dict=False):
|
||||
if ((i.extra_option or {}).get('alias') or None) == (
|
||||
(rule.get('extra_option') or {}).get('alias') or None):
|
||||
existed = True
|
||||
AutoDiscoveryCITypeCRUD().update(i.id, **rule)
|
||||
break
|
||||
|
||||
if not existed:
|
||||
try:
|
||||
AutoDiscoveryCITypeCRUD().add(**rule)
|
||||
except Exception as e:
|
||||
current_app.logger.warning("import auto discovery rules failed: {}".format(e))
|
||||
|
||||
@staticmethod
|
||||
def _import_icons(icons):
|
||||
from api.lib.common_setting.upload_file import CommonFileCRUD
|
||||
for icon_name in icons:
|
||||
if icons[icon_name]:
|
||||
try:
|
||||
CommonFileCRUD().save_str_to_file(icon_name, icons[icon_name])
|
||||
except Exception as e:
|
||||
current_app.logger.warning("save icon failed: {}".format(e))
|
||||
|
||||
def import_template(self, tpt):
|
||||
import time
|
||||
s = time.time()
|
||||
self._import_attributes(tpt.get('type2attributes') or {})
|
||||
attr_id_map = self._import_attributes(tpt.get('type2attributes') or {})
|
||||
current_app.logger.info('import attributes cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_ci_types(tpt.get('ci_types') or [])
|
||||
ci_type_id_map = self._import_ci_types(tpt.get('ci_types') or [], attr_id_map)
|
||||
current_app.logger.info('import ci_types cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_ci_type_groups(tpt.get('ci_type_groups') or [])
|
||||
self._import_ci_type_groups(tpt.get('ci_type_groups') or [], ci_type_id_map)
|
||||
current_app.logger.info('import ci_type_groups cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_relation_types(tpt.get('relation_types') or [])
|
||||
relation_type_id_map = self._import_relation_types(tpt.get('relation_types') or [])
|
||||
current_app.logger.info('import relation_types cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_ci_type_relations(tpt.get('ci_type_relations') or [])
|
||||
self._import_ci_type_relations(tpt.get('ci_type_relations') or [], ci_type_id_map, relation_type_id_map)
|
||||
current_app.logger.info('import ci_type_relations cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_type_attributes(tpt.get('type2attributes') or {})
|
||||
self._import_type_attributes(tpt.get('type2attributes') or {}, ci_type_id_map, attr_id_map)
|
||||
current_app.logger.info('import type2attributes cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_attribute_group(tpt.get('type2attribute_group') or {})
|
||||
self._import_attribute_group(tpt.get('type2attribute_group') or {}, ci_type_id_map, attr_id_map)
|
||||
current_app.logger.info('import type2attribute_group cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_auto_discovery_rules(tpt.get('ci_type_auto_discovery_rules') or [])
|
||||
current_app.logger.info('import ci_type_auto_discovery_rules cost: {}'.format(time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
self._import_icons(tpt.get('icons') or {})
|
||||
current_app.logger.info('import icons cost: {}'.format(time.time() - s))
|
||||
|
||||
@staticmethod
|
||||
def export_template():
|
||||
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryCITypeCRUD
|
||||
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryRuleCRUD
|
||||
from api.lib.common_setting.upload_file import CommonFileCRUD
|
||||
|
||||
tpt = dict(
|
||||
ci_types=CITypeManager.get_ci_types(),
|
||||
ci_type_groups=CITypeGroupManager.get(),
|
||||
relation_types=[i.to_dict() for i in RelationTypeManager.get_all()],
|
||||
ci_type_relations=CITypeRelationManager.get(),
|
||||
ci_type_auto_discovery_rules=list(),
|
||||
type2attributes=dict(),
|
||||
type2attribute_group=dict(),
|
||||
icons=dict()
|
||||
)
|
||||
|
||||
def get_icon_value(icon):
|
||||
try:
|
||||
return CommonFileCRUD().get_file_binary_str(icon)
|
||||
except:
|
||||
return ""
|
||||
|
||||
ad_rules = AutoDiscoveryCITypeCRUD.get_all()
|
||||
rules = []
|
||||
@@ -1088,23 +1235,91 @@ class CITypeTemplateManager(object):
|
||||
if r.get('adr_id'):
|
||||
adr = AutoDiscoveryRuleCRUD.get_by_id(r.pop('adr_id'))
|
||||
r['adr_name'] = adr and adr.name
|
||||
r['adr'] = adr and adr.to_dict() or {}
|
||||
|
||||
icon_url = r['adr'].get('option', {}).get('icon', {}).get('url')
|
||||
if icon_url and icon_url not in tpt['icons']:
|
||||
tpt['icons'][icon_url] = get_icon_value(icon_url)
|
||||
|
||||
rules.append(r)
|
||||
|
||||
tpt = dict(
|
||||
ci_types=CITypeManager.get_ci_types(),
|
||||
ci_type_groups=CITypeGroupManager.get(),
|
||||
relation_types=[i.to_dict() for i in RelationTypeManager.get_all()],
|
||||
ci_type_relations=CITypeRelationManager.get(),
|
||||
ci_type_auto_discovery_rules=rules,
|
||||
type2attributes=dict(),
|
||||
type2attribute_group=dict()
|
||||
)
|
||||
tpt['ci_type_auto_discovery_rules'] = rules
|
||||
|
||||
for ci_type in tpt['ci_types']:
|
||||
if ci_type['icon'] and len(ci_type['icon'].split('$$')) > 3:
|
||||
icon_url = ci_type['icon'].split('$$')[3]
|
||||
if icon_url not in tpt['icons']:
|
||||
tpt['icons'][icon_url] = get_icon_value(icon_url)
|
||||
|
||||
tpt['type2attributes'][ci_type['id']] = CITypeAttributeManager.get_attributes_by_type_id(
|
||||
ci_type['id'], choice_web_hook_parse=False, choice_other_parse=False)
|
||||
|
||||
for attr in tpt['type2attributes'][ci_type['id']]:
|
||||
for i in (attr.get('choice_value') or []):
|
||||
if (i[1] or {}).get('icon', {}).get('url') and len(i[1]['icon']['url'].split('$$')) > 3:
|
||||
icon_url = i[1]['icon']['url'].split('$$')[3]
|
||||
if icon_url not in tpt['icons']:
|
||||
tpt['icons'][icon_url] = get_icon_value(icon_url)
|
||||
|
||||
tpt['type2attribute_group'][ci_type['id']] = CITypeAttributeGroupManager.get_by_type_id(ci_type['id'])
|
||||
|
||||
return tpt
|
||||
|
||||
@staticmethod
|
||||
def export_template_by_type(type_id):
|
||||
ci_type = CITypeCache.get(type_id) or abort(404, ErrFormat.ci_type_not_found2.format("id={}".format(type_id)))
|
||||
|
||||
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryCITypeCRUD
|
||||
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryRuleCRUD
|
||||
from api.lib.common_setting.upload_file import CommonFileCRUD
|
||||
|
||||
tpt = dict(
|
||||
ci_types=CITypeManager.get_ci_types(type_name=ci_type.name),
|
||||
ci_type_auto_discovery_rules=list(),
|
||||
type2attributes=dict(),
|
||||
type2attribute_group=dict(),
|
||||
icons=dict()
|
||||
)
|
||||
|
||||
def get_icon_value(icon):
|
||||
try:
|
||||
return CommonFileCRUD().get_file_binary_str(icon)
|
||||
except:
|
||||
return ""
|
||||
|
||||
ad_rules = AutoDiscoveryCITypeCRUD.get_by_type_id(ci_type.id)
|
||||
rules = []
|
||||
for r in ad_rules:
|
||||
r = r.to_dict()
|
||||
r['type_name'] = ci_type and ci_type.name
|
||||
if r.get('adr_id'):
|
||||
adr = AutoDiscoveryRuleCRUD.get_by_id(r.pop('adr_id'))
|
||||
r['adr_name'] = adr and adr.name
|
||||
r['adr'] = adr and adr.to_dict() or {}
|
||||
|
||||
icon_url = r['adr'].get('option', {}).get('icon', {}).get('url')
|
||||
if icon_url and icon_url not in tpt['icons']:
|
||||
tpt['icons'][icon_url] = get_icon_value(icon_url)
|
||||
|
||||
rules.append(r)
|
||||
tpt['ci_type_auto_discovery_rules'] = rules
|
||||
|
||||
for ci_type in tpt['ci_types']:
|
||||
if ci_type['icon'] and len(ci_type['icon'].split('$$')) > 3:
|
||||
icon_url = ci_type['icon'].split('$$')[3]
|
||||
if icon_url not in tpt['icons']:
|
||||
tpt['icons'][icon_url] = get_icon_value(icon_url)
|
||||
|
||||
tpt['type2attributes'][ci_type['id']] = CITypeAttributeManager.get_attributes_by_type_id(
|
||||
ci_type['id'], choice_web_hook_parse=False, choice_other_parse=False)
|
||||
|
||||
for attr in tpt['type2attributes'][ci_type['id']]:
|
||||
for i in (attr.get('choice_value') or []):
|
||||
if (i[1] or {}).get('icon', {}).get('url') and len(i[1]['icon']['url'].split('$$')) > 3:
|
||||
icon_url = i[1]['icon']['url'].split('$$')[3]
|
||||
if icon_url not in tpt['icons']:
|
||||
tpt['icons'][icon_url] = get_icon_value(icon_url)
|
||||
|
||||
tpt['type2attribute_group'][ci_type['id']] = CITypeAttributeGroupManager.get_by_type_id(ci_type['id'])
|
||||
|
||||
return tpt
|
||||
|
@@ -12,6 +12,8 @@ class ValueTypeEnum(BaseEnum):
|
||||
DATE = "4"
|
||||
TIME = "5"
|
||||
JSON = "6"
|
||||
PASSWORD = TEXT
|
||||
LINK = TEXT
|
||||
|
||||
|
||||
class ConstraintEnum(BaseEnum):
|
||||
@@ -67,6 +69,7 @@ class ResourceTypeEnum(BaseEnum):
|
||||
CI_TYPE_RELATION = "CITypeRelation" # create/delete/grant
|
||||
RELATION_VIEW = "RelationView" # read/update/delete/grant
|
||||
CI_FILTER = "CIFilter" # read
|
||||
PAGE = "page" # read
|
||||
|
||||
|
||||
class PermEnum(BaseEnum):
|
||||
@@ -98,6 +101,7 @@ class AttributeDefaultValueEnum(BaseEnum):
|
||||
CMDB_QUEUE = "one_cmdb_async"
|
||||
REDIS_PREFIX_CI = "ONE_CMDB"
|
||||
REDIS_PREFIX_CI_RELATION = "CMDB_CI_RELATION"
|
||||
REDIS_PREFIX_CI_RELATION2 = "CMDB_CI_RELATION2"
|
||||
|
||||
BUILTIN_KEYWORDS = {'id', '_id', 'ci_id', 'type', '_type', 'ci_type'}
|
||||
|
||||
|
@@ -135,7 +135,7 @@ class AttributeHistoryManger(object):
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
cis = CIManager().get_cis_by_ids(list(ci_ids),
|
||||
unique_required=True)
|
||||
cis = {i['_id']: i for i in cis}
|
||||
cis = {i['_id']: i for i in cis if i}
|
||||
|
||||
return total, res, cis
|
||||
|
||||
|
@@ -143,11 +143,14 @@ class CIFilterPermsCRUD(DBMixin):
|
||||
first=True, to_dict=False)
|
||||
|
||||
if obj is not None:
|
||||
resource = None
|
||||
if current_app.config.get('USE_ACL'):
|
||||
ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
|
||||
resource = ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
|
||||
|
||||
obj.soft_delete()
|
||||
|
||||
return resource
|
||||
|
||||
|
||||
def has_perm_for_ci(arg_name, resource_type, perm, callback=None, app=None):
|
||||
def decorator_has_perm(func):
|
||||
|
@@ -14,7 +14,10 @@ from api.lib.cmdb.attribute import AttributeManager
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.const import PermEnum, ResourceTypeEnum, RoleEnum
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.perms import CIFilterPermsCRUD
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.exception import AbortException
|
||||
@@ -229,14 +232,28 @@ class PreferenceManager(object):
|
||||
if not parents:
|
||||
return
|
||||
|
||||
for l in leaf:
|
||||
_find_parent(l)
|
||||
for _l in leaf:
|
||||
_find_parent(_l)
|
||||
|
||||
for node_id in node2show_types:
|
||||
node2show_types[node_id] = [CITypeCache.get(i).to_dict() for i in set(node2show_types[node_id])]
|
||||
|
||||
topo_flatten = list(toposort.toposort_flatten(topo))
|
||||
level2constraint = {}
|
||||
for i, _ in enumerate(topo_flatten[1:]):
|
||||
ctr = CITypeRelation.get_by(
|
||||
parent_id=topo_flatten[i], child_id=topo_flatten[i + 1], first=True, to_dict=False)
|
||||
level2constraint[i + 1] = ctr and ctr.constraint
|
||||
|
||||
if leaf2show_types.get(topo_flatten[-1]):
|
||||
ctr = CITypeRelation.get_by(
|
||||
parent_id=topo_flatten[-1],
|
||||
child_id=leaf2show_types[topo_flatten[-1]][0], first=True, to_dict=False)
|
||||
level2constraint[len(topo_flatten)] = ctr and ctr.constraint
|
||||
|
||||
result[view_name] = dict(topo=list(map(list, toposort.toposort(topo))),
|
||||
topo_flatten=list(toposort.toposort_flatten(topo)),
|
||||
topo_flatten=topo_flatten,
|
||||
level2constraint=level2constraint,
|
||||
leaf=leaf,
|
||||
leaf2show_types=leaf2show_types,
|
||||
node2show_types=node2show_types,
|
||||
@@ -338,3 +355,29 @@ class PreferenceManager(object):
|
||||
|
||||
for i in PreferenceTreeView.get_by(type_id=type_id, uid=uid, to_dict=False):
|
||||
i.soft_delete()
|
||||
|
||||
@staticmethod
|
||||
def can_edit_relation(parent_id, child_id):
|
||||
views = PreferenceRelationView.get_by(to_dict=False)
|
||||
for view in views:
|
||||
has_m2m = False
|
||||
last_node_id = None
|
||||
for cr in view.cr_ids:
|
||||
_rel = CITypeRelation.get_by(parent_id=cr['parent_id'], child_id=cr['child_id'],
|
||||
first=True, to_dict=False)
|
||||
if _rel and _rel.constraint == ConstraintEnum.Many2Many:
|
||||
has_m2m = True
|
||||
|
||||
if parent_id == _rel.parent_id and child_id == _rel.child_id:
|
||||
return False
|
||||
|
||||
if _rel:
|
||||
last_node_id = _rel.child_id
|
||||
|
||||
if parent_id == last_node_id:
|
||||
rels = CITypeRelation.get_by(parent_id=last_node_id, to_dict=False)
|
||||
for rel in rels:
|
||||
if rel.child_id == child_id and has_m2m:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -4,6 +4,8 @@ from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
class ErrFormat(CommonErrFormat):
|
||||
ci_type_config = "模型配置"
|
||||
|
||||
invalid_relation_type = "无效的关系类型: {}"
|
||||
ci_type_not_found = "模型不存在!"
|
||||
argument_attributes_must_be_list = "参数 attributes 类型必须是列表"
|
||||
@@ -31,6 +33,7 @@ class ErrFormat(CommonErrFormat):
|
||||
unique_key_required = "主键字段 {} 缺失"
|
||||
ci_is_already_existed = "CI 已经存在!"
|
||||
relation_constraint = "关系约束: {}, 校验失败 "
|
||||
m2m_relation_constraint = "多对多关系 限制: 模型 {} <-> {} 已经存在多对多关系!"
|
||||
relation_not_found = "CI关系: {} 不存在"
|
||||
ci_search_Parentheses_invalid = "搜索表达式里小括号前不支持: 或、非"
|
||||
|
||||
@@ -95,3 +98,6 @@ class ErrFormat(CommonErrFormat):
|
||||
ci_filter_perm_cannot_or_query = "CI过滤授权 暂时不支持 或 查询"
|
||||
ci_filter_perm_attr_no_permission = "您没有属性 {} 的操作权限!"
|
||||
ci_filter_perm_ci_no_permission = "您没有该CI的操作权限!"
|
||||
|
||||
password_save_failed = "保存密码失败: {}"
|
||||
password_load_failed = "获取密码失败: {}"
|
||||
|
@@ -7,6 +7,7 @@ QUERY_CIS_BY_VALUE_TABLE = """
|
||||
attr.alias AS attr_alias,
|
||||
attr.value_type,
|
||||
attr.is_list,
|
||||
attr.is_password,
|
||||
c_cis.type_id,
|
||||
{0}.ci_id,
|
||||
{0}.attr_id,
|
||||
@@ -26,7 +27,8 @@ QUERY_CIS_BY_IDS = """
|
||||
A.attr_alias,
|
||||
A.value,
|
||||
A.value_type,
|
||||
A.is_list
|
||||
A.is_list,
|
||||
A.is_password
|
||||
FROM
|
||||
({1}) AS A {0}
|
||||
ORDER BY A.ci_id;
|
||||
@@ -43,7 +45,7 @@ FACET_QUERY1 = """
|
||||
|
||||
FACET_QUERY = """
|
||||
SELECT {0}.value,
|
||||
count({0}.ci_id)
|
||||
count(distinct {0}.ci_id)
|
||||
FROM {0}
|
||||
INNER JOIN ({1}) AS F ON F.ci_id={0}.ci_id
|
||||
WHERE {0}.attr_id={2:d}
|
||||
|
@@ -9,6 +9,7 @@ import time
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from jinja2 import Template
|
||||
from sqlalchemy import text
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
@@ -28,6 +29,7 @@ from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_NO_ATTR
|
||||
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_TYPE
|
||||
from api.lib.cmdb.search.ci.db.query_sql import QUERY_UNION_CI_ATTRIBUTE_IS_NULL
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.utils import handle_arg_list
|
||||
@@ -311,7 +313,7 @@ class Search(object):
|
||||
start = time.time()
|
||||
execute = db.session.execute
|
||||
# current_app.logger.debug(v_query_sql)
|
||||
res = execute(v_query_sql).fetchall()
|
||||
res = execute(text(v_query_sql)).fetchall()
|
||||
end_time = time.time()
|
||||
current_app.logger.debug("query ci ids time is: {0}".format(end_time - start))
|
||||
|
||||
@@ -524,15 +526,15 @@ class Search(object):
|
||||
if k:
|
||||
table_name = TableMap(attr=attr).table_name
|
||||
query_sql = FACET_QUERY.format(table_name, self.query_sql, attr.id)
|
||||
# current_app.logger.warning(query_sql)
|
||||
result = db.session.execute(query_sql).fetchall()
|
||||
result = db.session.execute(text(query_sql)).fetchall()
|
||||
facet[k] = result
|
||||
|
||||
facet_result = dict()
|
||||
for k, v in facet.items():
|
||||
if not k.startswith('_'):
|
||||
a = getattr(AttributeCache.get(k), self.ret_key)
|
||||
facet_result[a] = [(f[0], f[1], a) for f in v]
|
||||
attr = AttributeCache.get(k)
|
||||
a = getattr(attr, self.ret_key)
|
||||
facet_result[a] = [(ValueTypeMap.serialize[attr.value_type](f[0]), f[1], a) for f in v]
|
||||
|
||||
return facet_result
|
||||
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
|
||||
import json
|
||||
from collections import Counter
|
||||
|
||||
@@ -10,11 +8,14 @@ from flask import current_app
|
||||
from api.extensions import rd
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.ci_type import CITypeRelationManager
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
|
||||
from api.lib.cmdb.search.ci.es.search import Search as SearchFromES
|
||||
from api.models.cmdb import CI
|
||||
from api.models.cmdb import CIRelation
|
||||
|
||||
|
||||
class Search(object):
|
||||
@@ -26,7 +27,9 @@ class Search(object):
|
||||
page=1,
|
||||
count=None,
|
||||
sort=None,
|
||||
reverse=False):
|
||||
reverse=False,
|
||||
ancestor_ids=None,
|
||||
has_m2m=None):
|
||||
self.orig_query = query
|
||||
self.fl = fl
|
||||
self.facet_field = facet_field
|
||||
@@ -35,28 +38,85 @@ class Search(object):
|
||||
self.sort = sort or ("ci_id" if current_app.config.get("USE_ES") else None)
|
||||
|
||||
self.root_id = root_id
|
||||
self.level = level
|
||||
self.level = level or 0
|
||||
self.reverse = reverse
|
||||
|
||||
def _get_ids(self):
|
||||
self.level2constraint = CITypeRelationManager.get_level2constraint(
|
||||
root_id[0] if root_id and isinstance(root_id, list) else root_id,
|
||||
level[0] if isinstance(level, list) and level else level)
|
||||
|
||||
self.ancestor_ids = ancestor_ids
|
||||
self.has_m2m = has_m2m or False
|
||||
if not self.has_m2m:
|
||||
if self.ancestor_ids:
|
||||
self.has_m2m = True
|
||||
else:
|
||||
level = level[0] if isinstance(level, list) and level else level
|
||||
for _l, c in self.level2constraint.items():
|
||||
if _l < int(level) and c == ConstraintEnum.Many2Many:
|
||||
self.has_m2m = True
|
||||
|
||||
def _get_ids(self, ids):
|
||||
if self.level[-1] == 1 and len(ids) == 1:
|
||||
if self.ancestor_ids is None:
|
||||
return [i.second_ci_id for i in CIRelation.get_by(first_ci_id=ids[0], to_dict=False)]
|
||||
|
||||
else:
|
||||
seconds = {i.second_ci_id for i in CIRelation.get_by(first_ci_id=ids[0],
|
||||
ancestor_ids=self.ancestor_ids,
|
||||
to_dict=False)}
|
||||
|
||||
return list(seconds)
|
||||
|
||||
merge_ids = []
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
key = []
|
||||
_tmp = []
|
||||
for level in range(1, sorted(self.level)[-1] + 1):
|
||||
_tmp = list(map(lambda x: list(json.loads(x).keys()),
|
||||
filter(lambda x: x is not None, rd.get(ids, REDIS_PREFIX_CI_RELATION) or [])))
|
||||
ids = [j for i in _tmp for j in i]
|
||||
if not self.has_m2m:
|
||||
_tmp = map(lambda x: json.loads(x).keys(),
|
||||
filter(lambda x: x is not None, rd.get(ids, REDIS_PREFIX_CI_RELATION) or []))
|
||||
ids = [j for i in _tmp for j in i]
|
||||
key, prefix = ids, REDIS_PREFIX_CI_RELATION
|
||||
|
||||
else:
|
||||
if not self.ancestor_ids:
|
||||
if level == 1:
|
||||
key, prefix = list(map(str, ids)), REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
key = list(set(["{},{}".format(i, j) for idx, i in enumerate(key) for j in _tmp[idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
else:
|
||||
if level == 1:
|
||||
key, prefix = ["{},{}".format(self.ancestor_ids, i) for i in ids], REDIS_PREFIX_CI_RELATION2
|
||||
else:
|
||||
key = list(set(["{},{}".format(i, j) for idx, i in enumerate(key) for j in _tmp[idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
_tmp = list(map(lambda x: json.loads(x).keys() if x else [], rd.get(key, prefix) or []))
|
||||
ids = [j for i in _tmp for j in i]
|
||||
|
||||
if not key:
|
||||
return []
|
||||
|
||||
if level in self.level:
|
||||
merge_ids.extend(ids)
|
||||
|
||||
return merge_ids
|
||||
|
||||
def _get_reverse_ids(self):
|
||||
def _get_reverse_ids(self, ids):
|
||||
merge_ids = []
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
level2ids = {}
|
||||
for level in range(1, sorted(self.level)[-1] + 1):
|
||||
ids = CIRelationManager.get_ancestor_ids(ids, 1)
|
||||
ids, _level2ids = CIRelationManager.get_ancestor_ids(ids, 1)
|
||||
|
||||
if _level2ids.get(2):
|
||||
level2ids[level + 1] = _level2ids[2]
|
||||
|
||||
if level in self.level:
|
||||
merge_ids.extend(ids)
|
||||
if level in level2ids and level2ids[level]:
|
||||
merge_ids.extend(set(ids) & set(level2ids[level]))
|
||||
else:
|
||||
merge_ids.extend(ids)
|
||||
|
||||
return merge_ids
|
||||
|
||||
@@ -64,7 +124,7 @@ class Search(object):
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
cis = [CI.get_by_id(_id) or abort(404, ErrFormat.ci_not_found.format("id={}".format(_id))) for _id in ids]
|
||||
|
||||
merge_ids = self._get_ids() if not self.reverse else self._get_reverse_ids()
|
||||
merge_ids = self._get_ids(ids) if not self.reverse else self._get_reverse_ids(ids)
|
||||
|
||||
if not self.orig_query or ("_type:" not in self.orig_query
|
||||
and "type_id:" not in self.orig_query
|
||||
@@ -76,11 +136,11 @@ class Search(object):
|
||||
type_ids.extend(CITypeRelationManager.get_child_type_ids(ci.type_id, level))
|
||||
else:
|
||||
type_ids.extend(CITypeRelationManager.get_parent_type_ids(ci.type_id, level))
|
||||
type_ids = list(set(type_ids))
|
||||
type_ids = set(type_ids)
|
||||
if self.orig_query:
|
||||
self.orig_query = "_type:({0}),{1}".format(";".join(list(map(str, type_ids))), self.orig_query)
|
||||
self.orig_query = "_type:({0}),{1}".format(";".join(map(str, type_ids)), self.orig_query)
|
||||
else:
|
||||
self.orig_query = "_type:({0})".format(";".join(list(map(str, type_ids))))
|
||||
self.orig_query = "_type:({0})".format(";".join(map(str, type_ids)))
|
||||
|
||||
if not merge_ids:
|
||||
# cis, counter, total, self.page, numfound, facet_
|
||||
@@ -104,30 +164,66 @@ class Search(object):
|
||||
ci_ids=merge_ids).search()
|
||||
|
||||
def statistics(self, type_ids):
|
||||
_tmp = []
|
||||
self.level = int(self.level)
|
||||
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
for l in range(0, int(self.level)):
|
||||
if not l:
|
||||
_tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
[i or '{}' for i in rd.get(ids, REDIS_PREFIX_CI_RELATION) or []]))
|
||||
_tmp = []
|
||||
level2ids = {}
|
||||
for lv in range(1, self.level + 1):
|
||||
level2ids[lv] = []
|
||||
|
||||
if lv == 1:
|
||||
if not self.has_m2m:
|
||||
key, prefix = ids, REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
if not self.ancestor_ids:
|
||||
key, prefix = ids, REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
key = ["{},{}".format(self.ancestor_ids, _id) for _id in ids]
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
level2ids[lv] = [[i] for i in key]
|
||||
|
||||
if not key:
|
||||
_tmp = []
|
||||
continue
|
||||
|
||||
if type_ids and lv == self.level:
|
||||
_tmp = list(map(lambda x: [i for i in x if i[1] in type_ids],
|
||||
(map(lambda x: list(json.loads(x).items()),
|
||||
[i or '{}' for i in rd.get(key, prefix) or []]))))
|
||||
else:
|
||||
_tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
[i or '{}' for i in rd.get(key, prefix) or []]))
|
||||
|
||||
else:
|
||||
for idx, item in enumerate(_tmp):
|
||||
if item:
|
||||
if type_ids and l == self.level - 1:
|
||||
__tmp = list(
|
||||
map(lambda x: [(_id, type_id) for _id, type_id in json.loads(x).items()
|
||||
if type_id in type_ids],
|
||||
filter(lambda x: x is not None,
|
||||
rd.get([i[0] for i in item], REDIS_PREFIX_CI_RELATION) or [])))
|
||||
if not self.has_m2m:
|
||||
key, prefix = [i[0] for i in item], REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
key = list(set(['{},{}'.format(j, i[0]) for i in item for j in level2ids[lv - 1][idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
__tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
filter(lambda x: x is not None,
|
||||
rd.get([i[0] for i in item], REDIS_PREFIX_CI_RELATION) or [])))
|
||||
level2ids[lv].append(key)
|
||||
|
||||
if key:
|
||||
if type_ids and lv == self.level:
|
||||
__tmp = map(lambda x: [(_id, type_id) for _id, type_id in json.loads(x).items()
|
||||
if type_id in type_ids],
|
||||
filter(lambda x: x is not None,
|
||||
rd.get(key, prefix) or []))
|
||||
else:
|
||||
__tmp = map(lambda x: list(json.loads(x).items()),
|
||||
filter(lambda x: x is not None,
|
||||
rd.get(key, prefix) or []))
|
||||
else:
|
||||
__tmp = []
|
||||
|
||||
_tmp[idx] = [j for i in __tmp for j in i]
|
||||
else:
|
||||
_tmp[idx] = []
|
||||
level2ids[lv].append([])
|
||||
|
||||
result = {str(_id): len(_tmp[idx]) for idx, _id in enumerate(ids)}
|
||||
|
||||
|
@@ -12,7 +12,7 @@ import api.models.cmdb as model
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
|
||||
TIME_RE = re.compile(r"^(20|21|22|23|[0-1]\d):[0-5]\d:[0-5]\d$")
|
||||
TIME_RE = re.compile(r'(?:[01]\d|2[0-3]):[0-5]\d:[0-5]\d')
|
||||
|
||||
|
||||
def string2int(x):
|
||||
@@ -21,7 +21,7 @@ def string2int(x):
|
||||
|
||||
def str2datetime(x):
|
||||
try:
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d")
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
@@ -44,8 +44,8 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.FLOAT: float,
|
||||
ValueTypeEnum.TEXT: lambda x: x if isinstance(x, six.string_types) else str(x),
|
||||
ValueTypeEnum.TIME: lambda x: x if isinstance(x, six.string_types) else str(x),
|
||||
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d"),
|
||||
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d") if not isinstance(x, six.string_types) else x,
|
||||
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S") if not isinstance(x, six.string_types) else x,
|
||||
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
|
||||
}
|
||||
|
||||
@@ -64,6 +64,8 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.FLOAT: model.FloatChoice,
|
||||
ValueTypeEnum.TEXT: model.TextChoice,
|
||||
ValueTypeEnum.TIME: model.TextChoice,
|
||||
ValueTypeEnum.DATE: model.TextChoice,
|
||||
ValueTypeEnum.DATETIME: model.TextChoice,
|
||||
}
|
||||
|
||||
table = {
|
||||
@@ -97,7 +99,7 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.DATE: 'text',
|
||||
ValueTypeEnum.TIME: 'text',
|
||||
ValueTypeEnum.FLOAT: 'float',
|
||||
ValueTypeEnum.JSON: 'object'
|
||||
ValueTypeEnum.JSON: 'object',
|
||||
}
|
||||
|
||||
|
||||
@@ -110,7 +112,9 @@ class TableMap(object):
|
||||
@property
|
||||
def table(self):
|
||||
attr = AttributeCache.get(self.attr_name) if not self.attr else self.attr
|
||||
if attr.value_type != ValueTypeEnum.TEXT and attr.value_type != ValueTypeEnum.JSON:
|
||||
if attr.is_password or attr.is_link:
|
||||
self.is_index = False
|
||||
elif attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON}:
|
||||
self.is_index = True
|
||||
elif self.is_index is None:
|
||||
self.is_index = attr.is_index
|
||||
@@ -122,7 +126,9 @@ class TableMap(object):
|
||||
@property
|
||||
def table_name(self):
|
||||
attr = AttributeCache.get(self.attr_name) if not self.attr else self.attr
|
||||
if attr.value_type != ValueTypeEnum.TEXT and attr.value_type != ValueTypeEnum.JSON:
|
||||
if attr.is_password or attr.is_link:
|
||||
self.is_index = False
|
||||
elif attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON}:
|
||||
self.is_index = True
|
||||
elif self.is_index is None:
|
||||
self.is_index = attr.is_index
|
||||
|
@@ -66,9 +66,10 @@ class AttributeValueManager(object):
|
||||
use_master=use_master,
|
||||
to_dict=False)
|
||||
field_name = getattr(attr, ret_key)
|
||||
|
||||
if attr.is_list:
|
||||
res[field_name] = [ValueTypeMap.serialize[attr.value_type](i.value) for i in rs]
|
||||
elif attr.is_password and rs:
|
||||
res[field_name] = '******' if rs[0].value else ''
|
||||
else:
|
||||
res[field_name] = ValueTypeMap.serialize[attr.value_type](rs[0].value) if rs else None
|
||||
|
||||
@@ -131,8 +132,7 @@ class AttributeValueManager(object):
|
||||
return AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id)
|
||||
|
||||
@staticmethod
|
||||
def _write_change2(changed):
|
||||
record_id = None
|
||||
def write_change2(changed, record_id=None):
|
||||
for ci_id, attr_id, operate_type, old, new, type_id in changed:
|
||||
record_id = AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id,
|
||||
commit=False, flush=False)
|
||||
@@ -284,9 +284,9 @@ class AttributeValueManager(object):
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.warning(str(e))
|
||||
return abort(400, ErrFormat.attribute_value_unknown_error.format(str(e)))
|
||||
return abort(400, ErrFormat.attribute_value_unknown_error.format(e.args[0]))
|
||||
|
||||
return self._write_change2(changed)
|
||||
return self.write_change2(changed)
|
||||
|
||||
@staticmethod
|
||||
def delete_attr_value(attr_id, ci_id):
|
||||
|
@@ -1,12 +1,13 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.app import AppCRUD
|
||||
from api.lib.perm.acl.cache import RoleCache, AppCache
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD, ResourceCRUD
|
||||
from api.lib.perm.acl.role import RoleCRUD, RoleRelationCRUD
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD, ResourceCRUD
|
||||
|
||||
|
||||
class ACLManager(object):
|
||||
@@ -79,20 +80,22 @@ class ACLManager(object):
|
||||
return role.to_dict()
|
||||
|
||||
@staticmethod
|
||||
def delete_role(_id, payload):
|
||||
def delete_role(_id):
|
||||
RoleCRUD.delete_role(_id)
|
||||
return dict(rid=_id)
|
||||
|
||||
def get_user_info(self, username):
|
||||
from api.lib.perm.acl.acl import ACLManager as ACL
|
||||
user_info = ACL().get_user_info(username, self.app_name)
|
||||
result = dict(name=user_info.get('nickname') or username,
|
||||
username=user_info.get('username') or username,
|
||||
email=user_info.get('email'),
|
||||
uid=user_info.get('uid'),
|
||||
rid=user_info.get('rid'),
|
||||
role=dict(permissions=user_info.get('parents')),
|
||||
avatar=user_info.get('avatar'))
|
||||
result = dict(
|
||||
name=user_info.get('nickname') or username,
|
||||
username=user_info.get('username') or username,
|
||||
email=user_info.get('email'),
|
||||
uid=user_info.get('uid'),
|
||||
rid=user_info.get('rid'),
|
||||
role=dict(permissions=user_info.get('parents')),
|
||||
avatar=user_info.get('avatar')
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -109,8 +112,32 @@ class ACLManager(object):
|
||||
id2perms=id2perms
|
||||
)
|
||||
|
||||
def create_resources_type(self, payload):
|
||||
payload['app_id'] = self.validate_app().id
|
||||
rt = ResourceTypeCRUD.add(**payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
||||
def update_resources_type(self, _id, payload):
|
||||
rt = ResourceTypeCRUD.update(_id, **payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
||||
def create_resource(self, payload):
|
||||
payload['app_id'] = self.validate_app().id
|
||||
resource = ResourceCRUD.add(**payload)
|
||||
|
||||
return resource.to_dict()
|
||||
|
||||
def get_resource_by_type(self, q, u, rt_id, page=1, page_size=999999):
|
||||
numfound, res = ResourceCRUD.search(q, u, self.validate_app().id, rt_id, page, page_size)
|
||||
return res
|
||||
|
||||
def grant_resource(self, rid, resource_id, perms):
|
||||
PermissionCRUD.grant(rid, perms, resource_id=resource_id, group_id=None)
|
||||
|
||||
@staticmethod
|
||||
def create_app(payload):
|
||||
rt = AppCRUD.add(**payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
@@ -1,14 +1,24 @@
|
||||
from flask import abort
|
||||
import copy
|
||||
import json
|
||||
|
||||
from flask import abort, current_app
|
||||
from ldap3 import Connection
|
||||
from ldap3 import Server
|
||||
from ldap3.core.exceptions import LDAPBindError, LDAPSocketOpenError
|
||||
from ldap3 import AUTO_BIND_NO_TLS
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import CommonData
|
||||
from api.lib.utils import AESCrypto
|
||||
from api.lib.common_setting.const import AuthCommonConfig, AuthenticateType, AuthCommonConfigAutoRedirect, TestType
|
||||
|
||||
|
||||
class CommonDataCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_data_by_type(data_type):
|
||||
CommonDataCRUD.check_auth_type(data_type)
|
||||
return CommonData.get_by(data_type=data_type)
|
||||
|
||||
@staticmethod
|
||||
@@ -18,6 +28,8 @@ class CommonDataCRUD(object):
|
||||
@staticmethod
|
||||
def create_new_data(data_type, **kwargs):
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(data_type)
|
||||
|
||||
return CommonData.create(data_type=data_type, **kwargs)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
@@ -29,6 +41,7 @@ class CommonDataCRUD(object):
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(existed.data_type)
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
@@ -40,7 +53,230 @@ class CommonDataCRUD(object):
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(existed.data_type)
|
||||
existed.soft_delete()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def check_auth_type(data_type):
|
||||
if data_type in list(AuthenticateType.all()) + [AuthCommonConfig]:
|
||||
abort(400, ErrFormat.common_data_not_support_auth_type.format(data_type))
|
||||
|
||||
@staticmethod
|
||||
def set_auth_auto_redirect_enable(_value: int):
|
||||
existed = CommonData.get_by(first=True, data_type=AuthCommonConfig, to_dict=False)
|
||||
if not existed:
|
||||
CommonDataCRUD.create_new_data(AuthCommonConfig, data={AuthCommonConfigAutoRedirect: _value})
|
||||
else:
|
||||
data = existed.data
|
||||
data = copy.deepcopy(existed.data) if data else {}
|
||||
data[AuthCommonConfigAutoRedirect] = _value
|
||||
CommonDataCRUD.update_data(existed.id, data=data)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_auth_auto_redirect_enable():
|
||||
existed = CommonData.get_by(first=True, data_type=AuthCommonConfig)
|
||||
if not existed:
|
||||
return 0
|
||||
data = existed.get('data', {})
|
||||
if not data:
|
||||
return 0
|
||||
return data.get(AuthCommonConfigAutoRedirect, 0)
|
||||
|
||||
|
||||
class AuthenticateDataCRUD(object):
|
||||
common_type_list = [AuthCommonConfig]
|
||||
|
||||
def __init__(self, _type):
|
||||
self._type = _type
|
||||
self.record = None
|
||||
self.decrypt_data = {}
|
||||
|
||||
def get_support_type_list(self):
|
||||
return list(AuthenticateType.all()) + self.common_type_list
|
||||
|
||||
def get(self):
|
||||
if not self.decrypt_data:
|
||||
self.decrypt_data = self.get_decrypt_data()
|
||||
|
||||
return self.decrypt_data
|
||||
|
||||
def get_by_key(self, _key):
|
||||
if not self.decrypt_data:
|
||||
self.decrypt_data = self.get_decrypt_data()
|
||||
|
||||
return self.decrypt_data.get(_key, None)
|
||||
|
||||
def get_record(self, to_dict=False) -> CommonData:
|
||||
return CommonData.get_by(first=True, data_type=self._type, to_dict=to_dict)
|
||||
|
||||
def get_record_with_decrypt(self) -> dict:
|
||||
record = CommonData.get_by(first=True, data_type=self._type, to_dict=True)
|
||||
if not record:
|
||||
return {}
|
||||
data = self.get_decrypt_dict(record.get('data', ''))
|
||||
record['data'] = data
|
||||
return record
|
||||
|
||||
def get_decrypt_dict(self, data):
|
||||
decrypt_str = self.decrypt(data)
|
||||
try:
|
||||
return json.loads(decrypt_str)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
def get_decrypt_data(self) -> dict:
|
||||
self.record = self.get_record()
|
||||
if not self.record:
|
||||
return self.get_from_config()
|
||||
return self.get_decrypt_dict(self.record.data)
|
||||
|
||||
def get_from_config(self):
|
||||
return current_app.config.get(self._type, {})
|
||||
|
||||
def check_by_type(self) -> None:
|
||||
existed = self.get_record()
|
||||
if existed:
|
||||
abort(400, ErrFormat.common_data_already_existed.format(self._type))
|
||||
|
||||
def create(self, data) -> CommonData:
|
||||
self.check_by_type()
|
||||
encrypt = data.pop('encrypt', None)
|
||||
if encrypt is False:
|
||||
return CommonData.create(data_type=self._type, data=data)
|
||||
encrypted_data = self.encrypt(data)
|
||||
try:
|
||||
return CommonData.create(data_type=self._type, data=encrypted_data)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
def update_by_record(self, record, data) -> CommonData:
|
||||
encrypt = data.pop('encrypt', None)
|
||||
if encrypt is False:
|
||||
return record.update(data=data)
|
||||
encrypted_data = self.encrypt(data)
|
||||
try:
|
||||
return record.update(data=encrypted_data)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
def update(self, _id, data) -> CommonData:
|
||||
existed = CommonData.get_by(first=True, to_dict=False, id=_id)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
|
||||
return self.update_by_record(existed, data)
|
||||
|
||||
@staticmethod
|
||||
def delete(_id) -> None:
|
||||
existed = CommonData.get_by(first=True, to_dict=False, id=_id)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
existed.soft_delete()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def encrypt(data) -> str:
|
||||
if type(data) is dict:
|
||||
try:
|
||||
data = json.dumps(data)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
return AESCrypto().encrypt(data)
|
||||
|
||||
@staticmethod
|
||||
def decrypt(data) -> str:
|
||||
return AESCrypto().decrypt(data)
|
||||
|
||||
@staticmethod
|
||||
def get_enable_list():
|
||||
all_records = CommonData.query.filter(
|
||||
CommonData.data_type.in_(AuthenticateType.all()),
|
||||
CommonData.deleted == 0
|
||||
).all()
|
||||
enable_list = []
|
||||
for auth_type in AuthenticateType.all():
|
||||
record = list(filter(lambda x: x.data_type == auth_type, all_records))
|
||||
if not record:
|
||||
config = current_app.config.get(auth_type, None)
|
||||
if not config:
|
||||
continue
|
||||
|
||||
if config.get('enable', False):
|
||||
enable_list.append(dict(
|
||||
auth_type=auth_type,
|
||||
))
|
||||
|
||||
continue
|
||||
|
||||
try:
|
||||
decrypt_data = json.loads(AuthenticateDataCRUD.decrypt(record[0].data))
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
continue
|
||||
|
||||
if decrypt_data.get('enable', 0) == 1:
|
||||
enable_list.append(dict(
|
||||
auth_type=auth_type,
|
||||
))
|
||||
|
||||
auth_auto_redirect = CommonDataCRUD.get_auth_auto_redirect_enable()
|
||||
|
||||
return dict(
|
||||
enable_list=enable_list,
|
||||
auth_auto_redirect=auth_auto_redirect,
|
||||
)
|
||||
|
||||
def test(self, test_type, data):
|
||||
type_lower = self._type.lower()
|
||||
func_name = f'test_{type_lower}'
|
||||
if hasattr(self, func_name):
|
||||
try:
|
||||
return getattr(self, f'test_{type_lower}')(test_type, data)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
abort(400, ErrFormat.not_support_test.format(self._type))
|
||||
|
||||
@staticmethod
|
||||
def test_ldap(test_type, data):
|
||||
ldap_server = data.get('ldap_server')
|
||||
ldap_user_dn = data.get('ldap_user_dn', '{}')
|
||||
|
||||
server = Server(ldap_server, connect_timeout=2)
|
||||
if not server.check_availability():
|
||||
raise Exception(ErrFormat.ldap_server_connect_not_available)
|
||||
else:
|
||||
if test_type == TestType.Connect:
|
||||
return True
|
||||
|
||||
username = data.get('username', None)
|
||||
if not username:
|
||||
raise Exception(ErrFormat.ldap_test_username_required)
|
||||
user = ldap_user_dn.format(username)
|
||||
password = data.get('password', None)
|
||||
|
||||
try:
|
||||
Connection(server, user=user, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except LDAPBindError:
|
||||
ldap_domain = data.get('ldap_domain')
|
||||
user_with_domain = f"{username}@{ldap_domain}"
|
||||
try:
|
||||
Connection(server, user=user_with_domain, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except Exception as e:
|
||||
raise Exception(ErrFormat.ldap_test_unknown_error.format(str(e)))
|
||||
|
||||
except LDAPSocketOpenError:
|
||||
raise Exception(ErrFormat.ldap_server_connect_timeout)
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(ErrFormat.ldap_test_unknown_error.format(str(e)))
|
||||
|
||||
return True
|
||||
|
@@ -1,4 +1,6 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from api.extensions import cache
|
||||
from api.models.common_setting import CompanyInfo
|
||||
|
||||
@@ -11,6 +13,7 @@ class CompanyInfoCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def create(**kwargs):
|
||||
CompanyInfoCRUD.check_data(**kwargs)
|
||||
res = CompanyInfo.create(**kwargs)
|
||||
CompanyInfoCache.refresh(res.info)
|
||||
return res
|
||||
@@ -22,10 +25,26 @@ class CompanyInfoCRUD(object):
|
||||
if not existed:
|
||||
existed = CompanyInfoCRUD.create(**kwargs)
|
||||
else:
|
||||
CompanyInfoCRUD.check_data(**kwargs)
|
||||
existed = existed.update(**kwargs)
|
||||
CompanyInfoCache.refresh(existed.info)
|
||||
return existed
|
||||
|
||||
@staticmethod
|
||||
def check_data(**kwargs):
|
||||
info = kwargs.get('info', {})
|
||||
info['messenger'] = CompanyInfoCRUD.check_messenger(info.get('messenger', None))
|
||||
|
||||
kwargs['info'] = info
|
||||
|
||||
@staticmethod
|
||||
def check_messenger(messenger):
|
||||
if not messenger:
|
||||
return messenger
|
||||
|
||||
parsed_url = urlparse(messenger)
|
||||
return f"{parsed_url.scheme}://{parsed_url.netloc}"
|
||||
|
||||
|
||||
class CompanyInfoCache(object):
|
||||
key = 'CompanyInfoCache::'
|
||||
@@ -41,4 +60,4 @@ class CompanyInfoCache(object):
|
||||
|
||||
@classmethod
|
||||
def refresh(cls, info):
|
||||
cache.set(cls.key, info)
|
||||
cache.set(cls.key, info)
|
||||
|
@@ -19,3 +19,19 @@ BotNameMap = {
|
||||
'feishuApp': 'feishuBot',
|
||||
'dingdingApp': 'dingdingBot',
|
||||
}
|
||||
|
||||
|
||||
class AuthenticateType(BaseEnum):
|
||||
CAS = 'CAS'
|
||||
OAUTH2 = 'OAUTH2'
|
||||
OIDC = 'OIDC'
|
||||
LDAP = 'LDAP'
|
||||
|
||||
|
||||
AuthCommonConfig = 'AuthCommonConfig'
|
||||
AuthCommonConfigAutoRedirect = 'auto_redirect'
|
||||
|
||||
|
||||
class TestType(BaseEnum):
|
||||
Connect = 'connect'
|
||||
Login = 'login'
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask import abort
|
||||
from flask import abort, current_app
|
||||
from treelib import Tree
|
||||
from wtforms import Form
|
||||
from wtforms import IntegerField
|
||||
@@ -9,6 +9,7 @@ from wtforms import validators
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
from api.models.common_setting import Department, Employee
|
||||
|
||||
@@ -152,6 +153,10 @@ class DepartmentForm(Form):
|
||||
|
||||
class DepartmentCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_department_by_id(d_id, to_dict=True):
|
||||
return Department.get_by(first=True, department_id=d_id, to_dict=to_dict)
|
||||
|
||||
@staticmethod
|
||||
def add(**kwargs):
|
||||
DepartmentCRUD.check_department_name_unique(kwargs['department_name'])
|
||||
@@ -186,10 +191,11 @@ class DepartmentCRUD(object):
|
||||
filter(lambda d: d['department_id'] == department_parent_id, allow_p_d_id_list))
|
||||
if len(target) == 0:
|
||||
try:
|
||||
d = Department.get_by(
|
||||
dep = Department.get_by(
|
||||
first=True, to_dict=False, department_id=department_parent_id)
|
||||
name = d.department_name if d else ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
name = dep.department_name if dep else ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
name = ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
abort(400, ErrFormat.cannot_to_be_parent_department.format(name))
|
||||
|
||||
@@ -253,7 +259,7 @@ class DepartmentCRUD(object):
|
||||
try:
|
||||
RoleCRUD.delete_role(existed.acl_rid)
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error(str(e))
|
||||
|
||||
return existed.soft_delete()
|
||||
|
||||
@@ -268,7 +274,7 @@ class DepartmentCRUD(object):
|
||||
try:
|
||||
tree.remove_subtree(department_id)
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error(str(e))
|
||||
|
||||
[allow_d_id_list.append({'department_id': int(n.identifier), 'department_name': n.tag}) for n in
|
||||
tree.all_nodes()]
|
||||
@@ -390,6 +396,125 @@ class DepartmentCRUD(object):
|
||||
[id_list.append(int(n.identifier))
|
||||
for n in tmp_tree.all_nodes()]
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error(str(e))
|
||||
|
||||
return id_list
|
||||
|
||||
|
||||
class EditDepartmentInACL(object):
|
||||
|
||||
@staticmethod
|
||||
def add_department_to_acl(department_id, op_uid):
|
||||
db_department = DepartmentCRUD.get_department_by_id(department_id, to_dict=False)
|
||||
if not db_department:
|
||||
return
|
||||
|
||||
from api.models.acl import Role
|
||||
role = Role.get_by(first=True, name=db_department.department_name, app_id=None)
|
||||
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
if role is None:
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'name': db_department.department_name,
|
||||
}
|
||||
role = acl.create_role(payload)
|
||||
|
||||
acl_rid = role.get('id') if role else 0
|
||||
|
||||
db_department.update(
|
||||
acl_rid=acl_rid
|
||||
)
|
||||
info = f"add_department_to_acl, acl_rid: {acl_rid}"
|
||||
current_app.logger.info(info)
|
||||
return info
|
||||
|
||||
@staticmethod
|
||||
def delete_department_from_acl(department_rids, op_uid):
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
|
||||
result = []
|
||||
|
||||
for rid in department_rids:
|
||||
try:
|
||||
acl.delete_role(rid)
|
||||
except Exception as e:
|
||||
result.append(f"delete_department_in_acl, rid: {rid}, error: {e}")
|
||||
continue
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def edit_department_name_in_acl(d_rid: int, d_name: str, op_uid: int):
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
payload = {
|
||||
'name': d_name
|
||||
}
|
||||
try:
|
||||
acl.edit_role(d_rid, payload)
|
||||
except Exception as e:
|
||||
return f"edit_department_name_in_acl, rid: {d_rid}, error: {e}"
|
||||
|
||||
return f"edit_department_name_in_acl, rid: {d_rid}, success"
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_department_in_acl(e_list: list, new_d_id: int, op_uid: int):
|
||||
result = []
|
||||
new_department = DepartmentCRUD.get_department_by_id(new_d_id, False)
|
||||
if not new_department:
|
||||
result.append(f"{new_d_id} new_department is None")
|
||||
return result
|
||||
|
||||
from api.models.acl import Role
|
||||
new_role = Role.get_by(first=True, name=new_department.department_name, app_id=None)
|
||||
new_d_rid_in_acl = new_role.get('id') if new_role else 0
|
||||
if new_d_rid_in_acl == 0:
|
||||
return
|
||||
|
||||
if new_d_rid_in_acl != new_department.acl_rid:
|
||||
new_department.update(
|
||||
acl_rid=new_d_rid_in_acl
|
||||
)
|
||||
new_department_acl_rid = new_department.acl_rid if new_d_rid_in_acl == new_department.acl_rid else \
|
||||
new_d_rid_in_acl
|
||||
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
for employee in e_list:
|
||||
old_department = DepartmentCRUD.get_department_by_id(employee.get('department_id'), False)
|
||||
if not old_department:
|
||||
continue
|
||||
employee_acl_rid = employee.get('e_acl_rid')
|
||||
if employee_acl_rid == 0:
|
||||
result.append(f"employee_acl_rid == 0")
|
||||
continue
|
||||
|
||||
old_role = Role.get_by(first=True, name=old_department.department_name, app_id=None)
|
||||
old_d_rid_in_acl = old_role.get('id') if old_role else 0
|
||||
if old_d_rid_in_acl == 0:
|
||||
return
|
||||
if old_d_rid_in_acl != old_department.acl_rid:
|
||||
old_department.update(
|
||||
acl_rid=old_d_rid_in_acl
|
||||
)
|
||||
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'parent_id': d_acl_rid,
|
||||
}
|
||||
try:
|
||||
acl.remove_user_from_role(employee_acl_rid, payload)
|
||||
except Exception as e:
|
||||
result.append(
|
||||
f"remove_user_from_role employee_acl_rid: {employee_acl_rid}, parent_id: {d_acl_rid}, err: {e}")
|
||||
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'child_ids': [employee_acl_rid],
|
||||
}
|
||||
try:
|
||||
acl.add_user_to_role(new_department_acl_rid, payload)
|
||||
except Exception as e:
|
||||
result.append(
|
||||
f"add_user_to_role employee_acl_rid: {employee_acl_rid}, parent_id: {d_acl_rid}, err: {e}")
|
||||
|
||||
return result
|
||||
|
@@ -15,10 +15,13 @@ from wtforms import validators
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.common_setting.const import COMMON_SETTING_QUEUE, OperatorType
|
||||
from api.lib.common_setting.const import OperatorType
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import Employee, Department
|
||||
|
||||
from api.tasks.common_setting import refresh_employee_acl_info, edit_employee_department_in_acl
|
||||
|
||||
acl_user_columns = [
|
||||
'email',
|
||||
'mobile',
|
||||
@@ -121,10 +124,25 @@ class EmployeeCRUD(object):
|
||||
employee = CreateEmployee().create_single(**data)
|
||||
return employee.to_dict()
|
||||
|
||||
@staticmethod
|
||||
def add_employee_from_acl_created(**kwargs):
|
||||
try:
|
||||
kwargs['acl_uid'] = kwargs.pop('uid')
|
||||
kwargs['acl_rid'] = kwargs.pop('rid')
|
||||
kwargs['department_id'] = 0
|
||||
|
||||
Employee.create(
|
||||
**kwargs
|
||||
)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def add(**kwargs):
|
||||
try:
|
||||
return CreateEmployee().create_single(**kwargs)
|
||||
res = CreateEmployee().create_single(**kwargs)
|
||||
refresh_employee_acl_info.apply_async(args=(), queue=CMDB_QUEUE)
|
||||
return res
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
@@ -151,10 +169,9 @@ class EmployeeCRUD(object):
|
||||
existed.update(**kwargs)
|
||||
|
||||
if len(e_list) > 0:
|
||||
from api.tasks.common_setting import edit_employee_department_in_acl
|
||||
edit_employee_department_in_acl.apply_async(
|
||||
args=(e_list, new_department_id, current_user.uid),
|
||||
queue=COMMON_SETTING_QUEUE
|
||||
queue=CMDB_QUEUE
|
||||
)
|
||||
|
||||
return existed
|
||||
@@ -165,7 +182,7 @@ class EmployeeCRUD(object):
|
||||
def edit_employee_by_uid(_uid, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
try:
|
||||
user = edit_acl_user(_uid, **kwargs)
|
||||
edit_acl_user(_uid, **kwargs)
|
||||
|
||||
for column in employee_pop_columns:
|
||||
if kwargs.get(column):
|
||||
@@ -177,9 +194,9 @@ class EmployeeCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def change_password_by_uid(_uid, password):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
try:
|
||||
user = edit_acl_user(_uid, password=password)
|
||||
edit_acl_user(_uid, password=password)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@@ -278,7 +295,7 @@ class EmployeeCRUD(object):
|
||||
employees = []
|
||||
for r in pagination.items:
|
||||
d = r.Employee.to_dict()
|
||||
d['department_name'] = r.Department.department_name
|
||||
d['department_name'] = r.Department.department_name if r.Department else ''
|
||||
employees.append(d)
|
||||
|
||||
return {
|
||||
@@ -346,9 +363,11 @@ class EmployeeCRUD(object):
|
||||
|
||||
if value and column == "last_login":
|
||||
try:
|
||||
value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
return datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
except Exception as e:
|
||||
abort(400, ErrFormat.datetime_format_error.format(column))
|
||||
err = f"{ErrFormat.datetime_format_error.format(column)}: {str(e)}"
|
||||
abort(400, err)
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def get_attr_by_column(column):
|
||||
@@ -369,7 +388,7 @@ class EmployeeCRUD(object):
|
||||
relation = condition.get("relation", None)
|
||||
value = condition.get("value", None)
|
||||
|
||||
EmployeeCRUD.check_condition(column, operator, value, relation)
|
||||
value = EmployeeCRUD.check_condition(column, operator, value, relation)
|
||||
a, o = EmployeeCRUD.get_expr_by_condition(
|
||||
column, operator, value, relation)
|
||||
and_list += a
|
||||
@@ -422,7 +441,7 @@ class EmployeeCRUD(object):
|
||||
employees = []
|
||||
for r in pagination.items:
|
||||
d = r.Employee.to_dict()
|
||||
d['department_name'] = r.Department.department_name
|
||||
d['department_name'] = r.Department.department_name if r.Department else ''
|
||||
employees.append(d)
|
||||
|
||||
return {
|
||||
@@ -548,10 +567,152 @@ class EmployeeCRUD(object):
|
||||
for column in direct_columns:
|
||||
tmp[column] = d.get(column, '')
|
||||
notice_info = d.get('notice_info', {})
|
||||
notice_info = copy.deepcopy(notice_info) if notice_info else {}
|
||||
tmp.update(**notice_info)
|
||||
results.append(tmp)
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def import_employee(employee_list):
|
||||
res = CreateEmployee().batch_create(employee_list)
|
||||
refresh_employee_acl_info.apply_async(args=(), queue=CMDB_QUEUE)
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_employee_department(employee_id_list, column_value):
|
||||
err_list = []
|
||||
employee_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
employee = dict(
|
||||
e_acl_rid=existed.acl_rid,
|
||||
department_id=existed.department_id
|
||||
)
|
||||
employee_list.append(employee)
|
||||
existed.update(department_id=column_value)
|
||||
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
from api.lib.common_setting.department import EditDepartmentInACL
|
||||
EditDepartmentInACL.edit_employee_department_in_acl(
|
||||
employee_list, column_value, current_user.uid
|
||||
)
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_password_or_block_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
if column_name == 'block':
|
||||
err_list = []
|
||||
success_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
employee = EmployeeCRUD.edit_employee_block_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
success_list.append(employee)
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
return err_list
|
||||
else:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, is_acl)
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
err_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
EmployeeCRUD.edit_employee_single_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_single_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
if 'direct_supervisor_id' in kwargs.keys():
|
||||
if kwargs['direct_supervisor_id'] == existed.direct_supervisor_id:
|
||||
raise Exception(ErrFormat.direct_supervisor_is_not_self)
|
||||
|
||||
if is_acl:
|
||||
return edit_acl_user(existed.acl_uid, **kwargs)
|
||||
|
||||
try:
|
||||
for column in employee_pop_columns:
|
||||
if kwargs.get(column):
|
||||
kwargs.pop(column)
|
||||
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_block_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
value = get_block_value(kwargs.get('block'))
|
||||
if value is True:
|
||||
check_department_director_id_or_direct_supervisor_id(_id)
|
||||
value = 1
|
||||
else:
|
||||
value = 0
|
||||
|
||||
if is_acl:
|
||||
kwargs['block'] = value
|
||||
edit_acl_user(existed.acl_uid, **kwargs)
|
||||
|
||||
existed.update(block=value)
|
||||
data = existed.to_dict()
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def batch_employee(column_name, column_value, employee_id_list):
|
||||
if column_value is None:
|
||||
abort(400, ErrFormat.value_is_required)
|
||||
if column_name in ['password', 'block']:
|
||||
return EmployeeCRUD.batch_edit_password_or_block_column(column_name, employee_id_list, column_value, True)
|
||||
|
||||
elif column_name in ['department_id']:
|
||||
return EmployeeCRUD.batch_edit_employee_department(employee_id_list, column_value)
|
||||
|
||||
elif column_name in [
|
||||
'direct_supervisor_id', 'position_name'
|
||||
]:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, False)
|
||||
|
||||
else:
|
||||
abort(400, ErrFormat.column_name_not_support)
|
||||
|
||||
@staticmethod
|
||||
def update_last_login_by_uid(uid, last_login=None):
|
||||
employee = Employee.get_by(acl_uid=uid, first=True, to_dict=False)
|
||||
if not employee:
|
||||
return
|
||||
if last_login:
|
||||
try:
|
||||
last_login = datetime.strptime(last_login, '%Y-%m-%d %H:%M:%S')
|
||||
except Exception as e:
|
||||
last_login = datetime.now()
|
||||
else:
|
||||
last_login = datetime.now()
|
||||
|
||||
try:
|
||||
employee.update(
|
||||
last_login=last_login
|
||||
)
|
||||
return last_login
|
||||
except Exception as e:
|
||||
return
|
||||
|
||||
|
||||
def get_user_map(key='uid', acl=None):
|
||||
"""
|
||||
@@ -592,6 +753,7 @@ class CreateEmployee(object):
|
||||
try:
|
||||
existed = self.check_acl_user(user_data)
|
||||
if not existed:
|
||||
user_data['add_from'] = 'common'
|
||||
return self.acl.create_user(user_data)
|
||||
return existed
|
||||
except Exception as e:
|
||||
@@ -628,7 +790,8 @@ class CreateEmployee(object):
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def get_department_by_name(self, d_name):
|
||||
@staticmethod
|
||||
def get_department_by_name(d_name):
|
||||
return Department.get_by(first=True, department_name=d_name)
|
||||
|
||||
def get_end_department_id(self, department_name_list, department_name_map):
|
||||
|
@@ -162,4 +162,4 @@ class NoticeConfigUpdateForm(Form):
|
||||
info = StringField(validators=[
|
||||
validators.DataRequired(message="信息 不能为空"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
])
|
||||
|
@@ -8,6 +8,9 @@ class ErrFormat(CommonErrFormat):
|
||||
|
||||
no_file_part = "没有文件部分"
|
||||
file_is_required = "文件是必须的"
|
||||
file_not_found = "文件不存在"
|
||||
file_type_not_allowed = "文件类型不允许"
|
||||
upload_failed = "上传失败: {}"
|
||||
|
||||
direct_supervisor_is_not_self = "直属上级不能是自己"
|
||||
parent_department_is_not_self = "上级部门不能是自己"
|
||||
@@ -56,6 +59,7 @@ class ErrFormat(CommonErrFormat):
|
||||
email_send_timeout = "邮件发送超时"
|
||||
|
||||
common_data_not_found = "ID {} 找不到记录"
|
||||
common_data_already_existed = "{} 已存在"
|
||||
notice_platform_existed = "{} 已存在"
|
||||
notice_not_existed = "{} 配置项不存在"
|
||||
notice_please_config_messenger_first = "请先配置 messenger"
|
||||
@@ -63,3 +67,11 @@ class ErrFormat(CommonErrFormat):
|
||||
notice_bind_failed = "绑定失败: {}"
|
||||
notice_bind_success = "绑定成功"
|
||||
notice_remove_bind_success = "解绑成功"
|
||||
|
||||
not_support_test = "不支持的测试类型: {}"
|
||||
not_support_auth_type = "不支持的认证类型: {}"
|
||||
ldap_server_connect_timeout = "LDAP服务器连接超时"
|
||||
ldap_server_connect_not_available = "LDAP服务器连接不可用"
|
||||
ldap_test_unknown_error = "LDAP测试未知错误: {}"
|
||||
common_data_not_support_auth_type = "通用数据不支持auth类型: {}"
|
||||
ldap_test_username_required = "LDAP测试用户名必填"
|
||||
|
@@ -1,6 +1,14 @@
|
||||
import base64
|
||||
import uuid
|
||||
import os
|
||||
from io import BytesIO
|
||||
|
||||
from flask import abort, current_app
|
||||
import lz4.frame
|
||||
|
||||
from api.lib.common_setting.utils import get_cur_time_str
|
||||
from api.models.common_setting import CommonFile
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
|
||||
|
||||
def allowed_file(filename, allowed_extensions):
|
||||
@@ -14,3 +22,73 @@ def generate_new_file_name(name):
|
||||
cur_str = get_cur_time_str('_')
|
||||
|
||||
return f"{prev_name}_{cur_str}_{uid}.{ext}"
|
||||
|
||||
|
||||
class CommonFileCRUD:
|
||||
@staticmethod
|
||||
def add_file(**kwargs):
|
||||
return CommonFile.create(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_file(file_name, to_str=False):
|
||||
existed = CommonFile.get_by(file_name=file_name, first=True, to_dict=False)
|
||||
if not existed:
|
||||
abort(400, ErrFormat.file_not_found)
|
||||
|
||||
uncompressed_data = lz4.frame.decompress(existed.binary)
|
||||
|
||||
return base64.b64encode(uncompressed_data).decode('utf-8') if to_str else BytesIO(uncompressed_data)
|
||||
|
||||
@staticmethod
|
||||
def sync_file_to_db():
|
||||
for p in ['UPLOAD_DIRECTORY_FULL']:
|
||||
upload_path = current_app.config.get(p, None)
|
||||
if not upload_path:
|
||||
continue
|
||||
for root, dirs, files in os.walk(upload_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
if not os.path.isfile(file_path):
|
||||
continue
|
||||
|
||||
existed = CommonFile.get_by(file_name=file, first=True, to_dict=False)
|
||||
if existed:
|
||||
continue
|
||||
with open(file_path, 'rb') as f:
|
||||
data = f.read()
|
||||
compressed_data = lz4.frame.compress(data)
|
||||
try:
|
||||
CommonFileCRUD.add_file(
|
||||
origin_name=file,
|
||||
file_name=file,
|
||||
binary=compressed_data
|
||||
)
|
||||
|
||||
current_app.logger.info(f'sync file {file} to db')
|
||||
except Exception as e:
|
||||
current_app.logger.error(f'sync file {file} to db error: {e}')
|
||||
|
||||
def get_file_binary_str(self, file_name):
|
||||
return self.get_file(file_name, True)
|
||||
|
||||
def save_str_to_file(self, file_name, str_data):
|
||||
try:
|
||||
self.get_file(file_name)
|
||||
current_app.logger.info(f'file {file_name} already exists')
|
||||
return
|
||||
except Exception as e:
|
||||
# file not found
|
||||
pass
|
||||
|
||||
bytes_data = base64.b64decode(str_data)
|
||||
compressed_data = lz4.frame.compress(bytes_data)
|
||||
|
||||
try:
|
||||
self.add_file(
|
||||
origin_name=file_name,
|
||||
file_name=file_name,
|
||||
binary=compressed_data
|
||||
)
|
||||
current_app.logger.info(f'save_str_to_file {file_name} success')
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"save_str_to_file error: {e}")
|
||||
|
@@ -10,14 +10,18 @@ from api.lib.exception import CommitException
|
||||
|
||||
class FormatMixin(object):
|
||||
def to_dict(self):
|
||||
res = dict([(k, getattr(self, k) if not isinstance(
|
||||
getattr(self, k), (datetime.datetime, datetime.date, datetime.time)) else str(
|
||||
getattr(self, k))) for k in getattr(self, "__mapper__").c.keys()])
|
||||
# FIXME: getattr(cls, "__table__").columns k.name
|
||||
res = dict()
|
||||
for k in getattr(self, "__mapper__").c.keys():
|
||||
if k in {'password', '_password', 'secret', '_secret'}:
|
||||
continue
|
||||
|
||||
res.pop('password', None)
|
||||
res.pop('_password', None)
|
||||
res.pop('secret', None)
|
||||
if k.startswith('_'):
|
||||
k = k[1:]
|
||||
|
||||
if not isinstance(getattr(self, k), (datetime.datetime, datetime.date, datetime.time)):
|
||||
res[k] = getattr(self, k)
|
||||
else:
|
||||
res[k] = str(getattr(self, k))
|
||||
|
||||
return res
|
||||
|
||||
@@ -90,7 +94,7 @@ class CRUDMixin(FormatMixin):
|
||||
if any((isinstance(_id, six.string_types) and _id.isdigit(),
|
||||
isinstance(_id, (six.integer_types, float))), ):
|
||||
obj = getattr(cls, "query").get(int(_id))
|
||||
if obj and not obj.deleted:
|
||||
if obj and not getattr(obj, 'deleted', False):
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
|
@@ -4,8 +4,14 @@
|
||||
from functools import wraps
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from sqlalchemy.exc import InvalidRequestError
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.exc import PendingRollbackError
|
||||
from sqlalchemy.exc import StatementError
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
@@ -70,3 +76,43 @@ def args_validate(model_cls, exclude_args=None):
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def reconnect_db(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (StatementError, OperationalError, InvalidRequestError) as e:
|
||||
error_msg = str(e)
|
||||
if 'Lost connection' in error_msg or 'reconnect until invalid transaction' in error_msg or \
|
||||
'can be emitted within this transaction' in error_msg:
|
||||
current_app.logger.info('[reconnect_db] lost connect rollback then retry')
|
||||
db.session.rollback()
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
raise e
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _flush_db():
|
||||
try:
|
||||
db.session.commit()
|
||||
except (StatementError, OperationalError, InvalidRequestError, PendingRollbackError):
|
||||
db.session.rollback()
|
||||
|
||||
|
||||
def flush_db(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
_flush_db()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def run_flush_db():
|
||||
_flush_db()
|
||||
|
@@ -117,15 +117,15 @@ class ACLManager(object):
|
||||
if group:
|
||||
PermissionCRUD.grant(role.id, permissions, group_id=group.id)
|
||||
|
||||
def grant_resource_to_role_by_rid(self, name, rid, resource_type_name=None, permissions=None):
|
||||
def grant_resource_to_role_by_rid(self, name, rid, resource_type_name=None, permissions=None, rebuild=True):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
|
||||
if resource:
|
||||
PermissionCRUD.grant(rid, permissions, resource_id=resource.id)
|
||||
PermissionCRUD.grant(rid, permissions, resource_id=resource.id, rebuild=rebuild)
|
||||
else:
|
||||
group = self._get_resource_group(name)
|
||||
if group:
|
||||
PermissionCRUD.grant(rid, permissions, group_id=group.id)
|
||||
PermissionCRUD.grant(rid, permissions, group_id=group.id, rebuild=rebuild)
|
||||
|
||||
def revoke_resource_from_role(self, name, role, resource_type_name=None, permissions=None):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
@@ -138,20 +138,20 @@ class ACLManager(object):
|
||||
if group:
|
||||
PermissionCRUD.revoke(role.id, permissions, group_id=group.id)
|
||||
|
||||
def revoke_resource_from_role_by_rid(self, name, rid, resource_type_name=None, permissions=None):
|
||||
def revoke_resource_from_role_by_rid(self, name, rid, resource_type_name=None, permissions=None, rebuild=True):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
|
||||
if resource:
|
||||
PermissionCRUD.revoke(rid, permissions, resource_id=resource.id)
|
||||
PermissionCRUD.revoke(rid, permissions, resource_id=resource.id, rebuild=rebuild)
|
||||
else:
|
||||
group = self._get_resource_group(name)
|
||||
if group:
|
||||
PermissionCRUD.revoke(rid, permissions, group_id=group.id)
|
||||
PermissionCRUD.revoke(rid, permissions, group_id=group.id, rebuild=rebuild)
|
||||
|
||||
def del_resource(self, name, resource_type_name=None):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
if resource:
|
||||
ResourceCRUD.delete(resource.id)
|
||||
return ResourceCRUD.delete(resource.id)
|
||||
|
||||
def has_permission(self, resource_name, resource_type, perm, resource_id=None):
|
||||
if is_app_admin(self.app_id):
|
||||
|
@@ -1,14 +1,19 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
import json
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
|
||||
from flask import has_request_context, request
|
||||
from flask import has_request_context
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl import AppCache
|
||||
from api.models.acl import AuditLoginLog
|
||||
from api.models.acl import AuditPermissionLog
|
||||
from api.models.acl import AuditResourceLog
|
||||
from api.models.acl import AuditRoleLog
|
||||
@@ -283,6 +288,27 @@ class AuditCRUD(object):
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def search_login(_, q=None, page=1, page_size=10, start=None, end=None):
|
||||
query = db.session.query(AuditLoginLog)
|
||||
|
||||
if start:
|
||||
query = query.filter(AuditLoginLog.login_at >= start)
|
||||
if end:
|
||||
query = query.filter(AuditLoginLog.login_at <= end)
|
||||
|
||||
if q:
|
||||
query = query.filter(AuditLoginLog.username == q)
|
||||
|
||||
records = query.order_by(
|
||||
AuditLoginLog.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
||||
|
||||
data = {
|
||||
'data': [r.to_dict() for r in records],
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def add_role_log(cls, app_id, operate_type: AuditOperateType,
|
||||
scope: AuditScope, link_id: int, origin: dict, current: dict, extra: dict,
|
||||
@@ -348,3 +374,30 @@ class AuditCRUD(object):
|
||||
AuditTriggerLog.create(app_id=app_id, trigger_id=trigger_id, operate_uid=user_id,
|
||||
operate_type=operate_type.value,
|
||||
origin=origin, current=current, extra=extra, source=source.value)
|
||||
|
||||
@classmethod
|
||||
def add_login_log(cls, username, is_ok, description, _id=None, logout_at=None):
|
||||
if _id is not None:
|
||||
existed = AuditLoginLog.get_by_id(_id)
|
||||
if existed is not None:
|
||||
existed.update(logout_at=logout_at)
|
||||
return
|
||||
|
||||
payload = dict(username=username,
|
||||
is_ok=is_ok,
|
||||
description=description,
|
||||
logout_at=logout_at,
|
||||
ip=request.headers.get('X-Real-IP') or request.remote_addr,
|
||||
browser=request.headers.get('User-Agent'),
|
||||
)
|
||||
|
||||
if logout_at is None:
|
||||
payload['login_at'] = datetime.datetime.now()
|
||||
|
||||
try:
|
||||
from api.lib.common_setting.employee import EmployeeCRUD
|
||||
EmployeeCRUD.update_last_login_by_uid(current_user.uid)
|
||||
except:
|
||||
pass
|
||||
|
||||
return AuditLoginLog.create(**payload).id
|
||||
|
@@ -4,7 +4,7 @@
|
||||
import msgpack
|
||||
|
||||
from api.extensions import cache
|
||||
from api.extensions import db
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.utils import Lock
|
||||
from api.models.acl import App
|
||||
from api.models.acl import Permission
|
||||
@@ -221,9 +221,9 @@ class RoleRelationCache(object):
|
||||
return msgpack.loads(r_g, raw=False)
|
||||
|
||||
@classmethod
|
||||
@flush_db
|
||||
def rebuild(cls, rid, app_id):
|
||||
cls.clean(rid, app_id)
|
||||
db.session.remove()
|
||||
|
||||
cls.get_parent_ids(rid, app_id)
|
||||
cls.get_child_ids(rid, app_id)
|
||||
@@ -235,9 +235,9 @@ class RoleRelationCache(object):
|
||||
cls.get_resources2(rid, app_id)
|
||||
|
||||
@classmethod
|
||||
@flush_db
|
||||
def rebuild2(cls, rid, app_id):
|
||||
cache.delete(cls.PREFIX_RESOURCES2.format(rid, app_id))
|
||||
db.session.remove()
|
||||
cls.get_resources2(rid, app_id)
|
||||
|
||||
@classmethod
|
||||
|
@@ -260,7 +260,8 @@ class ResourceCRUD(object):
|
||||
numfound = query.count()
|
||||
res = [i.to_dict() for i in query.offset((page - 1) * page_size).limit(page_size)]
|
||||
for i in res:
|
||||
i['user'] = UserCache.get(i['uid']).nickname if i['uid'] else ''
|
||||
user = UserCache.get(i['uid']) if i['uid'] else ''
|
||||
i['user'] = user and user.nickname
|
||||
|
||||
return numfound, res
|
||||
|
||||
@@ -275,7 +276,6 @@ class ResourceCRUD(object):
|
||||
|
||||
from api.tasks.acl import apply_trigger
|
||||
triggers = TriggerCRUD.match_triggers(app_id, r.name, r.resource_type_id, uid)
|
||||
current_app.logger.info(triggers)
|
||||
for trigger in triggers:
|
||||
# auto trigger should be no uid
|
||||
apply_trigger.apply_async(args=(trigger.id,),
|
||||
@@ -328,6 +328,8 @@ class ResourceCRUD(object):
|
||||
AuditCRUD.add_resource_log(resource.app_id, AuditOperateType.delete,
|
||||
AuditScope.resource, resource.id, origin, {}, {})
|
||||
|
||||
return rebuilds
|
||||
|
||||
@classmethod
|
||||
def delete_by_name(cls, name, type_id, app_id):
|
||||
resource = Resource.get_by(name=name, resource_type_id=type_id, app_id=app_id) or abort(
|
||||
|
@@ -4,6 +4,9 @@ from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
class ErrFormat(CommonErrFormat):
|
||||
login_succeed = "登录成功"
|
||||
ldap_connection_failed = "连接LDAP服务失败"
|
||||
invalid_password = "密码验证失败"
|
||||
auth_only_with_app_token_failed = "应用 Token验证失败"
|
||||
session_invalid = "您不是应用管理员 或者 session失效(尝试一下退出重新登录)"
|
||||
|
||||
@@ -17,11 +20,11 @@ class ErrFormat(CommonErrFormat):
|
||||
role_exists = "角色 {} 已经存在!"
|
||||
global_role_not_found = "全局角色 {} 不存在!"
|
||||
global_role_exists = "全局角色 {} 已经存在!"
|
||||
user_role_delete_invalid = "删除用户角色, 请在 用户管理 页面操作!"
|
||||
|
||||
resource_no_permission = "您没有资源: {} 的 {} 权限"
|
||||
admin_required = "需要管理员权限"
|
||||
role_required = "需要角色: {}"
|
||||
user_role_delete_invalid = "删除用户角色, 请在 用户管理 页面操作!"
|
||||
|
||||
app_is_ready_existed = "应用 {} 已经存在"
|
||||
app_not_found = "应用 {} 不存在!"
|
||||
|
@@ -41,6 +41,7 @@ class UserCRUD(object):
|
||||
|
||||
@classmethod
|
||||
def add(cls, **kwargs):
|
||||
add_from = kwargs.pop('add_from', None)
|
||||
existed = User.get_by(username=kwargs['username'])
|
||||
existed and abort(400, ErrFormat.user_exists.format(kwargs['username']))
|
||||
|
||||
@@ -58,11 +59,17 @@ class UserCRUD(object):
|
||||
kwargs['employee_id'] = '{0:04d}'.format(biggest_employee_id + 1)
|
||||
user = User.create(**kwargs)
|
||||
|
||||
RoleCRUD.add_role(user.username, uid=user.uid)
|
||||
role = RoleCRUD.add_role(user.username, uid=user.uid)
|
||||
AuditCRUD.add_role_log(None, AuditOperateType.create,
|
||||
AuditScope.user, user.uid, {}, user.to_dict(), {}, {}
|
||||
)
|
||||
|
||||
if add_from != 'common':
|
||||
from api.lib.common_setting.employee import EmployeeCRUD
|
||||
payload = {column: getattr(user, column) for column in ['uid', 'username', 'nickname', 'email', 'block']}
|
||||
payload['rid'] = role.id
|
||||
EmployeeCRUD.add_employee_from_acl_created(**payload)
|
||||
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
|
@@ -93,6 +93,9 @@ def _auth_with_token():
|
||||
|
||||
|
||||
def _auth_with_ip_white_list():
|
||||
if request.url.endswith("acl/users/info"):
|
||||
return False
|
||||
|
||||
ip = request.headers.get('X-Real-IP') or request.remote_addr
|
||||
key = request.values.get('_key')
|
||||
secret = request.values.get('_secret')
|
||||
|
1
cmdb-api/api/lib/perm/authentication/__init__.py
Normal file
1
cmdb-api/api/lib/perm/authentication/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# -*- coding:utf-8 -*-
|
@@ -15,7 +15,7 @@ try:
|
||||
except ImportError:
|
||||
from flask import _request_ctx_stack as stack
|
||||
|
||||
from api.flask_cas import routing
|
||||
from . import routing
|
||||
|
||||
|
||||
class CAS(object):
|
@@ -119,4 +119,4 @@ def create_cas_validate_url(cas_url, cas_route, service, ticket,
|
||||
('service', service),
|
||||
('ticket', ticket),
|
||||
('renew', renew),
|
||||
)
|
||||
)
|
@@ -1,14 +1,24 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import json
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
import bs4
|
||||
from flask import Blueprint
|
||||
from flask import current_app, session, request, url_for, redirect
|
||||
from flask_login import login_user, logout_user
|
||||
from flask import current_app
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask import url_for
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from six.moves.urllib_request import urlopen
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from .cas_urls import create_cas_login_url
|
||||
from .cas_urls import create_cas_logout_url
|
||||
from .cas_urls import create_cas_validate_url
|
||||
@@ -16,6 +26,7 @@ from .cas_urls import create_cas_validate_url
|
||||
blueprint = Blueprint('cas', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/api/cas/login')
|
||||
@blueprint.route('/api/sso/login')
|
||||
def login():
|
||||
"""
|
||||
@@ -29,16 +40,20 @@ def login():
|
||||
If validation was successful the logged in username is saved in
|
||||
the user's session under the key `CAS_USERNAME_SESSION_KEY`.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
|
||||
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
|
||||
if request.values.get("next"):
|
||||
session["next"] = request.values.get("next")
|
||||
|
||||
_service = url_for('cas.login', _external=True, next=session["next"]) \
|
||||
if session.get("next") else url_for('cas.login', _external=True)
|
||||
# _service = url_for('cas.login', _external=True)
|
||||
_service = "{}://{}{}".format(urlparse(request.referrer).scheme,
|
||||
urlparse(request.referrer).netloc,
|
||||
url_for('cas.login'))
|
||||
|
||||
redirect_url = create_cas_login_url(
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGIN_ROUTE'],
|
||||
config['cas_server'],
|
||||
config['cas_login_route'],
|
||||
_service)
|
||||
|
||||
if 'ticket' in request.args:
|
||||
@@ -47,30 +62,38 @@ def login():
|
||||
if request.args.get('ticket'):
|
||||
|
||||
if validate(request.args['ticket']):
|
||||
redirect_url = session.get("next") or \
|
||||
current_app.config.get("CAS_AFTER_LOGIN")
|
||||
redirect_url = session.get("next") or config.get("cas_after_login") or "/"
|
||||
username = session.get("CAS_USERNAME")
|
||||
user = UserCache.get(username)
|
||||
login_user(user)
|
||||
|
||||
session.permanent = True
|
||||
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
else:
|
||||
del session[cas_token_session_key]
|
||||
redirect_url = create_cas_login_url(
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGIN_ROUTE'],
|
||||
config['cas_server'],
|
||||
config['cas_login_route'],
|
||||
url_for('cas.login', _external=True),
|
||||
renew=True)
|
||||
|
||||
AuditCRUD.add_login_log(session.get("CAS_USERNAME"), False, ErrFormat.invalid_password)
|
||||
|
||||
current_app.logger.info("redirect to: {0}".format(redirect_url))
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
||||
@blueprint.route('/api/cas/logout')
|
||||
@blueprint.route('/api/sso/logout')
|
||||
def logout():
|
||||
"""
|
||||
When the user accesses this route they are logged out.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
current_app.logger.info(config)
|
||||
|
||||
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
|
||||
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
|
||||
@@ -82,12 +105,14 @@ def logout():
|
||||
"next" in session and session.pop("next")
|
||||
|
||||
redirect_url = create_cas_logout_url(
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGOUT_ROUTE'],
|
||||
config['cas_server'],
|
||||
config['cas_logout_route'],
|
||||
url_for('cas.login', _external=True, next=request.referrer))
|
||||
|
||||
logout_user()
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
current_app.logger.debug('Redirecting to: {0}'.format(redirect_url))
|
||||
|
||||
return redirect(redirect_url)
|
||||
@@ -100,14 +125,15 @@ def validate(ticket):
|
||||
and the validated username is saved in the session under the
|
||||
key `CAS_USERNAME_SESSION_KEY`.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
|
||||
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
|
||||
|
||||
current_app.logger.debug("validating token {0}".format(ticket))
|
||||
|
||||
cas_validate_url = create_cas_validate_url(
|
||||
current_app.config['CAS_VALIDATE_SERVER'],
|
||||
current_app.config['CAS_VALIDATE_ROUTE'],
|
||||
config['cas_validate_server'],
|
||||
config['cas_validate_route'],
|
||||
url_for('cas.login', _external=True),
|
||||
ticket)
|
||||
|
||||
@@ -115,23 +141,35 @@ def validate(ticket):
|
||||
|
||||
try:
|
||||
response = urlopen(cas_validate_url).read()
|
||||
ticketid = _parse_tag(response, "cas:user")
|
||||
strs = [s.strip() for s in ticketid.split('|') if s.strip()]
|
||||
ticket_id = _parse_tag(response, "cas:user")
|
||||
strs = [s.strip() for s in ticket_id.split('|') if s.strip()]
|
||||
username, is_valid = None, False
|
||||
if len(strs) == 1:
|
||||
username = strs[0]
|
||||
is_valid = True
|
||||
user_info = json.loads(_parse_tag(response, "cas:other"))
|
||||
current_app.logger.info(user_info)
|
||||
except ValueError:
|
||||
current_app.logger.error("CAS returned unexpected result")
|
||||
is_valid = False
|
||||
return is_valid
|
||||
|
||||
if is_valid:
|
||||
current_app.logger.debug("valid")
|
||||
current_app.logger.debug("{}: {}".format(cas_username_session_key, username))
|
||||
session[cas_username_session_key] = username
|
||||
user = UserCache.get(username)
|
||||
if user is None:
|
||||
current_app.logger.info("create user: {}".format(username))
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
soup = bs4.BeautifulSoup(response)
|
||||
cas_user_map = config.get('cas_user_map')
|
||||
user_dict = dict()
|
||||
for k in cas_user_map:
|
||||
v = soup.find(cas_user_map[k]['tag'], cas_user_map[k].get('attrs', {}))
|
||||
user_dict[k] = v and v.text or None
|
||||
user_dict['password'] = uuid.uuid4().hex
|
||||
if "email" not in user_dict:
|
||||
user_dict['email'] = username
|
||||
|
||||
UserCRUD.add(**user_dict)
|
||||
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
user_info = ACLManager.get_user_info(username)
|
||||
@@ -164,4 +202,5 @@ def _parse_tag(string, tag):
|
||||
|
||||
if soup.find(tag) is None:
|
||||
return ''
|
||||
|
||||
return soup.find(tag).string.strip()
|
67
cmdb-api/api/lib/perm/authentication/ldap.py
Normal file
67
cmdb-api/api/lib/perm/authentication/ldap.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import uuid
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from ldap3 import ALL
|
||||
from ldap3 import AUTO_BIND_NO_TLS
|
||||
from ldap3 import Connection
|
||||
from ldap3 import Server
|
||||
from ldap3.core.exceptions import LDAPBindError
|
||||
from ldap3.core.exceptions import LDAPCertificateError
|
||||
from ldap3.core.exceptions import LDAPSocketOpenError
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from api.models.acl import User
|
||||
|
||||
|
||||
def authenticate_with_ldap(username, password):
|
||||
config = AuthenticateDataCRUD(AuthenticateType.LDAP).get()
|
||||
|
||||
server = Server(config.get('ldap_server'), get_info=ALL, connect_timeout=3)
|
||||
if '@' in username:
|
||||
email = username
|
||||
who = config.get('ldap_user_dn').format(username.split('@')[0])
|
||||
else:
|
||||
who = config.get('ldap_user_dn').format(username)
|
||||
email = "{}@{}".format(who, config.get('ldap_domain'))
|
||||
|
||||
username = username.split('@')[0]
|
||||
user = User.query.get_by_username(username)
|
||||
try:
|
||||
if not password:
|
||||
raise LDAPCertificateError
|
||||
|
||||
try:
|
||||
conn = Connection(server, user=who, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except LDAPBindError:
|
||||
conn = Connection(server,
|
||||
user=f"{username}@{config.get('ldap_domain')}",
|
||||
password=password,
|
||||
auto_bind=AUTO_BIND_NO_TLS)
|
||||
|
||||
if conn.result['result'] != 0:
|
||||
AuditCRUD.add_login_log(username, False, ErrFormat.invalid_password)
|
||||
raise LDAPBindError
|
||||
else:
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
if not user:
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
user = UserCRUD.add(username=username, email=email, password=uuid.uuid4().hex)
|
||||
|
||||
return user, True
|
||||
|
||||
except LDAPBindError as e:
|
||||
current_app.logger.info(e)
|
||||
return user, False
|
||||
|
||||
except LDAPSocketOpenError as e:
|
||||
current_app.logger.info(e)
|
||||
return abort(403, ErrFormat.ldap_connection_failed)
|
30
cmdb-api/api/lib/perm/authentication/oauth2/__init__.py
Normal file
30
cmdb-api/api/lib/perm/authentication/oauth2/__init__.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from . import routing
|
||||
|
||||
|
||||
class OAuth2(object):
|
||||
def __init__(self, app=None, url_prefix=None):
|
||||
self._app = app
|
||||
if app is not None:
|
||||
self.init_app(app, url_prefix)
|
||||
|
||||
@staticmethod
|
||||
def init_app(app, url_prefix=None):
|
||||
# Configuration defaults
|
||||
app.config.setdefault('OAUTH2_GRANT_TYPE', 'authorization_code')
|
||||
app.config.setdefault('OAUTH2_RESPONSE_TYPE', 'code')
|
||||
app.config.setdefault('OAUTH2_AFTER_LOGIN', '/')
|
||||
|
||||
app.config.setdefault('OIDC_GRANT_TYPE', 'authorization_code')
|
||||
app.config.setdefault('OIDC_RESPONSE_TYPE', 'code')
|
||||
app.config.setdefault('OIDC_AFTER_LOGIN', '/')
|
||||
|
||||
# Register Blueprint
|
||||
app.register_blueprint(routing.blueprint, url_prefix=url_prefix)
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
return self._app or current_app
|
139
cmdb-api/api/lib/perm/authentication/oauth2/routing.py
Normal file
139
cmdb-api/api/lib/perm/authentication/oauth2/routing.py
Normal file
@@ -0,0 +1,139 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import secrets
|
||||
import uuid
|
||||
|
||||
import requests
|
||||
from flask import Blueprint
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask import url_for
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
from six.moves.urllib.parse import urlencode
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
|
||||
blueprint = Blueprint('oauth2', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/login')
|
||||
def login(auth_type):
|
||||
config = AuthenticateDataCRUD(auth_type.upper()).get()
|
||||
|
||||
if request.values.get("next"):
|
||||
session["next"] = request.values.get("next")
|
||||
|
||||
session[f'{auth_type}_state'] = secrets.token_urlsafe(16)
|
||||
|
||||
auth_type = auth_type.upper()
|
||||
|
||||
redirect_uri = "{}://{}{}".format(urlparse(request.referrer).scheme,
|
||||
urlparse(request.referrer).netloc,
|
||||
url_for('oauth2.callback', auth_type=auth_type.lower()))
|
||||
qs = urlencode({
|
||||
'client_id': config['client_id'],
|
||||
'redirect_uri': redirect_uri,
|
||||
'response_type': current_app.config[f'{auth_type}_RESPONSE_TYPE'],
|
||||
'scope': ' '.join(config['scopes'] or []),
|
||||
'state': session[f'{auth_type.lower()}_state'],
|
||||
})
|
||||
|
||||
return redirect("{}?{}".format(config['authorize_url'].split('?')[0], qs))
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/callback')
|
||||
def callback(auth_type):
|
||||
auth_type = auth_type.upper()
|
||||
config = AuthenticateDataCRUD(auth_type).get()
|
||||
|
||||
redirect_url = session.get("next") or config.get('after_login') or '/'
|
||||
|
||||
if request.values['state'] != session.get(f'{auth_type.lower()}_state'):
|
||||
return abort(401, "state is invalid")
|
||||
|
||||
if 'code' not in request.values:
|
||||
return abort(401, 'code is invalid')
|
||||
|
||||
response = requests.post(config['token_url'], data={
|
||||
'client_id': config['client_id'],
|
||||
'client_secret': config['client_secret'],
|
||||
'code': request.values['code'],
|
||||
'grant_type': current_app.config[f'{auth_type}_GRANT_TYPE'],
|
||||
'redirect_uri': url_for('oauth2.callback', auth_type=auth_type.lower(), _external=True),
|
||||
}, headers={'Accept': 'application/json'})
|
||||
if response.status_code != 200:
|
||||
current_app.logger.error(response.text)
|
||||
return abort(401)
|
||||
access_token = response.json().get('access_token')
|
||||
if not access_token:
|
||||
return abort(401)
|
||||
|
||||
response = requests.get(config['user_info']['url'], headers={
|
||||
'Authorization': 'Bearer {}'.format(access_token),
|
||||
'Accept': 'application/json',
|
||||
})
|
||||
if response.status_code != 200:
|
||||
return abort(401)
|
||||
|
||||
res = response.json()
|
||||
email = res.get(config['user_info']['email'])
|
||||
username = res.get(config['user_info']['username'])
|
||||
avatar = res.get(config['user_info'].get('avatar'))
|
||||
user = UserCache.get(username)
|
||||
if user is None:
|
||||
current_app.logger.info("create user: {}".format(username))
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
user_dict = dict(username=username, email=email, avatar=avatar)
|
||||
user_dict['password'] = uuid.uuid4().hex
|
||||
|
||||
user = UserCRUD.add(**user_dict)
|
||||
|
||||
# log the user in
|
||||
login_user(user)
|
||||
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
user_info = ACLManager.get_user_info(username)
|
||||
|
||||
session["acl"] = dict(uid=user_info.get("uid"),
|
||||
avatar=user.avatar if user else user_info.get("avatar"),
|
||||
userId=user_info.get("uid"),
|
||||
rid=user_info.get("rid"),
|
||||
userName=user_info.get("username"),
|
||||
nickName=user_info.get("nickname") or user_info.get("username"),
|
||||
parentRoles=user_info.get("parents"),
|
||||
childRoles=user_info.get("children"),
|
||||
roleName=user_info.get("role"))
|
||||
session["uid"] = user_info.get("uid")
|
||||
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/logout')
|
||||
def logout(auth_type):
|
||||
"acl" in session and session.pop("acl")
|
||||
"uid" in session and session.pop("uid")
|
||||
f'{auth_type}_state' in session and session.pop(f'{auth_type}_state')
|
||||
"next" in session and session.pop("next")
|
||||
|
||||
redirect_url = url_for('oauth2.login', auth_type=auth_type, _external=True, next=request.referrer)
|
||||
|
||||
logout_user()
|
||||
|
||||
current_app.logger.debug('Redirecting to: {0}'.format(redirect_url))
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
return redirect(redirect_url)
|
1
cmdb-api/api/lib/secrets/__init__.py
Normal file
1
cmdb-api/api/lib/secrets/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# -*- coding:utf-8 -*-
|
429
cmdb-api/api/lib/secrets/inner.py
Normal file
429
cmdb-api/api/lib/secrets/inner.py
Normal file
@@ -0,0 +1,429 @@
|
||||
import os
|
||||
import secrets
|
||||
import sys
|
||||
from base64 import b64decode, b64encode
|
||||
|
||||
from Cryptodome.Protocol.SecretSharing import Shamir
|
||||
from colorama import Back
|
||||
from colorama import Fore
|
||||
from colorama import Style
|
||||
from colorama import init as colorama_init
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher
|
||||
from cryptography.hazmat.primitives.ciphers import algorithms
|
||||
from cryptography.hazmat.primitives.ciphers import modes
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from flask import current_app
|
||||
|
||||
global_iv_length = 16
|
||||
global_key_shares = 5 # Number of generated key shares
|
||||
global_key_threshold = 3 # Minimum number of shares required to rebuild the key
|
||||
|
||||
backend_root_key_name = "root_key"
|
||||
backend_encrypt_key_name = "encrypt_key"
|
||||
backend_root_key_salt_name = "root_key_salt"
|
||||
backend_encrypt_key_salt_name = "encrypt_key_salt"
|
||||
backend_seal_key = "seal_status"
|
||||
success = "success"
|
||||
seal_status = True
|
||||
|
||||
|
||||
def string_to_bytes(value):
|
||||
if isinstance(value, bytes):
|
||||
return value
|
||||
if sys.version_info.major == 2:
|
||||
byte_string = value
|
||||
else:
|
||||
byte_string = value.encode("utf-8")
|
||||
return byte_string
|
||||
|
||||
|
||||
class Backend:
|
||||
def __init__(self, backend=None):
|
||||
self.backend = backend
|
||||
|
||||
def get(self, key):
|
||||
return self.backend.get(key)
|
||||
|
||||
def add(self, key, value):
|
||||
return self.backend.add(key, value)
|
||||
|
||||
def update(self, key, value):
|
||||
return self.backend.update(key, value)
|
||||
|
||||
|
||||
class KeyManage:
|
||||
|
||||
def __init__(self, trigger=None, backend=None):
|
||||
self.trigger = trigger
|
||||
self.backend = backend
|
||||
if backend:
|
||||
self.backend = Backend(backend)
|
||||
|
||||
def init_app(self, app, backend=None):
|
||||
if (sys.argv[0].endswith("gunicorn") or
|
||||
(len(sys.argv) > 1 and sys.argv[1] in ("run", "cmdb-password-data-migrate"))):
|
||||
self.trigger = app.config.get("INNER_TRIGGER_TOKEN")
|
||||
if not self.trigger:
|
||||
return
|
||||
|
||||
self.backend = backend
|
||||
resp = self.auto_unseal()
|
||||
self.print_response(resp)
|
||||
|
||||
def hash_root_key(self, value):
|
||||
algorithm = hashes.SHA256()
|
||||
salt = self.backend.get(backend_root_key_salt_name)
|
||||
if not salt:
|
||||
salt = secrets.token_hex(16)
|
||||
msg, ok = self.backend.add(backend_root_key_salt_name, salt)
|
||||
if not ok:
|
||||
return msg, ok
|
||||
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=algorithm,
|
||||
length=32,
|
||||
salt=string_to_bytes(salt),
|
||||
iterations=100000,
|
||||
)
|
||||
key = kdf.derive(string_to_bytes(value))
|
||||
|
||||
return b64encode(key).decode('utf-8'), True
|
||||
|
||||
def generate_encrypt_key(self, key):
|
||||
algorithm = hashes.SHA256()
|
||||
salt = self.backend.get(backend_encrypt_key_salt_name)
|
||||
if not salt:
|
||||
salt = secrets.token_hex(32)
|
||||
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=algorithm,
|
||||
length=32,
|
||||
salt=string_to_bytes(salt),
|
||||
iterations=100000,
|
||||
backend=default_backend()
|
||||
)
|
||||
key = kdf.derive(string_to_bytes(key))
|
||||
msg, ok = self.backend.add(backend_encrypt_key_salt_name, salt)
|
||||
if ok:
|
||||
return b64encode(key).decode('utf-8'), ok
|
||||
else:
|
||||
return msg, ok
|
||||
|
||||
@classmethod
|
||||
def generate_keys(cls, secret):
|
||||
shares = Shamir.split(global_key_threshold, global_key_shares, secret, False)
|
||||
new_shares = []
|
||||
for share in shares:
|
||||
t = [i for i in share[1]] + [ord(i) for i in "{:0>2}".format(share[0])]
|
||||
new_shares.append(b64encode(bytes(t)))
|
||||
|
||||
return new_shares
|
||||
|
||||
def is_valid_root_key(self, root_key):
|
||||
root_key_hash, ok = self.hash_root_key(root_key)
|
||||
if not ok:
|
||||
return root_key_hash, ok
|
||||
backend_root_key_hash = self.backend.get(backend_root_key_name)
|
||||
if not backend_root_key_hash:
|
||||
return "should init firstly", False
|
||||
elif backend_root_key_hash != root_key_hash:
|
||||
return "invalid root key", False
|
||||
else:
|
||||
return "", True
|
||||
|
||||
def auth_root_secret(self, root_key):
|
||||
msg, ok = self.is_valid_root_key(root_key)
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
encrypt_key_aes = self.backend.get(backend_encrypt_key_name)
|
||||
if not encrypt_key_aes:
|
||||
return {
|
||||
"message": "encrypt key is empty",
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
secrets_encrypt_key, ok = InnerCrypt.aes_decrypt(string_to_bytes(root_key), encrypt_key_aes)
|
||||
if ok:
|
||||
msg, ok = self.backend.update(backend_seal_key, "open")
|
||||
if ok:
|
||||
current_app.config["secrets_encrypt_key"] = secrets_encrypt_key
|
||||
current_app.config["secrets_root_key"] = root_key
|
||||
current_app.config["secrets_shares"] = []
|
||||
return {"message": success, "status": success}
|
||||
return {"message": msg, "status": "failed"}
|
||||
else:
|
||||
return {
|
||||
"message": secrets_encrypt_key,
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
def unseal(self, key):
|
||||
if not self.is_seal():
|
||||
return {
|
||||
"message": "current status is unseal, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
try:
|
||||
t = [i for i in b64decode(key)]
|
||||
v = (int("".join([chr(i) for i in t[-2:]])), bytes(t[:-2]))
|
||||
shares = current_app.config.get("secrets_shares", [])
|
||||
if v not in shares:
|
||||
shares.append(v)
|
||||
current_app.config["secrets_shares"] = shares
|
||||
|
||||
if len(shares) >= global_key_threshold:
|
||||
recovered_secret = Shamir.combine(shares[:global_key_threshold], False)
|
||||
return self.auth_root_secret(b64encode(recovered_secret))
|
||||
else:
|
||||
return {
|
||||
"message": "waiting for inputting other unseal key {0}/{1}".format(len(shares),
|
||||
global_key_threshold),
|
||||
"status": "waiting"
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"message": "invalid token: " + str(e),
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
def generate_unseal_keys(self):
|
||||
info = self.backend.get(backend_root_key_name)
|
||||
if info:
|
||||
return "already exist", [], False
|
||||
|
||||
secret = AESGCM.generate_key(128)
|
||||
shares = self.generate_keys(secret)
|
||||
|
||||
return b64encode(secret), shares, True
|
||||
|
||||
def init(self):
|
||||
"""
|
||||
init the master key, unseal key and store in backend
|
||||
:return:
|
||||
"""
|
||||
root_key = self.backend.get(backend_root_key_name)
|
||||
if root_key:
|
||||
return {"message": "already init, skip", "status": "skip"}, False
|
||||
else:
|
||||
root_key, shares, status = self.generate_unseal_keys()
|
||||
if not status:
|
||||
return {"message": root_key, "status": "failed"}, False
|
||||
|
||||
# hash root key and store in backend
|
||||
root_key_hash, ok = self.hash_root_key(root_key)
|
||||
if not ok:
|
||||
return {"message": root_key_hash, "status": "failed"}, False
|
||||
|
||||
msg, ok = self.backend.add(backend_root_key_name, root_key_hash)
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
|
||||
# generate encrypt key from root_key and store in backend
|
||||
encrypt_key, ok = self.generate_encrypt_key(root_key)
|
||||
if not ok:
|
||||
return {"message": encrypt_key, "status": "failed"}
|
||||
|
||||
encrypt_key_aes, status = InnerCrypt.aes_encrypt(root_key, encrypt_key)
|
||||
if not status:
|
||||
return {"message": encrypt_key_aes, "status": "failed"}
|
||||
|
||||
msg, ok = self.backend.add(backend_encrypt_key_name, encrypt_key_aes)
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
msg, ok = self.backend.add(backend_seal_key, "open")
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
current_app.config["secrets_root_key"] = root_key
|
||||
current_app.config["secrets_encrypt_key"] = encrypt_key
|
||||
self.print_token(shares, root_token=root_key)
|
||||
|
||||
return {"message": "OK",
|
||||
"details": {
|
||||
"root_token": root_key,
|
||||
"seal_tokens": shares,
|
||||
}}, True
|
||||
|
||||
def auto_unseal(self):
|
||||
if not self.trigger:
|
||||
return {
|
||||
"message": "trigger config is empty, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
if self.trigger.startswith("http"):
|
||||
return {
|
||||
"message": "todo in next step, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
# TODO
|
||||
elif len(self.trigger.strip()) == 24:
|
||||
res = self.auth_root_secret(self.trigger.encode())
|
||||
if res.get("status") == success:
|
||||
return {
|
||||
"message": success,
|
||||
"status": success
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": res.get("message"),
|
||||
"status": "failed"
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": "trigger config is invalid, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
def seal(self, root_key):
|
||||
root_key = root_key.encode()
|
||||
msg, ok = self.is_valid_root_key(root_key)
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed"
|
||||
}
|
||||
else:
|
||||
msg, ok = self.backend.update(backend_seal_key, "block")
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed",
|
||||
}
|
||||
current_app.config["secrets_root_key"] = ''
|
||||
current_app.config["secrets_encrypt_key"] = ''
|
||||
return {
|
||||
"message": success,
|
||||
"status": success
|
||||
}
|
||||
|
||||
def is_seal(self):
|
||||
"""
|
||||
If there is no initialization or the root key is inconsistent, it is considered to be in a sealed state.
|
||||
:return:
|
||||
"""
|
||||
secrets_root_key = current_app.config.get("secrets_root_key")
|
||||
msg, ok = self.is_valid_root_key(secrets_root_key)
|
||||
if not ok:
|
||||
return true
|
||||
status = self.backend.get(backend_seal_key)
|
||||
return status == "block"
|
||||
|
||||
@classmethod
|
||||
def print_token(cls, shares, root_token):
|
||||
"""
|
||||
data: {"message": "OK",
|
||||
"details": {
|
||||
"root_token": root_key,
|
||||
"seal_tokens": shares,
|
||||
}}
|
||||
"""
|
||||
colorama_init()
|
||||
print(Style.BRIGHT, "Please be sure to store the Unseal Key in a secure location and avoid losing it."
|
||||
" The Unseal Key is required to unseal the system every time when it restarts."
|
||||
" Successful unsealing is necessary to enable the password feature." + Style.RESET_ALL)
|
||||
|
||||
for i, v in enumerate(shares):
|
||||
print(
|
||||
"unseal token " + str(i + 1) + ": " + Fore.RED + Back.BLACK + v.decode("utf-8") + Style.RESET_ALL)
|
||||
print()
|
||||
|
||||
print(Fore.GREEN + "root token: " + root_token.decode("utf-8") + Style.RESET_ALL)
|
||||
|
||||
@classmethod
|
||||
def print_response(cls, data):
|
||||
status = data.get("status", "")
|
||||
message = data.get("message", "")
|
||||
status_colors = {
|
||||
"skip": Style.BRIGHT,
|
||||
"failed": Fore.RED,
|
||||
"waiting": Fore.YELLOW,
|
||||
}
|
||||
print(status_colors.get(status, Fore.GREEN), message, Style.RESET_ALL)
|
||||
|
||||
|
||||
class InnerCrypt:
|
||||
def __init__(self):
|
||||
secrets_encrypt_key = current_app.config.get("secrets_encrypt_key", "")
|
||||
self.encrypt_key = b64decode(secrets_encrypt_key.encode("utf-8"))
|
||||
|
||||
def encrypt(self, plaintext):
|
||||
"""
|
||||
encrypt method contain aes currently
|
||||
"""
|
||||
return self.aes_encrypt(self.encrypt_key, plaintext)
|
||||
|
||||
def decrypt(self, ciphertext):
|
||||
"""
|
||||
decrypt method contain aes currently
|
||||
"""
|
||||
return self.aes_decrypt(self.encrypt_key, ciphertext)
|
||||
|
||||
@classmethod
|
||||
def aes_encrypt(cls, key, plaintext):
|
||||
if isinstance(plaintext, str):
|
||||
plaintext = string_to_bytes(plaintext)
|
||||
iv = os.urandom(global_iv_length)
|
||||
try:
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
encryptor = cipher.encryptor()
|
||||
v_padder = padding.PKCS7(algorithms.AES.block_size).padder()
|
||||
padded_plaintext = v_padder.update(plaintext) + v_padder.finalize()
|
||||
ciphertext = encryptor.update(padded_plaintext) + encryptor.finalize()
|
||||
|
||||
return b64encode(iv + ciphertext).decode("utf-8"), True
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
|
||||
@classmethod
|
||||
def aes_decrypt(cls, key, ciphertext):
|
||||
try:
|
||||
s = b64decode(ciphertext.encode("utf-8"))
|
||||
iv = s[:global_iv_length]
|
||||
ciphertext = s[global_iv_length:]
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
decrypter = cipher.decryptor()
|
||||
decrypted_padded_plaintext = decrypter.update(ciphertext) + decrypter.finalize()
|
||||
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
|
||||
plaintext = unpadder.update(decrypted_padded_plaintext) + unpadder.finalize()
|
||||
|
||||
return plaintext.decode('utf-8'), True
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
km = KeyManage()
|
||||
# info, shares, status = km.generate_unseal_keys()
|
||||
# print(info, shares, status)
|
||||
# print("..................")
|
||||
# for i in shares:
|
||||
# print(b64encode(i[1]).decode())
|
||||
|
||||
res1, ok1 = km.init()
|
||||
if not ok1:
|
||||
print(res1)
|
||||
# for j in res["details"]["seal_tokens"]:
|
||||
# r = km.unseal(j)
|
||||
# if r["status"] != "waiting":
|
||||
# if r["status"] != "success":
|
||||
# print("r........", r)
|
||||
# else:
|
||||
# print(r)
|
||||
# break
|
||||
|
||||
t_plaintext = b"Hello, World!" # The plaintext to encrypt
|
||||
c = InnerCrypt()
|
||||
t_ciphertext, status1 = c.encrypt(t_plaintext)
|
||||
print("Ciphertext:", t_ciphertext)
|
||||
decrypted_plaintext, status2 = c.decrypt(t_ciphertext)
|
||||
print("Decrypted plaintext:", decrypted_plaintext)
|
35
cmdb-api/api/lib/secrets/secrets.py
Normal file
35
cmdb-api/api/lib/secrets/secrets.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from api.models.cmdb import InnerKV
|
||||
|
||||
|
||||
class InnerKVManger(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def add(cls, key, value):
|
||||
data = {"key": key, "value": value}
|
||||
res = InnerKV.create(**data)
|
||||
if res.key == key:
|
||||
return "success", True
|
||||
|
||||
return "add failed", False
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
res = InnerKV.get_by(first=True, to_dict=False, key=key)
|
||||
if not res:
|
||||
return None
|
||||
|
||||
return res.value
|
||||
|
||||
@classmethod
|
||||
def update(cls, key, value):
|
||||
res = InnerKV.get_by(first=True, to_dict=False, key=key)
|
||||
if not res:
|
||||
return cls.add(key, value)
|
||||
|
||||
t = res.update(value=value)
|
||||
if t.key == key:
|
||||
return "success", True
|
||||
|
||||
return "update failed", True
|
141
cmdb-api/api/lib/secrets/vault.py
Normal file
141
cmdb-api/api/lib/secrets/vault.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from base64 import b64decode
|
||||
from base64 import b64encode
|
||||
|
||||
import hvac
|
||||
|
||||
|
||||
class VaultClient:
|
||||
def __init__(self, base_url, token, mount_path='cmdb'):
|
||||
self.client = hvac.Client(url=base_url, token=token)
|
||||
self.mount_path = mount_path
|
||||
|
||||
def create_app_role(self, role_name, policies):
|
||||
resp = self.client.create_approle(role_name, policies=policies)
|
||||
|
||||
return resp == 200
|
||||
|
||||
def delete_app_role(self, role_name):
|
||||
resp = self.client.delete_approle(role_name)
|
||||
|
||||
return resp == 204
|
||||
|
||||
def update_app_role_policies(self, role_name, policies):
|
||||
resp = self.client.update_approle_role(role_name, policies=policies)
|
||||
|
||||
return resp == 204
|
||||
|
||||
def get_app_role(self, role_name):
|
||||
resp = self.client.get_approle(role_name)
|
||||
resp.json()
|
||||
if resp.status_code == 200:
|
||||
return resp.json
|
||||
else:
|
||||
return {}
|
||||
|
||||
def enable_secrets_engine(self):
|
||||
resp = self.client.sys.enable_secrets_engine('kv', path=self.mount_path)
|
||||
resp_01 = self.client.sys.enable_secrets_engine('transit')
|
||||
|
||||
if resp.status_code == 200 and resp_01.status_code == 200:
|
||||
return resp.json
|
||||
else:
|
||||
return {}
|
||||
|
||||
def encrypt(self, plaintext):
|
||||
response = self.client.secrets.transit.encrypt_data(name='transit-key', plaintext=plaintext)
|
||||
ciphertext = response['data']['ciphertext']
|
||||
|
||||
return ciphertext
|
||||
|
||||
# decrypt data
|
||||
def decrypt(self, ciphertext):
|
||||
response = self.client.secrets.transit.decrypt_data(name='transit-key', ciphertext=ciphertext)
|
||||
plaintext = response['data']['plaintext']
|
||||
|
||||
return plaintext
|
||||
|
||||
def write(self, path, data, encrypt=None):
|
||||
if encrypt:
|
||||
for k, v in data.items():
|
||||
data[k] = self.encrypt(self.encode_base64(v))
|
||||
response = self.client.secrets.kv.v2.create_or_update_secret(
|
||||
path=path,
|
||||
secret=data,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
# read data
|
||||
def read(self, path, decrypt=True):
|
||||
try:
|
||||
response = self.client.secrets.kv.v2.read_secret_version(
|
||||
path=path, raise_on_deleted_version=False, mount_point=self.mount_path
|
||||
)
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
data = response['data']['data']
|
||||
if decrypt:
|
||||
try:
|
||||
for k, v in data.items():
|
||||
data[k] = self.decode_base64(self.decrypt(v))
|
||||
except:
|
||||
return data, True
|
||||
|
||||
return data, True
|
||||
|
||||
# update data
|
||||
def update(self, path, data, overwrite=True, encrypt=True):
|
||||
if encrypt:
|
||||
for k, v in data.items():
|
||||
data[k] = self.encrypt(self.encode_base64(v))
|
||||
if overwrite:
|
||||
response = self.client.secrets.kv.v2.create_or_update_secret(
|
||||
path=path,
|
||||
secret=data,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
else:
|
||||
response = self.client.secrets.kv.v2.patch(path=path, secret=data, mount_point=self.mount_path)
|
||||
|
||||
return response
|
||||
|
||||
# delete data
|
||||
def delete(self, path):
|
||||
response = self.client.secrets.kv.v2.delete_metadata_and_all_versions(
|
||||
path=path,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
# Base64 encode
|
||||
@classmethod
|
||||
def encode_base64(cls, data):
|
||||
encoded_bytes = b64encode(data.encode())
|
||||
encoded_string = encoded_bytes.decode()
|
||||
|
||||
return encoded_string
|
||||
|
||||
# Base64 decode
|
||||
@classmethod
|
||||
def decode_base64(cls, encoded_string):
|
||||
decoded_bytes = b64decode(encoded_string)
|
||||
decoded_string = decoded_bytes.decode()
|
||||
|
||||
return decoded_string
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_base_url = "http://localhost:8200"
|
||||
_token = "your token"
|
||||
|
||||
_path = "test001"
|
||||
# Example
|
||||
sdk = VaultClient(_base_url, _token)
|
||||
# sdk.enable_secrets_engine()
|
||||
_data = {"key1": "value1", "key2": "value2", "key3": "value3"}
|
||||
_data = sdk.update(_path, _data, overwrite=True, encrypt=True)
|
||||
print(_data)
|
||||
_data = sdk.read(_path, decrypt=True)
|
||||
print(_data)
|
@@ -12,6 +12,9 @@ from Crypto.Cipher import AES
|
||||
from elasticsearch import Elasticsearch
|
||||
from flask import current_app
|
||||
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
|
||||
|
||||
class BaseEnum(object):
|
||||
_ALL_ = set() # type: Set[str]
|
||||
@@ -286,3 +289,33 @@ class AESCrypto(object):
|
||||
text_decrypted = cipher.decrypt(encode_bytes)
|
||||
|
||||
return cls.unpad(text_decrypted).decode('utf8')
|
||||
|
||||
|
||||
class Crypto(AESCrypto):
|
||||
@classmethod
|
||||
def encrypt(cls, data):
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
|
||||
if not KeyManage(backend=InnerKVManger()).is_seal():
|
||||
res, status = InnerCrypt().encrypt(data)
|
||||
if status:
|
||||
return res
|
||||
|
||||
return AESCrypto().encrypt(data)
|
||||
|
||||
@classmethod
|
||||
def decrypt(cls, data):
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
|
||||
if not KeyManage(backend=InnerKVManger()).is_seal():
|
||||
try:
|
||||
res, status = InnerCrypt().decrypt(data)
|
||||
if status:
|
||||
return res
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
return AESCrypto().decrypt(data)
|
||||
except:
|
||||
return data
|
||||
|
@@ -5,16 +5,18 @@ import copy
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
|
||||
import ldap
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from flask_sqlalchemy import BaseQuery
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.database import CRUDModel
|
||||
from api.lib.database import Model
|
||||
from api.lib.database import Model2
|
||||
from api.lib.database import SoftDeleteMixin
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.perm.acl.const import OperateType
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
|
||||
|
||||
class App(Model):
|
||||
@@ -27,21 +29,26 @@ class App(Model):
|
||||
|
||||
|
||||
class UserQuery(BaseQuery):
|
||||
def _join(self, *args, **kwargs):
|
||||
super(UserQuery, self)._join(*args, **kwargs)
|
||||
|
||||
def authenticate(self, login, password):
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
|
||||
user = self.filter(db.or_(User.username == login,
|
||||
User.email == login)).filter(User.deleted.is_(False)).filter(User.block == 0).first()
|
||||
if user:
|
||||
current_app.logger.info(user)
|
||||
authenticated = user.check_password(password)
|
||||
if authenticated:
|
||||
from api.tasks.acl import op_record
|
||||
op_record.apply_async(args=(None, login, OperateType.LOGIN, ["ACL"]), queue=ACL_QUEUE)
|
||||
_id = AuditCRUD.add_login_log(login, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
else:
|
||||
AuditCRUD.add_login_log(login, False, ErrFormat.invalid_password)
|
||||
else:
|
||||
authenticated = False
|
||||
|
||||
AuditCRUD.add_login_log(login, False, ErrFormat.user_not_found.format(login))
|
||||
|
||||
current_app.logger.info(("login", login, user, authenticated))
|
||||
|
||||
return user, authenticated
|
||||
|
||||
def authenticate_with_key(self, key, secret, args, path):
|
||||
@@ -56,37 +63,6 @@ class UserQuery(BaseQuery):
|
||||
|
||||
return user, authenticated
|
||||
|
||||
def authenticate_with_ldap(self, username, password):
|
||||
ldap_conn = ldap.initialize(current_app.config.get('LDAP_SERVER'))
|
||||
ldap_conn.protocol_version = 3
|
||||
ldap_conn.set_option(ldap.OPT_REFERRALS, 0)
|
||||
if '@' in username:
|
||||
email = username
|
||||
who = current_app.config.get('LDAP_USER_DN').format(username.split('@')[0])
|
||||
else:
|
||||
who = current_app.config.get('LDAP_USER_DN').format(username)
|
||||
email = "{}@{}".format(who, current_app.config.get('LDAP_DOMAIN'))
|
||||
|
||||
username = username.split('@')[0]
|
||||
user = self.get_by_username(username)
|
||||
try:
|
||||
|
||||
if not password:
|
||||
raise ldap.INVALID_CREDENTIALS
|
||||
|
||||
ldap_conn.simple_bind_s(who, password)
|
||||
|
||||
if not user:
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
user = UserCRUD.add(username=username, email=email)
|
||||
|
||||
from api.tasks.acl import op_record
|
||||
op_record.apply_async(args=(None, username, OperateType.LOGIN, ["ACL"]), queue=ACL_QUEUE)
|
||||
|
||||
return user, True
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
return user, False
|
||||
|
||||
def search(self, key):
|
||||
query = self.filter(db.or_(User.email == key,
|
||||
User.nickname.ilike('%' + key + '%'),
|
||||
@@ -136,6 +112,7 @@ class User(CRUDModel, SoftDeleteMixin):
|
||||
wx_id = db.Column(db.String(32))
|
||||
employee_id = db.Column(db.String(16), index=True)
|
||||
avatar = db.Column(db.String(128))
|
||||
|
||||
# apps = db.Column(db.JSON)
|
||||
|
||||
def __str__(self):
|
||||
@@ -166,11 +143,9 @@ class User(CRUDModel, SoftDeleteMixin):
|
||||
|
||||
|
||||
class RoleQuery(BaseQuery):
|
||||
def _join(self, *args, **kwargs):
|
||||
super(RoleQuery, self)._join(*args, **kwargs)
|
||||
|
||||
def authenticate(self, login, password):
|
||||
role = self.filter(Role.name == login).first()
|
||||
role = self.filter(Role.name == login).filter(Role.deleted.is_(False)).first()
|
||||
if role:
|
||||
authenticated = role.check_password(password)
|
||||
|
||||
@@ -375,3 +350,16 @@ class AuditTriggerLog(Model):
|
||||
current = db.Column(db.JSON, default=dict(), comment='当前数据')
|
||||
extra = db.Column(db.JSON, default=dict(), comment='权限名')
|
||||
source = db.Column(db.String(16), default='', comment='来源')
|
||||
|
||||
|
||||
class AuditLoginLog(Model2):
|
||||
__tablename__ = "acl_audit_login_logs"
|
||||
|
||||
username = db.Column(db.String(64), index=True)
|
||||
channel = db.Column(db.Enum('web', 'api'), default="web")
|
||||
ip = db.Column(db.String(15))
|
||||
browser = db.Column(db.String(256))
|
||||
description = db.Column(db.String(128))
|
||||
is_ok = db.Column(db.Boolean)
|
||||
login_at = db.Column(db.DateTime)
|
||||
logout_at = db.Column(db.DateTime)
|
||||
|
@@ -12,7 +12,9 @@ from api.lib.cmdb.const import CITypeOperateType
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import OperateType
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.database import Model, Model2
|
||||
from api.lib.database import Model
|
||||
from api.lib.database import Model2
|
||||
from api.lib.utils import Crypto
|
||||
|
||||
|
||||
# template
|
||||
@@ -89,13 +91,37 @@ class Attribute(Model):
|
||||
compute_expr = db.Column(db.Text)
|
||||
compute_script = db.Column(db.Text)
|
||||
|
||||
choice_web_hook = db.Column(db.JSON)
|
||||
_choice_web_hook = db.Column('choice_web_hook', db.JSON)
|
||||
choice_other = db.Column(db.JSON)
|
||||
|
||||
uid = db.Column(db.Integer, index=True)
|
||||
|
||||
option = db.Column(db.JSON)
|
||||
|
||||
def _get_webhook(self):
|
||||
if self._choice_web_hook:
|
||||
if self._choice_web_hook.get('headers') and "Cookie" in self._choice_web_hook['headers']:
|
||||
self._choice_web_hook['headers']['Cookie'] = Crypto.decrypt(self._choice_web_hook['headers']['Cookie'])
|
||||
|
||||
if self._choice_web_hook.get('authorization'):
|
||||
for k, v in self._choice_web_hook['authorization'].items():
|
||||
self._choice_web_hook['authorization'][k] = Crypto.decrypt(v)
|
||||
|
||||
return self._choice_web_hook
|
||||
|
||||
def _set_webhook(self, data):
|
||||
if data:
|
||||
if data.get('headers') and "Cookie" in data['headers']:
|
||||
data['headers']['Cookie'] = Crypto.encrypt(data['headers']['Cookie'])
|
||||
|
||||
if data.get('authorization'):
|
||||
for k, v in data['authorization'].items():
|
||||
data['authorization'][k] = Crypto.encrypt(v)
|
||||
|
||||
self._choice_web_hook = data
|
||||
|
||||
choice_web_hook = db.synonym("_choice_web_hook", descriptor=property(_get_webhook, _set_webhook))
|
||||
|
||||
|
||||
class CITypeAttribute(Model):
|
||||
__tablename__ = "c_ci_type_attributes"
|
||||
@@ -130,7 +156,25 @@ class CITypeTrigger(Model):
|
||||
|
||||
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
|
||||
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"))
|
||||
option = db.Column('notify', db.JSON)
|
||||
_option = db.Column('notify', db.JSON)
|
||||
|
||||
def _get_option(self):
|
||||
if self._option and self._option.get('webhooks'):
|
||||
if self._option['webhooks'].get('authorization'):
|
||||
for k, v in self._option['webhooks']['authorization'].items():
|
||||
self._option['webhooks']['authorization'][k] = Crypto.decrypt(v)
|
||||
|
||||
return self._option
|
||||
|
||||
def _set_option(self, data):
|
||||
if data and data.get('webhooks'):
|
||||
if data['webhooks'].get('authorization'):
|
||||
for k, v in data['webhooks']['authorization'].items():
|
||||
data['webhooks']['authorization'][k] = Crypto.encrypt(v)
|
||||
|
||||
self._option = data
|
||||
|
||||
option = db.synonym("_option", descriptor=property(_get_option, _set_option))
|
||||
|
||||
|
||||
class CITriggerHistory(Model):
|
||||
@@ -174,6 +218,8 @@ class CIRelation(Model):
|
||||
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
|
||||
more = db.Column(db.Integer, db.ForeignKey("c_cis.id"))
|
||||
|
||||
ancestor_ids = db.Column(db.String(128), index=True)
|
||||
|
||||
first_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.first_ci_id")
|
||||
second_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.second_ci_id")
|
||||
relation_type = db.relationship("RelationType", backref="c_ci_relations.relation_type_id")
|
||||
@@ -504,3 +550,10 @@ class CIFilterPerms(Model):
|
||||
attr_filter = db.Column(db.Text)
|
||||
|
||||
rid = db.Column(db.Integer, index=True)
|
||||
|
||||
|
||||
class InnerKV(Model):
|
||||
__tablename__ = "c_kv"
|
||||
|
||||
key = db.Column(db.String(128), index=True)
|
||||
value = db.Column(db.Text)
|
||||
|
@@ -96,3 +96,11 @@ class NoticeConfig(Model):
|
||||
|
||||
platform = db.Column(db.VARCHAR(255), nullable=False)
|
||||
info = db.Column(db.JSON)
|
||||
|
||||
|
||||
class CommonFile(Model):
|
||||
__tablename__ = 'common_file'
|
||||
|
||||
file_name = db.Column(db.VARCHAR(512), nullable=False, index=True)
|
||||
origin_name = db.Column(db.VARCHAR(512), nullable=False)
|
||||
binary = db.Column(db.LargeBinary(16777216), nullable=False)
|
||||
|
@@ -46,5 +46,4 @@ def register_resources(resource_path, rest_api):
|
||||
resource_cls.url_prefix = ("",)
|
||||
if isinstance(resource_cls.url_prefix, six.string_types):
|
||||
resource_cls.url_prefix = (resource_cls.url_prefix,)
|
||||
|
||||
rest_api.add_resource(resource_cls, *resource_cls.url_prefix)
|
||||
|
@@ -9,7 +9,8 @@ from werkzeug.exceptions import BadRequest
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from api.extensions import celery
|
||||
from api.extensions import db
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateSource
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
@@ -24,10 +25,10 @@ from api.models.acl import Role
|
||||
from api.models.acl import Trigger
|
||||
|
||||
|
||||
@celery.task(base=QueueOnce,
|
||||
name="acl.role_rebuild",
|
||||
queue=ACL_QUEUE,
|
||||
once={"graceful": True, "unlock_before_run": True})
|
||||
@celery.task(name="acl.role_rebuild",
|
||||
queue=ACL_QUEUE,)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def role_rebuild(rids, app_id):
|
||||
rids = rids if isinstance(rids, list) else [rids]
|
||||
for rid in rids:
|
||||
@@ -37,6 +38,7 @@ def role_rebuild(rids, app_id):
|
||||
|
||||
|
||||
@celery.task(name="acl.update_resource_to_build_role", queue=ACL_QUEUE)
|
||||
@reconnect_db
|
||||
def update_resource_to_build_role(resource_id, app_id, group_id=None):
|
||||
rids = [i.id for i in Role.get_by(__func_isnot__key_uid=None, fl='id', to_dict=False)]
|
||||
rids += [i.id for i in Role.get_by(app_id=app_id, fl='id', to_dict=False)]
|
||||
@@ -52,9 +54,9 @@ def update_resource_to_build_role(resource_id, app_id, group_id=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.apply_trigger", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def apply_trigger(_id, resource_id=None, operator_uid=None):
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
|
||||
trigger = Trigger.get_by_id(_id)
|
||||
@@ -118,9 +120,9 @@ def apply_trigger(_id, resource_id=None, operator_uid=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.cancel_trigger", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def cancel_trigger(_id, resource_id=None, operator_uid=None):
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
|
||||
trigger = Trigger.get_by_id(_id)
|
||||
@@ -186,18 +188,19 @@ def cancel_trigger(_id, resource_id=None, operator_uid=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.op_record", queue=ACL_QUEUE)
|
||||
def op_record(app, rolename, operate_type, obj):
|
||||
@reconnect_db
|
||||
def op_record(app, role_name, operate_type, obj):
|
||||
if isinstance(app, int):
|
||||
app = AppCache.get(app)
|
||||
app = app and app.name
|
||||
|
||||
if isinstance(rolename, int):
|
||||
u = UserCache.get(rolename)
|
||||
if isinstance(role_name, int):
|
||||
u = UserCache.get(role_name)
|
||||
if u:
|
||||
rolename = u.username
|
||||
role_name = u.username
|
||||
if not u:
|
||||
r = RoleCache.get(rolename)
|
||||
r = RoleCache.get(role_name)
|
||||
if r:
|
||||
rolename = r.name
|
||||
role_name = r.name
|
||||
|
||||
OperateRecordCRUD.add(app, rolename, operate_type, obj)
|
||||
OperateRecordCRUD.add(app, role_name, operate_type, obj)
|
||||
|
@@ -16,6 +16,9 @@ from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.utils import Lock
|
||||
from api.lib.utils import handle_arg_list
|
||||
@@ -25,11 +28,12 @@ from api.models.cmdb import CITypeAttribute
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_cache(ci_id, operate_type, record_id):
|
||||
from api.lib.cmdb.ci import CITriggerManager
|
||||
|
||||
time.sleep(0.01)
|
||||
db.session.remove()
|
||||
|
||||
m = api.lib.cmdb.ci.CIManager()
|
||||
ci_dict = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
|
||||
@@ -49,9 +53,10 @@ def ci_cache(ci_id, operate_type, record_id):
|
||||
|
||||
|
||||
@celery.task(name="cmdb.batch_ci_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def batch_ci_cache(ci_ids, ): # only for attribute change index
|
||||
time.sleep(1)
|
||||
db.session.remove()
|
||||
|
||||
for ci_id in ci_ids:
|
||||
m = api.lib.cmdb.ci.CIManager()
|
||||
@@ -66,6 +71,7 @@ def batch_ci_cache(ci_ids, ): # only for attribute change index
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_delete", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_delete(ci_id):
|
||||
current_app.logger.info(ci_id)
|
||||
|
||||
@@ -78,6 +84,7 @@ def ci_delete(ci_id):
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_delete_trigger", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_delete_trigger(trigger, operate_type, ci_dict):
|
||||
current_app.logger.info('delete ci {} trigger'.format(ci_dict['_id']))
|
||||
from api.lib.cmdb.ci import CITriggerManager
|
||||
@@ -89,23 +96,39 @@ def ci_delete_trigger(trigger, operate_type, ci_dict):
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_cache", queue=CMDB_QUEUE)
|
||||
def ci_relation_cache(parent_id, child_id):
|
||||
db.session.remove()
|
||||
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_relation_cache(parent_id, child_id, ancestor_ids):
|
||||
with Lock("CIRelation_{}".format(parent_id)):
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
if ancestor_ids is None:
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, first=True, to_dict=False)
|
||||
if str(child_id) not in children:
|
||||
children[str(child_id)] = cr.second_ci.type_id
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
|
||||
first=True, to_dict=False)
|
||||
if str(child_id) not in children:
|
||||
children[str(child_id)] = cr.second_ci.type_id
|
||||
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
else:
|
||||
key = "{},{}".format(ancestor_ids, parent_id)
|
||||
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
|
||||
grandson = json.loads(grandson) if grandson is not None else {}
|
||||
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
|
||||
first=True, to_dict=False)
|
||||
if cr and str(cr.second_ci_id) not in grandson:
|
||||
grandson[str(cr.second_ci_id)] = cr.second_ci.type_id
|
||||
|
||||
rd.create_or_update({key: json.dumps(grandson)}, REDIS_PREFIX_CI_RELATION2)
|
||||
|
||||
current_app.logger.info("ADD ci relation cache: {0} -> {1}".format(parent_id, child_id))
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_add", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_relation_add(parent_dict, child_id, uid):
|
||||
"""
|
||||
:param parent_dict: key is '$parent_model.attr_name'
|
||||
@@ -121,8 +144,6 @@ def ci_relation_add(parent_dict, child_id, uid):
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
|
||||
db.session.remove()
|
||||
|
||||
for parent in parent_dict:
|
||||
parent_ci_type_name, _attr_name = parent.strip()[1:].split('.', 1)
|
||||
attr_name = CITypeAttributeManager.get_attr_name(parent_ci_type_name, _attr_name)
|
||||
@@ -147,27 +168,43 @@ def ci_relation_add(parent_dict, child_id, uid):
|
||||
except Exception as e:
|
||||
current_app.logger.warning(e)
|
||||
finally:
|
||||
db.session.remove()
|
||||
try:
|
||||
db.session.commit()
|
||||
except:
|
||||
db.session.rollback()
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_delete", queue=CMDB_QUEUE)
|
||||
def ci_relation_delete(parent_id, child_id):
|
||||
@reconnect_db
|
||||
def ci_relation_delete(parent_id, child_id, ancestor_ids):
|
||||
with Lock("CIRelation_{}".format(parent_id)):
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
if ancestor_ids is None:
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
|
||||
if str(child_id) in children:
|
||||
children.pop(str(child_id))
|
||||
if str(child_id) in children:
|
||||
children.pop(str(child_id))
|
||||
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
else:
|
||||
key = "{},{}".format(ancestor_ids, parent_id)
|
||||
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
|
||||
grandson = json.loads(grandson) if grandson is not None else {}
|
||||
|
||||
if str(child_id) in grandson:
|
||||
grandson.pop(str(child_id))
|
||||
|
||||
rd.create_or_update({key: json.dumps(grandson)}, REDIS_PREFIX_CI_RELATION2)
|
||||
|
||||
current_app.logger.info("DELETE ci relation cache: {0} -> {1}".format(parent_id, child_id))
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_type_attribute_order_rebuild", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_type_attribute_order_rebuild(type_id, uid):
|
||||
current_app.logger.info('rebuild attribute order')
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.cmdb.ci_type import CITypeAttributeGroupManager
|
||||
|
||||
@@ -188,11 +225,11 @@ def ci_type_attribute_order_rebuild(type_id, uid):
|
||||
|
||||
|
||||
@celery.task(name="cmdb.calc_computed_attribute", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def calc_computed_attribute(attr_id, uid):
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
|
||||
db.session.remove()
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
|
||||
|
@@ -1,24 +1,24 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import requests
|
||||
from flask import current_app
|
||||
|
||||
from api.extensions import celery
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.common_setting.const import COMMON_SETTING_QUEUE
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import Department
|
||||
from api.models.common_setting import Department, Employee
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
|
||||
|
||||
@celery.task(name="common_setting.edit_employee_department_in_acl", queue=COMMON_SETTING_QUEUE)
|
||||
@celery.task(name="common_setting.edit_employee_department_in_acl", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
|
||||
"""
|
||||
:param e_list:{acl_rid: 11, department_id: 22}
|
||||
:param new_d_id
|
||||
:param op_uid
|
||||
"""
|
||||
db.session.remove()
|
||||
|
||||
result = []
|
||||
new_department = Department.get_by(
|
||||
first=True, department_id=new_d_id, to_dict=False)
|
||||
@@ -75,3 +75,41 @@ def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
|
||||
result.append(ErrFormat.acl_add_user_to_role_failed.format(str(e)))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@celery.task(name="common_setting.refresh_employee_acl_info", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def refresh_employee_acl_info():
|
||||
acl = ACLManager('acl')
|
||||
role_map = {role['name']: role for role in acl.get_all_roles()}
|
||||
|
||||
criterion = [
|
||||
Employee.deleted == 0
|
||||
]
|
||||
query = Employee.query.filter(*criterion).order_by(
|
||||
Employee.created_at.desc()
|
||||
)
|
||||
|
||||
for em in query.all():
|
||||
if em.acl_uid and em.acl_rid:
|
||||
continue
|
||||
role = role_map.get(em.username, None)
|
||||
if not role:
|
||||
continue
|
||||
|
||||
params = dict()
|
||||
if not em.acl_uid:
|
||||
params['acl_uid'] = role.get('uid', 0)
|
||||
|
||||
if not em.acl_rid:
|
||||
params['acl_rid'] = role.get('id', 0)
|
||||
|
||||
try:
|
||||
em.update(**params)
|
||||
current_app.logger.info(
|
||||
f"refresh_employee_acl_info success, employee_id: {em.employee_id}, uid: {em.acl_uid}, "
|
||||
f"rid: {em.acl_rid}")
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
continue
|
||||
|
@@ -24,6 +24,7 @@ class AuditLogView(APIView):
|
||||
'role': AuditCRUD.search_role,
|
||||
'trigger': AuditCRUD.search_trigger,
|
||||
'resource': AuditCRUD.search_resource,
|
||||
'login': AuditCRUD.search_login,
|
||||
}
|
||||
if name not in func_map:
|
||||
abort(400, f'wrong {name}, please use {func_map.keys()}')
|
||||
|
@@ -8,11 +8,15 @@ from flask import abort
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask_login import login_user, logout_user
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.decorator import args_required
|
||||
from api.lib.decorator import args_validate
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import RoleCache
|
||||
from api.lib.perm.acl.cache import User
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
@@ -34,8 +38,10 @@ class LoginView(APIView):
|
||||
username = request.values.get("username") or request.values.get("email")
|
||||
password = request.values.get("password")
|
||||
_role = None
|
||||
if current_app.config.get('AUTH_WITH_LDAP'):
|
||||
user, authenticated = User.query.authenticate_with_ldap(username, password)
|
||||
config = AuthenticateDataCRUD(AuthenticateType.LDAP).get()
|
||||
if config.get('enabled') or config.get('enable'):
|
||||
from api.lib.perm.authentication.ldap import authenticate_with_ldap
|
||||
user, authenticated = authenticate_with_ldap(username, password)
|
||||
else:
|
||||
user, authenticated = User.query.authenticate(username, password)
|
||||
if not user:
|
||||
@@ -176,4 +182,7 @@ class LogoutView(APIView):
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
logout_user()
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
self.jsonify(code=200)
|
||||
|
@@ -84,11 +84,10 @@ class CIView(APIView):
|
||||
ci_dict = self._wrap_ci_dict()
|
||||
|
||||
manager = CIManager()
|
||||
current_app.logger.debug(ci_dict)
|
||||
ci_id = manager.add(ci_type,
|
||||
exist_policy=exist_policy or ExistPolicy.REJECT,
|
||||
_no_attribute_policy=_no_attribute_policy,
|
||||
_is_admin=request.values.pop('__is_admin', False),
|
||||
_is_admin=request.values.pop('__is_admin', None) or False,
|
||||
**ci_dict)
|
||||
|
||||
return self.jsonify(ci_id=ci_id)
|
||||
@@ -96,7 +95,6 @@ class CIView(APIView):
|
||||
@has_perm_for_ci("ci_id", ResourceTypeEnum.CI, PermEnum.UPDATE, CIManager.get_type)
|
||||
def put(self, ci_id=None):
|
||||
args = request.values
|
||||
current_app.logger.info(args)
|
||||
ci_type = args.get("ci_type")
|
||||
_no_attribute_policy = args.get("no_attribute_policy", ExistPolicy.IGNORE)
|
||||
|
||||
@@ -104,14 +102,14 @@ class CIView(APIView):
|
||||
manager = CIManager()
|
||||
if ci_id is not None:
|
||||
manager.update(ci_id,
|
||||
_is_admin=request.values.pop('__is_admin', False),
|
||||
_is_admin=request.values.pop('__is_admin', None) or False,
|
||||
**ci_dict)
|
||||
else:
|
||||
request.values.pop('exist_policy', None)
|
||||
ci_id = manager.add(ci_type,
|
||||
exist_policy=ExistPolicy.REPLACE,
|
||||
_no_attribute_policy=_no_attribute_policy,
|
||||
_is_admin=request.values.pop('__is_admin', False),
|
||||
_is_admin=request.values.pop('__is_admin', None) or False,
|
||||
**ci_dict)
|
||||
|
||||
return self.jsonify(ci_id=ci_id)
|
||||
@@ -228,11 +226,11 @@ class CIFlushView(APIView):
|
||||
from api.tasks.cmdb import ci_cache
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
if ci_id is not None:
|
||||
ci_cache.apply_async([ci_id], queue=CMDB_QUEUE)
|
||||
ci_cache.apply_async(args=(ci_id, None, None), queue=CMDB_QUEUE)
|
||||
else:
|
||||
cis = CI.get_by(to_dict=False)
|
||||
for ci in cis:
|
||||
ci_cache.apply_async([ci.id], queue=CMDB_QUEUE)
|
||||
ci_cache.apply_async(args=(ci.id, None, None), queue=CMDB_QUEUE)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
||||
@@ -242,3 +240,13 @@ class CIAutoDiscoveryStatisticsView(APIView):
|
||||
|
||||
def get(self):
|
||||
return self.jsonify(CIManager.get_ad_statistics())
|
||||
|
||||
|
||||
class CIPasswordView(APIView):
|
||||
url_prefix = "/ci/<int:ci_id>/attributes/<int:attr_id>/password"
|
||||
|
||||
def get(self, ci_id, attr_id):
|
||||
return self.jsonify(ci_id=ci_id, attr_id=attr_id, value=CIManager.load_password(ci_id, attr_id))
|
||||
|
||||
def post(self, ci_id, attr_id):
|
||||
return self.get(ci_id, attr_id)
|
||||
|
@@ -35,6 +35,7 @@ class CIRelationSearchView(APIView):
|
||||
count = get_page_size(request.values.get("count") or request.values.get("page_size"))
|
||||
|
||||
root_id = request.values.get('root_id')
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None # only for many to many
|
||||
level = list(map(int, handle_arg_list(request.values.get('level', '1'))))
|
||||
|
||||
query = request.values.get('q', "")
|
||||
@@ -42,9 +43,11 @@ class CIRelationSearchView(APIView):
|
||||
facet = handle_arg_list(request.values.get("facet", ""))
|
||||
sort = request.values.get("sort")
|
||||
reverse = request.values.get("reverse") in current_app.config.get('BOOL_TRUE')
|
||||
has_m2m = request.values.get("has_m2m") in current_app.config.get('BOOL_TRUE')
|
||||
|
||||
start = time.time()
|
||||
s = Search(root_id, level, query, fl, facet, page, count, sort, reverse)
|
||||
s = Search(root_id, level, query, fl, facet, page, count, sort, reverse,
|
||||
ancestor_ids=ancestor_ids, has_m2m=has_m2m)
|
||||
try:
|
||||
response, counter, total, page, numfound, facet = s.search()
|
||||
except SearchError as e:
|
||||
@@ -67,9 +70,11 @@ class CIRelationStatisticsView(APIView):
|
||||
root_ids = list(map(int, handle_arg_list(request.values.get('root_ids'))))
|
||||
level = request.values.get('level', 1)
|
||||
type_ids = set(map(int, handle_arg_list(request.values.get('type_ids', []))))
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None # only for many to many
|
||||
has_m2m = request.values.get("has_m2m") in current_app.config.get('BOOL_TRUE')
|
||||
|
||||
start = time.time()
|
||||
s = Search(root_ids, level)
|
||||
s = Search(root_ids, level, ancestor_ids=ancestor_ids, has_m2m=has_m2m)
|
||||
try:
|
||||
result = s.statistics(type_ids)
|
||||
except SearchError as e:
|
||||
@@ -121,14 +126,18 @@ class CIRelationView(APIView):
|
||||
url_prefix = "/ci_relations/<int:first_ci_id>/<int:second_ci_id>"
|
||||
|
||||
def post(self, first_ci_id, second_ci_id):
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
manager = CIRelationManager()
|
||||
res = manager.add(first_ci_id, second_ci_id)
|
||||
res = manager.add(first_ci_id, second_ci_id, ancestor_ids=ancestor_ids)
|
||||
|
||||
return self.jsonify(cr_id=res)
|
||||
|
||||
def delete(self, first_ci_id, second_ci_id):
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
manager = CIRelationManager()
|
||||
manager.delete_2(first_ci_id, second_ci_id)
|
||||
manager.delete_2(first_ci_id, second_ci_id, ancestor_ids=ancestor_ids)
|
||||
|
||||
return self.jsonify(message="CIType Relation is deleted")
|
||||
|
||||
@@ -151,8 +160,9 @@ class BatchCreateOrUpdateCIRelationView(APIView):
|
||||
ci_ids = list(map(int, request.values.get('ci_ids')))
|
||||
parents = list(map(int, request.values.get('parents', [])))
|
||||
children = list(map(int, request.values.get('children', [])))
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
CIRelationManager.batch_update(ci_ids, parents, children)
|
||||
CIRelationManager.batch_update(ci_ids, parents, children, ancestor_ids=ancestor_ids)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
||||
@@ -166,7 +176,8 @@ class BatchCreateOrUpdateCIRelationView(APIView):
|
||||
def delete(self):
|
||||
ci_ids = list(map(int, request.values.get('ci_ids')))
|
||||
parents = list(map(int, request.values.get('parents', [])))
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
CIRelationManager.batch_delete(ci_ids, parents)
|
||||
CIRelationManager.batch_delete(ci_ids, parents, ancestor_ids=ancestor_ids)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
@@ -166,7 +166,8 @@ class CITypeAttributeView(APIView):
|
||||
t = CITypeCache.get(type_id) or CITypeCache.get(type_name) or abort(404, ErrFormat.ci_type_not_found)
|
||||
type_id = t.id
|
||||
unique_id = t.unique_id
|
||||
unique = AttributeCache.get(unique_id).name
|
||||
unique = AttributeCache.get(unique_id)
|
||||
unique = unique and unique.name
|
||||
|
||||
attr_filter = CIFilterPermsCRUD.get_attr_filter(type_id)
|
||||
attributes = CITypeAttributeManager.get_attributes_by_type_id(type_id)
|
||||
@@ -318,12 +319,14 @@ class CITypeAttributeGroupView(APIView):
|
||||
|
||||
|
||||
class CITypeTemplateView(APIView):
|
||||
url_prefix = ("/ci_types/template/import", "/ci_types/template/export")
|
||||
url_prefix = ("/ci_types/template/import", "/ci_types/template/export", "/ci_types/<int:type_id>/template/export")
|
||||
|
||||
@role_required(RoleEnum.CONFIG)
|
||||
def get(self): # export
|
||||
return self.jsonify(
|
||||
dict(ci_type_template=CITypeTemplateManager.export_template()))
|
||||
def get(self, type_id=None): # export
|
||||
if type_id is not None:
|
||||
return self.jsonify(dict(ci_type_template=CITypeTemplateManager.export_template_by_type(type_id)))
|
||||
|
||||
return self.jsonify(dict(ci_type_template=CITypeTemplateManager.export_template()))
|
||||
|
||||
@role_required(RoleEnum.CONFIG)
|
||||
def post(self): # import
|
||||
@@ -457,13 +460,21 @@ class CITypeGrantView(APIView):
|
||||
_type = CITypeCache.get(type_id)
|
||||
type_name = _type and _type.name or abort(404, ErrFormat.ci_type_not_found)
|
||||
acl = ACLManager('cmdb')
|
||||
if not acl.has_permission(type_name, ResourceTypeEnum.CI_TYPE, PermEnum.GRANT) and \
|
||||
not is_app_admin('cmdb'):
|
||||
if not acl.has_permission(type_name, ResourceTypeEnum.CI_TYPE, PermEnum.GRANT) and not is_app_admin('cmdb'):
|
||||
return abort(403, ErrFormat.no_permission.format(type_name, PermEnum.GRANT))
|
||||
|
||||
acl.grant_resource_to_role_by_rid(type_name, rid, ResourceTypeEnum.CI_TYPE, perms)
|
||||
acl.grant_resource_to_role_by_rid(type_name, rid, ResourceTypeEnum.CI_TYPE, perms, rebuild=False)
|
||||
|
||||
CIFilterPermsCRUD().add(type_id=type_id, rid=rid, **request.values)
|
||||
if request.values.get('ci_filter') or request.values.get('attr_filter'):
|
||||
CIFilterPermsCRUD().add(type_id=type_id, rid=rid, **request.values)
|
||||
else:
|
||||
from api.tasks.acl import role_rebuild
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
|
||||
app_id = AppCache.get('cmdb').id
|
||||
current_app.logger.info((rid, app_id))
|
||||
role_rebuild.apply_async(args=(rid, app_id), queue=ACL_QUEUE)
|
||||
current_app.logger.info('done')
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
||||
@@ -481,21 +492,27 @@ class CITypeRevokeView(APIView):
|
||||
_type = CITypeCache.get(type_id)
|
||||
type_name = _type and _type.name or abort(404, ErrFormat.ci_type_not_found)
|
||||
acl = ACLManager('cmdb')
|
||||
if not acl.has_permission(type_name, ResourceTypeEnum.CI_TYPE, PermEnum.GRANT) and \
|
||||
not is_app_admin('cmdb'):
|
||||
if not acl.has_permission(type_name, ResourceTypeEnum.CI_TYPE, PermEnum.GRANT) and not is_app_admin('cmdb'):
|
||||
return abort(403, ErrFormat.no_permission.format(type_name, PermEnum.GRANT))
|
||||
|
||||
acl.revoke_resource_from_role_by_rid(type_name, rid, ResourceTypeEnum.CI_TYPE, perms)
|
||||
acl.revoke_resource_from_role_by_rid(type_name, rid, ResourceTypeEnum.CI_TYPE, perms, rebuild=False)
|
||||
|
||||
if PermEnum.READ in perms:
|
||||
CIFilterPermsCRUD().delete(type_id=type_id, rid=rid)
|
||||
app_id = AppCache.get('cmdb').id
|
||||
resource = None
|
||||
if PermEnum.READ in perms or not perms:
|
||||
resource = CIFilterPermsCRUD().delete(type_id=type_id, rid=rid)
|
||||
|
||||
app_id = AppCache.get('cmdb').id
|
||||
users = RoleRelationCRUD.get_users_by_rid(rid, app_id)
|
||||
for i in (users or []):
|
||||
if i.get('role', {}).get('id') and not RoleCRUD.has_permission(
|
||||
i.get('role').get('id'), type_name, ResourceTypeEnum.CI_TYPE, app_id, PermEnum.READ):
|
||||
PreferenceManager.delete_by_type_id(type_id, i.get('uid'))
|
||||
if not resource:
|
||||
from api.tasks.acl import role_rebuild
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
|
||||
role_rebuild.apply_async(args=(rid, app_id), queue=ACL_QUEUE)
|
||||
|
||||
users = RoleRelationCRUD.get_users_by_rid(rid, app_id)
|
||||
for i in (users or []):
|
||||
if i.get('role', {}).get('id') and not RoleCRUD.has_permission(
|
||||
i.get('role').get('id'), type_name, ResourceTypeEnum.CI_TYPE, app_id, PermEnum.READ):
|
||||
PreferenceManager.delete_by_type_id(type_id, i.get('uid'))
|
||||
|
||||
return self.jsonify(type_id=type_id, rid=rid)
|
||||
|
||||
|
@@ -9,6 +9,7 @@ from api.lib.cmdb.ci_type import CITypeRelationManager
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.preference import PreferenceManager
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.decorator import args_required
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
@@ -109,3 +110,10 @@ class CITypeRelationRevokeView(APIView):
|
||||
acl.revoke_resource_from_role_by_rid(resource_name, rid, ResourceTypeEnum.CI_TYPE_RELATION, perms)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
||||
|
||||
class CITypeRelationCanEditView(APIView):
|
||||
url_prefix = "/ci_type_relations/<int:parent_id>/<int:child_id>/can_edit"
|
||||
|
||||
def get(self, parent_id, child_id):
|
||||
return self.jsonify(result=PreferenceManager.can_edit_relation(parent_id, child_id))
|
||||
|
37
cmdb-api/api/views/cmdb/inner_secrets.py
Normal file
37
cmdb-api/api/views/cmdb/inner_secrets.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from flask import request
|
||||
|
||||
from api.lib.perm.auth import auth_abandoned
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.resource import APIView
|
||||
|
||||
|
||||
class InnerSecretUnSealView(APIView):
|
||||
url_prefix = "/secrets/unseal"
|
||||
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
unseal_key = request.headers.get("Unseal-Token")
|
||||
res = KeyManage(backend=InnerKVManger()).unseal(unseal_key)
|
||||
return self.jsonify(**res)
|
||||
|
||||
|
||||
class InnerSecretSealView(APIView):
|
||||
url_prefix = "/secrets/seal"
|
||||
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
unseal_key = request.headers.get("Inner-Token")
|
||||
res = KeyManage(backend=InnerKVManger()).seal(unseal_key)
|
||||
return self.jsonify(**res)
|
||||
|
||||
|
||||
class InnerSecretAutoSealView(APIView):
|
||||
url_prefix = "/secrets/auto_seal"
|
||||
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
root_key = request.headers.get("Inner-Token")
|
||||
res = KeyManage(trigger=root_key,
|
||||
backend=InnerKVManger()).auto_unseal()
|
||||
return self.jsonify(**res)
|
88
cmdb-api/api/views/common_setting/auth_config.py
Normal file
88
cmdb-api/api/views/common_setting/auth_config.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from flask import abort, request
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import TestType
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.acl import role_required
|
||||
from api.resource import APIView
|
||||
|
||||
prefix = '/auth_config'
|
||||
|
||||
|
||||
class AuthConfigView(APIView):
|
||||
url_prefix = (f'{prefix}/<string:auth_type>',)
|
||||
|
||||
@role_required("acl_admin")
|
||||
def get(self, auth_type):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
|
||||
if auth_type in cli.common_type_list:
|
||||
data = cli.get_record(True)
|
||||
else:
|
||||
data = cli.get_record_with_decrypt()
|
||||
return self.jsonify(data)
|
||||
|
||||
@role_required("acl_admin")
|
||||
def post(self, auth_type):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
|
||||
params = request.json
|
||||
data = params.get('data', {})
|
||||
if auth_type in cli.common_type_list:
|
||||
data['encrypt'] = False
|
||||
cli.create(data)
|
||||
|
||||
return self.jsonify(params)
|
||||
|
||||
|
||||
class AuthConfigViewWithId(APIView):
|
||||
url_prefix = (f'{prefix}/<string:auth_type>/<int:_id>',)
|
||||
|
||||
@role_required("acl_admin")
|
||||
def put(self, auth_type, _id):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
|
||||
params = request.json
|
||||
data = params.get('data', {})
|
||||
if auth_type in cli.common_type_list:
|
||||
data['encrypt'] = False
|
||||
|
||||
res = cli.update(_id, data)
|
||||
|
||||
return self.jsonify(res.to_dict())
|
||||
|
||||
@role_required("acl_admin")
|
||||
def delete(self, auth_type, _id):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
cli.delete(_id)
|
||||
return self.jsonify({})
|
||||
|
||||
|
||||
class AuthEnableListView(APIView):
|
||||
url_prefix = (f'{prefix}/enable_list',)
|
||||
|
||||
method_decorators = []
|
||||
|
||||
def get(self):
|
||||
return self.jsonify(AuthenticateDataCRUD.get_enable_list())
|
||||
|
||||
|
||||
class AuthConfigTestView(APIView):
|
||||
url_prefix = (f'{prefix}/<string:auth_type>/test',)
|
||||
|
||||
def post(self, auth_type):
|
||||
test_type = request.values.get('test_type', TestType.Connect)
|
||||
params = request.json
|
||||
return self.jsonify(AuthenticateDataCRUD(auth_type).test(test_type, params.get('data')))
|
@@ -1,9 +1,7 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import abort
|
||||
from flask import request
|
||||
|
||||
from api.lib.common_setting.company_info import CompanyInfoCRUD
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.resource import APIView
|
||||
|
||||
prefix = '/company'
|
||||
|
@@ -1,7 +1,5 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import os
|
||||
|
||||
from flask import abort, current_app, send_from_directory
|
||||
from flask import abort
|
||||
from flask import request
|
||||
from werkzeug.datastructures import MultiDict
|
||||
|
||||
|
@@ -3,9 +3,10 @@ import os
|
||||
|
||||
from flask import request, abort, current_app, send_from_directory
|
||||
from werkzeug.utils import secure_filename
|
||||
import lz4.frame
|
||||
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.common_setting.upload_file import allowed_file, generate_new_file_name
|
||||
from api.lib.common_setting.upload_file import allowed_file, generate_new_file_name, CommonFileCRUD
|
||||
from api.resource import APIView
|
||||
|
||||
prefix = '/file'
|
||||
@@ -28,7 +29,8 @@ class GetFileView(APIView):
|
||||
url_prefix = (f'{prefix}/<string:_filename>',)
|
||||
|
||||
def get(self, _filename):
|
||||
return send_from_directory(current_app.config['UPLOAD_DIRECTORY_FULL'], _filename, as_attachment=True)
|
||||
file_stream = CommonFileCRUD.get_file(_filename)
|
||||
return self.send_file(file_stream, as_attachment=True, download_name=_filename)
|
||||
|
||||
|
||||
class PostFileView(APIView):
|
||||
@@ -44,6 +46,8 @@ class PostFileView(APIView):
|
||||
if not file:
|
||||
abort(400, ErrFormat.file_is_required)
|
||||
extension = file.mimetype.split('/')[-1]
|
||||
if '+' in extension:
|
||||
extension = file.filename.split('.')[-1]
|
||||
if file.filename == '':
|
||||
filename = f'.{extension}'
|
||||
else:
|
||||
@@ -53,11 +57,20 @@ class PostFileView(APIView):
|
||||
filename = file.filename
|
||||
|
||||
if allowed_file(filename, current_app.config.get('ALLOWED_EXTENSIONS', ALLOWED_EXTENSIONS)):
|
||||
filename = generate_new_file_name(filename)
|
||||
filename = secure_filename(filename)
|
||||
file.save(os.path.join(
|
||||
current_app.config['UPLOAD_DIRECTORY_FULL'], filename))
|
||||
new_filename = generate_new_file_name(filename)
|
||||
new_filename = secure_filename(new_filename)
|
||||
file_content = file.read()
|
||||
compressed_data = lz4.frame.compress(file_content)
|
||||
try:
|
||||
CommonFileCRUD.add_file(
|
||||
origin_name=filename,
|
||||
file_name=new_filename,
|
||||
binary=compressed_data,
|
||||
)
|
||||
|
||||
return self.jsonify(file_name=filename)
|
||||
return self.jsonify(file_name=new_filename)
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
abort(400, ErrFormat.upload_failed.format(e))
|
||||
|
||||
abort(400, 'Extension not allow')
|
||||
abort(400, ErrFormat.file_type_not_allowed.format(filename))
|
||||
|
@@ -47,7 +47,7 @@ class CheckEmailServer(APIView):
|
||||
|
||||
def post(self):
|
||||
receive_address = request.args.get('receive_address')
|
||||
info = request.values.get('info')
|
||||
info = request.values.get('info', {})
|
||||
|
||||
try:
|
||||
|
||||
|
1
cmdb-api/migrations/README
Normal file
1
cmdb-api/migrations/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
45
cmdb-api/migrations/alembic.ini
Normal file
45
cmdb-api/migrations/alembic.ini
Normal file
@@ -0,0 +1,45 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
110
cmdb-api/migrations/env.py
Normal file
110
cmdb-api/migrations/env.py
Normal file
@@ -0,0 +1,110 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
from flask import current_app
|
||||
config.set_main_option(
|
||||
'sqlalchemy.url', current_app.config.get(
|
||||
'SQLALCHEMY_DATABASE_URI').replace('%', '%%'))
|
||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
# 添加要屏蔽的table列表
|
||||
exclude_tables = ["c_cfp"]
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True,
|
||||
include_name=include_name
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# this callback is used to prevent an auto-migration from being generated
|
||||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
include_name=include_name,
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def include_name(name, type_, parent_names):
|
||||
if type_ == "table":
|
||||
return name not in exclude_tables
|
||||
elif parent_names.get("table_name") in exclude_tables:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
cmdb-api/migrations/script.py.mako
Normal file
24
cmdb-api/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
360
cmdb-api/migrations/versions/6a4df2623057_.py
Normal file
360
cmdb-api/migrations/versions/6a4df2623057_.py
Normal file
@@ -0,0 +1,360 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 6a4df2623057
|
||||
Revises:
|
||||
Create Date: 2023-10-13 15:17:00.066858
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6a4df2623057'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('common_data',
|
||||
sa.Column('deleted_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('deleted', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('data_type', sa.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('data', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_common_data_deleted'), 'common_data', ['deleted'], unique=False)
|
||||
op.create_table('common_notice_config',
|
||||
sa.Column('deleted_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('deleted', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('platform', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('info', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_common_notice_config_deleted'), 'common_notice_config', ['deleted'], unique=False)
|
||||
op.add_column('c_attributes', sa.Column('choice_other', sa.JSON(), nullable=True))
|
||||
op.drop_index('idx_c_attributes_uid', table_name='c_attributes')
|
||||
op.create_index(op.f('ix_c_attributes_uid'), 'c_attributes', ['uid'], unique=False)
|
||||
op.drop_index('ix_c_custom_dashboard_deleted', table_name='c_c_d')
|
||||
op.create_index(op.f('ix_c_c_d_deleted'), 'c_c_d', ['deleted'], unique=False)
|
||||
op.drop_index('ix_c_ci_type_triggers_deleted', table_name='c_c_t_t')
|
||||
op.create_index(op.f('ix_c_c_t_t_deleted'), 'c_c_t_t', ['deleted'], unique=False)
|
||||
op.drop_index('ix_c_ci_type_unique_constraints_deleted', table_name='c_c_t_u_c')
|
||||
op.create_index(op.f('ix_c_c_t_u_c_deleted'), 'c_c_t_u_c', ['deleted'], unique=False)
|
||||
op.drop_index('c_ci_types_uid', table_name='c_ci_types')
|
||||
op.create_index(op.f('ix_c_ci_types_uid'), 'c_ci_types', ['uid'], unique=False)
|
||||
op.alter_column('c_prv', 'uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
nullable=False)
|
||||
op.drop_index('ix_c_preference_relation_views_deleted', table_name='c_prv')
|
||||
op.drop_index('ix_c_preference_relation_views_name', table_name='c_prv')
|
||||
op.create_index(op.f('ix_c_prv_deleted'), 'c_prv', ['deleted'], unique=False)
|
||||
op.create_index(op.f('ix_c_prv_name'), 'c_prv', ['name'], unique=False)
|
||||
op.create_index(op.f('ix_c_prv_uid'), 'c_prv', ['uid'], unique=False)
|
||||
op.drop_index('ix_c_preference_show_attributes_deleted', table_name='c_psa')
|
||||
op.drop_index('ix_c_preference_show_attributes_uid', table_name='c_psa')
|
||||
op.create_index(op.f('ix_c_psa_deleted'), 'c_psa', ['deleted'], unique=False)
|
||||
op.create_index(op.f('ix_c_psa_uid'), 'c_psa', ['uid'], unique=False)
|
||||
op.drop_index('ix_c_preference_tree_views_deleted', table_name='c_ptv')
|
||||
op.drop_index('ix_c_preference_tree_views_uid', table_name='c_ptv')
|
||||
op.create_index(op.f('ix_c_ptv_deleted'), 'c_ptv', ['deleted'], unique=False)
|
||||
op.create_index(op.f('ix_c_ptv_uid'), 'c_ptv', ['uid'], unique=False)
|
||||
op.alter_column('common_department', 'department_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='部门名称',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_director_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='部门负责人ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_parent_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='上级部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'sort_value',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='排序值',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'email',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='邮箱',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'username',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='用户名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'nickname',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='姓名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'sex',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=64),
|
||||
comment=None,
|
||||
existing_comment='性别',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'position_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='职位名称',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'mobile',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='电话号码',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'avatar',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='头像',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'direct_supervisor_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='直接上级ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'department_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中uid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_virtual_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中虚拟角色rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'last_login',
|
||||
existing_type=mysql.TIMESTAMP(),
|
||||
comment=None,
|
||||
existing_comment='上次登录时间',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'block',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='锁定状态',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'info',
|
||||
existing_type=mysql.JSON(),
|
||||
comment=None,
|
||||
existing_comment='员工信息',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'employee_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='员工ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'title',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='标题',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'content',
|
||||
existing_type=mysql.TEXT(charset='utf8mb3', collation='utf8mb3_unicode_ci'),
|
||||
comment=None,
|
||||
existing_comment='内容',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'path',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='跳转路径',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'is_read',
|
||||
existing_type=mysql.TINYINT(display_width=1),
|
||||
comment=None,
|
||||
existing_comment='是否已读',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'app_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment=None,
|
||||
existing_comment='应用名称',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'category',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment=None,
|
||||
existing_comment='分类',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'message_data',
|
||||
existing_type=mysql.JSON(),
|
||||
comment=None,
|
||||
existing_comment='数据',
|
||||
existing_nullable=True)
|
||||
op.drop_column('users', 'apps')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('users', sa.Column('apps', mysql.JSON(), nullable=True))
|
||||
op.alter_column('common_internal_message', 'message_data',
|
||||
existing_type=mysql.JSON(),
|
||||
comment='数据',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'category',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment='分类',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'app_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment='应用名称',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'is_read',
|
||||
existing_type=mysql.TINYINT(display_width=1),
|
||||
comment='是否已读',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'path',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='跳转路径',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'content',
|
||||
existing_type=mysql.TEXT(charset='utf8mb3', collation='utf8mb3_unicode_ci'),
|
||||
comment='内容',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'title',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='标题',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'employee_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='员工ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'info',
|
||||
existing_type=mysql.JSON(),
|
||||
comment='员工信息',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'block',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='锁定状态',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'last_login',
|
||||
existing_type=mysql.TIMESTAMP(),
|
||||
comment='上次登录时间',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_virtual_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中虚拟角色rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中uid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'department_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'direct_supervisor_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='直接上级ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'avatar',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='头像',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'mobile',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='电话号码',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'position_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='职位名称',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'sex',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=64),
|
||||
comment='性别',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'nickname',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='姓名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'username',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='用户名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'email',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='邮箱',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'sort_value',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='排序值',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_parent_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='上级部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_director_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='部门负责人ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='部门名称',
|
||||
existing_nullable=True)
|
||||
op.drop_index(op.f('ix_c_ptv_uid'), table_name='c_ptv')
|
||||
op.drop_index(op.f('ix_c_ptv_deleted'), table_name='c_ptv')
|
||||
op.create_index('ix_c_preference_tree_views_uid', 'c_ptv', ['uid'], unique=False)
|
||||
op.create_index('ix_c_preference_tree_views_deleted', 'c_ptv', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_psa_uid'), table_name='c_psa')
|
||||
op.drop_index(op.f('ix_c_psa_deleted'), table_name='c_psa')
|
||||
op.create_index('ix_c_preference_show_attributes_uid', 'c_psa', ['uid'], unique=False)
|
||||
op.create_index('ix_c_preference_show_attributes_deleted', 'c_psa', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_prv_uid'), table_name='c_prv')
|
||||
op.drop_index(op.f('ix_c_prv_name'), table_name='c_prv')
|
||||
op.drop_index(op.f('ix_c_prv_deleted'), table_name='c_prv')
|
||||
op.create_index('ix_c_preference_relation_views_name', 'c_prv', ['name'], unique=False)
|
||||
op.create_index('ix_c_preference_relation_views_deleted', 'c_prv', ['deleted'], unique=False)
|
||||
op.alter_column('c_prv', 'uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
nullable=True)
|
||||
op.drop_index(op.f('ix_c_ci_types_uid'), table_name='c_ci_types')
|
||||
op.create_index('c_ci_types_uid', 'c_ci_types', ['uid'], unique=False)
|
||||
op.drop_index(op.f('ix_c_c_t_u_c_deleted'), table_name='c_c_t_u_c')
|
||||
op.create_index('ix_c_ci_type_unique_constraints_deleted', 'c_c_t_u_c', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_c_t_t_deleted'), table_name='c_c_t_t')
|
||||
op.create_index('ix_c_ci_type_triggers_deleted', 'c_c_t_t', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_c_d_deleted'), table_name='c_c_d')
|
||||
op.create_index('ix_c_custom_dashboard_deleted', 'c_c_d', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_attributes_uid'), table_name='c_attributes')
|
||||
op.create_index('idx_c_attributes_uid', 'c_attributes', ['uid'], unique=False)
|
||||
op.drop_column('c_attributes', 'choice_other')
|
||||
op.drop_index(op.f('ix_common_notice_config_deleted'), table_name='common_notice_config')
|
||||
op.drop_table('common_notice_config')
|
||||
op.drop_index(op.f('ix_common_data_deleted'), table_name='common_data')
|
||||
op.drop_table('common_data')
|
||||
# ### end Alembic commands ###
|
@@ -1,7 +1,7 @@
|
||||
-i https://mirrors.aliyun.com/pypi/simple
|
||||
alembic==1.7.7
|
||||
bs4==0.0.1
|
||||
celery==5.3.1
|
||||
celery>=5.3.1
|
||||
celery-once==3.0.1
|
||||
click==8.1.3
|
||||
elasticsearch==7.17.9
|
||||
@@ -12,28 +12,29 @@ Flask==2.3.2
|
||||
Flask-Bcrypt==1.0.1
|
||||
Flask-Caching==2.0.2
|
||||
Flask-Cors==4.0.0
|
||||
Flask-Login==0.6.2
|
||||
Flask-Login>=0.6.2
|
||||
Flask-Migrate==2.5.2
|
||||
Flask-RESTful==0.3.10
|
||||
Flask-SQLAlchemy==2.5.0
|
||||
future==0.18.3
|
||||
gunicorn==21.0.1
|
||||
hvac==2.0.0
|
||||
itsdangerous==2.1.2
|
||||
Jinja2==3.1.2
|
||||
jinja2schema==0.1.4
|
||||
jsonschema==4.18.0
|
||||
kombu==5.3.1
|
||||
kombu>=5.3.1
|
||||
Mako==1.2.4
|
||||
MarkupSafe==2.1.3
|
||||
marshmallow==2.20.2
|
||||
more-itertools==5.0.0
|
||||
msgpack-python==0.5.6
|
||||
Pillow==9.3.0
|
||||
pycryptodome==3.12.0
|
||||
Pillow>=10.0.1
|
||||
cryptography>=41.0.2
|
||||
PyJWT==2.4.0
|
||||
PyMySQL==1.1.0
|
||||
python-ldap==3.4.0
|
||||
PyYAML==6.0
|
||||
ldap3==2.9.1
|
||||
PyYAML==6.0.1
|
||||
redis==4.6.0
|
||||
requests==2.31.0
|
||||
requests_oauthlib==1.3.1
|
||||
@@ -44,5 +45,9 @@ supervisor==4.0.3
|
||||
timeout-decorator==0.5.0
|
||||
toposort==1.10
|
||||
treelib==1.6.1
|
||||
Werkzeug==2.3.6
|
||||
Werkzeug>=2.3.6
|
||||
WTForms==3.0.0
|
||||
shamir~=17.12.0
|
||||
pycryptodomex>=3.19.0
|
||||
colorama>=0.4.6
|
||||
lz4>=4.3.2
|
@@ -11,10 +11,10 @@ from environs import Env
|
||||
env = Env()
|
||||
env.read_env()
|
||||
|
||||
ENV = env.str("FLASK_ENV", default="production")
|
||||
DEBUG = ENV == "development"
|
||||
SECRET_KEY = env.str("SECRET_KEY")
|
||||
BCRYPT_LOG_ROUNDS = env.int("BCRYPT_LOG_ROUNDS", default=13)
|
||||
ENV = env.str('FLASK_ENV', default='production')
|
||||
DEBUG = ENV == 'development'
|
||||
SECRET_KEY = env.str('SECRET_KEY')
|
||||
BCRYPT_LOG_ROUNDS = env.int('BCRYPT_LOG_ROUNDS', default=13)
|
||||
DEBUG_TB_ENABLED = DEBUG
|
||||
DEBUG_TB_INTERCEPT_REDIRECTS = False
|
||||
|
||||
@@ -23,7 +23,7 @@ ERROR_CODES = [400, 401, 403, 404, 405, 500, 502]
|
||||
# # database
|
||||
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
|
||||
SQLALCHEMY_BINDS = {
|
||||
"user": 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
|
||||
'user': 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
|
||||
}
|
||||
SQLALCHEMY_ECHO = False
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
||||
@@ -32,11 +32,11 @@ SQLALCHEMY_ENGINE_OPTIONS = {
|
||||
}
|
||||
|
||||
# # cache
|
||||
CACHE_TYPE = "redis"
|
||||
CACHE_REDIS_HOST = "127.0.0.1"
|
||||
CACHE_TYPE = 'redis'
|
||||
CACHE_REDIS_HOST = '127.0.0.1'
|
||||
CACHE_REDIS_PORT = 6379
|
||||
CACHE_REDIS_PASSWORD = ""
|
||||
CACHE_KEY_PREFIX = "CMDB::"
|
||||
CACHE_REDIS_PASSWORD = ''
|
||||
CACHE_KEY_PREFIX = 'CMDB::'
|
||||
CACHE_DEFAULT_TIMEOUT = 3000
|
||||
|
||||
# # log
|
||||
@@ -55,10 +55,10 @@ DEFAULT_MAIL_SENDER = ''
|
||||
|
||||
# # queue
|
||||
CELERY = {
|
||||
"broker_url": 'redis://127.0.0.1:6379/2',
|
||||
"result_backend": "redis://127.0.0.1:6379/2",
|
||||
"broker_vhost": "/",
|
||||
"broker_connection_retry_on_startup": True
|
||||
'broker_url': 'redis://127.0.0.1:6379/2',
|
||||
'result_backend': 'redis://127.0.0.1:6379/2',
|
||||
'broker_vhost': '/',
|
||||
'broker_connection_retry_on_startup': True
|
||||
}
|
||||
ONCE = {
|
||||
'backend': 'celery_once.backends.Redis',
|
||||
@@ -67,33 +67,87 @@ ONCE = {
|
||||
}
|
||||
}
|
||||
|
||||
# # SSO
|
||||
CAS_SERVER = "http://sso.xxx.com"
|
||||
CAS_VALIDATE_SERVER = "http://sso.xxx.com"
|
||||
CAS_LOGIN_ROUTE = "/cas/login"
|
||||
CAS_LOGOUT_ROUTE = "/cas/logout"
|
||||
CAS_VALIDATE_ROUTE = "/cas/serviceValidate"
|
||||
CAS_AFTER_LOGIN = "/"
|
||||
DEFAULT_SERVICE = "http://127.0.0.1:8000"
|
||||
# =============================== Authentication ===========================================================
|
||||
|
||||
# # ldap
|
||||
AUTH_WITH_LDAP = False
|
||||
LDAP_SERVER = ''
|
||||
LDAP_DOMAIN = ''
|
||||
LDAP_USER_DN = 'cn={},ou=users,dc=xxx,dc=com'
|
||||
# # CAS
|
||||
CAS = dict(
|
||||
enabled=False,
|
||||
cas_server='https://{your-CASServer-hostname}',
|
||||
cas_validate_server='https://{your-CASServer-hostname}',
|
||||
cas_login_route='/cas/built-in/cas/login',
|
||||
cas_logout_route='/cas/built-in/cas/logout',
|
||||
cas_validate_route='/cas/built-in/cas/serviceValidate',
|
||||
cas_after_login='/',
|
||||
cas_user_map={
|
||||
'username': {'tag': 'cas:user'},
|
||||
'nickname': {'tag': 'cas:attribute', 'attrs': {'name': 'displayName'}},
|
||||
'email': {'tag': 'cas:attribute', 'attrs': {'name': 'email'}},
|
||||
'mobile': {'tag': 'cas:attribute', 'attrs': {'name': 'phone'}},
|
||||
'avatar': {'tag': 'cas:attribute', 'attrs': {'name': 'avatar'}},
|
||||
}
|
||||
)
|
||||
|
||||
# # OAuth2.0
|
||||
OAUTH2 = dict(
|
||||
enabled=False,
|
||||
client_id='',
|
||||
client_secret='',
|
||||
authorize_url='https://{your-OAuth2Server-hostname}/login/oauth/authorize',
|
||||
token_url='https://{your-OAuth2Server-hostname}/api/login/oauth/access_token',
|
||||
scopes=['profile', 'email'],
|
||||
user_info={
|
||||
'url': 'https://{your-OAuth2Server-hostname}/api/userinfo',
|
||||
'email': 'email',
|
||||
'username': 'name',
|
||||
'avatar': 'picture'
|
||||
},
|
||||
after_login='/'
|
||||
)
|
||||
|
||||
# # OIDC
|
||||
OIDC = dict(
|
||||
enabled=False,
|
||||
client_id='',
|
||||
client_secret='',
|
||||
authorize_url='https://{your-OIDCServer-hostname}/login/oauth/authorize',
|
||||
token_url='https://{your-OIDCServer-hostname}/api/login/oauth/access_token',
|
||||
scopes=['openid', 'profile', 'email'],
|
||||
user_info={
|
||||
'url': 'https://{your-OIDCServer-hostname}/api/userinfo',
|
||||
'email': 'email',
|
||||
'username': 'name',
|
||||
'avatar': 'picture'
|
||||
},
|
||||
after_login='/'
|
||||
)
|
||||
|
||||
# # LDAP
|
||||
LDAP = dict(
|
||||
enabled=False,
|
||||
ldap_server='',
|
||||
ldap_domain='',
|
||||
ldap_user_dn='cn={},ou=users,dc=xxx,dc=com'
|
||||
)
|
||||
# ==========================================================================================================
|
||||
|
||||
# # pagination
|
||||
DEFAULT_PAGE_COUNT = 50
|
||||
|
||||
# # permission
|
||||
WHITE_LIST = ["127.0.0.1"]
|
||||
WHITE_LIST = ['127.0.0.1']
|
||||
USE_ACL = True
|
||||
|
||||
# # elastic search
|
||||
ES_HOST = '127.0.0.1'
|
||||
USE_ES = False
|
||||
|
||||
BOOL_TRUE = ['true', 'TRUE', 'True', True, '1', 1, "Yes", "YES", "yes", 'Y', 'y']
|
||||
BOOL_TRUE = ['true', 'TRUE', 'True', True, '1', 1, 'Yes', 'YES', 'yes', 'Y', 'y']
|
||||
|
||||
# # messenger
|
||||
USE_MESSENGER = True
|
||||
|
||||
# # secrets
|
||||
SECRETS_ENGINE = 'inner' # 'inner' or 'vault'
|
||||
VAULT_URL = ''
|
||||
VAULT_TOKEN = ''
|
||||
INNER_TRIGGER_TOKEN = ''
|
||||
|
@@ -63,14 +63,15 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ant-design/colors": "^3.2.2",
|
||||
"@babel/core": "^7.23.2",
|
||||
"@babel/polyfill": "^7.2.5",
|
||||
"@babel/preset-env": "^7.23.2",
|
||||
"@vue/cli-plugin-babel": "4.5.17",
|
||||
"@vue/cli-plugin-eslint": "^4.0.5",
|
||||
"@vue/cli-plugin-unit-jest": "^4.0.5",
|
||||
"@vue/cli-service": "^4.0.5",
|
||||
"@vue/eslint-config-standard": "^4.0.0",
|
||||
"@vue/test-utils": "^1.0.0-beta.30",
|
||||
"babel-core": "7.0.0-bridge.0",
|
||||
"babel-jest": "^23.6.0",
|
||||
"babel-plugin-import": "^1.11.0",
|
||||
"babel-plugin-transform-remove-console": "^6.9.4",
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
@font-face {
|
||||
font-family: "iconfont"; /* Project id 3857903 */
|
||||
src: url('iconfont.woff2?t=1696815443987') format('woff2'),
|
||||
url('iconfont.woff?t=1696815443987') format('woff'),
|
||||
url('iconfont.ttf?t=1696815443987') format('truetype');
|
||||
src: url('iconfont.woff2?t=1702544951995') format('woff2'),
|
||||
url('iconfont.woff?t=1702544951995') format('woff'),
|
||||
url('iconfont.ttf?t=1702544951995') format('truetype');
|
||||
}
|
||||
|
||||
.iconfont {
|
||||
@@ -13,6 +13,302 @@
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
.OAUTH2:before {
|
||||
content: "\e8d8";
|
||||
}
|
||||
|
||||
.OIDC:before {
|
||||
content: "\e8d6";
|
||||
}
|
||||
|
||||
.CAS:before {
|
||||
content: "\e8d7";
|
||||
}
|
||||
|
||||
.ops-setting-auth:before {
|
||||
content: "\e8d5";
|
||||
}
|
||||
|
||||
.ops-setting-auth-selected:before {
|
||||
content: "\e8d4";
|
||||
}
|
||||
|
||||
.a-itsm-knowledge2:before {
|
||||
content: "\e8d2";
|
||||
}
|
||||
|
||||
.itsm-qrdownload:before {
|
||||
content: "\e8d3";
|
||||
}
|
||||
|
||||
.oneterm-playback:before {
|
||||
content: "\e8d1";
|
||||
}
|
||||
|
||||
.oneterm-disconnect:before {
|
||||
content: "\e8d0";
|
||||
}
|
||||
|
||||
.ops-oneterm-publickey-selected:before {
|
||||
content: "\e8cf";
|
||||
}
|
||||
|
||||
.ops-oneterm-publickey:before {
|
||||
content: "\e8ce";
|
||||
}
|
||||
|
||||
.ops-oneterm-gateway:before {
|
||||
content: "\e8b9";
|
||||
}
|
||||
|
||||
.ops-oneterm-gateway-selected:before {
|
||||
content: "\e8bf";
|
||||
}
|
||||
|
||||
.ops-oneterm-account:before {
|
||||
content: "\e8c0";
|
||||
}
|
||||
|
||||
.ops-oneterm-account-selected:before {
|
||||
content: "\e8c1";
|
||||
}
|
||||
|
||||
.ops-oneterm-command:before {
|
||||
content: "\e8c2";
|
||||
}
|
||||
|
||||
.ops-oneterm-command-selected:before {
|
||||
content: "\e8c3";
|
||||
}
|
||||
|
||||
.ops-oneterm-assetlist:before {
|
||||
content: "\e8c4";
|
||||
}
|
||||
|
||||
.ops-oneterm-assetlist-selected:before {
|
||||
content: "\e8c5";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessiononline:before {
|
||||
content: "\e8c6";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessiononline-selected:before {
|
||||
content: "\e8c7";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessionhistory-selected:before {
|
||||
content: "\e8c8";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessionhistory:before {
|
||||
content: "\e8c9";
|
||||
}
|
||||
|
||||
.ops-oneterm-login:before {
|
||||
content: "\e8ca";
|
||||
}
|
||||
|
||||
.ops-oneterm-login-selected:before {
|
||||
content: "\e8cb";
|
||||
}
|
||||
|
||||
.ops-oneterm-operation:before {
|
||||
content: "\e8cc";
|
||||
}
|
||||
|
||||
.ops-oneterm-operation-selected:before {
|
||||
content: "\e8cd";
|
||||
}
|
||||
|
||||
.ops-oneterm-workstation-selected:before {
|
||||
content: "\e8b7";
|
||||
}
|
||||
|
||||
.ops-oneterm-workstation:before {
|
||||
content: "\e8b8";
|
||||
}
|
||||
|
||||
.oneterm-file-selected:before {
|
||||
content: "\e8be";
|
||||
}
|
||||
|
||||
.oneterm-file:before {
|
||||
content: "\e8bc";
|
||||
}
|
||||
|
||||
.oneterm-time:before {
|
||||
content: "\e8bd";
|
||||
}
|
||||
|
||||
.oneterm-download:before {
|
||||
content: "\e8bb";
|
||||
}
|
||||
|
||||
.oneterm-commandrecord:before {
|
||||
content: "\e8ba";
|
||||
}
|
||||
|
||||
.oneterm-asset:before {
|
||||
content: "\e8b6";
|
||||
}
|
||||
|
||||
.oneterm-total_asset:before {
|
||||
content: "\e8b5";
|
||||
}
|
||||
|
||||
.oneterm-switch:before {
|
||||
content: "\e8b4";
|
||||
}
|
||||
|
||||
.oneterm-session:before {
|
||||
content: "\e8b3";
|
||||
}
|
||||
|
||||
.oneterm-connect:before {
|
||||
content: "\e8b2";
|
||||
}
|
||||
|
||||
.oneterm-login:before {
|
||||
content: "\e8b1";
|
||||
}
|
||||
|
||||
.ops-oneterm-dashboard:before {
|
||||
content: "\e8af";
|
||||
}
|
||||
|
||||
.ops-oneterm-dashboard-selected:before {
|
||||
content: "\e8b0";
|
||||
}
|
||||
|
||||
.oneterm-recentsession:before {
|
||||
content: "\e8ae";
|
||||
}
|
||||
|
||||
.oneterm-myassets:before {
|
||||
content: "\e8ad";
|
||||
}
|
||||
|
||||
.ops-oneterm-log:before {
|
||||
content: "\e8aa";
|
||||
}
|
||||
|
||||
.ops-oneterm-session-selected:before {
|
||||
content: "\e8ab";
|
||||
}
|
||||
|
||||
.ops-oneterm-session:before {
|
||||
content: "\e8ac";
|
||||
}
|
||||
|
||||
.ops-oneterm-log-selected:before {
|
||||
content: "\e8a9";
|
||||
}
|
||||
|
||||
.ops-oneterm-assets:before {
|
||||
content: "\e8a7";
|
||||
}
|
||||
|
||||
.ops-oneterm-assets-selected:before {
|
||||
content: "\e8a8";
|
||||
}
|
||||
|
||||
.itsm-down:before {
|
||||
content: "\e8a5";
|
||||
}
|
||||
|
||||
.itsm-up:before {
|
||||
content: "\e8a6";
|
||||
}
|
||||
|
||||
.itsm-download:before {
|
||||
content: "\e8a4";
|
||||
}
|
||||
|
||||
.itsm-print:before {
|
||||
content: "\e8a3";
|
||||
}
|
||||
|
||||
.itsm-view:before {
|
||||
content: "\e8a2";
|
||||
}
|
||||
|
||||
.itsm-word:before {
|
||||
content: "\e8a1";
|
||||
}
|
||||
|
||||
.datainsight-custom:before {
|
||||
content: "\e89e";
|
||||
}
|
||||
|
||||
.datainsight-prometheus:before {
|
||||
content: "\e89f";
|
||||
}
|
||||
|
||||
.datainsight-zabbix:before {
|
||||
content: "\e8a0";
|
||||
}
|
||||
|
||||
.setting-mainpeople:before {
|
||||
content: "\e89a";
|
||||
}
|
||||
|
||||
.setting-deputypeople:before {
|
||||
content: "\e89d";
|
||||
}
|
||||
|
||||
.ops-setting-duty:before {
|
||||
content: "\e89c";
|
||||
}
|
||||
|
||||
.ops-setting-duty-selected:before {
|
||||
content: "\e89b";
|
||||
}
|
||||
|
||||
.datainsight-sequential:before {
|
||||
content: "\e899";
|
||||
}
|
||||
|
||||
.datainsight-close:before {
|
||||
content: "\e898";
|
||||
}
|
||||
|
||||
.datainsight-handle:before {
|
||||
content: "\e897";
|
||||
}
|
||||
|
||||
.datainsight-table:before {
|
||||
content: "\e896";
|
||||
}
|
||||
|
||||
.icon-xianxing-password:before {
|
||||
content: "\e894";
|
||||
}
|
||||
|
||||
.icon-xianxing-link:before {
|
||||
content: "\e895";
|
||||
}
|
||||
|
||||
.itsm-download-all:before {
|
||||
content: "\e892";
|
||||
}
|
||||
|
||||
.itsm-download-package:before {
|
||||
content: "\e893";
|
||||
}
|
||||
|
||||
.a-Frame4:before {
|
||||
content: "\e891";
|
||||
}
|
||||
|
||||
.itsm-again:before {
|
||||
content: "\e88f";
|
||||
}
|
||||
|
||||
.itsm-next:before {
|
||||
content: "\e890";
|
||||
}
|
||||
|
||||
.wechatApp:before {
|
||||
content: "\e88e";
|
||||
}
|
||||
|
File diff suppressed because one or more lines are too long
@@ -5,6 +5,524 @@
|
||||
"css_prefix_text": "",
|
||||
"description": "",
|
||||
"glyphs": [
|
||||
{
|
||||
"icon_id": "38566548",
|
||||
"name": "OAuth2.0",
|
||||
"font_class": "OAUTH2",
|
||||
"unicode": "e8d8",
|
||||
"unicode_decimal": 59608
|
||||
},
|
||||
{
|
||||
"icon_id": "38566584",
|
||||
"name": "OIDC",
|
||||
"font_class": "OIDC",
|
||||
"unicode": "e8d6",
|
||||
"unicode_decimal": 59606
|
||||
},
|
||||
{
|
||||
"icon_id": "38566578",
|
||||
"name": "cas",
|
||||
"font_class": "CAS",
|
||||
"unicode": "e8d7",
|
||||
"unicode_decimal": 59607
|
||||
},
|
||||
{
|
||||
"icon_id": "38547395",
|
||||
"name": "setting-authentication",
|
||||
"font_class": "ops-setting-auth",
|
||||
"unicode": "e8d5",
|
||||
"unicode_decimal": 59605
|
||||
},
|
||||
{
|
||||
"icon_id": "38547389",
|
||||
"name": "setting-authentication-selected",
|
||||
"font_class": "ops-setting-auth-selected",
|
||||
"unicode": "e8d4",
|
||||
"unicode_decimal": 59604
|
||||
},
|
||||
{
|
||||
"icon_id": "38533133",
|
||||
"name": "itsm-knowledge (2)",
|
||||
"font_class": "a-itsm-knowledge2",
|
||||
"unicode": "e8d2",
|
||||
"unicode_decimal": 59602
|
||||
},
|
||||
{
|
||||
"icon_id": "38531868",
|
||||
"name": "itsm-QRcode",
|
||||
"font_class": "itsm-qrdownload",
|
||||
"unicode": "e8d3",
|
||||
"unicode_decimal": 59603
|
||||
},
|
||||
{
|
||||
"icon_id": "38413515",
|
||||
"name": "oneterm-playback",
|
||||
"font_class": "oneterm-playback",
|
||||
"unicode": "e8d1",
|
||||
"unicode_decimal": 59601
|
||||
},
|
||||
{
|
||||
"icon_id": "38413481",
|
||||
"name": "oneterm-disconnect",
|
||||
"font_class": "oneterm-disconnect",
|
||||
"unicode": "e8d0",
|
||||
"unicode_decimal": 59600
|
||||
},
|
||||
{
|
||||
"icon_id": "38407867",
|
||||
"name": "oneterm-key-selected",
|
||||
"font_class": "ops-oneterm-publickey-selected",
|
||||
"unicode": "e8cf",
|
||||
"unicode_decimal": 59599
|
||||
},
|
||||
{
|
||||
"icon_id": "38407915",
|
||||
"name": "oneterm-key",
|
||||
"font_class": "ops-oneterm-publickey",
|
||||
"unicode": "e8ce",
|
||||
"unicode_decimal": 59598
|
||||
},
|
||||
{
|
||||
"icon_id": "38311855",
|
||||
"name": "oneterm-gateway",
|
||||
"font_class": "ops-oneterm-gateway",
|
||||
"unicode": "e8b9",
|
||||
"unicode_decimal": 59577
|
||||
},
|
||||
{
|
||||
"icon_id": "38311938",
|
||||
"name": "oneterm-gateway-selected",
|
||||
"font_class": "ops-oneterm-gateway-selected",
|
||||
"unicode": "e8bf",
|
||||
"unicode_decimal": 59583
|
||||
},
|
||||
{
|
||||
"icon_id": "38311957",
|
||||
"name": "oneterm-account",
|
||||
"font_class": "ops-oneterm-account",
|
||||
"unicode": "e8c0",
|
||||
"unicode_decimal": 59584
|
||||
},
|
||||
{
|
||||
"icon_id": "38311961",
|
||||
"name": "oneterm-account-selected",
|
||||
"font_class": "ops-oneterm-account-selected",
|
||||
"unicode": "e8c1",
|
||||
"unicode_decimal": 59585
|
||||
},
|
||||
{
|
||||
"icon_id": "38311974",
|
||||
"name": "oneterm-command",
|
||||
"font_class": "ops-oneterm-command",
|
||||
"unicode": "e8c2",
|
||||
"unicode_decimal": 59586
|
||||
},
|
||||
{
|
||||
"icon_id": "38311976",
|
||||
"name": "oneterm-command-selected",
|
||||
"font_class": "ops-oneterm-command-selected",
|
||||
"unicode": "e8c3",
|
||||
"unicode_decimal": 59587
|
||||
},
|
||||
{
|
||||
"icon_id": "38311979",
|
||||
"name": "oneterm-asset_list",
|
||||
"font_class": "ops-oneterm-assetlist",
|
||||
"unicode": "e8c4",
|
||||
"unicode_decimal": 59588
|
||||
},
|
||||
{
|
||||
"icon_id": "38311985",
|
||||
"name": "oneterm-asset_list-selected",
|
||||
"font_class": "ops-oneterm-assetlist-selected",
|
||||
"unicode": "e8c5",
|
||||
"unicode_decimal": 59589
|
||||
},
|
||||
{
|
||||
"icon_id": "38312030",
|
||||
"name": "oneterm-online",
|
||||
"font_class": "ops-oneterm-sessiononline",
|
||||
"unicode": "e8c6",
|
||||
"unicode_decimal": 59590
|
||||
},
|
||||
{
|
||||
"icon_id": "38312152",
|
||||
"name": "oneterm-online-selected",
|
||||
"font_class": "ops-oneterm-sessiononline-selected",
|
||||
"unicode": "e8c7",
|
||||
"unicode_decimal": 59591
|
||||
},
|
||||
{
|
||||
"icon_id": "38312154",
|
||||
"name": "oneterm-history-selected",
|
||||
"font_class": "ops-oneterm-sessionhistory-selected",
|
||||
"unicode": "e8c8",
|
||||
"unicode_decimal": 59592
|
||||
},
|
||||
{
|
||||
"icon_id": "38312155",
|
||||
"name": "oneterm-history",
|
||||
"font_class": "ops-oneterm-sessionhistory",
|
||||
"unicode": "e8c9",
|
||||
"unicode_decimal": 59593
|
||||
},
|
||||
{
|
||||
"icon_id": "38312404",
|
||||
"name": "oneterm-entry_log",
|
||||
"font_class": "ops-oneterm-login",
|
||||
"unicode": "e8ca",
|
||||
"unicode_decimal": 59594
|
||||
},
|
||||
{
|
||||
"icon_id": "38312423",
|
||||
"name": "oneterm-entry_log-selected",
|
||||
"font_class": "ops-oneterm-login-selected",
|
||||
"unicode": "e8cb",
|
||||
"unicode_decimal": 59595
|
||||
},
|
||||
{
|
||||
"icon_id": "38312426",
|
||||
"name": "oneterm-operation_log",
|
||||
"font_class": "ops-oneterm-operation",
|
||||
"unicode": "e8cc",
|
||||
"unicode_decimal": 59596
|
||||
},
|
||||
{
|
||||
"icon_id": "38312445",
|
||||
"name": "oneterm-operation_log-selected",
|
||||
"font_class": "ops-oneterm-operation-selected",
|
||||
"unicode": "e8cd",
|
||||
"unicode_decimal": 59597
|
||||
},
|
||||
{
|
||||
"icon_id": "38307876",
|
||||
"name": "oneterm-workstation-selected",
|
||||
"font_class": "ops-oneterm-workstation-selected",
|
||||
"unicode": "e8b7",
|
||||
"unicode_decimal": 59575
|
||||
},
|
||||
{
|
||||
"icon_id": "38307871",
|
||||
"name": "oneterm-workstation",
|
||||
"font_class": "ops-oneterm-workstation",
|
||||
"unicode": "e8b8",
|
||||
"unicode_decimal": 59576
|
||||
},
|
||||
{
|
||||
"icon_id": "38302246",
|
||||
"name": "oneterm-file-selected",
|
||||
"font_class": "oneterm-file-selected",
|
||||
"unicode": "e8be",
|
||||
"unicode_decimal": 59582
|
||||
},
|
||||
{
|
||||
"icon_id": "38302255",
|
||||
"name": "oneterm-file",
|
||||
"font_class": "oneterm-file",
|
||||
"unicode": "e8bc",
|
||||
"unicode_decimal": 59580
|
||||
},
|
||||
{
|
||||
"icon_id": "38203528",
|
||||
"name": "oneterm-time",
|
||||
"font_class": "oneterm-time",
|
||||
"unicode": "e8bd",
|
||||
"unicode_decimal": 59581
|
||||
},
|
||||
{
|
||||
"icon_id": "38203331",
|
||||
"name": "oneterm-download",
|
||||
"font_class": "oneterm-download",
|
||||
"unicode": "e8bb",
|
||||
"unicode_decimal": 59579
|
||||
},
|
||||
{
|
||||
"icon_id": "38201351",
|
||||
"name": "oneterm-command record",
|
||||
"font_class": "oneterm-commandrecord",
|
||||
"unicode": "e8ba",
|
||||
"unicode_decimal": 59578
|
||||
},
|
||||
{
|
||||
"icon_id": "38199341",
|
||||
"name": "oneterm-connected assets",
|
||||
"font_class": "oneterm-asset",
|
||||
"unicode": "e8b6",
|
||||
"unicode_decimal": 59574
|
||||
},
|
||||
{
|
||||
"icon_id": "38199350",
|
||||
"name": "oneterm-total assets",
|
||||
"font_class": "oneterm-total_asset",
|
||||
"unicode": "e8b5",
|
||||
"unicode_decimal": 59573
|
||||
},
|
||||
{
|
||||
"icon_id": "38199303",
|
||||
"name": "oneterm-switch (3)",
|
||||
"font_class": "oneterm-switch",
|
||||
"unicode": "e8b4",
|
||||
"unicode_decimal": 59572
|
||||
},
|
||||
{
|
||||
"icon_id": "38199317",
|
||||
"name": "oneterm-session",
|
||||
"font_class": "oneterm-session",
|
||||
"unicode": "e8b3",
|
||||
"unicode_decimal": 59571
|
||||
},
|
||||
{
|
||||
"icon_id": "38199339",
|
||||
"name": "oneterm-connection",
|
||||
"font_class": "oneterm-connect",
|
||||
"unicode": "e8b2",
|
||||
"unicode_decimal": 59570
|
||||
},
|
||||
{
|
||||
"icon_id": "38198321",
|
||||
"name": "oneterm-log in",
|
||||
"font_class": "oneterm-login",
|
||||
"unicode": "e8b1",
|
||||
"unicode_decimal": 59569
|
||||
},
|
||||
{
|
||||
"icon_id": "38194554",
|
||||
"name": "oneterm-dashboard",
|
||||
"font_class": "ops-oneterm-dashboard",
|
||||
"unicode": "e8af",
|
||||
"unicode_decimal": 59567
|
||||
},
|
||||
{
|
||||
"icon_id": "38194525",
|
||||
"name": "oneterm-dashboard-selected",
|
||||
"font_class": "ops-oneterm-dashboard-selected",
|
||||
"unicode": "e8b0",
|
||||
"unicode_decimal": 59568
|
||||
},
|
||||
{
|
||||
"icon_id": "38194352",
|
||||
"name": "oneterm-recent session",
|
||||
"font_class": "oneterm-recentsession",
|
||||
"unicode": "e8ae",
|
||||
"unicode_decimal": 59566
|
||||
},
|
||||
{
|
||||
"icon_id": "38194383",
|
||||
"name": "oneterm-my assets",
|
||||
"font_class": "oneterm-myassets",
|
||||
"unicode": "e8ad",
|
||||
"unicode_decimal": 59565
|
||||
},
|
||||
{
|
||||
"icon_id": "38194089",
|
||||
"name": "oneterm-log",
|
||||
"font_class": "ops-oneterm-log",
|
||||
"unicode": "e8aa",
|
||||
"unicode_decimal": 59562
|
||||
},
|
||||
{
|
||||
"icon_id": "38194088",
|
||||
"name": "oneterm-conversation-selected",
|
||||
"font_class": "ops-oneterm-session-selected",
|
||||
"unicode": "e8ab",
|
||||
"unicode_decimal": 59563
|
||||
},
|
||||
{
|
||||
"icon_id": "38194065",
|
||||
"name": "oneterm-conversation",
|
||||
"font_class": "ops-oneterm-session",
|
||||
"unicode": "e8ac",
|
||||
"unicode_decimal": 59564
|
||||
},
|
||||
{
|
||||
"icon_id": "38194105",
|
||||
"name": "oneterm-log-selected",
|
||||
"font_class": "ops-oneterm-log-selected",
|
||||
"unicode": "e8a9",
|
||||
"unicode_decimal": 59561
|
||||
},
|
||||
{
|
||||
"icon_id": "38194054",
|
||||
"name": "oneterm-assets",
|
||||
"font_class": "ops-oneterm-assets",
|
||||
"unicode": "e8a7",
|
||||
"unicode_decimal": 59559
|
||||
},
|
||||
{
|
||||
"icon_id": "38194055",
|
||||
"name": "oneterm-assets-selected",
|
||||
"font_class": "ops-oneterm-assets-selected",
|
||||
"unicode": "e8a8",
|
||||
"unicode_decimal": 59560
|
||||
},
|
||||
{
|
||||
"icon_id": "38123087",
|
||||
"name": "itsm-down",
|
||||
"font_class": "itsm-down",
|
||||
"unicode": "e8a5",
|
||||
"unicode_decimal": 59557
|
||||
},
|
||||
{
|
||||
"icon_id": "38123084",
|
||||
"name": "itsm-up",
|
||||
"font_class": "itsm-up",
|
||||
"unicode": "e8a6",
|
||||
"unicode_decimal": 59558
|
||||
},
|
||||
{
|
||||
"icon_id": "38105374",
|
||||
"name": "itsm-download",
|
||||
"font_class": "itsm-download",
|
||||
"unicode": "e8a4",
|
||||
"unicode_decimal": 59556
|
||||
},
|
||||
{
|
||||
"icon_id": "38105235",
|
||||
"name": "itsm-print",
|
||||
"font_class": "itsm-print",
|
||||
"unicode": "e8a3",
|
||||
"unicode_decimal": 59555
|
||||
},
|
||||
{
|
||||
"icon_id": "38104997",
|
||||
"name": "itsm-view",
|
||||
"font_class": "itsm-view",
|
||||
"unicode": "e8a2",
|
||||
"unicode_decimal": 59554
|
||||
},
|
||||
{
|
||||
"icon_id": "38105129",
|
||||
"name": "itsm-word",
|
||||
"font_class": "itsm-word",
|
||||
"unicode": "e8a1",
|
||||
"unicode_decimal": 59553
|
||||
},
|
||||
{
|
||||
"icon_id": "38095730",
|
||||
"name": "datainsight-custom",
|
||||
"font_class": "datainsight-custom",
|
||||
"unicode": "e89e",
|
||||
"unicode_decimal": 59550
|
||||
},
|
||||
{
|
||||
"icon_id": "38095729",
|
||||
"name": "datainsight-prometheus",
|
||||
"font_class": "datainsight-prometheus",
|
||||
"unicode": "e89f",
|
||||
"unicode_decimal": 59551
|
||||
},
|
||||
{
|
||||
"icon_id": "38095728",
|
||||
"name": "datainsight-zabbix",
|
||||
"font_class": "datainsight-zabbix",
|
||||
"unicode": "e8a0",
|
||||
"unicode_decimal": 59552
|
||||
},
|
||||
{
|
||||
"icon_id": "37944507",
|
||||
"name": "setting-main people",
|
||||
"font_class": "setting-mainpeople",
|
||||
"unicode": "e89a",
|
||||
"unicode_decimal": 59546
|
||||
},
|
||||
{
|
||||
"icon_id": "37944503",
|
||||
"name": "setting-deputy people",
|
||||
"font_class": "setting-deputypeople",
|
||||
"unicode": "e89d",
|
||||
"unicode_decimal": 59549
|
||||
},
|
||||
{
|
||||
"icon_id": "37940080",
|
||||
"name": "ops-setting-duty",
|
||||
"font_class": "ops-setting-duty",
|
||||
"unicode": "e89c",
|
||||
"unicode_decimal": 59548
|
||||
},
|
||||
{
|
||||
"icon_id": "37940033",
|
||||
"name": "ops-setting-duty-selected",
|
||||
"font_class": "ops-setting-duty-selected",
|
||||
"unicode": "e89b",
|
||||
"unicode_decimal": 59547
|
||||
},
|
||||
{
|
||||
"icon_id": "37841524",
|
||||
"name": "datainsight-sequential",
|
||||
"font_class": "datainsight-sequential",
|
||||
"unicode": "e899",
|
||||
"unicode_decimal": 59545
|
||||
},
|
||||
{
|
||||
"icon_id": "37841535",
|
||||
"name": "datainsight-close",
|
||||
"font_class": "datainsight-close",
|
||||
"unicode": "e898",
|
||||
"unicode_decimal": 59544
|
||||
},
|
||||
{
|
||||
"icon_id": "37841537",
|
||||
"name": "datainsight-handle",
|
||||
"font_class": "datainsight-handle",
|
||||
"unicode": "e897",
|
||||
"unicode_decimal": 59543
|
||||
},
|
||||
{
|
||||
"icon_id": "37841515",
|
||||
"name": "datainsight-table",
|
||||
"font_class": "datainsight-table",
|
||||
"unicode": "e896",
|
||||
"unicode_decimal": 59542
|
||||
},
|
||||
{
|
||||
"icon_id": "37830610",
|
||||
"name": "icon-xianxing-password",
|
||||
"font_class": "icon-xianxing-password",
|
||||
"unicode": "e894",
|
||||
"unicode_decimal": 59540
|
||||
},
|
||||
{
|
||||
"icon_id": "37830609",
|
||||
"name": "icon-xianxing-link",
|
||||
"font_class": "icon-xianxing-link",
|
||||
"unicode": "e895",
|
||||
"unicode_decimal": 59541
|
||||
},
|
||||
{
|
||||
"icon_id": "37822199",
|
||||
"name": "itsm-oneclick download",
|
||||
"font_class": "itsm-download-all",
|
||||
"unicode": "e892",
|
||||
"unicode_decimal": 59538
|
||||
},
|
||||
{
|
||||
"icon_id": "37822198",
|
||||
"name": "itsm-package download",
|
||||
"font_class": "itsm-download-package",
|
||||
"unicode": "e893",
|
||||
"unicode_decimal": 59539
|
||||
},
|
||||
{
|
||||
"icon_id": "37772067",
|
||||
"name": "weixin",
|
||||
"font_class": "a-Frame4",
|
||||
"unicode": "e891",
|
||||
"unicode_decimal": 59537
|
||||
},
|
||||
{
|
||||
"icon_id": "37632784",
|
||||
"name": "itsm-again",
|
||||
"font_class": "itsm-again",
|
||||
"unicode": "e88f",
|
||||
"unicode_decimal": 59535
|
||||
},
|
||||
{
|
||||
"icon_id": "37632783",
|
||||
"name": "itsm-next",
|
||||
"font_class": "itsm-next",
|
||||
"unicode": "e890",
|
||||
"unicode_decimal": 59536
|
||||
},
|
||||
{
|
||||
"icon_id": "37590786",
|
||||
"name": "wechatApp",
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
39
cmdb-ui/src/api/auth.js
Normal file
39
cmdb-ui/src/api/auth.js
Normal file
@@ -0,0 +1,39 @@
|
||||
import { axios } from '@/utils/request'
|
||||
|
||||
export function getAuthData(data_type) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/${data_type}`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function postAuthData(data_type, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/${data_type}`,
|
||||
method: 'post',
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
export function putAuthData(data_type, id, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/${data_type}/${id}`,
|
||||
method: 'put',
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
export function getAuthDataEnable() {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/enable_list`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function testLDAP(test_type, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/LDAP/test?test_type=${test_type}`,
|
||||
method: 'post',
|
||||
data,
|
||||
})
|
||||
}
|
@@ -1,8 +1,6 @@
|
||||
import config from '@/config/setting'
|
||||
|
||||
const api = {
|
||||
Login: config.useSSO ? '/api/sso/login' : '/v1/acl/login',
|
||||
Logout: config.useSSO ? '/api/sso/logout' : '/v1/acl/logout',
|
||||
Login: '/v1/acl/login',
|
||||
Logout: '/v1/acl/logout',
|
||||
ForgePassword: '/auth/forge-password',
|
||||
Register: '/auth/register',
|
||||
twoStepCode: '/auth/2step-code',
|
||||
|
@@ -1,6 +1,5 @@
|
||||
import api from './index'
|
||||
import { axios } from '@/utils/request'
|
||||
import config from '@/config/setting'
|
||||
/**
|
||||
* login func
|
||||
* parameter: {
|
||||
@@ -12,9 +11,10 @@ import config from '@/config/setting'
|
||||
* @param parameter
|
||||
* @returns {*}
|
||||
*/
|
||||
export function login(data) {
|
||||
if (config.useSSO) {
|
||||
window.location.href = config.ssoLoginUrl
|
||||
export function login(data, auth_type) {
|
||||
if (auth_type) {
|
||||
localStorage.setItem('ops_auth_type', auth_type)
|
||||
window.location.href = `/api/${auth_type.toLowerCase()}/login`
|
||||
} else {
|
||||
return axios({
|
||||
url: api.Login,
|
||||
@@ -43,17 +43,15 @@ export function getInfo() {
|
||||
}
|
||||
|
||||
export function logout() {
|
||||
if (config.useSSO) {
|
||||
window.location.replace(api.Logout)
|
||||
} else {
|
||||
return axios({
|
||||
url: api.Logout,
|
||||
method: 'post',
|
||||
headers: {
|
||||
'Content-Type': 'application/json;charset=UTF-8'
|
||||
}
|
||||
})
|
||||
}
|
||||
const auth_type = localStorage.getItem('ops_auth_type')
|
||||
localStorage.clear()
|
||||
return axios({
|
||||
url: auth_type ? `/${auth_type.toLowerCase()}/logout` : api.Logout,
|
||||
method: auth_type ? 'get' : 'post',
|
||||
headers: {
|
||||
'Content-Type': 'application/json;charset=UTF-8'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
BIN
cmdb-ui/src/assets/ops_logout.png
Normal file
BIN
cmdb-ui/src/assets/ops_logout.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 78 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user