mirror of
https://github.com/veops/cmdb.git
synced 2025-09-08 06:22:45 +08:00
Compare commits
475 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
2e644233bc | ||
|
d9b4082b46 | ||
|
a07f984152 | ||
|
4cab7ef6b0 | ||
|
070c163de6 | ||
|
282a779fb1 | ||
|
cb6b51a84c | ||
|
34bd320e75 | ||
|
1eca5791f6 | ||
|
13b1c9a30c | ||
|
b1a15a85d2 | ||
|
08e5a02caf | ||
|
308827b8fc | ||
|
dc4ccb22b9 | ||
|
c482e7ea43 | ||
|
663c14f763 | ||
|
c6ee227bab | ||
|
cb62cf2410 | ||
|
133f32a6b0 | ||
|
45c48c86fe | ||
|
2321f17dae | ||
|
ddb31a07a2 | ||
|
b474914fbb | ||
|
26099a3d69 | ||
|
62829c885b | ||
|
260aed6462 | ||
|
3841999cca | ||
|
14c03ce5d2 | ||
|
f463ecd6e6 | ||
|
adc0cfd5c5 | ||
|
086481657e | ||
|
d2f84ae3dc | ||
|
9f1b510cb3 | ||
|
61acb2483d | ||
|
0196c8a82c | ||
|
bed2323fc1 | ||
|
be9b308f56 | ||
|
8ba658ea1b | ||
|
0aa668cfa0 | ||
|
e20fd33a53 | ||
|
7462de63de | ||
|
5f9ba069ad | ||
|
5dc0d95ff8 | ||
|
e5536b76e6 | ||
|
8b044efd4e | ||
|
747b5bf494 | ||
|
21067022f6 | ||
|
4102c44fb2 | ||
|
600f95ce18 | ||
|
950fd38044 | ||
|
01085615b5 | ||
|
734f1940f9 | ||
|
c25c1e4e4b | ||
|
826a8306d3 | ||
|
740aae573e | ||
|
17828a7631 | ||
|
02cb497bdc | ||
|
05a7dc41ee | ||
|
459c70ba2d | ||
|
774f42ac34 | ||
|
420029a5e2 | ||
|
ab8acbfd20 | ||
|
4468b6a8de | ||
|
6bf145d085 | ||
|
42b1e47e76 | ||
|
673134003a | ||
|
ef67885571 | ||
|
075bf7217f | ||
|
3b7b8f435c | ||
|
2b7f6aeef3 | ||
|
544fac8aca | ||
|
3d0a56ec8c | ||
|
d2d8482052 | ||
|
a0afae8d2e | ||
|
9f3da68636 | ||
|
24b955c288 | ||
|
a07b2d37ec | ||
|
c86fcb4e7b | ||
|
ca7964f24b | ||
|
c42ac634fb | ||
|
a6fc3341ce | ||
|
fc3f2e25f3 | ||
|
511a5f70c6 | ||
|
f8ff4d5e45 | ||
|
3ab72cceaf | ||
|
4ab7e3c70c | ||
|
a7fe75f7df | ||
|
3474a71a75 | ||
|
6531baff64 | ||
|
ed5936250f | ||
|
52c32e2ab1 | ||
|
d3224625b6 | ||
|
f158c7e33a | ||
|
6dc12bb6ac | ||
|
b33ae16c00 | ||
|
2caffc2670 | ||
|
f28af51007 | ||
|
3a0369559f | ||
|
a74a2c5a94 | ||
|
9fbcb2838e | ||
|
60a445b972 | ||
|
bfdd7b6a0e | ||
|
ab093d2493 | ||
|
315a578a31 | ||
|
1e16dc5e5b | ||
|
f67e196acf | ||
|
439e25d5dd | ||
|
ea59c0d71f | ||
|
1137127aab | ||
|
4ad1b5282e | ||
|
cdd5e4d9aa | ||
|
432de5e847 | ||
|
3a2339765a | ||
|
b5a2af7420 | ||
|
8b267613d6 | ||
|
b365eb27f6 | ||
|
2125f020b5 | ||
|
ea762e35a0 | ||
|
f11aadf6d4 | ||
|
9cbf133b9f | ||
|
95e8f9de74 | ||
|
26792147ae | ||
|
4f9b581c2e | ||
|
e2b1cb3003 | ||
|
f75a85b48a | ||
|
313fc80e54 | ||
|
e0666689e5 | ||
|
7a9fd4f9d6 | ||
|
2fd706be85 | ||
|
3df51bb670 | ||
|
9bbbcbe6dc | ||
|
16d6b40e8d | ||
|
ef2d3812a2 | ||
|
bc653efd04 | ||
|
d891d7365d | ||
|
9953b2fc98 | ||
|
8de54812dc | ||
|
eb7d52cf35 | ||
|
6c4a5f2f6b | ||
|
17c5d4538b | ||
|
6c3e3f9eed | ||
|
b0494adc17 | ||
|
fc133f2ae9 | ||
|
ac6e3a0318 | ||
|
404ec976cc | ||
|
4211bbcbc9 | ||
|
0158636671 | ||
|
d986bc3bbc | ||
|
044b820548 | ||
|
536daa6d4f | ||
|
b0620b043b | ||
|
a88c9cf7f7 | ||
|
be50f505d1 | ||
|
0bb4f633d6 | ||
|
78b521f3af | ||
|
77bc850d4a | ||
|
e52f201ba1 | ||
|
64aea424dc | ||
|
0655b0e9eb | ||
|
cce0649299 | ||
|
52574c64cc | ||
|
fb904b01a6 | ||
|
63af79ec45 | ||
|
38af86317a | ||
|
03bac86588 | ||
|
130b68cadd | ||
|
65000f8141 | ||
|
23692ad50b | ||
|
16cd34e8b8 | ||
|
985f67ee47 | ||
|
8d95f8d57d | ||
|
cf6230008d | ||
|
ec97fa84d8 | ||
|
76f074704b | ||
|
e5addab3af | ||
|
1c6be9e281 | ||
|
9552892c68 | ||
|
b59e1af318 | ||
|
d164d883ab | ||
|
1fef160d9e | ||
|
2e537d390a | ||
|
5b9fe15afa | ||
|
89fa5f2243 | ||
|
652a5c7fb8 | ||
|
afb6adec89 | ||
|
a9db4285ab | ||
|
a04bdc29a5 | ||
|
91e0e076a7 | ||
|
339a7b857e | ||
|
e86e5ad1fd | ||
|
c50a69de77 | ||
|
4d16e9e6d9 | ||
|
fcea4dcb9f | ||
|
f98fd24c62 | ||
|
f10eeb8439 | ||
|
f070948122 | ||
|
4112bcf547 | ||
|
2292756bf7 | ||
|
93e2483974 | ||
|
fbb4fcc255 | ||
|
fc77241006 | ||
|
0d04ad7d90 | ||
|
e6290e49ea | ||
|
97aa2e0ebe | ||
|
939d9dc3cd | ||
|
576d2e3bc4 | ||
|
9a40246d29 | ||
|
044f95c3be | ||
|
a386de355e | ||
|
b93afc1790 | ||
|
77d89677ef | ||
|
7ec6775f03 | ||
|
98cc853dbc | ||
|
f57ff80099 | ||
|
51e4b5dd8f | ||
|
dbf44a020b | ||
|
8e578797ef | ||
|
158de4b946 | ||
|
3cf234d49e | ||
|
a7debc1b3b | ||
|
9268da2ffa | ||
|
cfcb092478 | ||
|
0d8b41b64a | ||
|
d85715793f | ||
|
afbdbe4682 | ||
|
e629abebb7 | ||
|
029c12365a | ||
|
4d000d9805 | ||
|
f1fc66bd2c | ||
|
d6af4af1d1 | ||
|
7fe2bdca5f | ||
|
1432131d2b | ||
|
bc94d039f5 | ||
|
5abafed9c8 | ||
|
04e249feac | ||
|
ef3e6bc6b0 | ||
|
d9d5f8f818 | ||
|
578da0807c | ||
|
3eb35f5497 | ||
|
9669ad04cd | ||
|
70214807ca | ||
|
7c1c309f7a | ||
|
9b9799ff5e | ||
|
b2578b61fa | ||
|
619f47ae13 | ||
|
37c5e31799 | ||
|
ab70b2a655 | ||
|
c285606f4a | ||
|
6d3611bd73 | ||
|
764f6a07e0 | ||
|
ae8d487af4 | ||
|
87c6554555 | ||
|
f5671c2a2a | ||
|
43ad3dfa7b | ||
|
29fa17a0b8 | ||
|
5191d6ed73 | ||
|
8348f8e7b1 | ||
|
75c48a0807 | ||
|
5b38385f7e | ||
|
036e1d236b | ||
|
c31be0f753 | ||
|
764d2fac3f | ||
|
f4079e9c3e | ||
|
2a0ed72235 | ||
|
9e803ae4c7 | ||
|
bebdb61adf | ||
|
f49cad771b | ||
|
a5b4fbda40 | ||
|
2cce2d5cf2 | ||
|
e720b7af66 | ||
|
09e4a5111b | ||
|
3539b12503 | ||
|
21d8673b5d | ||
|
7154426dc7 | ||
|
ca75c7dcd0 | ||
|
194a2254a6 | ||
|
26abad14d0 | ||
|
1521a71f9c | ||
|
d425b455f1 | ||
|
230307474b | ||
|
69d6b40e39 | ||
|
5dc2f89e7f | ||
|
9eaca4d6a0 | ||
|
3680a462f5 | ||
|
3ac50e7cd8 | ||
|
21b2cc1d5d | ||
|
cd5448cc7d | ||
|
10610bdb4b | ||
|
b5c2156387 | ||
|
b05ae0d1a7 | ||
|
bbf6138d43 | ||
|
1ba3e6a680 | ||
|
64045c1f93 | ||
|
5a3e55813c | ||
|
bc72e58886 | ||
|
9e78955ba1 | ||
|
136853d9a4 | ||
|
036e3ad00d | ||
|
5ce6c93237 | ||
|
43dba7f7ed | ||
|
f4879d20d6 | ||
|
740e4c6034 | ||
|
0f2baa1d94 | ||
|
405b0af72c | ||
|
a4e5178979 | ||
|
c14fe23283 | ||
|
b3a058f908 | ||
|
bd82a0e27c | ||
|
f22a5c3543 | ||
|
ed81c3f091 | ||
|
07814b85f9 | ||
|
db52b28d6b | ||
|
fc85ba21c8 | ||
|
6c5ee3fcd9 | ||
|
40f1ef88a9 | ||
|
bce422ffc8 | ||
|
7c79066532 | ||
|
1129ac93fb | ||
|
5ab0e7e737 | ||
|
23319c7417 | ||
|
c74f85cabb | ||
|
fce2b689fb | ||
|
105327bb0c | ||
|
745c43d0a4 | ||
|
3130d94568 | ||
|
04a66eb239 | ||
|
68390ec6f1 | ||
|
17392be138 | ||
|
f2fdb29221 | ||
|
4a18698423 | ||
|
95ccee04f9 | ||
|
b60628247b | ||
|
a6d7699ab4 | ||
|
4b21bcc438 | ||
|
33dce2f0f3 | ||
|
d43b827fe5 | ||
|
aec8bade41 | ||
|
89ae89a449 | ||
|
945f90e386 | ||
|
2ba6a16613 | ||
|
6089039366 | ||
|
e1e5307084 | ||
|
2ff7fce9dd | ||
|
fc4d3e0c1a | ||
|
f66a94712e | ||
|
24664c7686 | ||
|
1d668bab6e | ||
|
3d4b84909e | ||
|
8341e742eb | ||
|
a71ba83de0 | ||
|
9668131c18 | ||
|
4a744dcad9 | ||
|
2a420225e2 | ||
|
ff67785618 | ||
|
dfe1ba55d5 | ||
|
90b1b6b7af | ||
|
d5fbe42ed7 | ||
|
f424ad6864 | ||
|
16b724bd40 | ||
|
f70ed54cad | ||
|
dd64564160 | ||
|
cc2cdbcc9f | ||
|
81fe850627 | ||
|
487d9f76f6 | ||
|
92dd4c5dfe | ||
|
8ee7c6daf8 | ||
|
882b158d18 | ||
|
85222443c0 | ||
|
1696ecf49d | ||
|
73b92ff533 | ||
|
e977bb15a5 | ||
|
7c46d6cdbf | ||
|
4d11c1f7db | ||
|
0a563deb11 | ||
|
ba80ec4403 | ||
|
3b7cc4595b | ||
|
9fe47657a6 | ||
|
5a4a6caa07 | ||
|
9dadbe1599 | ||
|
40d016f513 | ||
|
655edaa7c8 | ||
|
7fa5cff919 | ||
|
d19834ed5d | ||
|
b6be430aa3 | ||
|
63792c242f | ||
|
10f7029722 | ||
|
ba176542dc | ||
|
aae3b6e2ff | ||
|
b370c7d46e | ||
|
efa5a8ea5d | ||
|
fd532626ac | ||
|
617337c614 | ||
|
9a3d24ac81 | ||
|
454dd4c56b | ||
|
88ad72d4dc | ||
|
8d1517d550 | ||
|
d3a8ef5966 | ||
|
e5baa5012d | ||
|
a1f63b00dd | ||
|
47ded84231 | ||
|
224a48a5f3 | ||
|
0e7c52df71 | ||
|
ff701cc770 | ||
|
6a7bb725cc | ||
|
0a13186c13 | ||
|
a0ffeb9950 | ||
|
6c70ec6d53 | ||
|
4b5f82699a | ||
|
f78c3b928b | ||
|
332659c1d5 | ||
|
3beb2706dc | ||
|
a14111e1ce | ||
|
c4320c14f9 | ||
|
4c5442748f | ||
|
a81750acba | ||
|
0439e2462b | ||
|
3b62bd7ac9 | ||
|
f6add52721 | ||
|
c85e535288 | ||
|
c0c6d116b5 | ||
|
39153e92d1 | ||
|
42bcc2e510 | ||
|
398fbb25dc | ||
|
4b312d4f99 | ||
|
10414155a5 | ||
|
feda0c37e7 | ||
|
173c120b64 | ||
|
5f2a0d1a7b | ||
|
50f894a01d | ||
|
66e93e73af | ||
|
58ad9d3f05 | ||
|
08c96039e9 | ||
|
ca0dd97626 | ||
|
7810ee3974 | ||
|
2cfea7ef08 | ||
|
0cee6cea25 | ||
|
5d13ba2f26 | ||
|
a583433530 | ||
|
733ac3b2b4 | ||
|
ef6300255a | ||
|
aad37dcf0b | ||
|
cce10d39ea | ||
|
c521dd447e | ||
|
4d0cd4ba56 | ||
|
7291274cb1 | ||
|
44f2e383c3 | ||
|
1f8219b418 | ||
|
cb2f170ded | ||
|
1241a23ba8 | ||
|
7d7744b7dc | ||
|
9c7d51127a | ||
|
b5a987f6b4 | ||
|
7bbc68bfd5 | ||
|
99d11e11ce | ||
|
7b96ac4638 | ||
|
0a36330852 | ||
|
9105f92c82 | ||
|
57541ab486 | ||
|
a0fcbd220e | ||
|
d54b404eb6 | ||
|
620c5bb5eb | ||
|
0fde1d699d | ||
|
61f77cf311 | ||
|
13476128d5 | ||
|
5cdb4ecd2a | ||
|
64c3b9da3b | ||
|
55dad7a58c | ||
|
38dabc35e5 | ||
|
5b4f95a50e | ||
|
f3046d3c91 | ||
|
5faae9af67 | ||
|
c0b50642e0 | ||
|
12ca296879 | ||
|
420c6cea2b | ||
|
ccc4bb48fa |
60
.github/ISSUE_TEMPLATE/1bug.yaml
vendored
60
.github/ISSUE_TEMPLATE/1bug.yaml
vendored
@@ -1,60 +0,0 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["☢️ bug"]
|
||||
assignees:
|
||||
- Selina316
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: aspects
|
||||
attributes:
|
||||
label: This bug is related to UI or API?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
value: "A bug happened!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: browsers
|
||||
attributes:
|
||||
label: What browsers are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Firefox
|
||||
- Chrome
|
||||
- Safari
|
||||
- Microsoft Edge
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
44
.github/ISSUE_TEMPLATE/2feature.yaml
vendored
44
.github/ISSUE_TEMPLATE/2feature.yaml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Feature wanted
|
||||
description: A new feature would be good
|
||||
title: "[Feature]: "
|
||||
labels: ["✏️ feature"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your feature suggestion; we will evaluate it carefully!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: aspects
|
||||
attributes:
|
||||
label: feature is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
label: What is your advice?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you want!
|
||||
value: "everyone wants this feature!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
36
.github/ISSUE_TEMPLATE/3consultation.yaml
vendored
36
.github/ISSUE_TEMPLATE/3consultation.yaml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Help wanted
|
||||
description: I have a question
|
||||
title: "[help wanted]: "
|
||||
labels: ["help wanted"]
|
||||
assignees:
|
||||
- ivonGwy
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please tell us what's you need!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: What is your question?
|
||||
description: Also tell us, how can we help?
|
||||
placeholder: Tell us what you need!
|
||||
value: "i have a question!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
60
.github/ISSUE_TEMPLATE/bug.yaml
vendored
60
.github/ISSUE_TEMPLATE/bug.yaml
vendored
@@ -1,60 +0,0 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["bug"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: bug is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
value: "A bug happened!"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
default: 2.3.5
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: browsers
|
||||
attributes:
|
||||
label: What browsers are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Firefox
|
||||
- Chrome
|
||||
- Safari
|
||||
- Microsoft Edge
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,6 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: veops official website
|
||||
url: https://veops.cn/#hero
|
||||
about: you can contact us here.
|
||||
|
44
.github/ISSUE_TEMPLATE/feature.yaml
vendored
44
.github/ISSUE_TEMPLATE/feature.yaml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Feature wanted
|
||||
description: A new feature would be good
|
||||
title: "[Feature]: "
|
||||
labels: ["feature"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your feature suggestion; we will evaluate it carefully!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: feature is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: describe the feature
|
||||
attributes:
|
||||
label: What is your advice?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you want!
|
||||
value: "everyone wants this feature!"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
default: 2.3.5
|
||||
validations:
|
||||
required: true
|
0
.github/config.yml
vendored
0
.github/config.yml
vendored
2
.gitignore
vendored
2
.gitignore
vendored
@@ -40,7 +40,6 @@ nosetests.xml
|
||||
.pytest_cache
|
||||
cmdb-api/test-output
|
||||
cmdb-api/api/uploaded_files
|
||||
cmdb-api/migrations/versions
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@@ -70,7 +69,6 @@ settings.py
|
||||
# UI
|
||||
cmdb-ui/node_modules
|
||||
cmdb-ui/dist
|
||||
cmdb-ui/yarn.lock
|
||||
|
||||
# Log files
|
||||
cmdb-ui/npm-debug.log*
|
||||
|
4
Makefile
4
Makefile
@@ -9,7 +9,7 @@ help: ## display this help
|
||||
|
||||
env: ## create a development environment using pipenv
|
||||
sudo easy_install pip && \
|
||||
pip install pipenv -i https://repo.huaweicloud.com/repository/pypi/simple && \
|
||||
pip install pipenv -i https://pypi.douban.com/simple && \
|
||||
npm install yarn && \
|
||||
make deps
|
||||
.PHONY: env
|
||||
@@ -36,7 +36,7 @@ api: ## start api server
|
||||
.PHONY: api
|
||||
|
||||
worker: ## start async tasks worker
|
||||
cd cmdb-api && pipenv run celery -A celery_worker.celery worker -E -Q one_cmdb_async --autoscale=5,2 --logfile=one_cmdb_async.log -D && pipenv run celery -A celery_worker.celery worker -E -Q acl_async --autoscale=2,1 --logfile=one_acl_async.log -D
|
||||
cd cmdb-api && pipenv run celery -A celery_worker.celery worker -E -Q one_cmdb_async --concurrency=1 -D && pipenv run celery -A celery_worker.celery worker -E -Q acl_async --concurrency=1 -D
|
||||
.PHONY: worker
|
||||
|
||||
ui: ## start ui server
|
||||
|
78
README.md
78
README.md
@@ -1,20 +1,12 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
<a href="https://veops.cn"><img src="docs/images/logo.png" alt="维易CMDB" width="300"/></a>
|
||||
</p>
|
||||
<h3 align="center">简单、轻量、通用的运维配置管理数据库</h3>
|
||||
<p align="center">
|
||||
<a href="https://github.com/veops/cmdb/blob/master/LICENSE"><img src="https://img.shields.io/badge/License-AGPLv3-brightgreen" alt="License: GPLv3"></a>
|
||||
<a href="https:https://github.com/sendya/ant-design-pro-vue"><img src="https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-brightgreen" alt="UI"></a>
|
||||
<a href="https://github.com/pallets/flask"><img src="https://img.shields.io/badge/API-Flask-brightgreen" alt="API"></a>
|
||||
</p>
|
||||
|
||||
|
||||
------------------------------
|
||||
[](https://github.com/veops/cmdb/blob/master/LICENSE)
|
||||
[](https://github.com/sendya/ant-design-pro-vue)
|
||||
[](https://github.com/pallets/flask)
|
||||
|
||||
[English](docs/README_en.md) / [中文](README.md)
|
||||
- 产品文档:https://veops.cn/docs/
|
||||
- 在线体验:<a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
|
||||
- 在线体验: <a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
|
||||
@@ -23,43 +15,45 @@
|
||||
|
||||
## 系统介绍
|
||||
|
||||
### 系统概览
|
||||
### 整体架构
|
||||
|
||||
<img src=docs/images/dashboard.png />
|
||||
<img src=docs/images/view.jpg />
|
||||
|
||||
[查看更多展示](docs/screenshot.md)
|
||||
### 相关文档
|
||||
|
||||
### 相关文章
|
||||
|
||||
- <a href="https://mp.weixin.qq.com/s/v3eANth64UBW5xdyOkK3tg" target="_blank">概要设计</a>
|
||||
- <a href="https://zhuanlan.zhihu.com/p/98453732" target="_blank">设计文档</a>
|
||||
- <a href="https://github.com/veops/cmdb/tree/master/docs/cmdb_api.md" target="_blank">API 文档</a>
|
||||
- <a href="https://mp.weixin.qq.com/s/rQaf4AES7YJsyNQG_MKOLg" target="_blank">自动发现</a>
|
||||
- 更多文章可以在公众号 **维易科技OneOps** 里查看
|
||||
- <a href="https://mp.weixin.qq.com/s/EflmmJ-qdUkddTx2hRt3pA" target="_blank">树形视图实践</a>
|
||||
|
||||
### 特点
|
||||
|
||||
- 灵活性
|
||||
1. 配置灵活,不设定任何运维场景,有内置模板
|
||||
2. 自动发现、入库 IT 资产
|
||||
1. 规范并统一纳管复杂数据资产
|
||||
2. 自动发现、入库 IT 资产
|
||||
- 安全性
|
||||
1. 细粒度权限控制
|
||||
1. 细粒度访问控制
|
||||
2. 完备操作日志
|
||||
- 多应用
|
||||
1. 丰富视图展示维度
|
||||
2. API简单强大
|
||||
3. 支持定义属性触发器、计算属性
|
||||
2. 提供 Restful API
|
||||
3. 自定义字段触发器
|
||||
|
||||
### 主要功能
|
||||
|
||||
- 模型属性支持索引、多值、默认排序、字体颜色,支持计算属性
|
||||
- 支持自动发现、定时巡检、文件导入
|
||||
- 支持资源、层级、关系视图展示
|
||||
- 支持资源、树形、关系视图展示
|
||||
- 支持模型间关系配置和展示
|
||||
- 细粒度访问控制,完备的操作日志
|
||||
- 支持跨模型搜索
|
||||
|
||||
### 系统概览
|
||||
|
||||
- 服务树
|
||||
|
||||

|
||||
|
||||
[查看更多展示](docs/screenshot.md)
|
||||
|
||||
|
||||
### 更多功能
|
||||
@@ -73,36 +67,22 @@
|
||||
## 安装
|
||||
|
||||
### Docker 一键快速构建
|
||||
> 方法一
|
||||
- 第一步: 先安装 docker 环境, 以及docker-compose
|
||||
- 第二步: 拷贝项目
|
||||
```shell
|
||||
git clone https://github.com/veops/cmdb.git
|
||||
```
|
||||
- 第三步:进入主目录,执行:
|
||||
- 进入主目录(先安装 docker 环境)
|
||||
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
> 方法二, 该方法适用于linux系统
|
||||
- 第一步: 先安装 docker 环境, 以及docker-compose
|
||||
- 第二步: 直接使用项目根目录下的install.sh 文件进行 `安装`、`启动`、`暂停`、`查状态`、`删除`、`卸载`
|
||||
```shell
|
||||
curl -so install.sh https://raw.githubusercontent.com/veops/cmdb/master/install.sh
|
||||
sh install.sh install
|
||||
```
|
||||
|
||||
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
|
||||
### [本地开发环境搭建](docs/local.md)
|
||||
|
||||
### [Makefile 安装](docs/makefile.md)
|
||||
|
||||
## 验证
|
||||
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
|
||||
|
||||
---
|
||||
|
||||
_**欢迎关注公众号(维易科技OneOps),关注后可加入微信群,进行产品和技术交流。**_
|
||||
_**欢迎关注我们的公众号,点击联系我们,加入微信、QQ群(336164978),获得更多产品、行业相关资讯**_
|
||||
|
||||

|
||||

|
||||
|
@@ -6,7 +6,7 @@ name = "pypi"
|
||||
[packages]
|
||||
# Flask
|
||||
Flask = "==2.3.2"
|
||||
Werkzeug = ">=2.3.6"
|
||||
Werkzeug = "==2.3.6"
|
||||
click = ">=5.0"
|
||||
# Api
|
||||
Flask-RESTful = "==0.3.10"
|
||||
@@ -21,29 +21,28 @@ Flask-Migrate = "==2.5.2"
|
||||
gunicorn = "==21.0.1"
|
||||
supervisor = "==4.0.3"
|
||||
# Auth
|
||||
Flask-Login = ">=0.6.2"
|
||||
Flask-Login = "==0.6.2"
|
||||
Flask-Bcrypt = "==1.0.1"
|
||||
Flask-Cors = ">=3.0.8"
|
||||
ldap3 = "==2.9.1"
|
||||
python-ldap = "==3.4.0"
|
||||
pycryptodome = "==3.12.0"
|
||||
cryptography = ">=41.0.2"
|
||||
# Caching
|
||||
Flask-Caching = ">=1.0.0"
|
||||
# Environment variable parsing
|
||||
environs = "==4.2.0"
|
||||
marshmallow = "==2.20.2"
|
||||
# async tasks
|
||||
celery = ">=5.3.1"
|
||||
celery = "==5.3.1"
|
||||
celery_once = "==3.0.1"
|
||||
more-itertools = "==5.0.0"
|
||||
kombu = ">=5.3.1"
|
||||
kombu = "==5.3.1"
|
||||
# common setting
|
||||
timeout-decorator = "==0.5.0"
|
||||
WTForms = "==3.0.0"
|
||||
email-validator = "==1.3.1"
|
||||
treelib = "==1.6.1"
|
||||
flasgger = "==0.9.5"
|
||||
Pillow = ">=10.0.1"
|
||||
Pillow = "==9.3.0"
|
||||
# other
|
||||
six = "==1.16.0"
|
||||
bs4 = ">=0.0.1"
|
||||
@@ -59,10 +58,6 @@ Jinja2 = "==3.1.2"
|
||||
jinja2schema = "==0.1.4"
|
||||
msgpack-python = "==0.5.6"
|
||||
alembic = "==1.7.7"
|
||||
hvac = "==2.0.0"
|
||||
colorama = ">=0.4.6"
|
||||
pycryptodomex = ">=3.19.0"
|
||||
lz4 = ">=4.3.2"
|
||||
|
||||
[dev-packages]
|
||||
# Testing
|
||||
@@ -79,3 +74,4 @@ flake8-isort = "==2.7.0"
|
||||
isort = "==4.3.21"
|
||||
pep8-naming = "==0.8.2"
|
||||
pydocstyle = "==3.0.0"
|
||||
|
||||
|
@@ -7,7 +7,6 @@ import os
|
||||
import sys
|
||||
from inspect import getmembers
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Flask
|
||||
from flask import jsonify
|
||||
@@ -18,15 +17,11 @@ from flask.json.provider import DefaultJSONProvider
|
||||
|
||||
import api.views.entry
|
||||
from api.extensions import (bcrypt, cache, celery, cors, db, es, login_manager, migrate, rd)
|
||||
from api.extensions import inner_secrets
|
||||
from api.lib.perm.authentication.cas import CAS
|
||||
from api.lib.perm.authentication.oauth2 import OAuth2
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.flask_cas import CAS
|
||||
from api.models.acl import User
|
||||
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
PROJECT_ROOT = os.path.join(HERE, os.pardir)
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
@@ -81,6 +76,15 @@ class MyJSONEncoder(DefaultJSONProvider):
|
||||
return o
|
||||
|
||||
|
||||
def create_acl_app(config_object="settings"):
|
||||
app = Flask(__name__.split(".")[0])
|
||||
app.config.from_object(config_object)
|
||||
|
||||
register_extensions(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def create_app(config_object="settings"):
|
||||
"""Create application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
|
||||
|
||||
@@ -97,7 +101,6 @@ def create_app(config_object="settings"):
|
||||
register_shell_context(app)
|
||||
register_commands(app)
|
||||
CAS(app)
|
||||
OAuth2(app)
|
||||
app.wsgi_app = ReverseProxy(app.wsgi_app)
|
||||
configure_upload_dir(app)
|
||||
|
||||
@@ -122,7 +125,7 @@ def register_extensions(app):
|
||||
db.init_app(app)
|
||||
cors.init_app(app)
|
||||
login_manager.init_app(app)
|
||||
migrate.init_app(app, db, directory=f"{BASE_DIR}/migrations")
|
||||
migrate.init_app(app, db)
|
||||
rd.init_app(app)
|
||||
if app.config.get('USE_ES'):
|
||||
es.init_app(app)
|
||||
@@ -130,10 +133,6 @@ def register_extensions(app):
|
||||
app.config.update(app.config.get("CELERY"))
|
||||
celery.conf.update(app.config)
|
||||
|
||||
if app.config.get('SECRETS_ENGINE') == 'inner':
|
||||
with app.app_context():
|
||||
inner_secrets.init_app(app, InnerKVManger())
|
||||
|
||||
|
||||
def register_blueprints(app):
|
||||
for item in getmembers(api.views.entry):
|
||||
@@ -194,11 +193,10 @@ def configure_logger(app):
|
||||
app.logger.addHandler(handler)
|
||||
|
||||
log_file = app.config['LOG_PATH']
|
||||
if log_file and log_file != "/dev/stdout":
|
||||
file_handler = RotatingFileHandler(log_file,
|
||||
maxBytes=2 ** 30,
|
||||
backupCount=7)
|
||||
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
file_handler.setFormatter(formatter)
|
||||
app.logger.addHandler(file_handler)
|
||||
file_handler = RotatingFileHandler(log_file,
|
||||
maxBytes=2 ** 30,
|
||||
backupCount=7)
|
||||
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
file_handler.setFormatter(formatter)
|
||||
app.logger.addHandler(file_handler)
|
||||
app.logger.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
|
@@ -1,8 +1,6 @@
|
||||
import click
|
||||
from flask.cli import with_appcontext
|
||||
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@@ -25,18 +23,50 @@ def init_acl():
|
||||
role_rebuild.apply_async(args=(role.id, app.id), queue=ACL_QUEUE)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def add_user():
|
||||
"""
|
||||
create a user
|
||||
|
||||
is_admin: default is False
|
||||
|
||||
"""
|
||||
|
||||
username = click.prompt('Enter username', confirmation_prompt=False)
|
||||
password = click.prompt('Enter password', hide_input=True, confirmation_prompt=True)
|
||||
email = click.prompt('Enter email ', confirmation_prompt=False)
|
||||
|
||||
UserCRUD.add(username=username, password=password, email=email)
|
||||
# @click.command()
|
||||
# @with_appcontext
|
||||
# def acl_clean():
|
||||
# from api.models.acl import Resource
|
||||
# from api.models.acl import Permission
|
||||
# from api.models.acl import RolePermission
|
||||
#
|
||||
# perms = RolePermission.get_by(to_dict=False)
|
||||
#
|
||||
# for r in perms:
|
||||
# perm = Permission.get_by_id(r.perm_id)
|
||||
# if perm and perm.app_id != r.app_id:
|
||||
# resource_id = r.resource_id
|
||||
# resource = Resource.get_by_id(resource_id)
|
||||
# perm_name = perm.name
|
||||
# existed = Permission.get_by(resource_type_id=resource.resource_type_id, name=perm_name, first=True,
|
||||
# to_dict=False)
|
||||
# if existed is not None:
|
||||
# other = RolePermission.get_by(rid=r.rid, perm_id=existed.id, resource_id=resource_id)
|
||||
# if not other:
|
||||
# r.update(perm_id=existed.id)
|
||||
# else:
|
||||
# r.soft_delete()
|
||||
# else:
|
||||
# r.soft_delete()
|
||||
#
|
||||
#
|
||||
# @click.command()
|
||||
# @with_appcontext
|
||||
# def acl_has_resource_role():
|
||||
# from api.models.acl import Role
|
||||
# from api.models.acl import App
|
||||
# from api.lib.perm.acl.cache import HasResourceRoleCache
|
||||
# from api.lib.perm.acl.role import RoleCRUD
|
||||
#
|
||||
# roles = Role.get_by(to_dict=False)
|
||||
# apps = App.get_by(to_dict=False)
|
||||
# for role in roles:
|
||||
# if role.app_id:
|
||||
# res = RoleCRUD.recursive_resources(role.id, role.app_id)
|
||||
# if res.get('resources') or res.get('groups'):
|
||||
# HasResourceRoleCache.add(role.id, role.app_id)
|
||||
# else:
|
||||
# for app in apps:
|
||||
# res = RoleCRUD.recursive_resources(role.id, app.id)
|
||||
# if res.get('resources') or res.get('groups'):
|
||||
# HasResourceRoleCache.add(role.id, app.id)
|
||||
|
@@ -7,7 +7,6 @@ import json
|
||||
import time
|
||||
|
||||
import click
|
||||
import requests
|
||||
from flask import current_app
|
||||
from flask.cli import with_appcontext
|
||||
from flask_login import login_user
|
||||
@@ -19,7 +18,6 @@ from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
@@ -30,9 +28,7 @@ from api.lib.perm.acl.cache import AppCache
|
||||
from api.lib.perm.acl.resource import ResourceCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.secrets.inner import global_key_threshold
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
from api.models.acl import App
|
||||
from api.models.acl import ResourceType
|
||||
from api.models.cmdb import Attribute
|
||||
@@ -50,19 +46,13 @@ def cmdb_init_cache():
|
||||
|
||||
ci_relations = CIRelation.get_by(to_dict=False)
|
||||
relations = dict()
|
||||
relations2 = dict()
|
||||
for cr in ci_relations:
|
||||
relations.setdefault(cr.first_ci_id, {}).update({cr.second_ci_id: cr.second_ci.type_id})
|
||||
if cr.ancestor_ids:
|
||||
relations2.setdefault(cr.ancestor_ids, {}).update({cr.second_ci_id: cr.second_ci.type_id})
|
||||
for i in relations:
|
||||
relations[i] = json.dumps(relations[i])
|
||||
if relations:
|
||||
rd.create_or_update(relations, REDIS_PREFIX_CI_RELATION)
|
||||
if relations2:
|
||||
rd.create_or_update(relations2, REDIS_PREFIX_CI_RELATION2)
|
||||
|
||||
es = None
|
||||
if current_app.config.get("USE_ES"):
|
||||
from api.extensions import es
|
||||
from api.models.cmdb import Attribute
|
||||
@@ -133,10 +123,10 @@ def cmdb_init_acl():
|
||||
|
||||
# 3. add resource and grant
|
||||
ci_types = CIType.get_by(to_dict=False)
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
for ci_type in ci_types:
|
||||
try:
|
||||
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
|
||||
ResourceCRUD.add(ci_type.name, type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -146,10 +136,10 @@ def cmdb_init_acl():
|
||||
[PermEnum.READ])
|
||||
|
||||
relation_views = PreferenceRelationView.get_by(to_dict=False)
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.RELATION_VIEW, first=True, to_dict=False).id
|
||||
type_id = ResourceType.get_by(name=ResourceTypeEnum.RELATION_VIEW, first=True, to_dict=False).id
|
||||
for view in relation_views:
|
||||
try:
|
||||
ResourceCRUD.add(view.name, resource_type_id, app_id)
|
||||
ResourceCRUD.add(view.name, type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -159,6 +149,57 @@ def cmdb_init_acl():
|
||||
[PermEnum.READ])
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-u',
|
||||
'--user',
|
||||
help='username'
|
||||
)
|
||||
@click.option(
|
||||
'-p',
|
||||
'--password',
|
||||
help='password'
|
||||
)
|
||||
@click.option(
|
||||
'-m',
|
||||
'--mail',
|
||||
help='mail'
|
||||
)
|
||||
@with_appcontext
|
||||
def add_user(user, password, mail):
|
||||
"""
|
||||
create a user
|
||||
|
||||
is_admin: default is False
|
||||
|
||||
Example: flask add-user -u <username> -p <password> -m <mail>
|
||||
"""
|
||||
assert user is not None
|
||||
assert password is not None
|
||||
assert mail is not None
|
||||
UserCRUD.add(username=user, password=password, email=mail)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-u',
|
||||
'--user',
|
||||
help='username'
|
||||
)
|
||||
@with_appcontext
|
||||
def del_user(user):
|
||||
"""
|
||||
delete a user
|
||||
|
||||
Example: flask del-user -u <username>
|
||||
"""
|
||||
assert user is not None
|
||||
from api.models.acl import User
|
||||
|
||||
u = User.get_by(username=user, first=True, to_dict=False)
|
||||
u and UserCRUD.delete(u.uid)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_counter():
|
||||
@@ -270,197 +311,3 @@ def cmdb_index_table_upgrade():
|
||||
CIIndexValueDateTime.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
|
||||
i.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def valid_address(address):
|
||||
if not address:
|
||||
return False
|
||||
|
||||
if not address.startswith(("http://127.0.0.1", "https://127.0.0.1")):
|
||||
response = {
|
||||
"message": "Address should start with http://127.0.0.1 or https://127.0.0.1",
|
||||
"status": "failed"
|
||||
}
|
||||
KeyManage.print_response(response)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_init(address):
|
||||
"""
|
||||
init inner secrets for password feature
|
||||
"""
|
||||
res, ok = KeyManage(backend=InnerKVManger).init()
|
||||
if not ok:
|
||||
if res.get("status") == "failed":
|
||||
KeyManage.print_response(res)
|
||||
return
|
||||
|
||||
token = res.get("details", {}).get("root_token", "")
|
||||
if valid_address(address):
|
||||
token = current_app.config.get("INNER_TRIGGER_TOKEN", "") if not token else token
|
||||
if not token:
|
||||
token = click.prompt(f'Enter root token', hide_input=True, confirmation_prompt=False)
|
||||
assert token is not None
|
||||
resp = requests.post("{}/api/v0.1/secrets/auto_seal".format(address.strip("/")),
|
||||
headers={"Inner-Token": token})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.text or resp.status_code, "status": "failed"})
|
||||
else:
|
||||
KeyManage.print_response(res)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
required=True,
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_unseal(address):
|
||||
"""
|
||||
unseal the secrets feature
|
||||
"""
|
||||
if not valid_address(address):
|
||||
return
|
||||
address = "{}/api/v0.1/secrets/unseal".format(address.strip("/"))
|
||||
for i in range(global_key_threshold):
|
||||
token = click.prompt(f'Enter unseal token {i + 1}', hide_input=True, confirmation_prompt=False)
|
||||
assert token is not None
|
||||
resp = requests.post(address, headers={"Unseal-Token": token})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
if resp.json().get("status") in ["success", "skip"]:
|
||||
return
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
|
||||
return
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
required=True,
|
||||
)
|
||||
@click.option(
|
||||
'-k',
|
||||
'--token',
|
||||
help='root token',
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_seal(address, token):
|
||||
"""
|
||||
seal the secrets feature
|
||||
"""
|
||||
assert address is not None
|
||||
assert token is not None
|
||||
if not valid_address(address):
|
||||
return
|
||||
address = "{}/api/v0.1/secrets/seal".format(address.strip("/"))
|
||||
resp = requests.post(address, headers={
|
||||
"Inner-Token": token,
|
||||
})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_password_data_migrate():
|
||||
"""
|
||||
Migrate CI password data, version >= v2.3.6
|
||||
"""
|
||||
from api.models.cmdb import CIIndexValueText
|
||||
from api.models.cmdb import CIValueText
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.vault import VaultClient
|
||||
|
||||
attrs = Attribute.get_by(to_dict=False)
|
||||
for attr in attrs:
|
||||
if attr.is_password:
|
||||
|
||||
value_table = CIIndexValueText if attr.is_index else CIValueText
|
||||
|
||||
failed = False
|
||||
for i in value_table.get_by(attr_id=attr.id, to_dict=False):
|
||||
if current_app.config.get("SECRETS_ENGINE", 'inner') == 'inner':
|
||||
_, status = InnerCrypt().decrypt(i.value)
|
||||
if status:
|
||||
continue
|
||||
|
||||
encrypt_value, status = InnerCrypt().encrypt(i.value)
|
||||
if status:
|
||||
CIValueText.create(ci_id=i.ci_id, attr_id=attr.id, value=encrypt_value)
|
||||
else:
|
||||
failed = True
|
||||
continue
|
||||
elif current_app.config.get("SECRETS_ENGINE") == 'vault':
|
||||
if i.value == '******':
|
||||
continue
|
||||
|
||||
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
|
||||
try:
|
||||
vault.update("/{}/{}".format(i.ci_id, i.attr_id), dict(v=i.value))
|
||||
except Exception as e:
|
||||
print('save password to vault failed: {}'.format(e))
|
||||
failed = True
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
i.delete()
|
||||
|
||||
if not failed and attr.is_index:
|
||||
attr.update(is_index=False)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_agent_init():
|
||||
"""
|
||||
Initialize the agent's permissions and obtain the key and secret
|
||||
"""
|
||||
|
||||
from api.models.acl import User
|
||||
|
||||
user = User.get_by(username="cmdb_agent", first=True, to_dict=False)
|
||||
if user is None:
|
||||
click.echo(
|
||||
click.style('user cmdb_agent does not exist, please use flask add-user to create it first', fg='red'))
|
||||
return
|
||||
|
||||
# grant
|
||||
_app = AppCache.get('cmdb') or App.create(name='cmdb')
|
||||
app_id = _app.id
|
||||
|
||||
ci_types = CIType.get_by(to_dict=False)
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
for ci_type in ci_types:
|
||||
try:
|
||||
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
ACLManager().grant_resource_to_role(ci_type.name,
|
||||
"cmdb_agent",
|
||||
ResourceTypeEnum.CI,
|
||||
[PermEnum.READ, PermEnum.UPDATE, PermEnum.ADD, PermEnum.DELETE])
|
||||
|
||||
click.echo("Key : {}".format(click.style(user.key, bg='red')))
|
||||
click.echo("Secret: {}".format(click.style(user.secret, bg='red')))
|
||||
|
@@ -10,6 +10,9 @@ from api.models.common_setting import Employee, Department
|
||||
|
||||
|
||||
class InitEmployee(object):
|
||||
"""
|
||||
初始化员工
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.log = current_app.logger
|
||||
@@ -55,8 +58,7 @@ class InitEmployee(object):
|
||||
self.log.error(ErrFormat.acl_import_user_failed.format(user['username'], str(e)))
|
||||
self.log.error(e)
|
||||
|
||||
@staticmethod
|
||||
def get_rid_by_uid(uid):
|
||||
def get_rid_by_uid(self, uid):
|
||||
from api.models.acl import Role
|
||||
role = Role.get_by(first=True, uid=uid)
|
||||
return role['id'] if role is not None else 0
|
||||
@@ -69,8 +71,7 @@ class InitDepartment(object):
|
||||
def init(self):
|
||||
self.init_wide_company()
|
||||
|
||||
@staticmethod
|
||||
def hard_delete(department_id, department_name):
|
||||
def hard_delete(self, department_id, department_name):
|
||||
existed_deleted_list = Department.query.filter(
|
||||
Department.department_name == department_name,
|
||||
Department.department_id == department_id,
|
||||
@@ -79,12 +80,11 @@ class InitDepartment(object):
|
||||
for existed in existed_deleted_list:
|
||||
existed.delete()
|
||||
|
||||
@staticmethod
|
||||
def get_department(department_name):
|
||||
def get_department(self, department_name):
|
||||
return Department.query.filter(
|
||||
Department.department_name == department_name,
|
||||
Department.deleted == 0,
|
||||
).first()
|
||||
).order_by(Department.created_at.asc()).first()
|
||||
|
||||
def run(self, department_id, department_name, department_parent_id):
|
||||
self.hard_delete(department_id, department_name)
|
||||
@@ -94,7 +94,7 @@ class InitDepartment(object):
|
||||
if res.department_id == department_id:
|
||||
return
|
||||
else:
|
||||
res.update(
|
||||
new_d = res.update(
|
||||
department_id=department_id,
|
||||
department_parent_id=department_parent_id,
|
||||
)
|
||||
@@ -108,11 +108,11 @@ class InitDepartment(object):
|
||||
new_d = self.get_department(department_name)
|
||||
|
||||
if new_d.department_id != department_id:
|
||||
new_d.update(
|
||||
new_d = new_d.update(
|
||||
department_id=department_id,
|
||||
department_parent_id=department_parent_id,
|
||||
)
|
||||
self.log.info(f"init {department_name} success.")
|
||||
self.log.info(f"初始化 {department_name} 部门成功.")
|
||||
|
||||
def run_common(self, department_id, department_name, department_parent_id):
|
||||
try:
|
||||
@@ -123,14 +123,19 @@ class InitDepartment(object):
|
||||
raise Exception(e)
|
||||
|
||||
def init_wide_company(self):
|
||||
"""
|
||||
创建 id 0, name 全公司 的部门
|
||||
"""
|
||||
department_id = 0
|
||||
department_name = '全公司'
|
||||
department_parent_id = -1
|
||||
|
||||
self.run_common(department_id, department_name, department_parent_id)
|
||||
|
||||
@staticmethod
|
||||
def create_acl_role_with_department():
|
||||
def create_acl_role_with_department(self):
|
||||
"""
|
||||
当前所有部门,在ACL创建 role
|
||||
"""
|
||||
acl = ACLManager('acl')
|
||||
role_name_map = {role['name']: role for role in acl.get_all_roles()}
|
||||
|
||||
@@ -141,7 +146,7 @@ class InitDepartment(object):
|
||||
continue
|
||||
|
||||
role = role_name_map.get(department.department_name)
|
||||
if not role:
|
||||
if role is None:
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'name': department.department_name,
|
||||
@@ -160,65 +165,50 @@ class InitDepartment(object):
|
||||
acl = self.check_app('backend')
|
||||
resources_types = acl.get_all_resources_types()
|
||||
|
||||
perms = ['read', 'grant', 'delete', 'update']
|
||||
|
||||
acl_rid = self.get_admin_user_rid()
|
||||
|
||||
results = list(filter(lambda t: t['name'] == '操作权限', resources_types['groups']))
|
||||
if len(results) == 0:
|
||||
payload = dict(
|
||||
app_id=acl.app_name,
|
||||
name='操作权限',
|
||||
description='',
|
||||
perms=perms
|
||||
perms=['read', 'grant', 'delete', 'update']
|
||||
)
|
||||
resource_type = acl.create_resources_type(payload)
|
||||
else:
|
||||
resource_type = results[0]
|
||||
resource_type_id = resource_type['id']
|
||||
existed_perms = resources_types.get('id2perms', {}).get(resource_type_id, [])
|
||||
existed_perms = [p['name'] for p in existed_perms]
|
||||
new_perms = []
|
||||
for perm in perms:
|
||||
if perm not in existed_perms:
|
||||
new_perms.append(perm)
|
||||
if len(new_perms) > 0:
|
||||
resource_type['perms'] = existed_perms + new_perms
|
||||
acl.update_resources_type(resource_type_id, resource_type)
|
||||
|
||||
resource_list = acl.get_resource_by_type(None, None, resource_type['id'])
|
||||
for name in ['公司信息']:
|
||||
payload = dict(
|
||||
type_id=resource_type['id'],
|
||||
app_id=acl.app_name,
|
||||
name=name,
|
||||
)
|
||||
try:
|
||||
acl.create_resource(payload)
|
||||
except Exception as e:
|
||||
if '已经存在' in str(e):
|
||||
pass
|
||||
else:
|
||||
raise Exception(e)
|
||||
|
||||
for name in ['公司信息', '公司架构', '通知设置']:
|
||||
target = list(filter(lambda r: r['name'] == name, resource_list))
|
||||
if len(target) == 0:
|
||||
payload = dict(
|
||||
type_id=resource_type['id'],
|
||||
app_id=acl.app_name,
|
||||
name=name,
|
||||
)
|
||||
resource = acl.create_resource(payload)
|
||||
else:
|
||||
resource = target[0]
|
||||
|
||||
if acl_rid > 0:
|
||||
acl.grant_resource(acl_rid, resource['id'], perms)
|
||||
|
||||
@staticmethod
|
||||
def check_app(app_name):
|
||||
def check_app(self, app_name):
|
||||
acl = ACLManager(app_name)
|
||||
payload = dict(
|
||||
name=app_name,
|
||||
description=app_name
|
||||
)
|
||||
app = acl.validate_app()
|
||||
if not app:
|
||||
acl.create_app(payload)
|
||||
return acl
|
||||
try:
|
||||
app = acl.validate_app()
|
||||
if app:
|
||||
return acl
|
||||
|
||||
@staticmethod
|
||||
def get_admin_user_rid():
|
||||
admin = Employee.get_by(first=True, username='admin', to_dict=False)
|
||||
return admin.acl_rid if admin else 0
|
||||
acl.create_app(payload)
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
if '不存在' in str(e):
|
||||
acl.create_app(payload)
|
||||
return acl
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
@click.command()
|
||||
@@ -251,19 +241,17 @@ def common_check_new_columns():
|
||||
from api.extensions import db
|
||||
from sqlalchemy import inspect, text
|
||||
|
||||
def get_model_by_table_name(_table_name):
|
||||
registry = getattr(db.Model, 'registry', None)
|
||||
class_registry = getattr(registry, '_class_registry', None)
|
||||
for _model in class_registry.values():
|
||||
if hasattr(_model, '__tablename__') and _model.__tablename__ == _table_name:
|
||||
return _model
|
||||
def get_model_by_table_name(table_name):
|
||||
for model in db.Model.registry._class_registry.values():
|
||||
if hasattr(model, '__tablename__') and model.__tablename__ == table_name:
|
||||
return model
|
||||
return None
|
||||
|
||||
def add_new_column(target_table_name, new_column):
|
||||
def add_new_column(table_name, new_column):
|
||||
column_type = new_column.type.compile(engine.dialect)
|
||||
default_value = new_column.default.arg if new_column.default else None
|
||||
|
||||
sql = "ALTER TABLE " + target_table_name + " ADD COLUMN " + new_column.name + " " + column_type
|
||||
sql = f"ALTER TABLE {table_name} ADD COLUMN {new_column.name} {column_type} "
|
||||
if new_column.comment:
|
||||
sql += f" comment '{new_column.comment}'"
|
||||
|
||||
@@ -289,8 +277,7 @@ def common_check_new_columns():
|
||||
model = get_model_by_table_name(table_name)
|
||||
if model is None:
|
||||
continue
|
||||
|
||||
model_columns = getattr(getattr(getattr(model, '__table__'), 'columns'), '_all_columns')
|
||||
model_columns = model.__table__.columns._all_columns
|
||||
for column in model_columns:
|
||||
if column.name not in existed_column_name_list:
|
||||
try:
|
||||
@@ -299,20 +286,3 @@ def common_check_new_columns():
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"add new column [{column.name}] in table [{table_name}] err:")
|
||||
current_app.logger.error(e)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def common_sync_file_to_db():
|
||||
from api.lib.common_setting.upload_file import CommonFileCRUD
|
||||
CommonFileCRUD.sync_file_to_db()
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option('--value', type=click.INT, default=-1)
|
||||
def set_auth_auto_redirect_enable(value):
|
||||
if value < 0:
|
||||
return
|
||||
from api.lib.common_setting.common_data import CommonDataCRUD
|
||||
CommonDataCRUD.set_auth_auto_redirect_enable(value)
|
||||
|
@@ -84,6 +84,66 @@ def clean():
|
||||
os.remove(full_pathname)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option("--url", default=None, help="Url to test (ex. /static/image.png)")
|
||||
@click.option(
|
||||
"--order", default="rule", help="Property on Rule to order by (default: rule)"
|
||||
)
|
||||
@with_appcontext
|
||||
def urls(url, order):
|
||||
"""Display all of the url matching routes for the project.
|
||||
|
||||
Borrowed from Flask-Script, converted to use Click.
|
||||
"""
|
||||
rows = []
|
||||
column_headers = ("Rule", "Endpoint", "Arguments")
|
||||
|
||||
if url:
|
||||
try:
|
||||
rule, arguments = current_app.url_map.bind("localhost").match(
|
||||
url, return_rule=True
|
||||
)
|
||||
rows.append((rule.rule, rule.endpoint, arguments))
|
||||
column_length = 3
|
||||
except (NotFound, MethodNotAllowed) as e:
|
||||
rows.append(("<{}>".format(e), None, None))
|
||||
column_length = 1
|
||||
else:
|
||||
rules = sorted(
|
||||
current_app.url_map.iter_rules(), key=lambda rule: getattr(rule, order)
|
||||
)
|
||||
for rule in rules:
|
||||
rows.append((rule.rule, rule.endpoint, None))
|
||||
column_length = 2
|
||||
|
||||
str_template = ""
|
||||
table_width = 0
|
||||
|
||||
if column_length >= 1:
|
||||
max_rule_length = max(len(r[0]) for r in rows)
|
||||
max_rule_length = max_rule_length if max_rule_length > 4 else 4
|
||||
str_template += "{:" + str(max_rule_length) + "}"
|
||||
table_width += max_rule_length
|
||||
|
||||
if column_length >= 2:
|
||||
max_endpoint_length = max(len(str(r[1])) for r in rows)
|
||||
max_endpoint_length = max_endpoint_length if max_endpoint_length > 8 else 8
|
||||
str_template += " {:" + str(max_endpoint_length) + "}"
|
||||
table_width += 2 + max_endpoint_length
|
||||
|
||||
if column_length >= 3:
|
||||
max_arguments_length = max(len(str(r[2])) for r in rows)
|
||||
max_arguments_length = max_arguments_length if max_arguments_length > 9 else 9
|
||||
str_template += " {:" + str(max_arguments_length) + "}"
|
||||
table_width += 2 + max_arguments_length
|
||||
|
||||
click.echo(str_template.format(*column_headers[:column_length]))
|
||||
click.echo("-" * table_width)
|
||||
|
||||
for row in rows:
|
||||
click.echo(str_template.format(*row[:column_length]))
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def db_setup():
|
||||
|
@@ -9,7 +9,6 @@ from flask_login import LoginManager
|
||||
from flask_migrate import Migrate
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.utils import ESHandler
|
||||
from api.lib.utils import RedisHandler
|
||||
|
||||
@@ -22,4 +21,3 @@ celery = Celery()
|
||||
cors = CORS(supports_credentials=True)
|
||||
rd = RedisHandler()
|
||||
es = ESHandler()
|
||||
inner_secrets = KeyManage()
|
||||
|
@@ -15,7 +15,7 @@ try:
|
||||
except ImportError:
|
||||
from flask import _request_ctx_stack as stack
|
||||
|
||||
from . import routing
|
||||
from api.flask_cas import routing
|
||||
|
||||
|
||||
class CAS(object):
|
@@ -119,4 +119,4 @@ def create_cas_validate_url(cas_url, cas_route, service, ticket,
|
||||
('service', service),
|
||||
('ticket', ticket),
|
||||
('renew', renew),
|
||||
)
|
||||
)
|
@@ -1,24 +1,14 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
import json
|
||||
|
||||
import bs4
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask import url_for
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from flask import current_app, session, request, url_for, redirect
|
||||
from flask_login import login_user, logout_user
|
||||
from six.moves.urllib_request import urlopen
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from .cas_urls import create_cas_login_url
|
||||
from .cas_urls import create_cas_logout_url
|
||||
from .cas_urls import create_cas_validate_url
|
||||
@@ -26,7 +16,6 @@ from .cas_urls import create_cas_validate_url
|
||||
blueprint = Blueprint('cas', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/api/cas/login')
|
||||
@blueprint.route('/api/sso/login')
|
||||
def login():
|
||||
"""
|
||||
@@ -40,20 +29,16 @@ def login():
|
||||
If validation was successful the logged in username is saved in
|
||||
the user's session under the key `CAS_USERNAME_SESSION_KEY`.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
|
||||
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
|
||||
if request.values.get("next"):
|
||||
session["next"] = request.values.get("next")
|
||||
|
||||
# _service = url_for('cas.login', _external=True)
|
||||
_service = "{}://{}{}".format(urlparse(request.referrer).scheme,
|
||||
urlparse(request.referrer).netloc,
|
||||
url_for('cas.login'))
|
||||
|
||||
_service = url_for('cas.login', _external=True, next=session["next"]) \
|
||||
if session.get("next") else url_for('cas.login', _external=True)
|
||||
redirect_url = create_cas_login_url(
|
||||
config['cas_server'],
|
||||
config['cas_login_route'],
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGIN_ROUTE'],
|
||||
_service)
|
||||
|
||||
if 'ticket' in request.args:
|
||||
@@ -62,38 +47,30 @@ def login():
|
||||
if request.args.get('ticket'):
|
||||
|
||||
if validate(request.args['ticket']):
|
||||
redirect_url = session.get("next") or config.get("cas_after_login") or "/"
|
||||
redirect_url = session.get("next") or \
|
||||
current_app.config.get("CAS_AFTER_LOGIN")
|
||||
username = session.get("CAS_USERNAME")
|
||||
user = UserCache.get(username)
|
||||
login_user(user)
|
||||
|
||||
session.permanent = True
|
||||
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
else:
|
||||
del session[cas_token_session_key]
|
||||
redirect_url = create_cas_login_url(
|
||||
config['cas_server'],
|
||||
config['cas_login_route'],
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGIN_ROUTE'],
|
||||
url_for('cas.login', _external=True),
|
||||
renew=True)
|
||||
|
||||
AuditCRUD.add_login_log(session.get("CAS_USERNAME"), False, ErrFormat.invalid_password)
|
||||
|
||||
current_app.logger.info("redirect to: {0}".format(redirect_url))
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
||||
@blueprint.route('/api/cas/logout')
|
||||
@blueprint.route('/api/sso/logout')
|
||||
def logout():
|
||||
"""
|
||||
When the user accesses this route they are logged out.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
current_app.logger.info(config)
|
||||
|
||||
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
|
||||
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
|
||||
@@ -105,14 +82,12 @@ def logout():
|
||||
"next" in session and session.pop("next")
|
||||
|
||||
redirect_url = create_cas_logout_url(
|
||||
config['cas_server'],
|
||||
config['cas_logout_route'],
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGOUT_ROUTE'],
|
||||
url_for('cas.login', _external=True, next=request.referrer))
|
||||
|
||||
logout_user()
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
current_app.logger.debug('Redirecting to: {0}'.format(redirect_url))
|
||||
|
||||
return redirect(redirect_url)
|
||||
@@ -125,15 +100,14 @@ def validate(ticket):
|
||||
and the validated username is saved in the session under the
|
||||
key `CAS_USERNAME_SESSION_KEY`.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
|
||||
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
|
||||
|
||||
current_app.logger.debug("validating token {0}".format(ticket))
|
||||
|
||||
cas_validate_url = create_cas_validate_url(
|
||||
config['cas_validate_server'],
|
||||
config['cas_validate_route'],
|
||||
current_app.config['CAS_VALIDATE_SERVER'],
|
||||
current_app.config['CAS_VALIDATE_ROUTE'],
|
||||
url_for('cas.login', _external=True),
|
||||
ticket)
|
||||
|
||||
@@ -141,35 +115,23 @@ def validate(ticket):
|
||||
|
||||
try:
|
||||
response = urlopen(cas_validate_url).read()
|
||||
ticket_id = _parse_tag(response, "cas:user")
|
||||
strs = [s.strip() for s in ticket_id.split('|') if s.strip()]
|
||||
ticketid = _parse_tag(response, "cas:user")
|
||||
strs = [s.strip() for s in ticketid.split('|') if s.strip()]
|
||||
username, is_valid = None, False
|
||||
if len(strs) == 1:
|
||||
username = strs[0]
|
||||
is_valid = True
|
||||
user_info = json.loads(_parse_tag(response, "cas:other"))
|
||||
current_app.logger.info(user_info)
|
||||
except ValueError:
|
||||
current_app.logger.error("CAS returned unexpected result")
|
||||
is_valid = False
|
||||
return is_valid
|
||||
|
||||
if is_valid:
|
||||
current_app.logger.debug("{}: {}".format(cas_username_session_key, username))
|
||||
current_app.logger.debug("valid")
|
||||
session[cas_username_session_key] = username
|
||||
user = UserCache.get(username)
|
||||
if user is None:
|
||||
current_app.logger.info("create user: {}".format(username))
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
soup = bs4.BeautifulSoup(response)
|
||||
cas_user_map = config.get('cas_user_map')
|
||||
user_dict = dict()
|
||||
for k in cas_user_map:
|
||||
v = soup.find(cas_user_map[k]['tag'], cas_user_map[k].get('attrs', {}))
|
||||
user_dict[k] = v and v.text or None
|
||||
user_dict['password'] = uuid.uuid4().hex
|
||||
if "email" not in user_dict:
|
||||
user_dict['email'] = username
|
||||
|
||||
UserCRUD.add(**user_dict)
|
||||
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
user_info = ACLManager.get_user_info(username)
|
||||
@@ -202,5 +164,4 @@ def _parse_tag(string, tag):
|
||||
|
||||
if soup.find(tag) is None:
|
||||
return ''
|
||||
|
||||
return soup.find(tag).string.strip()
|
@@ -1,5 +1,6 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import requests
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
@@ -22,7 +23,6 @@ from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.decorator import kwargs_required
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.perm.acl.acl import validate_permission
|
||||
from api.lib.webhook import webhook_request
|
||||
from api.models.cmdb import Attribute
|
||||
from api.models.cmdb import CIType
|
||||
from api.models.cmdb import CITypeAttribute
|
||||
@@ -40,11 +40,15 @@ class AttributeManager(object):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _get_choice_values_from_webhook(choice_webhook, payload=None):
|
||||
ret_key = choice_webhook.get('ret_key')
|
||||
def _get_choice_values_from_web_hook(choice_web_hook):
|
||||
url = choice_web_hook.get('url')
|
||||
ret_key = choice_web_hook.get('ret_key')
|
||||
headers = choice_web_hook.get('headers') or {}
|
||||
payload = choice_web_hook.get('payload') or {}
|
||||
method = (choice_web_hook.get('method') or 'GET').lower()
|
||||
|
||||
try:
|
||||
res = webhook_request(choice_webhook, payload or {}).json()
|
||||
res = getattr(requests, method)(url, headers=headers, data=payload).json()
|
||||
if ret_key:
|
||||
ret_key_list = ret_key.strip().split("##")
|
||||
for key in ret_key_list[:-1]:
|
||||
@@ -59,57 +63,19 @@ class AttributeManager(object):
|
||||
current_app.logger.error("get choice values failed: {}".format(e))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def _get_choice_values_from_other(choice_other):
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search
|
||||
|
||||
if choice_other.get('type_ids'):
|
||||
type_ids = choice_other.get('type_ids')
|
||||
attr_id = choice_other.get('attr_id')
|
||||
other_filter = choice_other.get('filter') or ''
|
||||
|
||||
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
|
||||
s = search(query, fl=[str(attr_id)], facet=[str(attr_id)], count=1)
|
||||
try:
|
||||
_, _, _, _, _, facet = s.search()
|
||||
return [[i[0], {}] for i in (list(facet.values()) or [[]])[0]]
|
||||
except SearchError as e:
|
||||
current_app.logger.error("get choice values from other ci failed: {}".format(e))
|
||||
return []
|
||||
|
||||
elif choice_other.get('script'):
|
||||
try:
|
||||
x = compile(choice_other['script'], '', "exec")
|
||||
local_ns = {}
|
||||
exec(x, {}, local_ns)
|
||||
res = local_ns['ChoiceValue']().values() or []
|
||||
return [[i, {}] for i in res]
|
||||
except Exception as e:
|
||||
current_app.logger.error("get choice values from script: {}".format(e))
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def get_choice_values(cls, attr_id, value_type, choice_web_hook, choice_other,
|
||||
choice_web_hook_parse=True, choice_other_parse=True):
|
||||
def get_choice_values(cls, attr_id, value_type, choice_web_hook, choice_web_hook_parse=True):
|
||||
if choice_web_hook:
|
||||
if choice_web_hook_parse and isinstance(choice_web_hook, dict):
|
||||
return cls._get_choice_values_from_webhook(choice_web_hook)
|
||||
else:
|
||||
return []
|
||||
elif choice_other:
|
||||
if choice_other_parse and isinstance(choice_other, dict):
|
||||
return cls._get_choice_values_from_other(choice_other)
|
||||
if choice_web_hook_parse:
|
||||
if isinstance(choice_web_hook, dict):
|
||||
return cls._get_choice_values_from_web_hook(choice_web_hook)
|
||||
else:
|
||||
return []
|
||||
|
||||
choice_table = ValueTypeMap.choice.get(value_type)
|
||||
if not choice_table:
|
||||
return []
|
||||
choice_values = choice_table.get_by(fl=["value", "option"], attr_id=attr_id)
|
||||
|
||||
return [[ValueTypeMap.serialize[value_type](choice_value['value']), choice_value['option']]
|
||||
for choice_value in choice_values]
|
||||
return [[choice_value['value'], choice_value['option']] for choice_value in choice_values]
|
||||
|
||||
@staticmethod
|
||||
def add_choice_values(_id, value_type, choice_values):
|
||||
@@ -156,8 +122,7 @@ class AttributeManager(object):
|
||||
res = list()
|
||||
for attr in attrs:
|
||||
attr["is_choice"] and attr.update(
|
||||
dict(choice_value=cls.get_choice_values(attr["id"], attr["value_type"],
|
||||
attr["choice_web_hook"], attr.get("choice_other"))))
|
||||
dict(choice_value=cls.get_choice_values(attr["id"], attr["value_type"], attr["choice_web_hook"])))
|
||||
attr['is_choice'] and attr.pop('choice_web_hook', None)
|
||||
|
||||
res.append(attr)
|
||||
@@ -167,38 +132,29 @@ class AttributeManager(object):
|
||||
def get_attribute_by_name(self, name):
|
||||
attr = Attribute.get_by(name=name, first=True)
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"],
|
||||
attr["choice_web_hook"], attr.get("choice_other"))
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"], attr["choice_web_hook"])
|
||||
|
||||
return attr
|
||||
|
||||
def get_attribute_by_alias(self, alias):
|
||||
attr = Attribute.get_by(alias=alias, first=True)
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"],
|
||||
attr["choice_web_hook"], attr.get("choice_other"))
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"], attr["choice_web_hook"])
|
||||
|
||||
return attr
|
||||
|
||||
def get_attribute_by_id(self, _id):
|
||||
attr = Attribute.get_by_id(_id).to_dict()
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"],
|
||||
attr["choice_web_hook"], attr.get("choice_other"))
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"], attr["choice_web_hook"])
|
||||
|
||||
return attr
|
||||
|
||||
def get_attribute(self, key, choice_web_hook_parse=True, choice_other_parse=True):
|
||||
def get_attribute(self, key, choice_web_hook_parse=True):
|
||||
attr = AttributeCache.get(key).to_dict()
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(
|
||||
attr["id"],
|
||||
attr["value_type"],
|
||||
attr["choice_web_hook"],
|
||||
attr.get("choice_other"),
|
||||
choice_web_hook_parse=choice_web_hook_parse,
|
||||
choice_other_parse=choice_other_parse,
|
||||
)
|
||||
attr["id"], attr["value_type"], attr["choice_web_hook"], choice_web_hook_parse=choice_web_hook_parse)
|
||||
|
||||
return attr
|
||||
|
||||
@@ -225,22 +181,12 @@ class AttributeManager(object):
|
||||
def add(cls, **kwargs):
|
||||
choice_value = kwargs.pop("choice_value", [])
|
||||
kwargs.pop("is_choice", None)
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') or kwargs.get('choice_other') else False
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') else False
|
||||
|
||||
name = kwargs.pop("name")
|
||||
if name in BUILTIN_KEYWORDS:
|
||||
return abort(400, ErrFormat.attribute_name_cannot_be_builtin)
|
||||
|
||||
while kwargs.get('choice_other'):
|
||||
if isinstance(kwargs['choice_other'], dict):
|
||||
if kwargs['choice_other'].get('script'):
|
||||
break
|
||||
|
||||
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
|
||||
break
|
||||
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
|
||||
alias = kwargs.pop("alias", "")
|
||||
alias = name if not alias else alias
|
||||
Attribute.get_by(name=name, first=True) and abort(400, ErrFormat.attribute_name_duplicate.format(name))
|
||||
@@ -250,8 +196,6 @@ class AttributeManager(object):
|
||||
|
||||
kwargs.get('is_computed') and cls.can_create_computed_attribute()
|
||||
|
||||
kwargs.get('choice_other') and kwargs['choice_other'].get('script') and cls.can_create_computed_attribute()
|
||||
|
||||
attr = Attribute.create(flush=True,
|
||||
name=name,
|
||||
alias=alias,
|
||||
@@ -337,6 +281,9 @@ class AttributeManager(object):
|
||||
def update(self, _id, **kwargs):
|
||||
attr = Attribute.get_by_id(_id) or abort(404, ErrFormat.attribute_not_found.format("id={}".format(_id)))
|
||||
|
||||
if not self._can_edit_attribute(attr):
|
||||
return abort(403, ErrFormat.cannot_edit_attribute)
|
||||
|
||||
if kwargs.get("name"):
|
||||
other = Attribute.get_by(name=kwargs['name'], first=True, to_dict=False)
|
||||
if other and other.id != attr.id:
|
||||
@@ -354,22 +301,12 @@ class AttributeManager(object):
|
||||
|
||||
self._change_index(attr, attr.is_index, kwargs['is_index'])
|
||||
|
||||
while kwargs.get('choice_other'):
|
||||
if isinstance(kwargs['choice_other'], dict):
|
||||
if kwargs['choice_other'].get('script'):
|
||||
break
|
||||
|
||||
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
|
||||
break
|
||||
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
|
||||
existed2 = attr.to_dict()
|
||||
if not existed2['choice_web_hook'] and not existed2.get('choice_other') and existed2['is_choice']:
|
||||
existed2['choice_value'] = self.get_choice_values(attr.id, attr.value_type, None, None)
|
||||
if not existed2['choice_web_hook'] and existed2['is_choice']:
|
||||
existed2['choice_value'] = self.get_choice_values(attr.id, attr.value_type, attr.choice_web_hook)
|
||||
|
||||
choice_value = kwargs.pop("choice_value", False)
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') or kwargs.get('choice_other') else False
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') else False
|
||||
kwargs['is_choice'] = is_choice
|
||||
|
||||
if kwargs.get('default') and not (isinstance(kwargs['default'], dict) and 'default' in kwargs['default']):
|
||||
@@ -377,14 +314,6 @@ class AttributeManager(object):
|
||||
|
||||
kwargs.get('is_computed') and self.can_create_computed_attribute()
|
||||
|
||||
is_changed = False
|
||||
for k in kwargs:
|
||||
if kwargs[k] != getattr(attr, k, None):
|
||||
is_changed = True
|
||||
|
||||
if is_changed and not self._can_edit_attribute(attr):
|
||||
return abort(403, ErrFormat.cannot_edit_attribute)
|
||||
|
||||
attr.update(flush=True, filter_none=False, **kwargs)
|
||||
|
||||
if is_choice and choice_value:
|
||||
|
@@ -3,6 +3,11 @@ import datetime
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.auto_discovery.const import ClOUD_MAP
|
||||
from api.lib.cmdb.cache import CITypeAttributeCache
|
||||
@@ -23,10 +28,6 @@ from api.lib.utils import AESCrypto
|
||||
from api.models.cmdb import AutoDiscoveryCI
|
||||
from api.models.cmdb import AutoDiscoveryCIType
|
||||
from api.models.cmdb import AutoDiscoveryRule
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func
|
||||
|
||||
PWD = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -35,10 +36,9 @@ def parse_plugin_script(script):
|
||||
attributes = []
|
||||
try:
|
||||
x = compile(script, '', "exec")
|
||||
local_ns = {}
|
||||
exec(x, {}, local_ns)
|
||||
unique_key = local_ns['AutoDiscovery']().unique_key
|
||||
attrs = local_ns['AutoDiscovery']().attributes() or []
|
||||
exec(x)
|
||||
unique_key = locals()['AutoDiscovery']().unique_key
|
||||
attrs = locals()['AutoDiscovery']().attributes() or []
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@@ -250,17 +250,20 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
current_app.logger.warning(e)
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def _can_add(**kwargs):
|
||||
def _can_add(self, **kwargs):
|
||||
self.cls.get_by(type_id=kwargs['type_id'], adr_id=kwargs.get('adr_id') or None) and abort(
|
||||
400, ErrFormat.ad_duplicate)
|
||||
|
||||
# self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
|
||||
|
||||
if kwargs.get('adr_id'):
|
||||
AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
|
||||
adr = AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
|
||||
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['adr_id'])))
|
||||
# if not adr.is_plugin:
|
||||
# other = self.cls.get_by(adr_id=adr.id, first=True, to_dict=False)
|
||||
# if other:
|
||||
# ci_type = CITypeCache.get(other.type_id)
|
||||
# return abort(400, ErrFormat.adr_default_ref_once.format(ci_type.alias))
|
||||
if not adr.is_plugin:
|
||||
other = self.cls.get_by(adr_id=adr.id, first=True, to_dict=False)
|
||||
if other:
|
||||
ci_type = CITypeCache.get(other.type_id)
|
||||
return abort(400, ErrFormat.adr_default_ref_once.format(ci_type.alias))
|
||||
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
|
||||
kwargs = check_plugin_script(**kwargs)
|
||||
|
@@ -29,7 +29,6 @@ from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RetKey
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.cmdb.history import AttributeHistoryManger
|
||||
from api.lib.cmdb.history import CIRelationHistoryManager
|
||||
from api.lib.cmdb.history import CITriggerHistoryManager
|
||||
@@ -43,8 +42,6 @@ from api.lib.notify import notify_send
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.perm.acl.acl import validate_permission
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.vault import VaultClient
|
||||
from api.lib.utils import Lock
|
||||
from api.lib.utils import handle_arg_list
|
||||
from api.lib.webhook import webhook_request
|
||||
@@ -63,7 +60,6 @@ from api.tasks.cmdb import ci_relation_cache
|
||||
from api.tasks.cmdb import ci_relation_delete
|
||||
|
||||
PRIVILEGED_USERS = {"worker", "cmdb_agent", "agent"}
|
||||
PASSWORD_DEFAULT_SHOW = "******"
|
||||
|
||||
|
||||
class CIManager(object):
|
||||
@@ -182,9 +178,6 @@ class CIManager(object):
|
||||
need_children and res.update(CIRelationManager.get_children(ci_id, ret_key=ret_key)) # one floor
|
||||
|
||||
ci_type = CITypeCache.get(ci.type_id)
|
||||
if not ci_type:
|
||||
return res
|
||||
|
||||
res["ci_type"] = ci_type.name
|
||||
|
||||
fields = CITypeAttributeManager.get_attr_names_by_type_id(ci.type_id) if not fields else fields
|
||||
@@ -330,8 +323,6 @@ class CIManager(object):
|
||||
ci_attr2type_attr = {type_attr.attr_id: type_attr for type_attr, _ in attrs}
|
||||
|
||||
ci = None
|
||||
record_id = None
|
||||
password_dict = {}
|
||||
need_lock = current_user.username not in current_app.config.get('PRIVILEGED_USERS', PRIVILEGED_USERS)
|
||||
with Lock(ci_type_name, need_lock=need_lock):
|
||||
existed = cls.ci_is_exist(unique_key, unique_value, ci_type.id)
|
||||
@@ -360,23 +351,14 @@ class CIManager(object):
|
||||
ci_dict.get(attr.name) is None and ci_dict.get(attr.alias) is None)):
|
||||
ci_dict[attr.name] = attr.default.get('default')
|
||||
|
||||
if (type_attr.is_required and not attr.is_computed and
|
||||
(attr.name not in ci_dict and attr.alias not in ci_dict)):
|
||||
if type_attr.is_required and (attr.name not in ci_dict and attr.alias not in ci_dict):
|
||||
return abort(400, ErrFormat.attribute_value_required.format(attr.name))
|
||||
else:
|
||||
for type_attr, attr in attrs:
|
||||
if attr.default and attr.default.get('default') == AttributeDefaultValueEnum.UPDATED_AT:
|
||||
ci_dict[attr.name] = now
|
||||
|
||||
computed_attrs = []
|
||||
for _, attr in attrs:
|
||||
if attr.is_computed:
|
||||
computed_attrs.append(attr.to_dict())
|
||||
elif attr.is_password:
|
||||
if attr.name in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.name)
|
||||
elif attr.alias in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.alias)
|
||||
computed_attrs = [attr.to_dict() for _, attr in attrs if attr.is_computed] or None
|
||||
|
||||
value_manager = AttributeValueManager()
|
||||
|
||||
@@ -413,10 +395,6 @@ class CIManager(object):
|
||||
cls.delete(ci.id)
|
||||
raise e
|
||||
|
||||
if password_dict:
|
||||
for attr_id in password_dict:
|
||||
record_id = cls.save_password(ci.id, attr_id, password_dict[attr_id], record_id, ci_type.id)
|
||||
|
||||
if record_id: # has change
|
||||
ci_cache.apply_async(args=(ci.id, operate_type, record_id), queue=CMDB_QUEUE)
|
||||
|
||||
@@ -436,16 +414,7 @@ class CIManager(object):
|
||||
if attr.default and attr.default.get('default') == AttributeDefaultValueEnum.UPDATED_AT:
|
||||
ci_dict[attr.name] = now
|
||||
|
||||
password_dict = dict()
|
||||
computed_attrs = list()
|
||||
for _, attr in attrs:
|
||||
if attr.is_computed:
|
||||
computed_attrs.append(attr.to_dict())
|
||||
elif attr.is_password:
|
||||
if attr.name in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.name)
|
||||
elif attr.alias in ci_dict:
|
||||
password_dict[attr.id] = ci_dict.pop(attr.alias)
|
||||
computed_attrs = [attr.to_dict() for _, attr in attrs if attr.is_computed] or None
|
||||
|
||||
value_manager = AttributeValueManager()
|
||||
|
||||
@@ -454,7 +423,6 @@ class CIManager(object):
|
||||
|
||||
limit_attrs = self._valid_ci_for_no_read(ci) if not _is_admin else {}
|
||||
|
||||
record_id = None
|
||||
need_lock = current_user.username not in current_app.config.get('PRIVILEGED_USERS', PRIVILEGED_USERS)
|
||||
with Lock(ci.ci_type.name, need_lock=need_lock):
|
||||
self._valid_unique_constraint(ci.type_id, ci_dict, ci_id)
|
||||
@@ -472,10 +440,6 @@ class CIManager(object):
|
||||
except BadRequest as e:
|
||||
raise e
|
||||
|
||||
if password_dict:
|
||||
for attr_id in password_dict:
|
||||
record_id = self.save_password(ci.id, attr_id, password_dict[attr_id], record_id, ci.type_id)
|
||||
|
||||
if record_id: # has change
|
||||
ci_cache.apply_async(args=(ci_id, OperateType.UPDATE, record_id), queue=CMDB_QUEUE)
|
||||
|
||||
@@ -501,33 +465,30 @@ class CIManager(object):
|
||||
ci_dict = cls.get_cis_by_ids([ci_id])
|
||||
ci_dict = ci_dict and ci_dict[0]
|
||||
|
||||
if ci_dict:
|
||||
triggers = CITriggerManager.get(ci_dict['_type'])
|
||||
for trigger in triggers:
|
||||
option = trigger['option']
|
||||
if not option.get('enable') or option.get('action') != OperateType.DELETE:
|
||||
continue
|
||||
triggers = CITriggerManager.get(ci_dict['_type'])
|
||||
for trigger in triggers:
|
||||
option = trigger['option']
|
||||
if not option.get('enable') or option.get('action') != OperateType.DELETE:
|
||||
continue
|
||||
|
||||
if option.get('filter') and not CITriggerManager.ci_filter(ci_dict.get('_id'), option['filter']):
|
||||
continue
|
||||
if option.get('filter') and not CITriggerManager.ci_filter(ci_dict.get('_id'), option['filter']):
|
||||
continue
|
||||
|
||||
ci_delete_trigger.apply_async(args=(trigger, OperateType.DELETE, ci_dict), queue=CMDB_QUEUE)
|
||||
ci_delete_trigger.apply_async(args=(trigger, OperateType.DELETE, ci_dict), queue=CMDB_QUEUE)
|
||||
|
||||
attrs = CITypeAttribute.get_by(type_id=ci.type_id, to_dict=False)
|
||||
attrs = [AttributeCache.get(attr.attr_id) for attr in attrs]
|
||||
for attr in attrs:
|
||||
value_table = TableMap(attr=attr).table
|
||||
attr_names = set([AttributeCache.get(attr.attr_id).name for attr in attrs])
|
||||
for attr_name in attr_names:
|
||||
value_table = TableMap(attr_name=attr_name).table
|
||||
for item in value_table.get_by(ci_id=ci_id, to_dict=False):
|
||||
item.delete(commit=False)
|
||||
|
||||
for item in CIRelation.get_by(first_ci_id=ci_id, to_dict=False):
|
||||
ci_relation_delete.apply_async(
|
||||
args=(item.first_ci_id, item.second_ci_id, item.ancestor_ids), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(args=(item.first_ci_id, item.second_ci_id), queue=CMDB_QUEUE)
|
||||
item.delete(commit=False)
|
||||
|
||||
for item in CIRelation.get_by(second_ci_id=ci_id, to_dict=False):
|
||||
ci_relation_delete.apply_async(
|
||||
args=(item.first_ci_id, item.second_ci_id, item.ancestor_ids), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(args=(item.first_ci_id, item.second_ci_id), queue=CMDB_QUEUE)
|
||||
item.delete(commit=False)
|
||||
|
||||
ad_ci = AutoDiscoveryCI.get_by(ci_id=ci_id, to_dict=False, first=True)
|
||||
@@ -537,8 +498,7 @@ class CIManager(object):
|
||||
|
||||
db.session.commit()
|
||||
|
||||
if ci_dict:
|
||||
AttributeHistoryManger.add(None, ci_id, [(None, OperateType.DELETE, ci_dict, None)], ci.type_id)
|
||||
AttributeHistoryManger.add(None, ci_id, [(None, OperateType.DELETE, ci_dict, None)], ci.type_id)
|
||||
|
||||
ci_delete.apply_async(args=(ci_id,), queue=CMDB_QUEUE)
|
||||
|
||||
@@ -640,13 +600,10 @@ class CIManager(object):
|
||||
_fields = list()
|
||||
for field in fields:
|
||||
attr = AttributeCache.get(field)
|
||||
if attr is not None and not attr.is_password:
|
||||
if attr is not None:
|
||||
_fields.append(str(attr.id))
|
||||
filter_fields_sql = "WHERE A.attr_id in ({0})".format(",".join(_fields))
|
||||
|
||||
ci2pos = {int(_id): _pos for _pos, _id in enumerate(ci_ids)}
|
||||
res = [None] * len(ci_ids)
|
||||
|
||||
ci_ids = ",".join(map(str, ci_ids))
|
||||
if value_tables is None:
|
||||
value_tables = ValueTypeMap.table_name.values()
|
||||
@@ -657,10 +614,11 @@ class CIManager(object):
|
||||
# current_app.logger.debug(query_sql)
|
||||
cis = db.session.execute(query_sql).fetchall()
|
||||
ci_set = set()
|
||||
res = list()
|
||||
ci_dict = dict()
|
||||
unique_id2obj = dict()
|
||||
excludes = excludes and set(excludes)
|
||||
for ci_id, type_id, attr_id, attr_name, attr_alias, value, value_type, is_list, is_password in cis:
|
||||
for ci_id, type_id, attr_id, attr_name, attr_alias, value, value_type, is_list in cis:
|
||||
if not fields and excludes and (attr_name in excludes or attr_alias in excludes):
|
||||
continue
|
||||
|
||||
@@ -676,7 +634,7 @@ class CIManager(object):
|
||||
ci_dict["unique"] = unique_id2obj[ci_type.unique_id] and unique_id2obj[ci_type.unique_id].name
|
||||
ci_dict["unique_alias"] = unique_id2obj[ci_type.unique_id] and unique_id2obj[ci_type.unique_id].alias
|
||||
ci_set.add(ci_id)
|
||||
res[ci2pos[ci_id]] = ci_dict
|
||||
res.append(ci_dict)
|
||||
|
||||
if ret_key == RetKey.NAME:
|
||||
attr_key = attr_name
|
||||
@@ -687,14 +645,11 @@ class CIManager(object):
|
||||
else:
|
||||
return abort(400, ErrFormat.argument_invalid.format("ret_key"))
|
||||
|
||||
if is_password and value:
|
||||
ci_dict[attr_key] = PASSWORD_DEFAULT_SHOW
|
||||
value = ValueTypeMap.serialize2[value_type](value)
|
||||
if is_list:
|
||||
ci_dict.setdefault(attr_key, []).append(value)
|
||||
else:
|
||||
value = ValueTypeMap.serialize2[value_type](value)
|
||||
if is_list:
|
||||
ci_dict.setdefault(attr_key, []).append(value)
|
||||
else:
|
||||
ci_dict[attr_key] = value
|
||||
ci_dict[attr_key] = value
|
||||
|
||||
return res
|
||||
|
||||
@@ -726,84 +681,6 @@ class CIManager(object):
|
||||
|
||||
return cls._get_cis_from_db(ci_ids, ret_key, fields, value_tables, excludes=excludes)
|
||||
|
||||
@classmethod
|
||||
def save_password(cls, ci_id, attr_id, value, record_id, type_id):
|
||||
changed = None
|
||||
encrypt_value = None
|
||||
value_table = ValueTypeMap.table[ValueTypeEnum.PASSWORD]
|
||||
if current_app.config.get('SECRETS_ENGINE') == 'inner':
|
||||
if value:
|
||||
encrypt_value, status = InnerCrypt().encrypt(value)
|
||||
if not status:
|
||||
current_app.logger.error('save password failed: {}'.format(encrypt_value))
|
||||
return abort(400, ErrFormat.password_save_failed.format(encrypt_value))
|
||||
else:
|
||||
encrypt_value = PASSWORD_DEFAULT_SHOW
|
||||
|
||||
existed = value_table.get_by(ci_id=ci_id, attr_id=attr_id, first=True, to_dict=False)
|
||||
if existed is None:
|
||||
if value:
|
||||
value_table.create(ci_id=ci_id, attr_id=attr_id, value=encrypt_value)
|
||||
changed = [(ci_id, attr_id, OperateType.ADD, '', PASSWORD_DEFAULT_SHOW, type_id)]
|
||||
elif existed.value != encrypt_value:
|
||||
if value:
|
||||
existed.update(ci_id=ci_id, attr_id=attr_id, value=encrypt_value)
|
||||
changed = [(ci_id, attr_id, OperateType.UPDATE, PASSWORD_DEFAULT_SHOW, PASSWORD_DEFAULT_SHOW, type_id)]
|
||||
else:
|
||||
existed.delete()
|
||||
changed = [(ci_id, attr_id, OperateType.DELETE, PASSWORD_DEFAULT_SHOW, '', type_id)]
|
||||
|
||||
if current_app.config.get('SECRETS_ENGINE') == 'vault':
|
||||
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
|
||||
if value:
|
||||
try:
|
||||
vault.update("/{}/{}".format(ci_id, attr_id), dict(v=value))
|
||||
except Exception as e:
|
||||
current_app.logger.error('save password to vault failed: {}'.format(e))
|
||||
return abort(400, ErrFormat.password_save_failed.format('write vault failed'))
|
||||
else:
|
||||
try:
|
||||
vault.delete("/{}/{}".format(ci_id, attr_id))
|
||||
except Exception as e:
|
||||
current_app.logger.warning('delete password to vault failed: {}'.format(e))
|
||||
|
||||
if changed is not None:
|
||||
return AttributeValueManager.write_change2(changed, record_id)
|
||||
|
||||
@classmethod
|
||||
def load_password(cls, ci_id, attr_id):
|
||||
ci = CI.get_by_id(ci_id) or abort(404, ErrFormat.ci_not_found.format(ci_id))
|
||||
|
||||
limit_attrs = cls._valid_ci_for_no_read(ci, ci.ci_type)
|
||||
if limit_attrs:
|
||||
attr = AttributeCache.get(attr_id)
|
||||
if attr and attr.name not in limit_attrs:
|
||||
return abort(403, ErrFormat.no_permission2)
|
||||
|
||||
if current_app.config.get('SECRETS_ENGINE', 'inner') == 'inner':
|
||||
value_table = ValueTypeMap.table[ValueTypeEnum.PASSWORD]
|
||||
v = value_table.get_by(ci_id=ci_id, attr_id=attr_id, first=True, to_dict=False)
|
||||
|
||||
v = v and v.value
|
||||
if not v:
|
||||
return
|
||||
|
||||
decrypt_value, status = InnerCrypt().decrypt(v)
|
||||
if not status:
|
||||
current_app.logger.error('load password failed: {}'.format(decrypt_value))
|
||||
return abort(400, ErrFormat.password_load_failed.format(decrypt_value))
|
||||
|
||||
return decrypt_value
|
||||
|
||||
elif current_app.config.get('SECRETS_ENGINE') == 'vault':
|
||||
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
|
||||
data, status = vault.read("/{}/{}".format(ci_id, attr_id))
|
||||
if not status:
|
||||
current_app.logger.error('read password from vault failed: {}'.format(data))
|
||||
return abort(400, ErrFormat.password_load_failed.format(data))
|
||||
|
||||
return data.get('v')
|
||||
|
||||
|
||||
class CIRelationManager(object):
|
||||
"""
|
||||
@@ -891,14 +768,12 @@ class CIRelationManager(object):
|
||||
|
||||
@classmethod
|
||||
def get_ancestor_ids(cls, ci_ids, level=1):
|
||||
level2ids = dict()
|
||||
for _level in range(1, level + 1):
|
||||
cis = db.session.query(CIRelation.first_ci_id, CIRelation.ancestor_ids).filter(
|
||||
for _ in range(level):
|
||||
cis = db.session.query(CIRelation.first_ci_id).filter(
|
||||
CIRelation.second_ci_id.in_(ci_ids)).filter(CIRelation.deleted.is_(False))
|
||||
ci_ids = [i.first_ci_id for i in cis]
|
||||
level2ids[_level + 1] = {int(i.ancestor_ids.split(',')[-1]) for i in cis if i.ancestor_ids}
|
||||
|
||||
return ci_ids, level2ids
|
||||
return ci_ids
|
||||
|
||||
@staticmethod
|
||||
def _check_constraint(first_ci_id, first_type_id, second_ci_id, second_type_id, type_relation):
|
||||
@@ -925,14 +800,13 @@ class CIRelationManager(object):
|
||||
return abort(400, ErrFormat.relation_constraint.format("1-N"))
|
||||
|
||||
@classmethod
|
||||
def add(cls, first_ci_id, second_ci_id, more=None, relation_type_id=None, ancestor_ids=None):
|
||||
def add(cls, first_ci_id, second_ci_id, more=None, relation_type_id=None):
|
||||
|
||||
first_ci = CIManager.confirm_ci_existed(first_ci_id)
|
||||
second_ci = CIManager.confirm_ci_existed(second_ci_id)
|
||||
|
||||
existed = CIRelation.get_by(first_ci_id=first_ci_id,
|
||||
second_ci_id=second_ci_id,
|
||||
ancestor_ids=ancestor_ids,
|
||||
to_dict=False,
|
||||
first=True)
|
||||
if existed is not None:
|
||||
@@ -968,12 +842,11 @@ class CIRelationManager(object):
|
||||
|
||||
existed = CIRelation.create(first_ci_id=first_ci_id,
|
||||
second_ci_id=second_ci_id,
|
||||
relation_type_id=relation_type_id,
|
||||
ancestor_ids=ancestor_ids)
|
||||
relation_type_id=relation_type_id)
|
||||
|
||||
CIRelationHistoryManager().add(existed, OperateType.ADD)
|
||||
|
||||
ci_relation_cache.apply_async(args=(first_ci_id, second_ci_id, ancestor_ids), queue=CMDB_QUEUE)
|
||||
ci_relation_cache.apply_async(args=(first_ci_id, second_ci_id), queue=CMDB_QUEUE)
|
||||
|
||||
if more is not None:
|
||||
existed.upadte(more=more)
|
||||
@@ -997,56 +870,53 @@ class CIRelationManager(object):
|
||||
his_manager = CIRelationHistoryManager()
|
||||
his_manager.add(cr, operate_type=OperateType.DELETE)
|
||||
|
||||
ci_relation_delete.apply_async(args=(cr.first_ci_id, cr.second_ci_id, cr.ancestor_ids), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(args=(cr.first_ci_id, cr.second_ci_id), queue=CMDB_QUEUE)
|
||||
|
||||
return cr_id
|
||||
|
||||
@classmethod
|
||||
def delete_2(cls, first_ci_id, second_ci_id, ancestor_ids=None):
|
||||
def delete_2(cls, first_ci_id, second_ci_id):
|
||||
cr = CIRelation.get_by(first_ci_id=first_ci_id,
|
||||
second_ci_id=second_ci_id,
|
||||
ancestor_ids=ancestor_ids,
|
||||
to_dict=False,
|
||||
first=True)
|
||||
|
||||
ci_relation_delete.apply_async(args=(first_ci_id, second_ci_id, ancestor_ids), queue=CMDB_QUEUE)
|
||||
ci_relation_delete.apply_async(args=(first_ci_id, second_ci_id), queue=CMDB_QUEUE)
|
||||
|
||||
return cr and cls.delete(cr.id)
|
||||
return cls.delete(cr.id)
|
||||
|
||||
@classmethod
|
||||
def batch_update(cls, ci_ids, parents, children, ancestor_ids=None):
|
||||
def batch_update(cls, ci_ids, parents, children):
|
||||
"""
|
||||
only for many to one
|
||||
:param ci_ids:
|
||||
:param parents:
|
||||
:param children:
|
||||
:param ancestor_ids:
|
||||
:return:
|
||||
"""
|
||||
if isinstance(parents, list):
|
||||
for parent_id in parents:
|
||||
for ci_id in ci_ids:
|
||||
cls.add(parent_id, ci_id, ancestor_ids=ancestor_ids)
|
||||
cls.add(parent_id, ci_id)
|
||||
|
||||
if isinstance(children, list):
|
||||
for child_id in children:
|
||||
for ci_id in ci_ids:
|
||||
cls.add(ci_id, child_id, ancestor_ids=ancestor_ids)
|
||||
cls.add(ci_id, child_id)
|
||||
|
||||
@classmethod
|
||||
def batch_delete(cls, ci_ids, parents, ancestor_ids=None):
|
||||
def batch_delete(cls, ci_ids, parents):
|
||||
"""
|
||||
only for many to one
|
||||
:param ci_ids:
|
||||
:param parents:
|
||||
:param ancestor_ids:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if isinstance(parents, list):
|
||||
for parent_id in parents:
|
||||
for ci_id in ci_ids:
|
||||
cls.delete_2(parent_id, ci_id, ancestor_ids=ancestor_ids)
|
||||
cls.delete_2(parent_id, ci_id)
|
||||
|
||||
|
||||
class CITriggerManager(object):
|
||||
|
@@ -1,6 +1,7 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import copy
|
||||
import datetime
|
||||
|
||||
import toposort
|
||||
from flask import abort
|
||||
@@ -24,6 +25,7 @@ from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.cmdb.history import CITypeHistoryManager
|
||||
from api.lib.cmdb.relation_type import RelationTypeManager
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
from api.lib.cmdb.value import AttributeValueManager
|
||||
from api.lib.decorator import kwargs_required
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
@@ -352,20 +354,19 @@ class CITypeAttributeManager(object):
|
||||
return [AttributeCache.get(attr.attr_id).name for attr in CITypeAttributesCache.get(type_id)]
|
||||
|
||||
@staticmethod
|
||||
def get_attributes_by_type_id(type_id, choice_web_hook_parse=True, choice_other_parse=True):
|
||||
def get_attributes_by_type_id(type_id, choice_web_hook_parse=True):
|
||||
has_config_perm = ACLManager('cmdb').has_permission(
|
||||
CITypeManager.get_name_by_id(type_id), ResourceTypeEnum.CI, PermEnum.CONFIG)
|
||||
|
||||
attrs = CITypeAttributesCache.get(type_id)
|
||||
result = list()
|
||||
for attr in sorted(attrs, key=lambda x: (x.order, x.id)):
|
||||
attr_dict = AttributeManager().get_attribute(attr.attr_id, choice_web_hook_parse, choice_other_parse)
|
||||
attr_dict = AttributeManager().get_attribute(attr.attr_id, choice_web_hook_parse)
|
||||
attr_dict["is_required"] = attr.is_required
|
||||
attr_dict["order"] = attr.order
|
||||
attr_dict["default_show"] = attr.default_show
|
||||
if not has_config_perm:
|
||||
attr_dict.pop('choice_web_hook', None)
|
||||
attr_dict.pop('choice_other', None)
|
||||
|
||||
result.append(attr_dict)
|
||||
|
||||
@@ -373,25 +374,13 @@ class CITypeAttributeManager(object):
|
||||
|
||||
@staticmethod
|
||||
def get_common_attributes(type_ids):
|
||||
has_config_perm = False
|
||||
for type_id in type_ids:
|
||||
has_config_perm |= ACLManager('cmdb').has_permission(
|
||||
CITypeManager.get_name_by_id(type_id), ResourceTypeEnum.CI, PermEnum.CONFIG)
|
||||
|
||||
result = CITypeAttribute.get_by(__func_in___key_type_id=list(map(int, type_ids)), to_dict=False)
|
||||
attr2types = {}
|
||||
for i in result:
|
||||
attr2types.setdefault(i.attr_id, []).append(i.type_id)
|
||||
|
||||
attrs = []
|
||||
for attr_id in attr2types:
|
||||
if len(attr2types[attr_id]) == len(type_ids):
|
||||
attr = AttributeManager().get_attribute_by_id(attr_id)
|
||||
if not has_config_perm:
|
||||
attr.pop('choice_web_hook', None)
|
||||
attrs.append(attr)
|
||||
|
||||
return attrs
|
||||
return [AttributeCache.get(attr_id).to_dict() for attr_id in attr2types
|
||||
if len(attr2types[attr_id]) == len(type_ids)]
|
||||
|
||||
@staticmethod
|
||||
def _check(type_id, attr_ids):
|
||||
@@ -500,7 +489,7 @@ class CITypeAttributeManager(object):
|
||||
for ci in CI.get_by(type_id=type_id, to_dict=False):
|
||||
AttributeValueManager.delete_attr_value(attr_id, ci.id)
|
||||
|
||||
ci_cache.apply_async(args=(ci.id, None, None), queue=CMDB_QUEUE)
|
||||
ci_cache.apply_async([ci.id], queue=CMDB_QUEUE)
|
||||
|
||||
CITypeAttributeCache.clean(type_id, attr_id)
|
||||
|
||||
@@ -533,7 +522,7 @@ class CITypeAttributeManager(object):
|
||||
CITypeAttributesCache.clean(type_id)
|
||||
|
||||
from api.tasks.cmdb import ci_type_attribute_order_rebuild
|
||||
ci_type_attribute_order_rebuild.apply_async(args=(type_id, current_user.uid), queue=CMDB_QUEUE)
|
||||
ci_type_attribute_order_rebuild.apply_async(args=(type_id,), queue=CMDB_QUEUE)
|
||||
|
||||
|
||||
class CITypeRelationManager(object):
|
||||
@@ -637,16 +626,6 @@ class CITypeRelationManager(object):
|
||||
current_app.logger.warning(str(e))
|
||||
return abort(400, ErrFormat.circular_dependency_error)
|
||||
|
||||
if constraint == ConstraintEnum.Many2Many:
|
||||
other_c = CITypeRelation.get_by(parent_id=p.id, constraint=ConstraintEnum.Many2Many,
|
||||
to_dict=False, first=True)
|
||||
other_p = CITypeRelation.get_by(child_id=c.id, constraint=ConstraintEnum.Many2Many,
|
||||
to_dict=False, first=True)
|
||||
if other_c and other_c.child_id != c.id:
|
||||
return abort(400, ErrFormat.m2m_relation_constraint.format(p.name, other_c.child.name))
|
||||
if other_p and other_p.parent_id != p.id:
|
||||
return abort(400, ErrFormat.m2m_relation_constraint.format(other_p.parent.name, c.name))
|
||||
|
||||
existed = cls._get(p.id, c.id)
|
||||
if existed is not None:
|
||||
existed.update(relation_type_id=relation_type_id,
|
||||
@@ -696,24 +675,6 @@ class CITypeRelationManager(object):
|
||||
|
||||
cls.delete(ctr.id)
|
||||
|
||||
@staticmethod
|
||||
def get_level2constraint(root_id, level):
|
||||
level = level + 1 if level == 1 else level
|
||||
ci = CI.get_by_id(root_id)
|
||||
if ci is None:
|
||||
return dict()
|
||||
|
||||
root_id = ci.type_id
|
||||
level2constraint = dict()
|
||||
for lv in range(1, int(level) + 1):
|
||||
for i in CITypeRelation.get_by(parent_id=root_id, to_dict=False):
|
||||
if i.constraint == ConstraintEnum.Many2Many:
|
||||
root_id = i.child_id
|
||||
level2constraint[lv] = ConstraintEnum.Many2Many
|
||||
break
|
||||
|
||||
return level2constraint
|
||||
|
||||
|
||||
class CITypeAttributeGroupManager(object):
|
||||
cls = CITypeAttributeGroup
|
||||
@@ -886,7 +847,7 @@ class CITypeAttributeGroupManager(object):
|
||||
CITypeAttributesCache.clean(type_id)
|
||||
|
||||
from api.tasks.cmdb import ci_type_attribute_order_rebuild
|
||||
ci_type_attribute_order_rebuild.apply_async(args=(type_id, current_user.uid), queue=CMDB_QUEUE)
|
||||
ci_type_attribute_order_rebuild.apply_async(args=(type_id,), queue=CMDB_QUEUE)
|
||||
|
||||
|
||||
class CITypeTemplateManager(object):
|
||||
@@ -1131,7 +1092,7 @@ class CITypeTemplateManager(object):
|
||||
|
||||
for ci_type in tpt['ci_types']:
|
||||
tpt['type2attributes'][ci_type['id']] = CITypeAttributeManager.get_attributes_by_type_id(
|
||||
ci_type['id'], choice_web_hook_parse=False, choice_other_parse=False)
|
||||
ci_type['id'], choice_web_hook_parse=False)
|
||||
|
||||
tpt['type2attribute_group'][ci_type['id']] = CITypeAttributeGroupManager.get_by_type_id(ci_type['id'])
|
||||
|
||||
|
@@ -12,8 +12,6 @@ class ValueTypeEnum(BaseEnum):
|
||||
DATE = "4"
|
||||
TIME = "5"
|
||||
JSON = "6"
|
||||
PASSWORD = TEXT
|
||||
LINK = TEXT
|
||||
|
||||
|
||||
class ConstraintEnum(BaseEnum):
|
||||
@@ -100,7 +98,6 @@ class AttributeDefaultValueEnum(BaseEnum):
|
||||
CMDB_QUEUE = "one_cmdb_async"
|
||||
REDIS_PREFIX_CI = "ONE_CMDB"
|
||||
REDIS_PREFIX_CI_RELATION = "CMDB_CI_RELATION"
|
||||
REDIS_PREFIX_CI_RELATION2 = "CMDB_CI_RELATION2"
|
||||
|
||||
BUILTIN_KEYWORDS = {'id', '_id', 'ci_id', 'type', '_type', 'ci_type'}
|
||||
|
||||
|
@@ -135,7 +135,7 @@ class AttributeHistoryManger(object):
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
cis = CIManager().get_cis_by_ids(list(ci_ids),
|
||||
unique_required=True)
|
||||
cis = {i['_id']: i for i in cis if i}
|
||||
cis = {i['_id']: i for i in cis}
|
||||
|
||||
return total, res, cis
|
||||
|
||||
|
@@ -14,10 +14,7 @@ from api.lib.cmdb.attribute import AttributeManager
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.const import PermEnum, ResourceTypeEnum, RoleEnum
|
||||
from api.lib.cmdb.perms import CIFilterPermsCRUD
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.exception import AbortException
|
||||
@@ -119,7 +116,7 @@ class PreferenceManager(object):
|
||||
for i in result:
|
||||
if i["is_choice"]:
|
||||
i.update(dict(choice_value=AttributeManager.get_choice_values(
|
||||
i["id"], i["value_type"], i["choice_web_hook"], i.get("choice_other"))))
|
||||
i["id"], i["value_type"], i["choice_web_hook"])))
|
||||
|
||||
return is_subscribed, result
|
||||
|
||||
@@ -232,28 +229,14 @@ class PreferenceManager(object):
|
||||
if not parents:
|
||||
return
|
||||
|
||||
for _l in leaf:
|
||||
_find_parent(_l)
|
||||
for l in leaf:
|
||||
_find_parent(l)
|
||||
|
||||
for node_id in node2show_types:
|
||||
node2show_types[node_id] = [CITypeCache.get(i).to_dict() for i in set(node2show_types[node_id])]
|
||||
|
||||
topo_flatten = list(toposort.toposort_flatten(topo))
|
||||
level2constraint = {}
|
||||
for i, _ in enumerate(topo_flatten[1:]):
|
||||
ctr = CITypeRelation.get_by(
|
||||
parent_id=topo_flatten[i], child_id=topo_flatten[i + 1], first=True, to_dict=False)
|
||||
level2constraint[i + 1] = ctr and ctr.constraint
|
||||
|
||||
if leaf2show_types.get(topo_flatten[-1]):
|
||||
ctr = CITypeRelation.get_by(
|
||||
parent_id=topo_flatten[-1],
|
||||
child_id=leaf2show_types[topo_flatten[-1]][0], first=True, to_dict=False)
|
||||
level2constraint[len(topo_flatten)] = ctr and ctr.constraint
|
||||
|
||||
result[view_name] = dict(topo=list(map(list, toposort.toposort(topo))),
|
||||
topo_flatten=topo_flatten,
|
||||
level2constraint=level2constraint,
|
||||
topo_flatten=list(toposort.toposort_flatten(topo)),
|
||||
leaf=leaf,
|
||||
leaf2show_types=leaf2show_types,
|
||||
node2show_types=node2show_types,
|
||||
@@ -355,29 +338,3 @@ class PreferenceManager(object):
|
||||
|
||||
for i in PreferenceTreeView.get_by(type_id=type_id, uid=uid, to_dict=False):
|
||||
i.soft_delete()
|
||||
|
||||
@staticmethod
|
||||
def can_edit_relation(parent_id, child_id):
|
||||
views = PreferenceRelationView.get_by(to_dict=False)
|
||||
for view in views:
|
||||
has_m2m = False
|
||||
last_node_id = None
|
||||
for cr in view.cr_ids:
|
||||
_rel = CITypeRelation.get_by(parent_id=cr['parent_id'], child_id=cr['child_id'],
|
||||
first=True, to_dict=False)
|
||||
if _rel and _rel.constraint == ConstraintEnum.Many2Many:
|
||||
has_m2m = True
|
||||
|
||||
if parent_id == _rel.parent_id and child_id == _rel.child_id:
|
||||
return False
|
||||
|
||||
if _rel:
|
||||
last_node_id = _rel.child_id
|
||||
|
||||
if parent_id == last_node_id:
|
||||
rels = CITypeRelation.get_by(parent_id=last_node_id, to_dict=False)
|
||||
for rel in rels:
|
||||
if rel.child_id == child_id and has_m2m:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -23,7 +23,6 @@ class ErrFormat(CommonErrFormat):
|
||||
cannot_edit_attribute = "您没有权限修改该属性!"
|
||||
cannot_delete_attribute = "目前只允许 属性创建人、管理员 删除属性!"
|
||||
attribute_name_cannot_be_builtin = "属性字段名不能是内置字段: id, _id, ci_id, type, _type, ci_type"
|
||||
attribute_choice_other_invalid = "预定义值: 其他模型请求参数不合法!"
|
||||
|
||||
ci_not_found = "CI {} 不存在"
|
||||
unique_constraint = "多属性联合唯一校验不通过: {}"
|
||||
@@ -31,7 +30,6 @@ class ErrFormat(CommonErrFormat):
|
||||
unique_key_required = "主键字段 {} 缺失"
|
||||
ci_is_already_existed = "CI 已经存在!"
|
||||
relation_constraint = "关系约束: {}, 校验失败 "
|
||||
m2m_relation_constraint = "多对多关系 限制: 模型 {} <-> {} 已经存在多对多关系!"
|
||||
relation_not_found = "CI关系: {} 不存在"
|
||||
ci_search_Parentheses_invalid = "搜索表达式里小括号前不支持: 或、非"
|
||||
|
||||
@@ -96,6 +94,3 @@ class ErrFormat(CommonErrFormat):
|
||||
ci_filter_perm_cannot_or_query = "CI过滤授权 暂时不支持 或 查询"
|
||||
ci_filter_perm_attr_no_permission = "您没有属性 {} 的操作权限!"
|
||||
ci_filter_perm_ci_no_permission = "您没有该CI的操作权限!"
|
||||
|
||||
password_save_failed = "保存密码失败: {}"
|
||||
password_load_failed = "获取密码失败: {}"
|
||||
|
@@ -7,7 +7,6 @@ QUERY_CIS_BY_VALUE_TABLE = """
|
||||
attr.alias AS attr_alias,
|
||||
attr.value_type,
|
||||
attr.is_list,
|
||||
attr.is_password,
|
||||
c_cis.type_id,
|
||||
{0}.ci_id,
|
||||
{0}.attr_id,
|
||||
@@ -27,8 +26,7 @@ QUERY_CIS_BY_IDS = """
|
||||
A.attr_alias,
|
||||
A.value,
|
||||
A.value_type,
|
||||
A.is_list,
|
||||
A.is_password
|
||||
A.is_list
|
||||
FROM
|
||||
({1}) AS A {0}
|
||||
ORDER BY A.ci_id;
|
||||
@@ -45,7 +43,7 @@ FACET_QUERY1 = """
|
||||
|
||||
FACET_QUERY = """
|
||||
SELECT {0}.value,
|
||||
count(distinct {0}.ci_id)
|
||||
count({0}.ci_id)
|
||||
FROM {0}
|
||||
INNER JOIN ({1}) AS F ON F.ci_id={0}.ci_id
|
||||
WHERE {0}.attr_id={2:d}
|
||||
|
@@ -9,7 +9,6 @@ import time
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from jinja2 import Template
|
||||
from sqlalchemy import text
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
@@ -29,7 +28,6 @@ from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_NO_ATTR
|
||||
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_TYPE
|
||||
from api.lib.cmdb.search.ci.db.query_sql import QUERY_UNION_CI_ATTRIBUTE_IS_NULL
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.utils import handle_arg_list
|
||||
@@ -313,7 +311,7 @@ class Search(object):
|
||||
start = time.time()
|
||||
execute = db.session.execute
|
||||
# current_app.logger.debug(v_query_sql)
|
||||
res = execute(text(v_query_sql)).fetchall()
|
||||
res = execute(v_query_sql).fetchall()
|
||||
end_time = time.time()
|
||||
current_app.logger.debug("query ci ids time is: {0}".format(end_time - start))
|
||||
|
||||
@@ -526,15 +524,15 @@ class Search(object):
|
||||
if k:
|
||||
table_name = TableMap(attr=attr).table_name
|
||||
query_sql = FACET_QUERY.format(table_name, self.query_sql, attr.id)
|
||||
result = db.session.execute(text(query_sql)).fetchall()
|
||||
# current_app.logger.warning(query_sql)
|
||||
result = db.session.execute(query_sql).fetchall()
|
||||
facet[k] = result
|
||||
|
||||
facet_result = dict()
|
||||
for k, v in facet.items():
|
||||
if not k.startswith('_'):
|
||||
attr = AttributeCache.get(k)
|
||||
a = getattr(attr, self.ret_key)
|
||||
facet_result[a] = [(ValueTypeMap.serialize[attr.value_type](f[0]), f[1], a) for f in v]
|
||||
a = getattr(AttributeCache.get(k), self.ret_key)
|
||||
facet_result[a] = [(f[0], f[1], a) for f in v]
|
||||
|
||||
return facet_result
|
||||
|
||||
|
@@ -1,4 +1,6 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
|
||||
import json
|
||||
from collections import Counter
|
||||
|
||||
@@ -8,14 +10,11 @@ from flask import current_app
|
||||
from api.extensions import rd
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.ci_type import CITypeRelationManager
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
|
||||
from api.lib.cmdb.search.ci.es.search import Search as SearchFromES
|
||||
from api.models.cmdb import CI
|
||||
from api.models.cmdb import CIRelation
|
||||
|
||||
|
||||
class Search(object):
|
||||
@@ -27,8 +26,7 @@ class Search(object):
|
||||
page=1,
|
||||
count=None,
|
||||
sort=None,
|
||||
reverse=False,
|
||||
ancestor_ids=None):
|
||||
reverse=False):
|
||||
self.orig_query = query
|
||||
self.fl = fl
|
||||
self.facet_field = facet_field
|
||||
@@ -37,84 +35,28 @@ class Search(object):
|
||||
self.sort = sort or ("ci_id" if current_app.config.get("USE_ES") else None)
|
||||
|
||||
self.root_id = root_id
|
||||
self.level = level or 0
|
||||
self.level = level
|
||||
self.reverse = reverse
|
||||
|
||||
self.level2constraint = CITypeRelationManager.get_level2constraint(
|
||||
root_id[0] if root_id and isinstance(root_id, list) else root_id,
|
||||
level[0] if isinstance(level, list) and level else level)
|
||||
|
||||
self.ancestor_ids = ancestor_ids
|
||||
self.has_m2m = False
|
||||
if self.ancestor_ids:
|
||||
self.has_m2m = True
|
||||
else:
|
||||
level = level[0] if isinstance(level, list) and level else level
|
||||
for _l, c in self.level2constraint.items():
|
||||
if _l < int(level) and c == ConstraintEnum.Many2Many:
|
||||
self.has_m2m = True
|
||||
|
||||
def _get_ids(self, ids):
|
||||
if self.level[-1] == 1 and len(ids) == 1:
|
||||
if self.ancestor_ids is None:
|
||||
return [i.second_ci_id for i in CIRelation.get_by(first_ci_id=ids[0], to_dict=False)]
|
||||
|
||||
else:
|
||||
seconds = {i.second_ci_id for i in CIRelation.get_by(first_ci_id=ids[0],
|
||||
ancestor_ids=self.ancestor_ids,
|
||||
to_dict=False)}
|
||||
|
||||
return list(seconds)
|
||||
|
||||
def _get_ids(self):
|
||||
merge_ids = []
|
||||
key = []
|
||||
_tmp = []
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
for level in range(1, sorted(self.level)[-1] + 1):
|
||||
if not self.has_m2m:
|
||||
_tmp = map(lambda x: json.loads(x).keys(),
|
||||
filter(lambda x: x is not None, rd.get(ids, REDIS_PREFIX_CI_RELATION) or []))
|
||||
ids = [j for i in _tmp for j in i]
|
||||
key, prefix = ids, REDIS_PREFIX_CI_RELATION
|
||||
|
||||
else:
|
||||
if not self.ancestor_ids:
|
||||
if level == 1:
|
||||
key, prefix = list(map(str, ids)), REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
key = list(set(["{},{}".format(i, j) for idx, i in enumerate(key) for j in _tmp[idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
else:
|
||||
if level == 1:
|
||||
key, prefix = ["{},{}".format(self.ancestor_ids, i) for i in ids], REDIS_PREFIX_CI_RELATION2
|
||||
else:
|
||||
key = list(set(["{},{}".format(i, j) for idx, i in enumerate(key) for j in _tmp[idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
if not key:
|
||||
return []
|
||||
|
||||
_tmp = list(map(lambda x: json.loads(x).keys() if x else [], rd.get(key, prefix) or []))
|
||||
_tmp = list(map(lambda x: list(json.loads(x).keys()),
|
||||
filter(lambda x: x is not None, rd.get(ids, REDIS_PREFIX_CI_RELATION) or [])))
|
||||
ids = [j for i in _tmp for j in i]
|
||||
|
||||
if level in self.level:
|
||||
merge_ids.extend(ids)
|
||||
|
||||
return merge_ids
|
||||
|
||||
def _get_reverse_ids(self, ids):
|
||||
def _get_reverse_ids(self):
|
||||
merge_ids = []
|
||||
level2ids = {}
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
for level in range(1, sorted(self.level)[-1] + 1):
|
||||
ids, _level2ids = CIRelationManager.get_ancestor_ids(ids, 1)
|
||||
|
||||
if _level2ids.get(2):
|
||||
level2ids[level + 1] = _level2ids[2]
|
||||
|
||||
ids = CIRelationManager.get_ancestor_ids(ids, 1)
|
||||
if level in self.level:
|
||||
if level in level2ids and level2ids[level]:
|
||||
merge_ids.extend(set(ids) & set(level2ids[level]))
|
||||
else:
|
||||
merge_ids.extend(ids)
|
||||
merge_ids.extend(ids)
|
||||
|
||||
return merge_ids
|
||||
|
||||
@@ -122,7 +64,7 @@ class Search(object):
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
cis = [CI.get_by_id(_id) or abort(404, ErrFormat.ci_not_found.format("id={}".format(_id))) for _id in ids]
|
||||
|
||||
merge_ids = self._get_ids(ids) if not self.reverse else self._get_reverse_ids(ids)
|
||||
merge_ids = self._get_ids() if not self.reverse else self._get_reverse_ids()
|
||||
|
||||
if not self.orig_query or ("_type:" not in self.orig_query
|
||||
and "type_id:" not in self.orig_query
|
||||
@@ -134,11 +76,11 @@ class Search(object):
|
||||
type_ids.extend(CITypeRelationManager.get_child_type_ids(ci.type_id, level))
|
||||
else:
|
||||
type_ids.extend(CITypeRelationManager.get_parent_type_ids(ci.type_id, level))
|
||||
type_ids = set(type_ids)
|
||||
type_ids = list(set(type_ids))
|
||||
if self.orig_query:
|
||||
self.orig_query = "_type:({0}),{1}".format(";".join(map(str, type_ids)), self.orig_query)
|
||||
self.orig_query = "_type:({0}),{1}".format(";".join(list(map(str, type_ids))), self.orig_query)
|
||||
else:
|
||||
self.orig_query = "_type:({0})".format(";".join(map(str, type_ids)))
|
||||
self.orig_query = "_type:({0})".format(";".join(list(map(str, type_ids))))
|
||||
|
||||
if not merge_ids:
|
||||
# cis, counter, total, self.page, numfound, facet_
|
||||
@@ -162,66 +104,30 @@ class Search(object):
|
||||
ci_ids=merge_ids).search()
|
||||
|
||||
def statistics(self, type_ids):
|
||||
self.level = int(self.level)
|
||||
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
_tmp = []
|
||||
level2ids = {}
|
||||
for lv in range(1, self.level + 1):
|
||||
level2ids[lv] = []
|
||||
|
||||
if lv == 1:
|
||||
if not self.has_m2m:
|
||||
key, prefix = ids, REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
if not self.ancestor_ids:
|
||||
key, prefix = ids, REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
key = ["{},{}".format(self.ancestor_ids, _id) for _id in ids]
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
level2ids[lv] = [[i] for i in key]
|
||||
|
||||
if not key:
|
||||
_tmp = []
|
||||
continue
|
||||
|
||||
if type_ids and lv == self.level:
|
||||
_tmp = list(map(lambda x: [i for i in x if i[1] in type_ids],
|
||||
(map(lambda x: list(json.loads(x).items()),
|
||||
[i or '{}' for i in rd.get(key, prefix) or []]))))
|
||||
else:
|
||||
_tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
[i or '{}' for i in rd.get(key, prefix) or []]))
|
||||
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
for l in range(0, int(self.level)):
|
||||
if not l:
|
||||
_tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
[i or '{}' for i in rd.get(ids, REDIS_PREFIX_CI_RELATION) or []]))
|
||||
else:
|
||||
for idx, item in enumerate(_tmp):
|
||||
if item:
|
||||
if not self.has_m2m:
|
||||
key, prefix = [i[0] for i in item], REDIS_PREFIX_CI_RELATION
|
||||
if type_ids and l == self.level - 1:
|
||||
__tmp = list(
|
||||
map(lambda x: [(_id, type_id) for _id, type_id in json.loads(x).items()
|
||||
if type_id in type_ids],
|
||||
filter(lambda x: x is not None,
|
||||
rd.get([i[0] for i in item], REDIS_PREFIX_CI_RELATION) or [])))
|
||||
else:
|
||||
key = list(set(['{},{}'.format(j, i[0]) for i in item for j in level2ids[lv - 1][idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
level2ids[lv].append(key)
|
||||
|
||||
if key:
|
||||
if type_ids and lv == self.level:
|
||||
__tmp = map(lambda x: [(_id, type_id) for _id, type_id in json.loads(x).items()
|
||||
if type_id in type_ids],
|
||||
filter(lambda x: x is not None,
|
||||
rd.get(key, prefix) or []))
|
||||
else:
|
||||
__tmp = map(lambda x: list(json.loads(x).items()),
|
||||
filter(lambda x: x is not None,
|
||||
rd.get(key, prefix) or []))
|
||||
else:
|
||||
__tmp = []
|
||||
__tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
filter(lambda x: x is not None,
|
||||
rd.get([i[0] for i in item], REDIS_PREFIX_CI_RELATION) or [])))
|
||||
|
||||
_tmp[idx] = [j for i in __tmp for j in i]
|
||||
else:
|
||||
_tmp[idx] = []
|
||||
level2ids[lv].append([])
|
||||
|
||||
result = {str(_id): len(_tmp[idx]) for idx, _id in enumerate(ids)}
|
||||
|
||||
|
@@ -12,7 +12,7 @@ import api.models.cmdb as model
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
|
||||
TIME_RE = re.compile(r'(?:[01]\d|2[0-3]):[0-5]\d:[0-5]\d')
|
||||
TIME_RE = re.compile(r"^(20|21|22|23|[0-1]\d):[0-5]\d:[0-5]\d$")
|
||||
|
||||
|
||||
def string2int(x):
|
||||
@@ -21,7 +21,7 @@ def string2int(x):
|
||||
|
||||
def str2datetime(x):
|
||||
try:
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d").date()
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d")
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
@@ -44,8 +44,8 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.FLOAT: float,
|
||||
ValueTypeEnum.TEXT: lambda x: x if isinstance(x, six.string_types) else str(x),
|
||||
ValueTypeEnum.TIME: lambda x: x if isinstance(x, six.string_types) else str(x),
|
||||
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d") if not isinstance(x, six.string_types) else x,
|
||||
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S") if not isinstance(x, six.string_types) else x,
|
||||
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d"),
|
||||
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
|
||||
}
|
||||
|
||||
@@ -64,8 +64,6 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.FLOAT: model.FloatChoice,
|
||||
ValueTypeEnum.TEXT: model.TextChoice,
|
||||
ValueTypeEnum.TIME: model.TextChoice,
|
||||
ValueTypeEnum.DATE: model.TextChoice,
|
||||
ValueTypeEnum.DATETIME: model.TextChoice,
|
||||
}
|
||||
|
||||
table = {
|
||||
@@ -99,7 +97,7 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.DATE: 'text',
|
||||
ValueTypeEnum.TIME: 'text',
|
||||
ValueTypeEnum.FLOAT: 'float',
|
||||
ValueTypeEnum.JSON: 'object',
|
||||
ValueTypeEnum.JSON: 'object'
|
||||
}
|
||||
|
||||
|
||||
@@ -112,9 +110,7 @@ class TableMap(object):
|
||||
@property
|
||||
def table(self):
|
||||
attr = AttributeCache.get(self.attr_name) if not self.attr else self.attr
|
||||
if attr.is_password or attr.is_link:
|
||||
self.is_index = False
|
||||
elif attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON}:
|
||||
if attr.value_type != ValueTypeEnum.TEXT and attr.value_type != ValueTypeEnum.JSON:
|
||||
self.is_index = True
|
||||
elif self.is_index is None:
|
||||
self.is_index = attr.is_index
|
||||
@@ -126,9 +122,7 @@ class TableMap(object):
|
||||
@property
|
||||
def table_name(self):
|
||||
attr = AttributeCache.get(self.attr_name) if not self.attr else self.attr
|
||||
if attr.is_password or attr.is_link:
|
||||
self.is_index = False
|
||||
elif attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON}:
|
||||
if attr.value_type != ValueTypeEnum.TEXT and attr.value_type != ValueTypeEnum.JSON:
|
||||
self.is_index = True
|
||||
elif self.is_index is None:
|
||||
self.is_index = attr.is_index
|
||||
|
@@ -66,10 +66,9 @@ class AttributeValueManager(object):
|
||||
use_master=use_master,
|
||||
to_dict=False)
|
||||
field_name = getattr(attr, ret_key)
|
||||
|
||||
if attr.is_list:
|
||||
res[field_name] = [ValueTypeMap.serialize[attr.value_type](i.value) for i in rs]
|
||||
elif attr.is_password and rs:
|
||||
res[field_name] = '******' if rs[0].value else ''
|
||||
else:
|
||||
res[field_name] = ValueTypeMap.serialize[attr.value_type](rs[0].value) if rs else None
|
||||
|
||||
@@ -93,7 +92,7 @@ class AttributeValueManager(object):
|
||||
|
||||
@staticmethod
|
||||
def _check_is_choice(attr, value_type, value):
|
||||
choice_values = AttributeManager.get_choice_values(attr.id, value_type, attr.choice_web_hook, attr.choice_other)
|
||||
choice_values = AttributeManager.get_choice_values(attr.id, value_type, attr.choice_web_hook)
|
||||
if str(value) not in list(map(str, [i[0] for i in choice_values])):
|
||||
return abort(400, ErrFormat.not_in_choice_values.format(value))
|
||||
|
||||
@@ -132,7 +131,8 @@ class AttributeValueManager(object):
|
||||
return AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id)
|
||||
|
||||
@staticmethod
|
||||
def write_change2(changed, record_id=None):
|
||||
def _write_change2(changed):
|
||||
record_id = None
|
||||
for ci_id, attr_id, operate_type, old, new, type_id in changed:
|
||||
record_id = AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id,
|
||||
commit=False, flush=False)
|
||||
@@ -284,9 +284,9 @@ class AttributeValueManager(object):
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.warning(str(e))
|
||||
return abort(400, ErrFormat.attribute_value_unknown_error.format(e.args[0]))
|
||||
return abort(400, ErrFormat.attribute_value_unknown_error.format(str(e)))
|
||||
|
||||
return self.write_change2(changed)
|
||||
return self._write_change2(changed)
|
||||
|
||||
@staticmethod
|
||||
def delete_attr_value(attr_id, ci_id):
|
||||
|
@@ -1,13 +1,12 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.app import AppCRUD
|
||||
from api.lib.perm.acl.cache import RoleCache, AppCache
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD, ResourceCRUD
|
||||
from api.lib.perm.acl.role import RoleCRUD, RoleRelationCRUD
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD, ResourceCRUD
|
||||
|
||||
|
||||
class ACLManager(object):
|
||||
@@ -80,22 +79,20 @@ class ACLManager(object):
|
||||
return role.to_dict()
|
||||
|
||||
@staticmethod
|
||||
def delete_role(_id):
|
||||
def delete_role(_id, payload):
|
||||
RoleCRUD.delete_role(_id)
|
||||
return dict(rid=_id)
|
||||
|
||||
def get_user_info(self, username):
|
||||
from api.lib.perm.acl.acl import ACLManager as ACL
|
||||
user_info = ACL().get_user_info(username, self.app_name)
|
||||
result = dict(
|
||||
name=user_info.get('nickname') or username,
|
||||
username=user_info.get('username') or username,
|
||||
email=user_info.get('email'),
|
||||
uid=user_info.get('uid'),
|
||||
rid=user_info.get('rid'),
|
||||
role=dict(permissions=user_info.get('parents')),
|
||||
avatar=user_info.get('avatar')
|
||||
)
|
||||
result = dict(name=user_info.get('nickname') or username,
|
||||
username=user_info.get('username') or username,
|
||||
email=user_info.get('email'),
|
||||
uid=user_info.get('uid'),
|
||||
rid=user_info.get('rid'),
|
||||
role=dict(permissions=user_info.get('parents')),
|
||||
avatar=user_info.get('avatar'))
|
||||
|
||||
return result
|
||||
|
||||
@@ -112,32 +109,8 @@ class ACLManager(object):
|
||||
id2perms=id2perms
|
||||
)
|
||||
|
||||
def create_resources_type(self, payload):
|
||||
payload['app_id'] = self.validate_app().id
|
||||
rt = ResourceTypeCRUD.add(**payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
||||
def update_resources_type(self, _id, payload):
|
||||
rt = ResourceTypeCRUD.update(_id, **payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
||||
def create_resource(self, payload):
|
||||
payload['app_id'] = self.validate_app().id
|
||||
resource = ResourceCRUD.add(**payload)
|
||||
|
||||
return resource.to_dict()
|
||||
|
||||
def get_resource_by_type(self, q, u, rt_id, page=1, page_size=999999):
|
||||
numfound, res = ResourceCRUD.search(q, u, self.validate_app().id, rt_id, page, page_size)
|
||||
return res
|
||||
|
||||
def grant_resource(self, rid, resource_id, perms):
|
||||
PermissionCRUD.grant(rid, perms, resource_id=resource_id, group_id=None)
|
||||
|
||||
@staticmethod
|
||||
def create_app(payload):
|
||||
rt = AppCRUD.add(**payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
@@ -1,24 +1,14 @@
|
||||
import copy
|
||||
import json
|
||||
|
||||
from flask import abort, current_app
|
||||
from ldap3 import Connection
|
||||
from ldap3 import Server
|
||||
from ldap3.core.exceptions import LDAPBindError, LDAPSocketOpenError
|
||||
from ldap3 import AUTO_BIND_NO_TLS
|
||||
from flask import abort
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import CommonData
|
||||
from api.lib.utils import AESCrypto
|
||||
from api.lib.common_setting.const import AuthCommonConfig, AuthenticateType, AuthCommonConfigAutoRedirect, TestType
|
||||
|
||||
|
||||
class CommonDataCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_data_by_type(data_type):
|
||||
CommonDataCRUD.check_auth_type(data_type)
|
||||
return CommonData.get_by(data_type=data_type)
|
||||
|
||||
@staticmethod
|
||||
@@ -28,8 +18,6 @@ class CommonDataCRUD(object):
|
||||
@staticmethod
|
||||
def create_new_data(data_type, **kwargs):
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(data_type)
|
||||
|
||||
return CommonData.create(data_type=data_type, **kwargs)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
@@ -41,7 +29,6 @@ class CommonDataCRUD(object):
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(existed.data_type)
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
@@ -53,230 +40,7 @@ class CommonDataCRUD(object):
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(existed.data_type)
|
||||
existed.soft_delete()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def check_auth_type(data_type):
|
||||
if data_type in list(AuthenticateType.all()) + [AuthCommonConfig]:
|
||||
abort(400, ErrFormat.common_data_not_support_auth_type.format(data_type))
|
||||
|
||||
@staticmethod
|
||||
def set_auth_auto_redirect_enable(_value: int):
|
||||
existed = CommonData.get_by(first=True, data_type=AuthCommonConfig, to_dict=False)
|
||||
if not existed:
|
||||
CommonDataCRUD.create_new_data(AuthCommonConfig, data={AuthCommonConfigAutoRedirect: _value})
|
||||
else:
|
||||
data = existed.data
|
||||
data = copy.deepcopy(existed.data) if data else {}
|
||||
data[AuthCommonConfigAutoRedirect] = _value
|
||||
CommonDataCRUD.update_data(existed.id, data=data)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_auth_auto_redirect_enable():
|
||||
existed = CommonData.get_by(first=True, data_type=AuthCommonConfig)
|
||||
if not existed:
|
||||
return 0
|
||||
data = existed.get('data', {})
|
||||
if not data:
|
||||
return 0
|
||||
return data.get(AuthCommonConfigAutoRedirect, 0)
|
||||
|
||||
|
||||
class AuthenticateDataCRUD(object):
|
||||
common_type_list = [AuthCommonConfig]
|
||||
|
||||
def __init__(self, _type):
|
||||
self._type = _type
|
||||
self.record = None
|
||||
self.decrypt_data = {}
|
||||
|
||||
def get_support_type_list(self):
|
||||
return list(AuthenticateType.all()) + self.common_type_list
|
||||
|
||||
def get(self):
|
||||
if not self.decrypt_data:
|
||||
self.decrypt_data = self.get_decrypt_data()
|
||||
|
||||
return self.decrypt_data
|
||||
|
||||
def get_by_key(self, _key):
|
||||
if not self.decrypt_data:
|
||||
self.decrypt_data = self.get_decrypt_data()
|
||||
|
||||
return self.decrypt_data.get(_key, None)
|
||||
|
||||
def get_record(self, to_dict=False) -> CommonData:
|
||||
return CommonData.get_by(first=True, data_type=self._type, to_dict=to_dict)
|
||||
|
||||
def get_record_with_decrypt(self) -> dict:
|
||||
record = CommonData.get_by(first=True, data_type=self._type, to_dict=True)
|
||||
if not record:
|
||||
return {}
|
||||
data = self.get_decrypt_dict(record.get('data', ''))
|
||||
record['data'] = data
|
||||
return record
|
||||
|
||||
def get_decrypt_dict(self, data):
|
||||
decrypt_str = self.decrypt(data)
|
||||
try:
|
||||
return json.loads(decrypt_str)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
def get_decrypt_data(self) -> dict:
|
||||
self.record = self.get_record()
|
||||
if not self.record:
|
||||
return self.get_from_config()
|
||||
return self.get_decrypt_dict(self.record.data)
|
||||
|
||||
def get_from_config(self):
|
||||
return current_app.config.get(self._type, {})
|
||||
|
||||
def check_by_type(self) -> None:
|
||||
existed = self.get_record()
|
||||
if existed:
|
||||
abort(400, ErrFormat.common_data_already_existed.format(self._type))
|
||||
|
||||
def create(self, data) -> CommonData:
|
||||
self.check_by_type()
|
||||
encrypt = data.pop('encrypt', None)
|
||||
if encrypt is False:
|
||||
return CommonData.create(data_type=self._type, data=data)
|
||||
encrypted_data = self.encrypt(data)
|
||||
try:
|
||||
return CommonData.create(data_type=self._type, data=encrypted_data)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
def update_by_record(self, record, data) -> CommonData:
|
||||
encrypt = data.pop('encrypt', None)
|
||||
if encrypt is False:
|
||||
return record.update(data=data)
|
||||
encrypted_data = self.encrypt(data)
|
||||
try:
|
||||
return record.update(data=encrypted_data)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
def update(self, _id, data) -> CommonData:
|
||||
existed = CommonData.get_by(first=True, to_dict=False, id=_id)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
|
||||
return self.update_by_record(existed, data)
|
||||
|
||||
@staticmethod
|
||||
def delete(_id) -> None:
|
||||
existed = CommonData.get_by(first=True, to_dict=False, id=_id)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
existed.soft_delete()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def encrypt(data) -> str:
|
||||
if type(data) is dict:
|
||||
try:
|
||||
data = json.dumps(data)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
return AESCrypto().encrypt(data)
|
||||
|
||||
@staticmethod
|
||||
def decrypt(data) -> str:
|
||||
return AESCrypto().decrypt(data)
|
||||
|
||||
@staticmethod
|
||||
def get_enable_list():
|
||||
all_records = CommonData.query.filter(
|
||||
CommonData.data_type.in_(AuthenticateType.all()),
|
||||
CommonData.deleted == 0
|
||||
).all()
|
||||
enable_list = []
|
||||
for auth_type in AuthenticateType.all():
|
||||
record = list(filter(lambda x: x.data_type == auth_type, all_records))
|
||||
if not record:
|
||||
config = current_app.config.get(auth_type, None)
|
||||
if not config:
|
||||
continue
|
||||
|
||||
if config.get('enable', False):
|
||||
enable_list.append(dict(
|
||||
auth_type=auth_type,
|
||||
))
|
||||
|
||||
continue
|
||||
|
||||
try:
|
||||
decrypt_data = json.loads(AuthenticateDataCRUD.decrypt(record[0].data))
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
continue
|
||||
|
||||
if decrypt_data.get('enable', 0) == 1:
|
||||
enable_list.append(dict(
|
||||
auth_type=auth_type,
|
||||
))
|
||||
|
||||
auth_auto_redirect = CommonDataCRUD.get_auth_auto_redirect_enable()
|
||||
|
||||
return dict(
|
||||
enable_list=enable_list,
|
||||
auth_auto_redirect=auth_auto_redirect,
|
||||
)
|
||||
|
||||
def test(self, test_type, data):
|
||||
type_lower = self._type.lower()
|
||||
func_name = f'test_{type_lower}'
|
||||
if hasattr(self, func_name):
|
||||
try:
|
||||
return getattr(self, f'test_{type_lower}')(test_type, data)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
abort(400, ErrFormat.not_support_test.format(self._type))
|
||||
|
||||
@staticmethod
|
||||
def test_ldap(test_type, data):
|
||||
ldap_server = data.get('ldap_server')
|
||||
ldap_user_dn = data.get('ldap_user_dn', '{}')
|
||||
|
||||
server = Server(ldap_server, connect_timeout=2)
|
||||
if not server.check_availability():
|
||||
raise Exception(ErrFormat.ldap_server_connect_not_available)
|
||||
else:
|
||||
if test_type == TestType.Connect:
|
||||
return True
|
||||
|
||||
username = data.get('username', None)
|
||||
if not username:
|
||||
raise Exception(ErrFormat.ldap_test_username_required)
|
||||
user = ldap_user_dn.format(username)
|
||||
password = data.get('password', None)
|
||||
|
||||
try:
|
||||
Connection(server, user=user, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except LDAPBindError:
|
||||
ldap_domain = data.get('ldap_domain')
|
||||
user_with_domain = f"{username}@{ldap_domain}"
|
||||
try:
|
||||
Connection(server, user=user_with_domain, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except Exception as e:
|
||||
raise Exception(ErrFormat.ldap_test_unknown_error.format(str(e)))
|
||||
|
||||
except LDAPSocketOpenError:
|
||||
raise Exception(ErrFormat.ldap_server_connect_timeout)
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(ErrFormat.ldap_test_unknown_error.format(str(e)))
|
||||
|
||||
return True
|
||||
|
@@ -1,7 +1,5 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from api.extensions import cache
|
||||
from api.models.common_setting import CompanyInfo
|
||||
|
||||
|
||||
@@ -13,51 +11,14 @@ class CompanyInfoCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def create(**kwargs):
|
||||
CompanyInfoCRUD.check_data(**kwargs)
|
||||
res = CompanyInfo.create(**kwargs)
|
||||
CompanyInfoCache.refresh(res.info)
|
||||
return res
|
||||
return CompanyInfo.create(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def update(_id, **kwargs):
|
||||
kwargs.pop('id', None)
|
||||
existed = CompanyInfo.get_by_id(_id)
|
||||
if not existed:
|
||||
existed = CompanyInfoCRUD.create(**kwargs)
|
||||
return CompanyInfoCRUD.create(**kwargs)
|
||||
else:
|
||||
CompanyInfoCRUD.check_data(**kwargs)
|
||||
existed = existed.update(**kwargs)
|
||||
CompanyInfoCache.refresh(existed.info)
|
||||
return existed
|
||||
|
||||
@staticmethod
|
||||
def check_data(**kwargs):
|
||||
info = kwargs.get('info', {})
|
||||
info['messenger'] = CompanyInfoCRUD.check_messenger(info.get('messenger', None))
|
||||
|
||||
kwargs['info'] = info
|
||||
|
||||
@staticmethod
|
||||
def check_messenger(messenger):
|
||||
if not messenger:
|
||||
return messenger
|
||||
|
||||
parsed_url = urlparse(messenger)
|
||||
return f"{parsed_url.scheme}://{parsed_url.netloc}"
|
||||
|
||||
|
||||
class CompanyInfoCache(object):
|
||||
key = 'CompanyInfoCache::'
|
||||
|
||||
@classmethod
|
||||
def get(cls):
|
||||
info = cache.get(cls.key)
|
||||
if not info:
|
||||
res = CompanyInfo.get_by(first=True) or {}
|
||||
info = res.get('info', {})
|
||||
cache.set(cls.key, info)
|
||||
return info
|
||||
|
||||
@classmethod
|
||||
def refresh(cls, info):
|
||||
cache.set(cls.key, info)
|
||||
return existed
|
||||
|
@@ -12,26 +12,3 @@ class OperatorType(BaseEnum):
|
||||
LESS_THAN = 6
|
||||
IS_EMPTY = 7
|
||||
IS_NOT_EMPTY = 8
|
||||
|
||||
|
||||
BotNameMap = {
|
||||
'wechatApp': 'wechatBot',
|
||||
'feishuApp': 'feishuBot',
|
||||
'dingdingApp': 'dingdingBot',
|
||||
}
|
||||
|
||||
|
||||
class AuthenticateType(BaseEnum):
|
||||
CAS = 'CAS'
|
||||
OAUTH2 = 'OAUTH2'
|
||||
OIDC = 'OIDC'
|
||||
LDAP = 'LDAP'
|
||||
|
||||
|
||||
AuthCommonConfig = 'AuthCommonConfig'
|
||||
AuthCommonConfigAutoRedirect = 'auto_redirect'
|
||||
|
||||
|
||||
class TestType(BaseEnum):
|
||||
Connect = 'connect'
|
||||
Login = 'login'
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask import abort, current_app
|
||||
from flask import abort
|
||||
from treelib import Tree
|
||||
from wtforms import Form
|
||||
from wtforms import IntegerField
|
||||
@@ -9,7 +9,6 @@ from wtforms import validators
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
from api.models.common_setting import Department, Employee
|
||||
|
||||
@@ -153,10 +152,6 @@ class DepartmentForm(Form):
|
||||
|
||||
class DepartmentCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_department_by_id(d_id, to_dict=True):
|
||||
return Department.get_by(first=True, department_id=d_id, to_dict=to_dict)
|
||||
|
||||
@staticmethod
|
||||
def add(**kwargs):
|
||||
DepartmentCRUD.check_department_name_unique(kwargs['department_name'])
|
||||
@@ -191,11 +186,10 @@ class DepartmentCRUD(object):
|
||||
filter(lambda d: d['department_id'] == department_parent_id, allow_p_d_id_list))
|
||||
if len(target) == 0:
|
||||
try:
|
||||
dep = Department.get_by(
|
||||
d = Department.get_by(
|
||||
first=True, to_dict=False, department_id=department_parent_id)
|
||||
name = dep.department_name if dep else ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
name = d.department_name if d else ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
name = ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
abort(400, ErrFormat.cannot_to_be_parent_department.format(name))
|
||||
|
||||
@@ -259,7 +253,7 @@ class DepartmentCRUD(object):
|
||||
try:
|
||||
RoleCRUD.delete_role(existed.acl_rid)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
pass
|
||||
|
||||
return existed.soft_delete()
|
||||
|
||||
@@ -274,7 +268,7 @@ class DepartmentCRUD(object):
|
||||
try:
|
||||
tree.remove_subtree(department_id)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
pass
|
||||
|
||||
[allow_d_id_list.append({'department_id': int(n.identifier), 'department_name': n.tag}) for n in
|
||||
tree.all_nodes()]
|
||||
@@ -396,125 +390,6 @@ class DepartmentCRUD(object):
|
||||
[id_list.append(int(n.identifier))
|
||||
for n in tmp_tree.all_nodes()]
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
pass
|
||||
|
||||
return id_list
|
||||
|
||||
|
||||
class EditDepartmentInACL(object):
|
||||
|
||||
@staticmethod
|
||||
def add_department_to_acl(department_id, op_uid):
|
||||
db_department = DepartmentCRUD.get_department_by_id(department_id, to_dict=False)
|
||||
if not db_department:
|
||||
return
|
||||
|
||||
from api.models.acl import Role
|
||||
role = Role.get_by(first=True, name=db_department.department_name, app_id=None)
|
||||
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
if role is None:
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'name': db_department.department_name,
|
||||
}
|
||||
role = acl.create_role(payload)
|
||||
|
||||
acl_rid = role.get('id') if role else 0
|
||||
|
||||
db_department.update(
|
||||
acl_rid=acl_rid
|
||||
)
|
||||
info = f"add_department_to_acl, acl_rid: {acl_rid}"
|
||||
current_app.logger.info(info)
|
||||
return info
|
||||
|
||||
@staticmethod
|
||||
def delete_department_from_acl(department_rids, op_uid):
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
|
||||
result = []
|
||||
|
||||
for rid in department_rids:
|
||||
try:
|
||||
acl.delete_role(rid)
|
||||
except Exception as e:
|
||||
result.append(f"delete_department_in_acl, rid: {rid}, error: {e}")
|
||||
continue
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def edit_department_name_in_acl(d_rid: int, d_name: str, op_uid: int):
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
payload = {
|
||||
'name': d_name
|
||||
}
|
||||
try:
|
||||
acl.edit_role(d_rid, payload)
|
||||
except Exception as e:
|
||||
return f"edit_department_name_in_acl, rid: {d_rid}, error: {e}"
|
||||
|
||||
return f"edit_department_name_in_acl, rid: {d_rid}, success"
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_department_in_acl(e_list: list, new_d_id: int, op_uid: int):
|
||||
result = []
|
||||
new_department = DepartmentCRUD.get_department_by_id(new_d_id, False)
|
||||
if not new_department:
|
||||
result.append(f"{new_d_id} new_department is None")
|
||||
return result
|
||||
|
||||
from api.models.acl import Role
|
||||
new_role = Role.get_by(first=True, name=new_department.department_name, app_id=None)
|
||||
new_d_rid_in_acl = new_role.get('id') if new_role else 0
|
||||
if new_d_rid_in_acl == 0:
|
||||
return
|
||||
|
||||
if new_d_rid_in_acl != new_department.acl_rid:
|
||||
new_department.update(
|
||||
acl_rid=new_d_rid_in_acl
|
||||
)
|
||||
new_department_acl_rid = new_department.acl_rid if new_d_rid_in_acl == new_department.acl_rid else \
|
||||
new_d_rid_in_acl
|
||||
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
for employee in e_list:
|
||||
old_department = DepartmentCRUD.get_department_by_id(employee.get('department_id'), False)
|
||||
if not old_department:
|
||||
continue
|
||||
employee_acl_rid = employee.get('e_acl_rid')
|
||||
if employee_acl_rid == 0:
|
||||
result.append(f"employee_acl_rid == 0")
|
||||
continue
|
||||
|
||||
old_role = Role.get_by(first=True, name=old_department.department_name, app_id=None)
|
||||
old_d_rid_in_acl = old_role.get('id') if old_role else 0
|
||||
if old_d_rid_in_acl == 0:
|
||||
return
|
||||
if old_d_rid_in_acl != old_department.acl_rid:
|
||||
old_department.update(
|
||||
acl_rid=old_d_rid_in_acl
|
||||
)
|
||||
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'parent_id': d_acl_rid,
|
||||
}
|
||||
try:
|
||||
acl.remove_user_from_role(employee_acl_rid, payload)
|
||||
except Exception as e:
|
||||
result.append(
|
||||
f"remove_user_from_role employee_acl_rid: {employee_acl_rid}, parent_id: {d_acl_rid}, err: {e}")
|
||||
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'child_ids': [employee_acl_rid],
|
||||
}
|
||||
try:
|
||||
acl.add_user_to_role(new_department_acl_rid, payload)
|
||||
except Exception as e:
|
||||
result.append(
|
||||
f"add_user_to_role employee_acl_rid: {employee_acl_rid}, parent_id: {d_acl_rid}, err: {e}")
|
||||
|
||||
return result
|
||||
|
@@ -1,9 +1,8 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import copy
|
||||
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from flask import abort
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import or_, literal_column, func, not_, and_
|
||||
@@ -121,19 +120,6 @@ class EmployeeCRUD(object):
|
||||
employee = CreateEmployee().create_single(**data)
|
||||
return employee.to_dict()
|
||||
|
||||
@staticmethod
|
||||
def add_employee_from_acl_created(**kwargs):
|
||||
try:
|
||||
kwargs['acl_uid'] = kwargs.pop('uid')
|
||||
kwargs['acl_rid'] = kwargs.pop('rid')
|
||||
kwargs['department_id'] = 0
|
||||
|
||||
Employee.create(
|
||||
**kwargs
|
||||
)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def add(**kwargs):
|
||||
try:
|
||||
@@ -178,7 +164,7 @@ class EmployeeCRUD(object):
|
||||
def edit_employee_by_uid(_uid, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
try:
|
||||
edit_acl_user(_uid, **kwargs)
|
||||
user = edit_acl_user(_uid, **kwargs)
|
||||
|
||||
for column in employee_pop_columns:
|
||||
if kwargs.get(column):
|
||||
@@ -190,9 +176,9 @@ class EmployeeCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def change_password_by_uid(_uid, password):
|
||||
EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
try:
|
||||
edit_acl_user(_uid, password=password)
|
||||
user = edit_acl_user(_uid, password=password)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@@ -359,11 +345,9 @@ class EmployeeCRUD(object):
|
||||
|
||||
if value and column == "last_login":
|
||||
try:
|
||||
return datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
except Exception as e:
|
||||
err = f"{ErrFormat.datetime_format_error.format(column)}: {str(e)}"
|
||||
abort(400, err)
|
||||
return value
|
||||
abort(400, ErrFormat.datetime_format_error.format(column))
|
||||
|
||||
@staticmethod
|
||||
def get_attr_by_column(column):
|
||||
@@ -384,7 +368,7 @@ class EmployeeCRUD(object):
|
||||
relation = condition.get("relation", None)
|
||||
value = condition.get("value", None)
|
||||
|
||||
value = EmployeeCRUD.check_condition(column, operator, value, relation)
|
||||
EmployeeCRUD.check_condition(column, operator, value, relation)
|
||||
a, o = EmployeeCRUD.get_expr_by_condition(
|
||||
column, operator, value, relation)
|
||||
and_list += a
|
||||
@@ -490,60 +474,6 @@ class EmployeeCRUD(object):
|
||||
|
||||
return [r.to_dict() for r in results]
|
||||
|
||||
@staticmethod
|
||||
def remove_bind_notice_by_uid(_platform, _uid):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
employee_data = existed.to_dict()
|
||||
|
||||
notice_info = employee_data.get('notice_info', {})
|
||||
notice_info = copy.deepcopy(notice_info) if notice_info else {}
|
||||
|
||||
notice_info[_platform] = ''
|
||||
|
||||
existed.update(
|
||||
notice_info=notice_info
|
||||
)
|
||||
return ErrFormat.notice_remove_bind_success
|
||||
|
||||
@staticmethod
|
||||
def bind_notice_by_uid(_platform, _uid):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
mobile = existed.mobile
|
||||
if not mobile or len(mobile) == 0:
|
||||
abort(400, ErrFormat.notice_bind_err_with_empty_mobile)
|
||||
|
||||
from api.lib.common_setting.notice_config import NoticeConfigCRUD
|
||||
messenger = NoticeConfigCRUD.get_messenger_url()
|
||||
if not messenger or len(messenger) == 0:
|
||||
abort(400, ErrFormat.notice_please_config_messenger_first)
|
||||
|
||||
url = f"{messenger}/v1/uid/getbyphone"
|
||||
try:
|
||||
payload = dict(
|
||||
phone=mobile,
|
||||
sender=_platform
|
||||
)
|
||||
res = requests.post(url, json=payload)
|
||||
result = res.json()
|
||||
if res.status_code != 200:
|
||||
raise Exception(result.get('msg', ''))
|
||||
target_id = result.get('uid', '')
|
||||
|
||||
employee_data = existed.to_dict()
|
||||
|
||||
notice_info = employee_data.get('notice_info', {})
|
||||
notice_info = copy.deepcopy(notice_info) if notice_info else {}
|
||||
|
||||
notice_info[_platform] = '' if not target_id else target_id
|
||||
|
||||
existed.update(
|
||||
notice_info=notice_info
|
||||
)
|
||||
return ErrFormat.notice_bind_success
|
||||
|
||||
except Exception as e:
|
||||
return abort(400, ErrFormat.notice_bind_failed.format(str(e)))
|
||||
|
||||
@staticmethod
|
||||
def get_employee_notice_by_ids(employee_ids):
|
||||
criterion = [
|
||||
@@ -563,130 +493,10 @@ class EmployeeCRUD(object):
|
||||
for column in direct_columns:
|
||||
tmp[column] = d.get(column, '')
|
||||
notice_info = d.get('notice_info', {})
|
||||
notice_info = copy.deepcopy(notice_info) if notice_info else {}
|
||||
tmp.update(**notice_info)
|
||||
results.append(tmp)
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def import_employee(employee_list):
|
||||
res = CreateEmployee().batch_create(employee_list)
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_employee_department(employee_id_list, column_value):
|
||||
err_list = []
|
||||
employee_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
employee = dict(
|
||||
e_acl_rid=existed.acl_rid,
|
||||
department_id=existed.department_id
|
||||
)
|
||||
employee_list.append(employee)
|
||||
existed.update(department_id=column_value)
|
||||
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
from api.lib.common_setting.department import EditDepartmentInACL
|
||||
EditDepartmentInACL.edit_employee_department_in_acl(
|
||||
employee_list, column_value, current_user.uid
|
||||
)
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_password_or_block_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
if column_name == 'block':
|
||||
err_list = []
|
||||
success_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
employee = EmployeeCRUD.edit_employee_block_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
success_list.append(employee)
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
return err_list
|
||||
else:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, is_acl)
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
err_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
EmployeeCRUD.edit_employee_single_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_single_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
if 'direct_supervisor_id' in kwargs.keys():
|
||||
if kwargs['direct_supervisor_id'] == existed.direct_supervisor_id:
|
||||
raise Exception(ErrFormat.direct_supervisor_is_not_self)
|
||||
|
||||
if is_acl:
|
||||
return edit_acl_user(existed.acl_uid, **kwargs)
|
||||
|
||||
try:
|
||||
for column in employee_pop_columns:
|
||||
if kwargs.get(column):
|
||||
kwargs.pop(column)
|
||||
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_block_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
value = get_block_value(kwargs.get('block'))
|
||||
if value is True:
|
||||
check_department_director_id_or_direct_supervisor_id(_id)
|
||||
value = 1
|
||||
else:
|
||||
value = 0
|
||||
|
||||
if is_acl:
|
||||
kwargs['block'] = value
|
||||
edit_acl_user(existed.acl_uid, **kwargs)
|
||||
|
||||
existed.update(block=value)
|
||||
data = existed.to_dict()
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def batch_employee(column_name, column_value, employee_id_list):
|
||||
if column_value is None:
|
||||
abort(400, ErrFormat.value_is_required)
|
||||
if column_name in ['password', 'block']:
|
||||
return EmployeeCRUD.batch_edit_password_or_block_column(column_name, employee_id_list, column_value, True)
|
||||
|
||||
elif column_name in ['department_id']:
|
||||
return EmployeeCRUD.batch_edit_employee_department(employee_id_list, column_value)
|
||||
|
||||
elif column_name in [
|
||||
'direct_supervisor_id', 'position_name'
|
||||
]:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, False)
|
||||
|
||||
else:
|
||||
abort(400, ErrFormat.column_name_not_support)
|
||||
|
||||
|
||||
def get_user_map(key='uid', acl=None):
|
||||
"""
|
||||
@@ -727,7 +537,6 @@ class CreateEmployee(object):
|
||||
try:
|
||||
existed = self.check_acl_user(user_data)
|
||||
if not existed:
|
||||
user_data['add_from'] = 'common'
|
||||
return self.acl.create_user(user_data)
|
||||
return existed
|
||||
except Exception as e:
|
||||
@@ -764,8 +573,7 @@ class CreateEmployee(object):
|
||||
**kwargs
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_department_by_name(d_name):
|
||||
def get_department_by_name(self, d_name):
|
||||
return Department.get_by(first=True, department_name=d_name)
|
||||
|
||||
def get_end_department_id(self, department_name_list, department_name_map):
|
||||
|
@@ -1,104 +1,41 @@
|
||||
import requests
|
||||
|
||||
from api.lib.common_setting.const import BotNameMap
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import CompanyInfo, NoticeConfig
|
||||
from api.models.common_setting import NoticeConfig
|
||||
from wtforms import Form
|
||||
from wtforms import StringField
|
||||
from wtforms import validators
|
||||
from flask import abort, current_app
|
||||
from flask import abort
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
from email.utils import formataddr
|
||||
|
||||
|
||||
class NoticeConfigCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def add_notice_config(**kwargs):
|
||||
platform = kwargs.get('platform')
|
||||
NoticeConfigCRUD.check_platform(platform)
|
||||
info = kwargs.get('info', {})
|
||||
if 'name' not in info:
|
||||
info['name'] = platform
|
||||
kwargs['info'] = info
|
||||
NoticeConfigCRUD.check_platform(kwargs.get('platform'))
|
||||
try:
|
||||
NoticeConfigCRUD.update_messenger_config(**info)
|
||||
res = NoticeConfig.create(
|
||||
return NoticeConfig.create(
|
||||
**kwargs
|
||||
)
|
||||
return res
|
||||
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def check_platform(platform):
|
||||
NoticeConfig.get_by(first=True, to_dict=False, platform=platform) and \
|
||||
abort(400, ErrFormat.notice_platform_existed.format(platform))
|
||||
NoticeConfig.get_by(first=True, to_dict=False, platform=platform) and abort(400, f"{platform} 已存在!")
|
||||
|
||||
@staticmethod
|
||||
def edit_notice_config(_id, **kwargs):
|
||||
existed = NoticeConfigCRUD.get_notice_config_by_id(_id)
|
||||
try:
|
||||
info = kwargs.get('info', {})
|
||||
if 'name' not in info:
|
||||
info['name'] = existed.platform
|
||||
kwargs['info'] = info
|
||||
NoticeConfigCRUD.update_messenger_config(**info)
|
||||
|
||||
res = existed.update(**kwargs)
|
||||
return res
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def get_messenger_url():
|
||||
from api.lib.common_setting.company_info import CompanyInfoCache
|
||||
com_info = CompanyInfoCache.get()
|
||||
if not com_info:
|
||||
return
|
||||
messenger = com_info.get('messenger', '')
|
||||
if len(messenger) == 0:
|
||||
return
|
||||
if messenger[-1] == '/':
|
||||
messenger = messenger[:-1]
|
||||
return messenger
|
||||
|
||||
@staticmethod
|
||||
def update_messenger_config(**kwargs):
|
||||
try:
|
||||
messenger = NoticeConfigCRUD.get_messenger_url()
|
||||
if not messenger or len(messenger) == 0:
|
||||
raise Exception(ErrFormat.notice_please_config_messenger_first)
|
||||
|
||||
url = f"{messenger}/v1/senders"
|
||||
name = kwargs.get('name')
|
||||
bot_list = kwargs.pop('bot', None)
|
||||
for k, v in kwargs.items():
|
||||
if isinstance(v, bool):
|
||||
kwargs[k] = 'true' if v else 'false'
|
||||
else:
|
||||
kwargs[k] = str(v)
|
||||
|
||||
payload = {name: [kwargs]}
|
||||
current_app.logger.info(f"update_messenger_config: {url}, {payload}")
|
||||
res = requests.put(url, json=payload, timeout=2)
|
||||
current_app.logger.info(f"update_messenger_config: {res.status_code}, {res.text}")
|
||||
|
||||
if not bot_list or len(bot_list) == 0:
|
||||
return
|
||||
bot_name = BotNameMap.get(name)
|
||||
payload = {bot_name: bot_list}
|
||||
current_app.logger.info(f"update_messenger_config: {url}, {payload}")
|
||||
bot_res = requests.put(url, json=payload, timeout=2)
|
||||
current_app.logger.info(f"update_messenger_config: {bot_res.status_code}, {bot_res.text}")
|
||||
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def get_notice_config_by_id(_id):
|
||||
return NoticeConfig.get_by(first=True, to_dict=False, id=_id) or \
|
||||
abort(400,
|
||||
ErrFormat.notice_not_existed.format(_id))
|
||||
return NoticeConfig.get_by(first=True, to_dict=False, id=_id) or abort(400, f"{_id} 配置项不存在!")
|
||||
|
||||
@staticmethod
|
||||
def get_all():
|
||||
@@ -106,46 +43,38 @@ class NoticeConfigCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def test_send_email(receive_address, **kwargs):
|
||||
messenger = NoticeConfigCRUD.get_messenger_url()
|
||||
if not messenger or len(messenger) == 0:
|
||||
abort(400, ErrFormat.notice_please_config_messenger_first)
|
||||
url = f"{messenger}/v1/message"
|
||||
|
||||
# 设置发送方和接收方的电子邮件地址
|
||||
sender_email = 'test@test.com'
|
||||
sender_name = 'Test Sender'
|
||||
recipient_email = receive_address
|
||||
recipient_name = receive_address
|
||||
|
||||
subject = 'Test Email'
|
||||
body = 'This is a test email'
|
||||
payload = {
|
||||
"sender": 'email',
|
||||
"msgtype": "text/plain",
|
||||
"title": subject,
|
||||
"content": body,
|
||||
"tos": [recipient_email],
|
||||
}
|
||||
current_app.logger.info(f"test_send_email: {url}, {payload}")
|
||||
response = requests.post(url, json=payload)
|
||||
if response.status_code != 200:
|
||||
abort(400, response.text)
|
||||
|
||||
message = MIMEText(body, 'plain', 'utf-8')
|
||||
message['From'] = formataddr((sender_name, sender_email))
|
||||
message['To'] = formataddr((recipient_name, recipient_email))
|
||||
message['Subject'] = subject
|
||||
|
||||
smtp_server = kwargs.get('server')
|
||||
smtp_port = kwargs.get('port')
|
||||
smtp_username = kwargs.get('username')
|
||||
smtp_password = kwargs.get('password')
|
||||
|
||||
if kwargs.get('mail_type') == 'SMTP':
|
||||
smtp_connection = smtplib.SMTP(smtp_server, smtp_port)
|
||||
else:
|
||||
smtp_connection = smtplib.SMTP_SSL(smtp_server, smtp_port)
|
||||
|
||||
if kwargs.get('is_login'):
|
||||
smtp_connection.login(smtp_username, smtp_password)
|
||||
|
||||
smtp_connection.sendmail(sender_email, recipient_email, message.as_string())
|
||||
smtp_connection.quit()
|
||||
|
||||
return 1
|
||||
|
||||
@staticmethod
|
||||
def get_app_bot():
|
||||
result = []
|
||||
for notice_app in NoticeConfig.get_by(to_dict=False):
|
||||
if notice_app.platform in ['email']:
|
||||
continue
|
||||
info = notice_app.info
|
||||
name = info.get('name', '')
|
||||
if name not in BotNameMap:
|
||||
continue
|
||||
result.append(dict(
|
||||
name=info.get('name', ''),
|
||||
label=info.get('label', ''),
|
||||
bot=info.get('bot', []),
|
||||
))
|
||||
return result
|
||||
|
||||
|
||||
class NoticeConfigForm(Form):
|
||||
platform = StringField(validators=[
|
||||
|
@@ -8,9 +8,6 @@ class ErrFormat(CommonErrFormat):
|
||||
|
||||
no_file_part = "没有文件部分"
|
||||
file_is_required = "文件是必须的"
|
||||
file_not_found = "文件不存在"
|
||||
file_type_not_allowed = "文件类型不允许"
|
||||
upload_failed = "上传失败: {}"
|
||||
|
||||
direct_supervisor_is_not_self = "直属上级不能是自己"
|
||||
parent_department_is_not_self = "上级部门不能是自己"
|
||||
@@ -59,19 +56,3 @@ class ErrFormat(CommonErrFormat):
|
||||
email_send_timeout = "邮件发送超时"
|
||||
|
||||
common_data_not_found = "ID {} 找不到记录"
|
||||
common_data_already_existed = "{} 已存在"
|
||||
notice_platform_existed = "{} 已存在"
|
||||
notice_not_existed = "{} 配置项不存在"
|
||||
notice_please_config_messenger_first = "请先配置 messenger"
|
||||
notice_bind_err_with_empty_mobile = "绑定失败,手机号为空"
|
||||
notice_bind_failed = "绑定失败: {}"
|
||||
notice_bind_success = "绑定成功"
|
||||
notice_remove_bind_success = "解绑成功"
|
||||
|
||||
not_support_test = "不支持的测试类型: {}"
|
||||
not_support_auth_type = "不支持的认证类型: {}"
|
||||
ldap_server_connect_timeout = "LDAP服务器连接超时"
|
||||
ldap_server_connect_not_available = "LDAP服务器连接不可用"
|
||||
ldap_test_unknown_error = "LDAP测试未知错误: {}"
|
||||
common_data_not_support_auth_type = "通用数据不支持auth类型: {}"
|
||||
ldap_test_username_required = "LDAP测试用户名必填"
|
||||
|
@@ -1,13 +1,6 @@
|
||||
import uuid
|
||||
import os
|
||||
from io import BytesIO
|
||||
|
||||
from flask import abort, current_app
|
||||
import lz4.frame
|
||||
|
||||
from api.lib.common_setting.utils import get_cur_time_str
|
||||
from api.models.common_setting import CommonFile
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
|
||||
|
||||
def allowed_file(filename, allowed_extensions):
|
||||
@@ -21,48 +14,3 @@ def generate_new_file_name(name):
|
||||
cur_str = get_cur_time_str('_')
|
||||
|
||||
return f"{prev_name}_{cur_str}_{uid}.{ext}"
|
||||
|
||||
|
||||
class CommonFileCRUD:
|
||||
@staticmethod
|
||||
def add_file(**kwargs):
|
||||
return CommonFile.create(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_file(file_name):
|
||||
existed = CommonFile.get_by(file_name=file_name, first=True, to_dict=False)
|
||||
if not existed:
|
||||
abort(400, ErrFormat.file_not_found)
|
||||
|
||||
uncompressed_data = lz4.frame.decompress(existed.binary)
|
||||
|
||||
return BytesIO(uncompressed_data)
|
||||
|
||||
@staticmethod
|
||||
def sync_file_to_db():
|
||||
for p in ['UPLOAD_DIRECTORY_FULL']:
|
||||
upload_path = current_app.config.get(p, None)
|
||||
if not upload_path:
|
||||
continue
|
||||
for root, dirs, files in os.walk(upload_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
if not os.path.isfile(file_path):
|
||||
continue
|
||||
|
||||
existed = CommonFile.get_by(file_name=file, first=True, to_dict=False)
|
||||
if existed:
|
||||
continue
|
||||
with open(file_path, 'rb') as f:
|
||||
data = f.read()
|
||||
compressed_data = lz4.frame.compress(data)
|
||||
try:
|
||||
CommonFileCRUD.add_file(
|
||||
origin_name=file,
|
||||
file_name=file,
|
||||
binary=compressed_data
|
||||
)
|
||||
|
||||
current_app.logger.info(f'sync file {file} to db')
|
||||
except Exception as e:
|
||||
current_app.logger.error(f'sync file {file} to db error: {e}')
|
||||
|
@@ -10,18 +10,14 @@ from api.lib.exception import CommitException
|
||||
|
||||
class FormatMixin(object):
|
||||
def to_dict(self):
|
||||
res = dict()
|
||||
for k in getattr(self, "__mapper__").c.keys():
|
||||
if k in {'password', '_password', 'secret', '_secret'}:
|
||||
continue
|
||||
res = dict([(k, getattr(self, k) if not isinstance(
|
||||
getattr(self, k), (datetime.datetime, datetime.date, datetime.time)) else str(
|
||||
getattr(self, k))) for k in getattr(self, "__mapper__").c.keys()])
|
||||
# FIXME: getattr(cls, "__table__").columns k.name
|
||||
|
||||
if k.startswith('_'):
|
||||
k = k[1:]
|
||||
|
||||
if not isinstance(getattr(self, k), (datetime.datetime, datetime.date, datetime.time)):
|
||||
res[k] = getattr(self, k)
|
||||
else:
|
||||
res[k] = str(getattr(self, k))
|
||||
res.pop('password', None)
|
||||
res.pop('_password', None)
|
||||
res.pop('secret', None)
|
||||
|
||||
return res
|
||||
|
||||
@@ -94,7 +90,7 @@ class CRUDMixin(FormatMixin):
|
||||
if any((isinstance(_id, six.string_types) and _id.isdigit(),
|
||||
isinstance(_id, (six.integer_types, float))), ):
|
||||
obj = getattr(cls, "query").get(int(_id))
|
||||
if obj and not getattr(obj, 'deleted', False):
|
||||
if obj and not obj.deleted:
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
|
@@ -4,14 +4,8 @@
|
||||
from functools import wraps
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from sqlalchemy.exc import InvalidRequestError
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.exc import PendingRollbackError
|
||||
from sqlalchemy.exc import StatementError
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
@@ -76,43 +70,3 @@ def args_validate(model_cls, exclude_args=None):
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def reconnect_db(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (StatementError, OperationalError, InvalidRequestError) as e:
|
||||
error_msg = str(e)
|
||||
if 'Lost connection' in error_msg or 'reconnect until invalid transaction' in error_msg or \
|
||||
'can be emitted within this transaction' in error_msg:
|
||||
current_app.logger.info('[reconnect_db] lost connect rollback then retry')
|
||||
db.session.rollback()
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
raise e
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _flush_db():
|
||||
try:
|
||||
db.session.commit()
|
||||
except (StatementError, OperationalError, InvalidRequestError, PendingRollbackError):
|
||||
db.session.rollback()
|
||||
|
||||
|
||||
def flush_db(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
_flush_db()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def run_flush_db():
|
||||
_flush_db()
|
||||
|
@@ -3,12 +3,10 @@
|
||||
import json
|
||||
|
||||
import requests
|
||||
import six
|
||||
from flask import current_app
|
||||
from jinja2 import Template
|
||||
from markdownify import markdownify as md
|
||||
|
||||
from api.lib.common_setting.notice_config import NoticeConfigCRUD
|
||||
from api.lib.mail import send_mail
|
||||
|
||||
|
||||
@@ -19,15 +17,7 @@ def _request_messenger(subject, body, tos, sender, payload):
|
||||
if not params['tos']:
|
||||
raise Exception("no receivers")
|
||||
|
||||
flat_tos = []
|
||||
for i in params['tos']:
|
||||
if i.strip():
|
||||
to = Template(i).render(payload)
|
||||
if isinstance(to, list):
|
||||
flat_tos.extend(to)
|
||||
elif isinstance(to, six.string_types):
|
||||
flat_tos.append(to)
|
||||
params['tos'] = flat_tos
|
||||
params['tos'] = [Template(i).render(payload) for i in params['tos'] if i.strip()]
|
||||
|
||||
if sender == "email":
|
||||
params['msgtype'] = 'text/html'
|
||||
@@ -42,14 +32,7 @@ def _request_messenger(subject, body, tos, sender, payload):
|
||||
|
||||
params['content'] = json.dumps(dict(content=content))
|
||||
|
||||
url = current_app.config.get('MESSENGER_URL') or NoticeConfigCRUD.get_messenger_url()
|
||||
if not url:
|
||||
raise Exception("no messenger url")
|
||||
|
||||
if not url.endswith("message"):
|
||||
url = "{}/v1/message".format(url)
|
||||
|
||||
resp = requests.post(url, json=params)
|
||||
resp = requests.post(current_app.config.get('MESSENGER_URL'), json=params)
|
||||
if resp.status_code != 200:
|
||||
raise Exception(resp.text)
|
||||
|
||||
|
@@ -1,19 +1,14 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
import json
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
|
||||
from flask import has_request_context
|
||||
from flask import request
|
||||
from flask import has_request_context, request
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl import AppCache
|
||||
from api.models.acl import AuditLoginLog
|
||||
from api.models.acl import AuditPermissionLog
|
||||
from api.models.acl import AuditResourceLog
|
||||
from api.models.acl import AuditRoleLog
|
||||
@@ -288,27 +283,6 @@ class AuditCRUD(object):
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def search_login(_, q=None, page=1, page_size=10, start=None, end=None):
|
||||
query = db.session.query(AuditLoginLog)
|
||||
|
||||
if start:
|
||||
query = query.filter(AuditLoginLog.login_at >= start)
|
||||
if end:
|
||||
query = query.filter(AuditLoginLog.login_at <= end)
|
||||
|
||||
if q:
|
||||
query = query.filter(AuditLoginLog.username == q)
|
||||
|
||||
records = query.order_by(
|
||||
AuditLoginLog.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
||||
|
||||
data = {
|
||||
'data': [r.to_dict() for r in records],
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def add_role_log(cls, app_id, operate_type: AuditOperateType,
|
||||
scope: AuditScope, link_id: int, origin: dict, current: dict, extra: dict,
|
||||
@@ -374,24 +348,3 @@ class AuditCRUD(object):
|
||||
AuditTriggerLog.create(app_id=app_id, trigger_id=trigger_id, operate_uid=user_id,
|
||||
operate_type=operate_type.value,
|
||||
origin=origin, current=current, extra=extra, source=source.value)
|
||||
|
||||
@classmethod
|
||||
def add_login_log(cls, username, is_ok, description, _id=None, logout_at=None):
|
||||
if _id is not None:
|
||||
existed = AuditLoginLog.get_by_id(_id)
|
||||
if existed is not None:
|
||||
existed.update(logout_at=logout_at)
|
||||
return
|
||||
|
||||
payload = dict(username=username,
|
||||
is_ok=is_ok,
|
||||
description=description,
|
||||
logout_at=logout_at,
|
||||
ip=request.headers.get('X-Real-IP') or request.remote_addr,
|
||||
browser=request.headers.get('User-Agent'),
|
||||
)
|
||||
|
||||
if logout_at is None:
|
||||
payload['login_at'] = datetime.datetime.now()
|
||||
|
||||
return AuditLoginLog.create(**payload).id
|
||||
|
@@ -4,7 +4,7 @@
|
||||
import msgpack
|
||||
|
||||
from api.extensions import cache
|
||||
from api.lib.decorator import flush_db
|
||||
from api.extensions import db
|
||||
from api.lib.utils import Lock
|
||||
from api.models.acl import App
|
||||
from api.models.acl import Permission
|
||||
@@ -221,9 +221,9 @@ class RoleRelationCache(object):
|
||||
return msgpack.loads(r_g, raw=False)
|
||||
|
||||
@classmethod
|
||||
@flush_db
|
||||
def rebuild(cls, rid, app_id):
|
||||
cls.clean(rid, app_id)
|
||||
db.session.remove()
|
||||
|
||||
cls.get_parent_ids(rid, app_id)
|
||||
cls.get_child_ids(rid, app_id)
|
||||
@@ -235,9 +235,9 @@ class RoleRelationCache(object):
|
||||
cls.get_resources2(rid, app_id)
|
||||
|
||||
@classmethod
|
||||
@flush_db
|
||||
def rebuild2(cls, rid, app_id):
|
||||
cache.delete(cls.PREFIX_RESOURCES2.format(rid, app_id))
|
||||
db.session.remove()
|
||||
cls.get_resources2(rid, app_id)
|
||||
|
||||
@classmethod
|
||||
|
@@ -260,8 +260,7 @@ class ResourceCRUD(object):
|
||||
numfound = query.count()
|
||||
res = [i.to_dict() for i in query.offset((page - 1) * page_size).limit(page_size)]
|
||||
for i in res:
|
||||
user = UserCache.get(i['uid']) if i['uid'] else ''
|
||||
i['user'] = user and user.nickname
|
||||
i['user'] = UserCache.get(i['uid']).nickname if i['uid'] else ''
|
||||
|
||||
return numfound, res
|
||||
|
||||
@@ -276,6 +275,7 @@ class ResourceCRUD(object):
|
||||
|
||||
from api.tasks.acl import apply_trigger
|
||||
triggers = TriggerCRUD.match_triggers(app_id, r.name, r.resource_type_id, uid)
|
||||
current_app.logger.info(triggers)
|
||||
for trigger in triggers:
|
||||
# auto trigger should be no uid
|
||||
apply_trigger.apply_async(args=(trigger.id,),
|
||||
|
@@ -4,9 +4,6 @@ from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
class ErrFormat(CommonErrFormat):
|
||||
login_succeed = "登录成功"
|
||||
ldap_connection_failed = "连接LDAP服务失败"
|
||||
invalid_password = "密码验证失败"
|
||||
auth_only_with_app_token_failed = "应用 Token验证失败"
|
||||
session_invalid = "您不是应用管理员 或者 session失效(尝试一下退出重新登录)"
|
||||
|
||||
@@ -20,11 +17,11 @@ class ErrFormat(CommonErrFormat):
|
||||
role_exists = "角色 {} 已经存在!"
|
||||
global_role_not_found = "全局角色 {} 不存在!"
|
||||
global_role_exists = "全局角色 {} 已经存在!"
|
||||
user_role_delete_invalid = "删除用户角色, 请在 用户管理 页面操作!"
|
||||
|
||||
resource_no_permission = "您没有资源: {} 的 {} 权限"
|
||||
admin_required = "需要管理员权限"
|
||||
role_required = "需要角色: {}"
|
||||
user_role_delete_invalid = "删除用户角色, 请在 用户管理 页面操作!"
|
||||
|
||||
app_is_ready_existed = "应用 {} 已经存在"
|
||||
app_not_found = "应用 {} 不存在!"
|
||||
|
@@ -10,7 +10,9 @@ from sqlalchemy import or_
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl.app import AppCRUD
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditScope
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
from api.lib.perm.acl.audit import AuditScope
|
||||
from api.lib.perm.acl.cache import AppCache
|
||||
from api.lib.perm.acl.cache import HasResourceRoleCache
|
||||
from api.lib.perm.acl.cache import RoleCache
|
||||
@@ -69,16 +71,16 @@ class RoleRelationCRUD(object):
|
||||
@staticmethod
|
||||
def get_parent_ids(rid, app_id):
|
||||
if app_id is not None:
|
||||
return [i.parent_id for i in RoleRelation.get_by(child_id=rid, app_id=app_id, to_dict=False)] + \
|
||||
[i.parent_id for i in RoleRelation.get_by(child_id=rid, app_id=None, to_dict=False)]
|
||||
return ([i.parent_id for i in RoleRelation.get_by(child_id=rid, app_id=app_id, to_dict=False)] +
|
||||
[i.parent_id for i in RoleRelation.get_by(child_id=rid, app_id=None, to_dict=False)])
|
||||
else:
|
||||
return [i.parent_id for i in RoleRelation.get_by(child_id=rid, app_id=app_id, to_dict=False)]
|
||||
|
||||
@staticmethod
|
||||
def get_child_ids(rid, app_id):
|
||||
if app_id is not None:
|
||||
return [i.child_id for i in RoleRelation.get_by(parent_id=rid, app_id=app_id, to_dict=False)] + \
|
||||
[i.child_id for i in RoleRelation.get_by(parent_id=rid, app_id=None, to_dict=False)]
|
||||
return ([i.child_id for i in RoleRelation.get_by(parent_id=rid, app_id=app_id, to_dict=False)] +
|
||||
[i.child_id for i in RoleRelation.get_by(parent_id=rid, app_id=None, to_dict=False)])
|
||||
else:
|
||||
return [i.child_id for i in RoleRelation.get_by(parent_id=rid, app_id=app_id, to_dict=False)]
|
||||
|
||||
@@ -213,6 +215,7 @@ class RoleCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def search(q, app_id, page=1, page_size=None, user_role=True, is_all=False, user_only=False):
|
||||
|
||||
if user_only: # only user role
|
||||
query = db.session.query(Role).filter(Role.deleted.is_(False)).filter(Role.uid.isnot(None))
|
||||
|
||||
@@ -270,13 +273,6 @@ class RoleCRUD(object):
|
||||
RoleCache.clean(rid)
|
||||
|
||||
role = role.update(**kwargs)
|
||||
|
||||
if origin['uid'] and kwargs.get('name') and kwargs.get('name') != origin['name']:
|
||||
from api.models.acl import User
|
||||
user = User.get_by(uid=origin['uid'], first=True, to_dict=False)
|
||||
if user:
|
||||
user.update(username=kwargs['name'])
|
||||
|
||||
AuditCRUD.add_role_log(role.app_id, AuditOperateType.update,
|
||||
AuditScope.role, role.id, origin, role.to_dict(), {},
|
||||
)
|
||||
@@ -295,11 +291,12 @@ class RoleCRUD(object):
|
||||
from api.lib.perm.acl.acl import is_admin
|
||||
|
||||
role = Role.get_by_id(rid) or abort(404, ErrFormat.role_not_found.format("rid={}".format(rid)))
|
||||
if not role.app_id and not is_admin():
|
||||
return abort(403, ErrFormat.admin_required)
|
||||
|
||||
not force and role.uid and abort(400, ErrFormat.user_role_delete_invalid)
|
||||
|
||||
if not role.app_id and not is_admin():
|
||||
return abort(403, ErrFormat.admin_required)
|
||||
|
||||
origin = role.to_dict()
|
||||
|
||||
child_ids = []
|
||||
@@ -308,18 +305,20 @@ class RoleCRUD(object):
|
||||
|
||||
for i in RoleRelation.get_by(parent_id=rid, to_dict=False):
|
||||
child_ids.append(i.child_id)
|
||||
i.soft_delete()
|
||||
i.soft_delete(commit=False)
|
||||
|
||||
for i in RoleRelation.get_by(child_id=rid, to_dict=False):
|
||||
parent_ids.append(i.parent_id)
|
||||
i.soft_delete()
|
||||
i.soft_delete(commit=False)
|
||||
|
||||
role_permissions = []
|
||||
for i in RolePermission.get_by(rid=rid, to_dict=False):
|
||||
role_permissions.append(i.to_dict())
|
||||
i.soft_delete()
|
||||
i.soft_delete(commit=False)
|
||||
|
||||
role.soft_delete()
|
||||
role.soft_delete(commit=False)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
role_rebuild.apply_async(args=(recursive_child_ids, role.app_id), queue=ACL_QUEUE)
|
||||
|
||||
|
@@ -41,7 +41,6 @@ class UserCRUD(object):
|
||||
|
||||
@classmethod
|
||||
def add(cls, **kwargs):
|
||||
add_from = kwargs.pop('add_from', None)
|
||||
existed = User.get_by(username=kwargs['username'])
|
||||
existed and abort(400, ErrFormat.user_exists.format(kwargs['username']))
|
||||
|
||||
@@ -59,15 +58,10 @@ class UserCRUD(object):
|
||||
kwargs['employee_id'] = '{0:04d}'.format(biggest_employee_id + 1)
|
||||
user = User.create(**kwargs)
|
||||
|
||||
role = RoleCRUD.add_role(user.username, uid=user.uid)
|
||||
RoleCRUD.add_role(user.username, uid=user.uid)
|
||||
AuditCRUD.add_role_log(None, AuditOperateType.create,
|
||||
AuditScope.user, user.uid, {}, user.to_dict(), {}, {}
|
||||
)
|
||||
if add_from != 'common':
|
||||
from api.lib.common_setting.employee import EmployeeCRUD
|
||||
payload = {column: getattr(user, column) for column in ['uid', 'username', 'nickname', 'email', 'block']}
|
||||
payload['rid'] = role.id
|
||||
EmployeeCRUD.add_employee_from_acl_created(**payload)
|
||||
|
||||
return user
|
||||
|
||||
|
@@ -93,9 +93,6 @@ def _auth_with_token():
|
||||
|
||||
|
||||
def _auth_with_ip_white_list():
|
||||
if request.url.endswith("acl/users/info"):
|
||||
return False
|
||||
|
||||
ip = request.headers.get('X-Real-IP') or request.remote_addr
|
||||
key = request.values.get('_key')
|
||||
secret = request.values.get('_secret')
|
||||
|
@@ -1 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
@@ -1,67 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import uuid
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from ldap3 import ALL
|
||||
from ldap3 import AUTO_BIND_NO_TLS
|
||||
from ldap3 import Connection
|
||||
from ldap3 import Server
|
||||
from ldap3.core.exceptions import LDAPBindError
|
||||
from ldap3.core.exceptions import LDAPCertificateError
|
||||
from ldap3.core.exceptions import LDAPSocketOpenError
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from api.models.acl import User
|
||||
|
||||
|
||||
def authenticate_with_ldap(username, password):
|
||||
config = AuthenticateDataCRUD(AuthenticateType.LDAP).get()
|
||||
|
||||
server = Server(config.get('LDAP').get('ldap_server'), get_info=ALL, connect_timeout=3)
|
||||
if '@' in username:
|
||||
email = username
|
||||
who = config['LDAP'].get('ldap_user_dn').format(username.split('@')[0])
|
||||
else:
|
||||
who = config['LDAP'].get('ldap_user_dn').format(username)
|
||||
email = "{}@{}".format(who, config['LDAP'].get('ldap_domain'))
|
||||
|
||||
username = username.split('@')[0]
|
||||
user = User.query.get_by_username(username)
|
||||
try:
|
||||
if not password:
|
||||
raise LDAPCertificateError
|
||||
|
||||
try:
|
||||
conn = Connection(server, user=who, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except LDAPBindError:
|
||||
conn = Connection(server,
|
||||
user=f"{username}@{config['LDAP'].get('ldap_domain')}",
|
||||
password=password,
|
||||
auto_bind=AUTO_BIND_NO_TLS)
|
||||
|
||||
if conn.result['result'] != 0:
|
||||
AuditCRUD.add_login_log(username, False, ErrFormat.invalid_password)
|
||||
raise LDAPBindError
|
||||
else:
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
if not user:
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
user = UserCRUD.add(username=username, email=email, password=uuid.uuid4().hex)
|
||||
|
||||
return user, True
|
||||
|
||||
except LDAPBindError as e:
|
||||
current_app.logger.info(e)
|
||||
return user, False
|
||||
|
||||
except LDAPSocketOpenError as e:
|
||||
current_app.logger.info(e)
|
||||
return abort(403, ErrFormat.ldap_connection_failed)
|
@@ -1,30 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from . import routing
|
||||
|
||||
|
||||
class OAuth2(object):
|
||||
def __init__(self, app=None, url_prefix=None):
|
||||
self._app = app
|
||||
if app is not None:
|
||||
self.init_app(app, url_prefix)
|
||||
|
||||
@staticmethod
|
||||
def init_app(app, url_prefix=None):
|
||||
# Configuration defaults
|
||||
app.config.setdefault('OAUTH2_GRANT_TYPE', 'authorization_code')
|
||||
app.config.setdefault('OAUTH2_RESPONSE_TYPE', 'code')
|
||||
app.config.setdefault('OAUTH2_AFTER_LOGIN', '/')
|
||||
|
||||
app.config.setdefault('OIDC_GRANT_TYPE', 'authorization_code')
|
||||
app.config.setdefault('OIDC_RESPONSE_TYPE', 'code')
|
||||
app.config.setdefault('OIDC_AFTER_LOGIN', '/')
|
||||
|
||||
# Register Blueprint
|
||||
app.register_blueprint(routing.blueprint, url_prefix=url_prefix)
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
return self._app or current_app
|
@@ -1,139 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import secrets
|
||||
import uuid
|
||||
|
||||
import requests
|
||||
from flask import Blueprint
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask import url_for
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
from six.moves.urllib.parse import urlencode
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
|
||||
blueprint = Blueprint('oauth2', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/login')
|
||||
def login(auth_type):
|
||||
config = AuthenticateDataCRUD(auth_type.upper()).get()
|
||||
|
||||
if request.values.get("next"):
|
||||
session["next"] = request.values.get("next")
|
||||
|
||||
session[f'{auth_type}_state'] = secrets.token_urlsafe(16)
|
||||
|
||||
auth_type = auth_type.upper()
|
||||
|
||||
redirect_uri = "{}://{}{}".format(urlparse(request.referrer).scheme,
|
||||
urlparse(request.referrer).netloc,
|
||||
url_for('oauth2.callback', auth_type=auth_type.lower()))
|
||||
qs = urlencode({
|
||||
'client_id': config['client_id'],
|
||||
'redirect_uri': redirect_uri,
|
||||
'response_type': current_app.config[f'{auth_type}_RESPONSE_TYPE'],
|
||||
'scope': ' '.join(config['scopes'] or []),
|
||||
'state': session[f'{auth_type.lower()}_state'],
|
||||
})
|
||||
|
||||
return redirect("{}?{}".format(config['authorize_url'].split('?')[0], qs))
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/callback')
|
||||
def callback(auth_type):
|
||||
auth_type = auth_type.upper()
|
||||
config = AuthenticateDataCRUD(auth_type).get()
|
||||
|
||||
redirect_url = session.get("next") or config.get('after_login') or '/'
|
||||
|
||||
if request.values['state'] != session.get(f'{auth_type.lower()}_state'):
|
||||
return abort(401, "state is invalid")
|
||||
|
||||
if 'code' not in request.values:
|
||||
return abort(401, 'code is invalid')
|
||||
|
||||
response = requests.post(config['token_url'], data={
|
||||
'client_id': config['client_id'],
|
||||
'client_secret': config['client_secret'],
|
||||
'code': request.values['code'],
|
||||
'grant_type': current_app.config[f'{auth_type}_GRANT_TYPE'],
|
||||
'redirect_uri': url_for('oauth2.callback', auth_type=auth_type.lower(), _external=True),
|
||||
}, headers={'Accept': 'application/json'})
|
||||
if response.status_code != 200:
|
||||
current_app.logger.error(response.text)
|
||||
return abort(401)
|
||||
access_token = response.json().get('access_token')
|
||||
if not access_token:
|
||||
return abort(401)
|
||||
|
||||
response = requests.get(config['user_info']['url'], headers={
|
||||
'Authorization': 'Bearer {}'.format(access_token),
|
||||
'Accept': 'application/json',
|
||||
})
|
||||
if response.status_code != 200:
|
||||
return abort(401)
|
||||
|
||||
res = response.json()
|
||||
email = res.get(config['user_info']['email'])
|
||||
username = res.get(config['user_info']['username'])
|
||||
avatar = res.get(config['user_info'].get('avatar'))
|
||||
user = UserCache.get(username)
|
||||
if user is None:
|
||||
current_app.logger.info("create user: {}".format(username))
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
user_dict = dict(username=username, email=email, avatar=avatar)
|
||||
user_dict['password'] = uuid.uuid4().hex
|
||||
|
||||
user = UserCRUD.add(**user_dict)
|
||||
|
||||
# log the user in
|
||||
login_user(user)
|
||||
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
user_info = ACLManager.get_user_info(username)
|
||||
|
||||
session["acl"] = dict(uid=user_info.get("uid"),
|
||||
avatar=user.avatar if user else user_info.get("avatar"),
|
||||
userId=user_info.get("uid"),
|
||||
rid=user_info.get("rid"),
|
||||
userName=user_info.get("username"),
|
||||
nickName=user_info.get("nickname") or user_info.get("username"),
|
||||
parentRoles=user_info.get("parents"),
|
||||
childRoles=user_info.get("children"),
|
||||
roleName=user_info.get("role"))
|
||||
session["uid"] = user_info.get("uid")
|
||||
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/logout')
|
||||
def logout(auth_type):
|
||||
"acl" in session and session.pop("acl")
|
||||
"uid" in session and session.pop("uid")
|
||||
f'{auth_type}_state' in session and session.pop(f'{auth_type}_state')
|
||||
"next" in session and session.pop("next")
|
||||
|
||||
redirect_url = url_for('oauth2.login', auth_type=auth_type, _external=True, next=request.referrer)
|
||||
|
||||
logout_user()
|
||||
|
||||
current_app.logger.debug('Redirecting to: {0}'.format(redirect_url))
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
return redirect(redirect_url)
|
@@ -1 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
@@ -1,429 +0,0 @@
|
||||
import os
|
||||
import secrets
|
||||
import sys
|
||||
from base64 import b64decode, b64encode
|
||||
|
||||
from Cryptodome.Protocol.SecretSharing import Shamir
|
||||
from colorama import Back
|
||||
from colorama import Fore
|
||||
from colorama import Style
|
||||
from colorama import init as colorama_init
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher
|
||||
from cryptography.hazmat.primitives.ciphers import algorithms
|
||||
from cryptography.hazmat.primitives.ciphers import modes
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from flask import current_app
|
||||
|
||||
global_iv_length = 16
|
||||
global_key_shares = 5 # Number of generated key shares
|
||||
global_key_threshold = 3 # Minimum number of shares required to rebuild the key
|
||||
|
||||
backend_root_key_name = "root_key"
|
||||
backend_encrypt_key_name = "encrypt_key"
|
||||
backend_root_key_salt_name = "root_key_salt"
|
||||
backend_encrypt_key_salt_name = "encrypt_key_salt"
|
||||
backend_seal_key = "seal_status"
|
||||
success = "success"
|
||||
seal_status = True
|
||||
|
||||
|
||||
def string_to_bytes(value):
|
||||
if isinstance(value, bytes):
|
||||
return value
|
||||
if sys.version_info.major == 2:
|
||||
byte_string = value
|
||||
else:
|
||||
byte_string = value.encode("utf-8")
|
||||
return byte_string
|
||||
|
||||
|
||||
class Backend:
|
||||
def __init__(self, backend=None):
|
||||
self.backend = backend
|
||||
|
||||
def get(self, key):
|
||||
return self.backend.get(key)
|
||||
|
||||
def add(self, key, value):
|
||||
return self.backend.add(key, value)
|
||||
|
||||
def update(self, key, value):
|
||||
return self.backend.update(key, value)
|
||||
|
||||
|
||||
class KeyManage:
|
||||
|
||||
def __init__(self, trigger=None, backend=None):
|
||||
self.trigger = trigger
|
||||
self.backend = backend
|
||||
if backend:
|
||||
self.backend = Backend(backend)
|
||||
|
||||
def init_app(self, app, backend=None):
|
||||
if (sys.argv[0].endswith("gunicorn") or
|
||||
(len(sys.argv) > 1 and sys.argv[1] in ("run", "cmdb-password-data-migrate"))):
|
||||
self.trigger = app.config.get("INNER_TRIGGER_TOKEN")
|
||||
if not self.trigger:
|
||||
return
|
||||
|
||||
self.backend = backend
|
||||
resp = self.auto_unseal()
|
||||
self.print_response(resp)
|
||||
|
||||
def hash_root_key(self, value):
|
||||
algorithm = hashes.SHA256()
|
||||
salt = self.backend.get(backend_root_key_salt_name)
|
||||
if not salt:
|
||||
salt = secrets.token_hex(16)
|
||||
msg, ok = self.backend.add(backend_root_key_salt_name, salt)
|
||||
if not ok:
|
||||
return msg, ok
|
||||
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=algorithm,
|
||||
length=32,
|
||||
salt=string_to_bytes(salt),
|
||||
iterations=100000,
|
||||
)
|
||||
key = kdf.derive(string_to_bytes(value))
|
||||
|
||||
return b64encode(key).decode('utf-8'), True
|
||||
|
||||
def generate_encrypt_key(self, key):
|
||||
algorithm = hashes.SHA256()
|
||||
salt = self.backend.get(backend_encrypt_key_salt_name)
|
||||
if not salt:
|
||||
salt = secrets.token_hex(32)
|
||||
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=algorithm,
|
||||
length=32,
|
||||
salt=string_to_bytes(salt),
|
||||
iterations=100000,
|
||||
backend=default_backend()
|
||||
)
|
||||
key = kdf.derive(string_to_bytes(key))
|
||||
msg, ok = self.backend.add(backend_encrypt_key_salt_name, salt)
|
||||
if ok:
|
||||
return b64encode(key).decode('utf-8'), ok
|
||||
else:
|
||||
return msg, ok
|
||||
|
||||
@classmethod
|
||||
def generate_keys(cls, secret):
|
||||
shares = Shamir.split(global_key_threshold, global_key_shares, secret, False)
|
||||
new_shares = []
|
||||
for share in shares:
|
||||
t = [i for i in share[1]] + [ord(i) for i in "{:0>2}".format(share[0])]
|
||||
new_shares.append(b64encode(bytes(t)))
|
||||
|
||||
return new_shares
|
||||
|
||||
def is_valid_root_key(self, root_key):
|
||||
root_key_hash, ok = self.hash_root_key(root_key)
|
||||
if not ok:
|
||||
return root_key_hash, ok
|
||||
backend_root_key_hash = self.backend.get(backend_root_key_name)
|
||||
if not backend_root_key_hash:
|
||||
return "should init firstly", False
|
||||
elif backend_root_key_hash != root_key_hash:
|
||||
return "invalid root key", False
|
||||
else:
|
||||
return "", True
|
||||
|
||||
def auth_root_secret(self, root_key):
|
||||
msg, ok = self.is_valid_root_key(root_key)
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
encrypt_key_aes = self.backend.get(backend_encrypt_key_name)
|
||||
if not encrypt_key_aes:
|
||||
return {
|
||||
"message": "encrypt key is empty",
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
secrets_encrypt_key, ok = InnerCrypt.aes_decrypt(string_to_bytes(root_key), encrypt_key_aes)
|
||||
if ok:
|
||||
msg, ok = self.backend.update(backend_seal_key, "open")
|
||||
if ok:
|
||||
current_app.config["secrets_encrypt_key"] = secrets_encrypt_key
|
||||
current_app.config["secrets_root_key"] = root_key
|
||||
current_app.config["secrets_shares"] = []
|
||||
return {"message": success, "status": success}
|
||||
return {"message": msg, "status": "failed"}
|
||||
else:
|
||||
return {
|
||||
"message": secrets_encrypt_key,
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
def unseal(self, key):
|
||||
if not self.is_seal():
|
||||
return {
|
||||
"message": "current status is unseal, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
try:
|
||||
t = [i for i in b64decode(key)]
|
||||
v = (int("".join([chr(i) for i in t[-2:]])), bytes(t[:-2]))
|
||||
shares = current_app.config.get("secrets_shares", [])
|
||||
if v not in shares:
|
||||
shares.append(v)
|
||||
current_app.config["secrets_shares"] = shares
|
||||
|
||||
if len(shares) >= global_key_threshold:
|
||||
recovered_secret = Shamir.combine(shares[:global_key_threshold], False)
|
||||
return self.auth_root_secret(b64encode(recovered_secret))
|
||||
else:
|
||||
return {
|
||||
"message": "waiting for inputting other unseal key {0}/{1}".format(len(shares),
|
||||
global_key_threshold),
|
||||
"status": "waiting"
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"message": "invalid token: " + str(e),
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
def generate_unseal_keys(self):
|
||||
info = self.backend.get(backend_root_key_name)
|
||||
if info:
|
||||
return "already exist", [], False
|
||||
|
||||
secret = AESGCM.generate_key(128)
|
||||
shares = self.generate_keys(secret)
|
||||
|
||||
return b64encode(secret), shares, True
|
||||
|
||||
def init(self):
|
||||
"""
|
||||
init the master key, unseal key and store in backend
|
||||
:return:
|
||||
"""
|
||||
root_key = self.backend.get(backend_root_key_name)
|
||||
if root_key:
|
||||
return {"message": "already init, skip", "status": "skip"}, False
|
||||
else:
|
||||
root_key, shares, status = self.generate_unseal_keys()
|
||||
if not status:
|
||||
return {"message": root_key, "status": "failed"}, False
|
||||
|
||||
# hash root key and store in backend
|
||||
root_key_hash, ok = self.hash_root_key(root_key)
|
||||
if not ok:
|
||||
return {"message": root_key_hash, "status": "failed"}, False
|
||||
|
||||
msg, ok = self.backend.add(backend_root_key_name, root_key_hash)
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
|
||||
# generate encrypt key from root_key and store in backend
|
||||
encrypt_key, ok = self.generate_encrypt_key(root_key)
|
||||
if not ok:
|
||||
return {"message": encrypt_key, "status": "failed"}
|
||||
|
||||
encrypt_key_aes, status = InnerCrypt.aes_encrypt(root_key, encrypt_key)
|
||||
if not status:
|
||||
return {"message": encrypt_key_aes, "status": "failed"}
|
||||
|
||||
msg, ok = self.backend.add(backend_encrypt_key_name, encrypt_key_aes)
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
msg, ok = self.backend.add(backend_seal_key, "open")
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
current_app.config["secrets_root_key"] = root_key
|
||||
current_app.config["secrets_encrypt_key"] = encrypt_key
|
||||
self.print_token(shares, root_token=root_key)
|
||||
|
||||
return {"message": "OK",
|
||||
"details": {
|
||||
"root_token": root_key,
|
||||
"seal_tokens": shares,
|
||||
}}, True
|
||||
|
||||
def auto_unseal(self):
|
||||
if not self.trigger:
|
||||
return {
|
||||
"message": "trigger config is empty, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
if self.trigger.startswith("http"):
|
||||
return {
|
||||
"message": "todo in next step, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
# TODO
|
||||
elif len(self.trigger.strip()) == 24:
|
||||
res = self.auth_root_secret(self.trigger.encode())
|
||||
if res.get("status") == success:
|
||||
return {
|
||||
"message": success,
|
||||
"status": success
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": res.get("message"),
|
||||
"status": "failed"
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": "trigger config is invalid, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
def seal(self, root_key):
|
||||
root_key = root_key.encode()
|
||||
msg, ok = self.is_valid_root_key(root_key)
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed"
|
||||
}
|
||||
else:
|
||||
msg, ok = self.backend.update(backend_seal_key, "block")
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed",
|
||||
}
|
||||
current_app.config["secrets_root_key"] = ''
|
||||
current_app.config["secrets_encrypt_key"] = ''
|
||||
return {
|
||||
"message": success,
|
||||
"status": success
|
||||
}
|
||||
|
||||
def is_seal(self):
|
||||
"""
|
||||
If there is no initialization or the root key is inconsistent, it is considered to be in a sealed state.
|
||||
:return:
|
||||
"""
|
||||
secrets_root_key = current_app.config.get("secrets_root_key")
|
||||
msg, ok = self.is_valid_root_key(secrets_root_key)
|
||||
if not ok:
|
||||
return true
|
||||
status = self.backend.get(backend_seal_key)
|
||||
return status == "block"
|
||||
|
||||
@classmethod
|
||||
def print_token(cls, shares, root_token):
|
||||
"""
|
||||
data: {"message": "OK",
|
||||
"details": {
|
||||
"root_token": root_key,
|
||||
"seal_tokens": shares,
|
||||
}}
|
||||
"""
|
||||
colorama_init()
|
||||
print(Style.BRIGHT, "Please be sure to store the Unseal Key in a secure location and avoid losing it."
|
||||
" The Unseal Key is required to unseal the system every time when it restarts."
|
||||
" Successful unsealing is necessary to enable the password feature." + Style.RESET_ALL)
|
||||
|
||||
for i, v in enumerate(shares):
|
||||
print(
|
||||
"unseal token " + str(i + 1) + ": " + Fore.RED + Back.BLACK + v.decode("utf-8") + Style.RESET_ALL)
|
||||
print()
|
||||
|
||||
print(Fore.GREEN + "root token: " + root_token.decode("utf-8") + Style.RESET_ALL)
|
||||
|
||||
@classmethod
|
||||
def print_response(cls, data):
|
||||
status = data.get("status", "")
|
||||
message = data.get("message", "")
|
||||
status_colors = {
|
||||
"skip": Style.BRIGHT,
|
||||
"failed": Fore.RED,
|
||||
"waiting": Fore.YELLOW,
|
||||
}
|
||||
print(status_colors.get(status, Fore.GREEN), message, Style.RESET_ALL)
|
||||
|
||||
|
||||
class InnerCrypt:
|
||||
def __init__(self):
|
||||
secrets_encrypt_key = current_app.config.get("secrets_encrypt_key", "")
|
||||
self.encrypt_key = b64decode(secrets_encrypt_key.encode("utf-8"))
|
||||
|
||||
def encrypt(self, plaintext):
|
||||
"""
|
||||
encrypt method contain aes currently
|
||||
"""
|
||||
return self.aes_encrypt(self.encrypt_key, plaintext)
|
||||
|
||||
def decrypt(self, ciphertext):
|
||||
"""
|
||||
decrypt method contain aes currently
|
||||
"""
|
||||
return self.aes_decrypt(self.encrypt_key, ciphertext)
|
||||
|
||||
@classmethod
|
||||
def aes_encrypt(cls, key, plaintext):
|
||||
if isinstance(plaintext, str):
|
||||
plaintext = string_to_bytes(plaintext)
|
||||
iv = os.urandom(global_iv_length)
|
||||
try:
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
encryptor = cipher.encryptor()
|
||||
v_padder = padding.PKCS7(algorithms.AES.block_size).padder()
|
||||
padded_plaintext = v_padder.update(plaintext) + v_padder.finalize()
|
||||
ciphertext = encryptor.update(padded_plaintext) + encryptor.finalize()
|
||||
|
||||
return b64encode(iv + ciphertext).decode("utf-8"), True
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
|
||||
@classmethod
|
||||
def aes_decrypt(cls, key, ciphertext):
|
||||
try:
|
||||
s = b64decode(ciphertext.encode("utf-8"))
|
||||
iv = s[:global_iv_length]
|
||||
ciphertext = s[global_iv_length:]
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
decrypter = cipher.decryptor()
|
||||
decrypted_padded_plaintext = decrypter.update(ciphertext) + decrypter.finalize()
|
||||
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
|
||||
plaintext = unpadder.update(decrypted_padded_plaintext) + unpadder.finalize()
|
||||
|
||||
return plaintext.decode('utf-8'), True
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
km = KeyManage()
|
||||
# info, shares, status = km.generate_unseal_keys()
|
||||
# print(info, shares, status)
|
||||
# print("..................")
|
||||
# for i in shares:
|
||||
# print(b64encode(i[1]).decode())
|
||||
|
||||
res1, ok1 = km.init()
|
||||
if not ok1:
|
||||
print(res1)
|
||||
# for j in res["details"]["seal_tokens"]:
|
||||
# r = km.unseal(j)
|
||||
# if r["status"] != "waiting":
|
||||
# if r["status"] != "success":
|
||||
# print("r........", r)
|
||||
# else:
|
||||
# print(r)
|
||||
# break
|
||||
|
||||
t_plaintext = b"Hello, World!" # The plaintext to encrypt
|
||||
c = InnerCrypt()
|
||||
t_ciphertext, status1 = c.encrypt(t_plaintext)
|
||||
print("Ciphertext:", t_ciphertext)
|
||||
decrypted_plaintext, status2 = c.decrypt(t_ciphertext)
|
||||
print("Decrypted plaintext:", decrypted_plaintext)
|
@@ -1,35 +0,0 @@
|
||||
from api.models.cmdb import InnerKV
|
||||
|
||||
|
||||
class InnerKVManger(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def add(cls, key, value):
|
||||
data = {"key": key, "value": value}
|
||||
res = InnerKV.create(**data)
|
||||
if res.key == key:
|
||||
return "success", True
|
||||
|
||||
return "add failed", False
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
res = InnerKV.get_by(first=True, to_dict=False, key=key)
|
||||
if not res:
|
||||
return None
|
||||
|
||||
return res.value
|
||||
|
||||
@classmethod
|
||||
def update(cls, key, value):
|
||||
res = InnerKV.get_by(first=True, to_dict=False, key=key)
|
||||
if not res:
|
||||
return cls.add(key, value)
|
||||
|
||||
t = res.update(value=value)
|
||||
if t.key == key:
|
||||
return "success", True
|
||||
|
||||
return "update failed", True
|
@@ -1,141 +0,0 @@
|
||||
from base64 import b64decode
|
||||
from base64 import b64encode
|
||||
|
||||
import hvac
|
||||
|
||||
|
||||
class VaultClient:
|
||||
def __init__(self, base_url, token, mount_path='cmdb'):
|
||||
self.client = hvac.Client(url=base_url, token=token)
|
||||
self.mount_path = mount_path
|
||||
|
||||
def create_app_role(self, role_name, policies):
|
||||
resp = self.client.create_approle(role_name, policies=policies)
|
||||
|
||||
return resp == 200
|
||||
|
||||
def delete_app_role(self, role_name):
|
||||
resp = self.client.delete_approle(role_name)
|
||||
|
||||
return resp == 204
|
||||
|
||||
def update_app_role_policies(self, role_name, policies):
|
||||
resp = self.client.update_approle_role(role_name, policies=policies)
|
||||
|
||||
return resp == 204
|
||||
|
||||
def get_app_role(self, role_name):
|
||||
resp = self.client.get_approle(role_name)
|
||||
resp.json()
|
||||
if resp.status_code == 200:
|
||||
return resp.json
|
||||
else:
|
||||
return {}
|
||||
|
||||
def enable_secrets_engine(self):
|
||||
resp = self.client.sys.enable_secrets_engine('kv', path=self.mount_path)
|
||||
resp_01 = self.client.sys.enable_secrets_engine('transit')
|
||||
|
||||
if resp.status_code == 200 and resp_01.status_code == 200:
|
||||
return resp.json
|
||||
else:
|
||||
return {}
|
||||
|
||||
def encrypt(self, plaintext):
|
||||
response = self.client.secrets.transit.encrypt_data(name='transit-key', plaintext=plaintext)
|
||||
ciphertext = response['data']['ciphertext']
|
||||
|
||||
return ciphertext
|
||||
|
||||
# decrypt data
|
||||
def decrypt(self, ciphertext):
|
||||
response = self.client.secrets.transit.decrypt_data(name='transit-key', ciphertext=ciphertext)
|
||||
plaintext = response['data']['plaintext']
|
||||
|
||||
return plaintext
|
||||
|
||||
def write(self, path, data, encrypt=None):
|
||||
if encrypt:
|
||||
for k, v in data.items():
|
||||
data[k] = self.encrypt(self.encode_base64(v))
|
||||
response = self.client.secrets.kv.v2.create_or_update_secret(
|
||||
path=path,
|
||||
secret=data,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
# read data
|
||||
def read(self, path, decrypt=True):
|
||||
try:
|
||||
response = self.client.secrets.kv.v2.read_secret_version(
|
||||
path=path, raise_on_deleted_version=False, mount_point=self.mount_path
|
||||
)
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
data = response['data']['data']
|
||||
if decrypt:
|
||||
try:
|
||||
for k, v in data.items():
|
||||
data[k] = self.decode_base64(self.decrypt(v))
|
||||
except:
|
||||
return data, True
|
||||
|
||||
return data, True
|
||||
|
||||
# update data
|
||||
def update(self, path, data, overwrite=True, encrypt=True):
|
||||
if encrypt:
|
||||
for k, v in data.items():
|
||||
data[k] = self.encrypt(self.encode_base64(v))
|
||||
if overwrite:
|
||||
response = self.client.secrets.kv.v2.create_or_update_secret(
|
||||
path=path,
|
||||
secret=data,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
else:
|
||||
response = self.client.secrets.kv.v2.patch(path=path, secret=data, mount_point=self.mount_path)
|
||||
|
||||
return response
|
||||
|
||||
# delete data
|
||||
def delete(self, path):
|
||||
response = self.client.secrets.kv.v2.delete_metadata_and_all_versions(
|
||||
path=path,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
# Base64 encode
|
||||
@classmethod
|
||||
def encode_base64(cls, data):
|
||||
encoded_bytes = b64encode(data.encode())
|
||||
encoded_string = encoded_bytes.decode()
|
||||
|
||||
return encoded_string
|
||||
|
||||
# Base64 decode
|
||||
@classmethod
|
||||
def decode_base64(cls, encoded_string):
|
||||
decoded_bytes = b64decode(encoded_string)
|
||||
decoded_string = decoded_bytes.decode()
|
||||
|
||||
return decoded_string
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_base_url = "http://localhost:8200"
|
||||
_token = "your token"
|
||||
|
||||
_path = "test001"
|
||||
# Example
|
||||
sdk = VaultClient(_base_url, _token)
|
||||
# sdk.enable_secrets_engine()
|
||||
_data = {"key1": "value1", "key2": "value2", "key3": "value3"}
|
||||
_data = sdk.update(_path, _data, overwrite=True, encrypt=True)
|
||||
print(_data)
|
||||
_data = sdk.read(_path, decrypt=True)
|
||||
print(_data)
|
@@ -12,9 +12,6 @@ from Crypto.Cipher import AES
|
||||
from elasticsearch import Elasticsearch
|
||||
from flask import current_app
|
||||
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
|
||||
|
||||
class BaseEnum(object):
|
||||
_ALL_ = set() # type: Set[str]
|
||||
@@ -289,33 +286,3 @@ class AESCrypto(object):
|
||||
text_decrypted = cipher.decrypt(encode_bytes)
|
||||
|
||||
return cls.unpad(text_decrypted).decode('utf8')
|
||||
|
||||
|
||||
class Crypto(AESCrypto):
|
||||
@classmethod
|
||||
def encrypt(cls, data):
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
|
||||
if not KeyManage(backend=InnerKVManger()).is_seal():
|
||||
res, status = InnerCrypt().encrypt(data)
|
||||
if status:
|
||||
return res
|
||||
|
||||
return AESCrypto().encrypt(data)
|
||||
|
||||
@classmethod
|
||||
def decrypt(cls, data):
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
|
||||
if not KeyManage(backend=InnerKVManger()).is_seal():
|
||||
try:
|
||||
res, status = InnerCrypt().decrypt(data)
|
||||
if status:
|
||||
return res
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
return AESCrypto().decrypt(data)
|
||||
except:
|
||||
return data
|
||||
|
@@ -5,18 +5,16 @@ import copy
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
|
||||
import ldap
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from flask_sqlalchemy import BaseQuery
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.database import CRUDModel
|
||||
from api.lib.database import Model
|
||||
from api.lib.database import Model2
|
||||
from api.lib.database import SoftDeleteMixin
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.perm.acl.const import OperateType
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
|
||||
|
||||
class App(Model):
|
||||
@@ -29,26 +27,21 @@ class App(Model):
|
||||
|
||||
|
||||
class UserQuery(BaseQuery):
|
||||
def _join(self, *args, **kwargs):
|
||||
super(UserQuery, self)._join(*args, **kwargs)
|
||||
|
||||
def authenticate(self, login, password):
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
|
||||
user = self.filter(db.or_(User.username == login,
|
||||
User.email == login)).filter(User.deleted.is_(False)).filter(User.block == 0).first()
|
||||
if user:
|
||||
current_app.logger.info(user)
|
||||
authenticated = user.check_password(password)
|
||||
if authenticated:
|
||||
_id = AuditCRUD.add_login_log(login, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
else:
|
||||
AuditCRUD.add_login_log(login, False, ErrFormat.invalid_password)
|
||||
from api.tasks.acl import op_record
|
||||
op_record.apply_async(args=(None, login, OperateType.LOGIN, ["ACL"]), queue=ACL_QUEUE)
|
||||
else:
|
||||
authenticated = False
|
||||
|
||||
AuditCRUD.add_login_log(login, False, ErrFormat.user_not_found.format(login))
|
||||
|
||||
current_app.logger.info(("login", login, user, authenticated))
|
||||
|
||||
return user, authenticated
|
||||
|
||||
def authenticate_with_key(self, key, secret, args, path):
|
||||
@@ -63,6 +56,37 @@ class UserQuery(BaseQuery):
|
||||
|
||||
return user, authenticated
|
||||
|
||||
def authenticate_with_ldap(self, username, password):
|
||||
ldap_conn = ldap.initialize(current_app.config.get('LDAP_SERVER'))
|
||||
ldap_conn.protocol_version = 3
|
||||
ldap_conn.set_option(ldap.OPT_REFERRALS, 0)
|
||||
if '@' in username:
|
||||
email = username
|
||||
who = current_app.config.get('LDAP_USER_DN').format(username.split('@')[0])
|
||||
else:
|
||||
who = current_app.config.get('LDAP_USER_DN').format(username)
|
||||
email = "{}@{}".format(who, current_app.config.get('LDAP_DOMAIN'))
|
||||
|
||||
username = username.split('@')[0]
|
||||
user = self.get_by_username(username)
|
||||
try:
|
||||
|
||||
if not password:
|
||||
raise ldap.INVALID_CREDENTIALS
|
||||
|
||||
ldap_conn.simple_bind_s(who, password)
|
||||
|
||||
if not user:
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
user = UserCRUD.add(username=username, email=email)
|
||||
|
||||
from api.tasks.acl import op_record
|
||||
op_record.apply_async(args=(None, username, OperateType.LOGIN, ["ACL"]), queue=ACL_QUEUE)
|
||||
|
||||
return user, True
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
return user, False
|
||||
|
||||
def search(self, key):
|
||||
query = self.filter(db.or_(User.email == key,
|
||||
User.nickname.ilike('%' + key + '%'),
|
||||
@@ -112,7 +136,6 @@ class User(CRUDModel, SoftDeleteMixin):
|
||||
wx_id = db.Column(db.String(32))
|
||||
employee_id = db.Column(db.String(16), index=True)
|
||||
avatar = db.Column(db.String(128))
|
||||
|
||||
# apps = db.Column(db.JSON)
|
||||
|
||||
def __str__(self):
|
||||
@@ -143,6 +166,8 @@ class User(CRUDModel, SoftDeleteMixin):
|
||||
|
||||
|
||||
class RoleQuery(BaseQuery):
|
||||
def _join(self, *args, **kwargs):
|
||||
super(RoleQuery, self)._join(*args, **kwargs)
|
||||
|
||||
def authenticate(self, login, password):
|
||||
role = self.filter(Role.name == login).first()
|
||||
@@ -350,16 +375,3 @@ class AuditTriggerLog(Model):
|
||||
current = db.Column(db.JSON, default=dict(), comment='当前数据')
|
||||
extra = db.Column(db.JSON, default=dict(), comment='权限名')
|
||||
source = db.Column(db.String(16), default='', comment='来源')
|
||||
|
||||
|
||||
class AuditLoginLog(Model2):
|
||||
__tablename__ = "acl_audit_login_logs"
|
||||
|
||||
username = db.Column(db.String(64), index=True)
|
||||
channel = db.Column(db.Enum('web', 'api'), default="web")
|
||||
ip = db.Column(db.String(15))
|
||||
browser = db.Column(db.String(256))
|
||||
description = db.Column(db.String(128))
|
||||
is_ok = db.Column(db.Boolean)
|
||||
login_at = db.Column(db.DateTime)
|
||||
logout_at = db.Column(db.DateTime)
|
||||
|
@@ -12,9 +12,7 @@ from api.lib.cmdb.const import CITypeOperateType
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import OperateType
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.database import Model
|
||||
from api.lib.database import Model2
|
||||
from api.lib.utils import Crypto
|
||||
from api.lib.database import Model, Model2
|
||||
|
||||
|
||||
# template
|
||||
@@ -91,37 +89,12 @@ class Attribute(Model):
|
||||
compute_expr = db.Column(db.Text)
|
||||
compute_script = db.Column(db.Text)
|
||||
|
||||
_choice_web_hook = db.Column('choice_web_hook', db.JSON)
|
||||
choice_other = db.Column(db.JSON)
|
||||
choice_web_hook = db.Column(db.JSON)
|
||||
|
||||
uid = db.Column(db.Integer, index=True)
|
||||
|
||||
option = db.Column(db.JSON)
|
||||
|
||||
def _get_webhook(self):
|
||||
if self._choice_web_hook:
|
||||
if self._choice_web_hook.get('headers') and "Cookie" in self._choice_web_hook['headers']:
|
||||
self._choice_web_hook['headers']['Cookie'] = Crypto.decrypt(self._choice_web_hook['headers']['Cookie'])
|
||||
|
||||
if self._choice_web_hook.get('authorization'):
|
||||
for k, v in self._choice_web_hook['authorization'].items():
|
||||
self._choice_web_hook['authorization'][k] = Crypto.decrypt(v)
|
||||
|
||||
return self._choice_web_hook
|
||||
|
||||
def _set_webhook(self, data):
|
||||
if data:
|
||||
if data.get('headers') and "Cookie" in data['headers']:
|
||||
data['headers']['Cookie'] = Crypto.encrypt(data['headers']['Cookie'])
|
||||
|
||||
if data.get('authorization'):
|
||||
for k, v in data['authorization'].items():
|
||||
data['authorization'][k] = Crypto.encrypt(v)
|
||||
|
||||
self._choice_web_hook = data
|
||||
|
||||
choice_web_hook = db.synonym("_choice_web_hook", descriptor=property(_get_webhook, _set_webhook))
|
||||
|
||||
|
||||
class CITypeAttribute(Model):
|
||||
__tablename__ = "c_ci_type_attributes"
|
||||
@@ -156,25 +129,7 @@ class CITypeTrigger(Model):
|
||||
|
||||
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
|
||||
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"))
|
||||
_option = db.Column('notify', db.JSON)
|
||||
|
||||
def _get_option(self):
|
||||
if self._option and self._option.get('webhooks'):
|
||||
if self._option['webhooks'].get('authorization'):
|
||||
for k, v in self._option['webhooks']['authorization'].items():
|
||||
self._option['webhooks']['authorization'][k] = Crypto.decrypt(v)
|
||||
|
||||
return self._option
|
||||
|
||||
def _set_option(self, data):
|
||||
if data and data.get('webhooks'):
|
||||
if data['webhooks'].get('authorization'):
|
||||
for k, v in data['webhooks']['authorization'].items():
|
||||
data['webhooks']['authorization'][k] = Crypto.encrypt(v)
|
||||
|
||||
self._option = data
|
||||
|
||||
option = db.synonym("_option", descriptor=property(_get_option, _set_option))
|
||||
option = db.Column('notify', db.JSON)
|
||||
|
||||
|
||||
class CITriggerHistory(Model):
|
||||
@@ -218,8 +173,6 @@ class CIRelation(Model):
|
||||
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
|
||||
more = db.Column(db.Integer, db.ForeignKey("c_cis.id"))
|
||||
|
||||
ancestor_ids = db.Column(db.String(128), index=True)
|
||||
|
||||
first_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.first_ci_id")
|
||||
second_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.second_ci_id")
|
||||
relation_type = db.relationship("RelationType", backref="c_ci_relations.relation_type_id")
|
||||
@@ -550,10 +503,3 @@ class CIFilterPerms(Model):
|
||||
attr_filter = db.Column(db.Text)
|
||||
|
||||
rid = db.Column(db.Integer, index=True)
|
||||
|
||||
|
||||
class InnerKV(Model):
|
||||
__tablename__ = "c_kv"
|
||||
|
||||
key = db.Column(db.String(128), index=True)
|
||||
value = db.Column(db.Text)
|
||||
|
@@ -96,11 +96,3 @@ class NoticeConfig(Model):
|
||||
|
||||
platform = db.Column(db.VARCHAR(255), nullable=False)
|
||||
info = db.Column(db.JSON)
|
||||
|
||||
|
||||
class CommonFile(Model):
|
||||
__tablename__ = 'common_file'
|
||||
|
||||
file_name = db.Column(db.VARCHAR(512), nullable=False, index=True)
|
||||
origin_name = db.Column(db.VARCHAR(512), nullable=False)
|
||||
binary = db.Column(db.LargeBinary(16777216), nullable=False)
|
||||
|
@@ -46,4 +46,5 @@ def register_resources(resource_path, rest_api):
|
||||
resource_cls.url_prefix = ("",)
|
||||
if isinstance(resource_cls.url_prefix, six.string_types):
|
||||
resource_cls.url_prefix = (resource_cls.url_prefix,)
|
||||
|
||||
rest_api.add_resource(resource_cls, *resource_cls.url_prefix)
|
||||
|
@@ -9,8 +9,7 @@ from werkzeug.exceptions import BadRequest
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from api.extensions import celery
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateSource
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
@@ -29,7 +28,6 @@ from api.models.acl import Trigger
|
||||
name="acl.role_rebuild",
|
||||
queue=ACL_QUEUE,
|
||||
once={"graceful": True, "unlock_before_run": True})
|
||||
@reconnect_db
|
||||
def role_rebuild(rids, app_id):
|
||||
rids = rids if isinstance(rids, list) else [rids]
|
||||
for rid in rids:
|
||||
@@ -39,7 +37,6 @@ def role_rebuild(rids, app_id):
|
||||
|
||||
|
||||
@celery.task(name="acl.update_resource_to_build_role", queue=ACL_QUEUE)
|
||||
@reconnect_db
|
||||
def update_resource_to_build_role(resource_id, app_id, group_id=None):
|
||||
rids = [i.id for i in Role.get_by(__func_isnot__key_uid=None, fl='id', to_dict=False)]
|
||||
rids += [i.id for i in Role.get_by(app_id=app_id, fl='id', to_dict=False)]
|
||||
@@ -55,9 +52,9 @@ def update_resource_to_build_role(resource_id, app_id, group_id=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.apply_trigger", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def apply_trigger(_id, resource_id=None, operator_uid=None):
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
|
||||
trigger = Trigger.get_by_id(_id)
|
||||
@@ -121,9 +118,9 @@ def apply_trigger(_id, resource_id=None, operator_uid=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.cancel_trigger", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def cancel_trigger(_id, resource_id=None, operator_uid=None):
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
|
||||
trigger = Trigger.get_by_id(_id)
|
||||
@@ -189,7 +186,6 @@ def cancel_trigger(_id, resource_id=None, operator_uid=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.op_record", queue=ACL_QUEUE)
|
||||
@reconnect_db
|
||||
def op_record(app, rolename, operate_type, obj):
|
||||
if isinstance(app, int):
|
||||
app = AppCache.get(app)
|
||||
|
@@ -16,9 +16,6 @@ from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.utils import Lock
|
||||
from api.lib.utils import handle_arg_list
|
||||
@@ -28,12 +25,11 @@ from api.models.cmdb import CITypeAttribute
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_cache(ci_id, operate_type, record_id):
|
||||
from api.lib.cmdb.ci import CITriggerManager
|
||||
|
||||
time.sleep(0.01)
|
||||
db.session.remove()
|
||||
|
||||
m = api.lib.cmdb.ci.CIManager()
|
||||
ci_dict = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
|
||||
@@ -45,18 +41,16 @@ def ci_cache(ci_id, operate_type, record_id):
|
||||
|
||||
current_app.logger.info("{0} flush..........".format(ci_id))
|
||||
|
||||
if operate_type:
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get('worker'))
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get('worker'))
|
||||
|
||||
CITriggerManager.fire(operate_type, ci_dict, record_id)
|
||||
CITriggerManager.fire(operate_type, ci_dict, record_id)
|
||||
|
||||
|
||||
@celery.task(name="cmdb.batch_ci_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def batch_ci_cache(ci_ids, ): # only for attribute change index
|
||||
time.sleep(1)
|
||||
db.session.remove()
|
||||
|
||||
for ci_id in ci_ids:
|
||||
m = api.lib.cmdb.ci.CIManager()
|
||||
@@ -71,7 +65,6 @@ def batch_ci_cache(ci_ids, ): # only for attribute change index
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_delete", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_delete(ci_id):
|
||||
current_app.logger.info(ci_id)
|
||||
|
||||
@@ -84,7 +77,6 @@ def ci_delete(ci_id):
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_delete_trigger", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_delete_trigger(trigger, operate_type, ci_dict):
|
||||
current_app.logger.info('delete ci {} trigger'.format(ci_dict['_id']))
|
||||
from api.lib.cmdb.ci import CITriggerManager
|
||||
@@ -96,39 +88,23 @@ def ci_delete_trigger(trigger, operate_type, ci_dict):
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_relation_cache(parent_id, child_id, ancestor_ids):
|
||||
def ci_relation_cache(parent_id, child_id):
|
||||
db.session.remove()
|
||||
|
||||
with Lock("CIRelation_{}".format(parent_id)):
|
||||
if ancestor_ids is None:
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
|
||||
first=True, to_dict=False)
|
||||
if str(child_id) not in children:
|
||||
children[str(child_id)] = cr.second_ci.type_id
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, first=True, to_dict=False)
|
||||
if str(child_id) not in children:
|
||||
children[str(child_id)] = cr.second_ci.type_id
|
||||
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
else:
|
||||
key = "{},{}".format(ancestor_ids, parent_id)
|
||||
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
|
||||
grandson = json.loads(grandson) if grandson is not None else {}
|
||||
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
|
||||
first=True, to_dict=False)
|
||||
if cr and str(cr.second_ci_id) not in grandson:
|
||||
grandson[str(cr.second_ci_id)] = cr.second_ci.type_id
|
||||
|
||||
rd.create_or_update({key: json.dumps(grandson)}, REDIS_PREFIX_CI_RELATION2)
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
current_app.logger.info("ADD ci relation cache: {0} -> {1}".format(parent_id, child_id))
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_add", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_relation_add(parent_dict, child_id, uid):
|
||||
"""
|
||||
:param parent_dict: key is '$parent_model.attr_name'
|
||||
@@ -144,6 +120,8 @@ def ci_relation_add(parent_dict, child_id, uid):
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
|
||||
db.session.remove()
|
||||
|
||||
for parent in parent_dict:
|
||||
parent_ci_type_name, _attr_name = parent.strip()[1:].split('.', 1)
|
||||
attr_name = CITypeAttributeManager.get_attr_name(parent_ci_type_name, _attr_name)
|
||||
@@ -168,52 +146,33 @@ def ci_relation_add(parent_dict, child_id, uid):
|
||||
except Exception as e:
|
||||
current_app.logger.warning(e)
|
||||
finally:
|
||||
try:
|
||||
db.session.commit()
|
||||
except:
|
||||
db.session.rollback()
|
||||
db.session.remove()
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_delete", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_relation_delete(parent_id, child_id, ancestor_ids):
|
||||
def ci_relation_delete(parent_id, child_id):
|
||||
with Lock("CIRelation_{}".format(parent_id)):
|
||||
if ancestor_ids is None:
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
|
||||
if str(child_id) in children:
|
||||
children.pop(str(child_id))
|
||||
if str(child_id) in children:
|
||||
children.pop(str(child_id))
|
||||
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
else:
|
||||
key = "{},{}".format(ancestor_ids, parent_id)
|
||||
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
|
||||
grandson = json.loads(grandson) if grandson is not None else {}
|
||||
|
||||
if str(child_id) in grandson:
|
||||
grandson.pop(str(child_id))
|
||||
|
||||
rd.create_or_update({key: json.dumps(grandson)}, REDIS_PREFIX_CI_RELATION2)
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
current_app.logger.info("DELETE ci relation cache: {0} -> {1}".format(parent_id, child_id))
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_type_attribute_order_rebuild", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_type_attribute_order_rebuild(type_id, uid):
|
||||
def ci_type_attribute_order_rebuild(type_id):
|
||||
current_app.logger.info('rebuild attribute order')
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.cmdb.ci_type import CITypeAttributeGroupManager
|
||||
|
||||
attrs = CITypeAttributesCache.get(type_id)
|
||||
id2attr = {attr.attr_id: attr for attr in attrs}
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
|
||||
res = CITypeAttributeGroupManager.get_by_type_id(type_id, True)
|
||||
order = 0
|
||||
for group in res:
|
||||
@@ -225,11 +184,11 @@ def ci_type_attribute_order_rebuild(type_id, uid):
|
||||
|
||||
|
||||
@celery.task(name="cmdb.calc_computed_attribute", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def calc_computed_attribute(attr_id, uid):
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
|
||||
db.session.remove()
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
|
||||
|
@@ -24,7 +24,6 @@ class AuditLogView(APIView):
|
||||
'role': AuditCRUD.search_role,
|
||||
'trigger': AuditCRUD.search_trigger,
|
||||
'resource': AuditCRUD.search_resource,
|
||||
'login': AuditCRUD.search_login,
|
||||
}
|
||||
if name not in func_map:
|
||||
abort(400, f'wrong {name}, please use {func_map.keys()}')
|
||||
|
@@ -8,15 +8,11 @@ from flask import abort
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
from flask_login import login_user, logout_user
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.decorator import args_required
|
||||
from api.lib.decorator import args_validate
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import RoleCache
|
||||
from api.lib.perm.acl.cache import User
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
@@ -38,10 +34,8 @@ class LoginView(APIView):
|
||||
username = request.values.get("username") or request.values.get("email")
|
||||
password = request.values.get("password")
|
||||
_role = None
|
||||
config = AuthenticateDataCRUD(AuthenticateType.LDAP).get()
|
||||
if config.get('LDAP', {}).get('enabled') or config.get('LDAP', {}).get('enable'):
|
||||
from api.lib.perm.authentication.ldap import authenticate_with_ldap
|
||||
user, authenticated = authenticate_with_ldap(username, password)
|
||||
if current_app.config.get('AUTH_WITH_LDAP'):
|
||||
user, authenticated = User.query.authenticate_with_ldap(username, password)
|
||||
else:
|
||||
user, authenticated = User.query.authenticate(username, password)
|
||||
if not user:
|
||||
@@ -182,7 +176,4 @@ class LogoutView(APIView):
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
logout_user()
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
self.jsonify(code=200)
|
||||
|
@@ -84,10 +84,11 @@ class CIView(APIView):
|
||||
ci_dict = self._wrap_ci_dict()
|
||||
|
||||
manager = CIManager()
|
||||
current_app.logger.debug(ci_dict)
|
||||
ci_id = manager.add(ci_type,
|
||||
exist_policy=exist_policy or ExistPolicy.REJECT,
|
||||
_no_attribute_policy=_no_attribute_policy,
|
||||
_is_admin=request.values.pop('__is_admin', None) or False,
|
||||
_is_admin=request.values.pop('__is_admin', False),
|
||||
**ci_dict)
|
||||
|
||||
return self.jsonify(ci_id=ci_id)
|
||||
@@ -95,6 +96,7 @@ class CIView(APIView):
|
||||
@has_perm_for_ci("ci_id", ResourceTypeEnum.CI, PermEnum.UPDATE, CIManager.get_type)
|
||||
def put(self, ci_id=None):
|
||||
args = request.values
|
||||
current_app.logger.info(args)
|
||||
ci_type = args.get("ci_type")
|
||||
_no_attribute_policy = args.get("no_attribute_policy", ExistPolicy.IGNORE)
|
||||
|
||||
@@ -102,14 +104,14 @@ class CIView(APIView):
|
||||
manager = CIManager()
|
||||
if ci_id is not None:
|
||||
manager.update(ci_id,
|
||||
_is_admin=request.values.pop('__is_admin', None) or False,
|
||||
_is_admin=request.values.pop('__is_admin', False),
|
||||
**ci_dict)
|
||||
else:
|
||||
request.values.pop('exist_policy', None)
|
||||
ci_id = manager.add(ci_type,
|
||||
exist_policy=ExistPolicy.REPLACE,
|
||||
_no_attribute_policy=_no_attribute_policy,
|
||||
_is_admin=request.values.pop('__is_admin', None) or False,
|
||||
_is_admin=request.values.pop('__is_admin', False),
|
||||
**ci_dict)
|
||||
|
||||
return self.jsonify(ci_id=ci_id)
|
||||
@@ -226,11 +228,11 @@ class CIFlushView(APIView):
|
||||
from api.tasks.cmdb import ci_cache
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
if ci_id is not None:
|
||||
ci_cache.apply_async(args=(ci_id, None, None), queue=CMDB_QUEUE)
|
||||
ci_cache.apply_async([ci_id], queue=CMDB_QUEUE)
|
||||
else:
|
||||
cis = CI.get_by(to_dict=False)
|
||||
for ci in cis:
|
||||
ci_cache.apply_async(args=(ci.id, None, None), queue=CMDB_QUEUE)
|
||||
ci_cache.apply_async([ci.id], queue=CMDB_QUEUE)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
||||
@@ -240,13 +242,3 @@ class CIAutoDiscoveryStatisticsView(APIView):
|
||||
|
||||
def get(self):
|
||||
return self.jsonify(CIManager.get_ad_statistics())
|
||||
|
||||
|
||||
class CIPasswordView(APIView):
|
||||
url_prefix = "/ci/<int:ci_id>/attributes/<int:attr_id>/password"
|
||||
|
||||
def get(self, ci_id, attr_id):
|
||||
return self.jsonify(ci_id=ci_id, attr_id=attr_id, value=CIManager.load_password(ci_id, attr_id))
|
||||
|
||||
def post(self, ci_id, attr_id):
|
||||
return self.get(ci_id, attr_id)
|
||||
|
@@ -35,7 +35,6 @@ class CIRelationSearchView(APIView):
|
||||
count = get_page_size(request.values.get("count") or request.values.get("page_size"))
|
||||
|
||||
root_id = request.values.get('root_id')
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None # only for many to many
|
||||
level = list(map(int, handle_arg_list(request.values.get('level', '1'))))
|
||||
|
||||
query = request.values.get('q', "")
|
||||
@@ -45,7 +44,7 @@ class CIRelationSearchView(APIView):
|
||||
reverse = request.values.get("reverse") in current_app.config.get('BOOL_TRUE')
|
||||
|
||||
start = time.time()
|
||||
s = Search(root_id, level, query, fl, facet, page, count, sort, reverse, ancestor_ids=ancestor_ids)
|
||||
s = Search(root_id, level, query, fl, facet, page, count, sort, reverse)
|
||||
try:
|
||||
response, counter, total, page, numfound, facet = s.search()
|
||||
except SearchError as e:
|
||||
@@ -68,10 +67,9 @@ class CIRelationStatisticsView(APIView):
|
||||
root_ids = list(map(int, handle_arg_list(request.values.get('root_ids'))))
|
||||
level = request.values.get('level', 1)
|
||||
type_ids = set(map(int, handle_arg_list(request.values.get('type_ids', []))))
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None # only for many to many
|
||||
|
||||
start = time.time()
|
||||
s = Search(root_ids, level, ancestor_ids=ancestor_ids)
|
||||
s = Search(root_ids, level)
|
||||
try:
|
||||
result = s.statistics(type_ids)
|
||||
except SearchError as e:
|
||||
@@ -123,18 +121,14 @@ class CIRelationView(APIView):
|
||||
url_prefix = "/ci_relations/<int:first_ci_id>/<int:second_ci_id>"
|
||||
|
||||
def post(self, first_ci_id, second_ci_id):
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
manager = CIRelationManager()
|
||||
res = manager.add(first_ci_id, second_ci_id, ancestor_ids=ancestor_ids)
|
||||
res = manager.add(first_ci_id, second_ci_id)
|
||||
|
||||
return self.jsonify(cr_id=res)
|
||||
|
||||
def delete(self, first_ci_id, second_ci_id):
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
manager = CIRelationManager()
|
||||
manager.delete_2(first_ci_id, second_ci_id, ancestor_ids=ancestor_ids)
|
||||
manager.delete_2(first_ci_id, second_ci_id)
|
||||
|
||||
return self.jsonify(message="CIType Relation is deleted")
|
||||
|
||||
@@ -157,9 +151,8 @@ class BatchCreateOrUpdateCIRelationView(APIView):
|
||||
ci_ids = list(map(int, request.values.get('ci_ids')))
|
||||
parents = list(map(int, request.values.get('parents', [])))
|
||||
children = list(map(int, request.values.get('children', [])))
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
CIRelationManager.batch_update(ci_ids, parents, children, ancestor_ids=ancestor_ids)
|
||||
CIRelationManager.batch_update(ci_ids, parents, children)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
||||
@@ -173,8 +166,7 @@ class BatchCreateOrUpdateCIRelationView(APIView):
|
||||
def delete(self):
|
||||
ci_ids = list(map(int, request.values.get('ci_ids')))
|
||||
parents = list(map(int, request.values.get('parents', [])))
|
||||
ancestor_ids = request.values.get('ancestor_ids') or None
|
||||
|
||||
CIRelationManager.batch_delete(ci_ids, parents, ancestor_ids=ancestor_ids)
|
||||
CIRelationManager.batch_delete(ci_ids, parents)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
@@ -105,7 +105,6 @@ class CITypeGroupView(APIView):
|
||||
|
||||
return self.jsonify(group.to_dict())
|
||||
|
||||
@role_required(RoleEnum.CONFIG)
|
||||
@args_validate(CITypeGroupManager.cls)
|
||||
def put(self, gid=None):
|
||||
if "/order" in request.url:
|
||||
|
@@ -9,7 +9,6 @@ from api.lib.cmdb.ci_type import CITypeRelationManager
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.preference import PreferenceManager
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.decorator import args_required
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
@@ -110,10 +109,3 @@ class CITypeRelationRevokeView(APIView):
|
||||
acl.revoke_resource_from_role_by_rid(resource_name, rid, ResourceTypeEnum.CI_TYPE_RELATION, perms)
|
||||
|
||||
return self.jsonify(code=200)
|
||||
|
||||
|
||||
class CITypeRelationCanEditView(APIView):
|
||||
url_prefix = "/ci_type_relations/<int:parent_id>/<int:child_id>/can_edit"
|
||||
|
||||
def get(self, parent_id, child_id):
|
||||
return self.jsonify(result=PreferenceManager.can_edit_relation(parent_id, child_id))
|
||||
|
@@ -1,37 +0,0 @@
|
||||
from flask import request
|
||||
|
||||
from api.lib.perm.auth import auth_abandoned
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.resource import APIView
|
||||
|
||||
|
||||
class InnerSecretUnSealView(APIView):
|
||||
url_prefix = "/secrets/unseal"
|
||||
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
unseal_key = request.headers.get("Unseal-Token")
|
||||
res = KeyManage(backend=InnerKVManger()).unseal(unseal_key)
|
||||
return self.jsonify(**res)
|
||||
|
||||
|
||||
class InnerSecretSealView(APIView):
|
||||
url_prefix = "/secrets/seal"
|
||||
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
unseal_key = request.headers.get("Inner-Token")
|
||||
res = KeyManage(backend=InnerKVManger()).seal(unseal_key)
|
||||
return self.jsonify(**res)
|
||||
|
||||
|
||||
class InnerSecretAutoSealView(APIView):
|
||||
url_prefix = "/secrets/auto_seal"
|
||||
|
||||
@auth_abandoned
|
||||
def post(self):
|
||||
root_key = request.headers.get("Inner-Token")
|
||||
res = KeyManage(trigger=root_key,
|
||||
backend=InnerKVManger()).auto_unseal()
|
||||
return self.jsonify(**res)
|
@@ -1,88 +0,0 @@
|
||||
from flask import abort, request
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import TestType
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.acl import role_required
|
||||
from api.resource import APIView
|
||||
|
||||
prefix = '/auth_config'
|
||||
|
||||
|
||||
class AuthConfigView(APIView):
|
||||
url_prefix = (f'{prefix}/<string:auth_type>',)
|
||||
|
||||
@role_required("acl_admin")
|
||||
def get(self, auth_type):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
|
||||
if auth_type in cli.common_type_list:
|
||||
data = cli.get_record(True)
|
||||
else:
|
||||
data = cli.get_record_with_decrypt()
|
||||
return self.jsonify(data)
|
||||
|
||||
@role_required("acl_admin")
|
||||
def post(self, auth_type):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
|
||||
params = request.json
|
||||
data = params.get('data', {})
|
||||
if auth_type in cli.common_type_list:
|
||||
data['encrypt'] = False
|
||||
cli.create(data)
|
||||
|
||||
return self.jsonify(params)
|
||||
|
||||
|
||||
class AuthConfigViewWithId(APIView):
|
||||
url_prefix = (f'{prefix}/<string:auth_type>/<int:_id>',)
|
||||
|
||||
@role_required("acl_admin")
|
||||
def put(self, auth_type, _id):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
|
||||
params = request.json
|
||||
data = params.get('data', {})
|
||||
if auth_type in cli.common_type_list:
|
||||
data['encrypt'] = False
|
||||
|
||||
res = cli.update(_id, data)
|
||||
|
||||
return self.jsonify(res.to_dict())
|
||||
|
||||
@role_required("acl_admin")
|
||||
def delete(self, auth_type, _id):
|
||||
cli = AuthenticateDataCRUD(auth_type)
|
||||
|
||||
if auth_type not in cli.get_support_type_list():
|
||||
abort(400, ErrFormat.not_support_auth_type.format(auth_type))
|
||||
cli.delete(_id)
|
||||
return self.jsonify({})
|
||||
|
||||
|
||||
class AuthEnableListView(APIView):
|
||||
url_prefix = (f'{prefix}/enable_list',)
|
||||
|
||||
method_decorators = []
|
||||
|
||||
def get(self):
|
||||
return self.jsonify(AuthenticateDataCRUD.get_enable_list())
|
||||
|
||||
|
||||
class AuthConfigTestView(APIView):
|
||||
url_prefix = (f'{prefix}/<string:auth_type>/test',)
|
||||
|
||||
def post(self, auth_type):
|
||||
test_type = request.values.get('test_type', TestType.Connect)
|
||||
params = request.json
|
||||
return self.jsonify(AuthenticateDataCRUD(auth_type).test(test_type, params.get('data')))
|
@@ -1,7 +1,9 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import abort
|
||||
from flask import request
|
||||
|
||||
from api.lib.common_setting.company_info import CompanyInfoCRUD
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.resource import APIView
|
||||
|
||||
prefix = '/company'
|
||||
|
@@ -1,5 +1,7 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import abort
|
||||
import os
|
||||
|
||||
from flask import abort, current_app, send_from_directory
|
||||
from flask import request
|
||||
from werkzeug.datastructures import MultiDict
|
||||
|
||||
@@ -154,15 +156,3 @@ class GetEmployeeNoticeByIds(APIView):
|
||||
else:
|
||||
result = EmployeeCRUD.get_employee_notice_by_ids(employee_ids)
|
||||
return self.jsonify(result)
|
||||
|
||||
|
||||
class EmployeeBindNoticeWithACLID(APIView):
|
||||
url_prefix = (f'{prefix}/by_uid/bind_notice/<string:platform>/<int:_uid>',)
|
||||
|
||||
def put(self, platform, _uid):
|
||||
data = EmployeeCRUD.bind_notice_by_uid(platform, _uid)
|
||||
return self.jsonify(info=data)
|
||||
|
||||
def delete(self, platform, _uid):
|
||||
data = EmployeeCRUD.remove_bind_notice_by_uid(platform, _uid)
|
||||
return self.jsonify(info=data)
|
||||
|
@@ -3,10 +3,9 @@ import os
|
||||
|
||||
from flask import request, abort, current_app, send_from_directory
|
||||
from werkzeug.utils import secure_filename
|
||||
import lz4.frame
|
||||
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.common_setting.upload_file import allowed_file, generate_new_file_name, CommonFileCRUD
|
||||
from api.lib.common_setting.upload_file import allowed_file, generate_new_file_name
|
||||
from api.resource import APIView
|
||||
|
||||
prefix = '/file'
|
||||
@@ -29,8 +28,7 @@ class GetFileView(APIView):
|
||||
url_prefix = (f'{prefix}/<string:_filename>',)
|
||||
|
||||
def get(self, _filename):
|
||||
file_stream = CommonFileCRUD.get_file(_filename)
|
||||
return self.send_file(file_stream, as_attachment=True, download_name=_filename)
|
||||
return send_from_directory(current_app.config['UPLOAD_DIRECTORY_FULL'], _filename, as_attachment=True)
|
||||
|
||||
|
||||
class PostFileView(APIView):
|
||||
@@ -55,20 +53,11 @@ class PostFileView(APIView):
|
||||
filename = file.filename
|
||||
|
||||
if allowed_file(filename, current_app.config.get('ALLOWED_EXTENSIONS', ALLOWED_EXTENSIONS)):
|
||||
new_filename = generate_new_file_name(filename)
|
||||
new_filename = secure_filename(new_filename)
|
||||
file_content = file.read()
|
||||
compressed_data = lz4.frame.compress(file_content)
|
||||
try:
|
||||
CommonFileCRUD.add_file(
|
||||
origin_name=filename,
|
||||
file_name=new_filename,
|
||||
binary=compressed_data,
|
||||
)
|
||||
filename = generate_new_file_name(filename)
|
||||
filename = secure_filename(filename)
|
||||
file.save(os.path.join(
|
||||
current_app.config['UPLOAD_DIRECTORY_FULL'], filename))
|
||||
|
||||
return self.jsonify(file_name=new_filename)
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
abort(400, ErrFormat.upload_failed.format(e))
|
||||
return self.jsonify(file_name=filename)
|
||||
|
||||
abort(400, ErrFormat.file_type_not_allowed.format(filename))
|
||||
abort(400, 'Extension not allow')
|
||||
|
@@ -47,7 +47,7 @@ class CheckEmailServer(APIView):
|
||||
|
||||
def post(self):
|
||||
receive_address = request.args.get('receive_address')
|
||||
info = request.values.get('info', {})
|
||||
info = request.values.get('info')
|
||||
|
||||
try:
|
||||
|
||||
@@ -69,11 +69,3 @@ class NoticeConfigGetView(APIView):
|
||||
def get(self):
|
||||
res = NoticeConfigCRUD.get_all()
|
||||
return self.jsonify(res)
|
||||
|
||||
|
||||
class NoticeAppBotView(APIView):
|
||||
url_prefix = (f'{prefix}/app_bot',)
|
||||
|
||||
def get(self):
|
||||
res = NoticeConfigCRUD.get_app_bot()
|
||||
return self.jsonify(res)
|
||||
|
@@ -1 +0,0 @@
|
||||
Generic single-database configuration.
|
@@ -1,45 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
@@ -1,110 +0,0 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
from flask import current_app
|
||||
config.set_main_option(
|
||||
'sqlalchemy.url', current_app.config.get(
|
||||
'SQLALCHEMY_DATABASE_URI').replace('%', '%%'))
|
||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
# 添加要屏蔽的table列表
|
||||
exclude_tables = ["c_cfp"]
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True,
|
||||
include_name=include_name
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# this callback is used to prevent an auto-migration from being generated
|
||||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
include_name=include_name,
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def include_name(name, type_, parent_names):
|
||||
if type_ == "table":
|
||||
return name not in exclude_tables
|
||||
elif parent_names.get("table_name") in exclude_tables:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
@@ -1,24 +0,0 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@@ -1,360 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 6a4df2623057
|
||||
Revises:
|
||||
Create Date: 2023-10-13 15:17:00.066858
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6a4df2623057'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('common_data',
|
||||
sa.Column('deleted_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('deleted', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('data_type', sa.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('data', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_common_data_deleted'), 'common_data', ['deleted'], unique=False)
|
||||
op.create_table('common_notice_config',
|
||||
sa.Column('deleted_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('deleted', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('platform', sa.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('info', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_common_notice_config_deleted'), 'common_notice_config', ['deleted'], unique=False)
|
||||
op.add_column('c_attributes', sa.Column('choice_other', sa.JSON(), nullable=True))
|
||||
op.drop_index('idx_c_attributes_uid', table_name='c_attributes')
|
||||
op.create_index(op.f('ix_c_attributes_uid'), 'c_attributes', ['uid'], unique=False)
|
||||
op.drop_index('ix_c_custom_dashboard_deleted', table_name='c_c_d')
|
||||
op.create_index(op.f('ix_c_c_d_deleted'), 'c_c_d', ['deleted'], unique=False)
|
||||
op.drop_index('ix_c_ci_type_triggers_deleted', table_name='c_c_t_t')
|
||||
op.create_index(op.f('ix_c_c_t_t_deleted'), 'c_c_t_t', ['deleted'], unique=False)
|
||||
op.drop_index('ix_c_ci_type_unique_constraints_deleted', table_name='c_c_t_u_c')
|
||||
op.create_index(op.f('ix_c_c_t_u_c_deleted'), 'c_c_t_u_c', ['deleted'], unique=False)
|
||||
op.drop_index('c_ci_types_uid', table_name='c_ci_types')
|
||||
op.create_index(op.f('ix_c_ci_types_uid'), 'c_ci_types', ['uid'], unique=False)
|
||||
op.alter_column('c_prv', 'uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
nullable=False)
|
||||
op.drop_index('ix_c_preference_relation_views_deleted', table_name='c_prv')
|
||||
op.drop_index('ix_c_preference_relation_views_name', table_name='c_prv')
|
||||
op.create_index(op.f('ix_c_prv_deleted'), 'c_prv', ['deleted'], unique=False)
|
||||
op.create_index(op.f('ix_c_prv_name'), 'c_prv', ['name'], unique=False)
|
||||
op.create_index(op.f('ix_c_prv_uid'), 'c_prv', ['uid'], unique=False)
|
||||
op.drop_index('ix_c_preference_show_attributes_deleted', table_name='c_psa')
|
||||
op.drop_index('ix_c_preference_show_attributes_uid', table_name='c_psa')
|
||||
op.create_index(op.f('ix_c_psa_deleted'), 'c_psa', ['deleted'], unique=False)
|
||||
op.create_index(op.f('ix_c_psa_uid'), 'c_psa', ['uid'], unique=False)
|
||||
op.drop_index('ix_c_preference_tree_views_deleted', table_name='c_ptv')
|
||||
op.drop_index('ix_c_preference_tree_views_uid', table_name='c_ptv')
|
||||
op.create_index(op.f('ix_c_ptv_deleted'), 'c_ptv', ['deleted'], unique=False)
|
||||
op.create_index(op.f('ix_c_ptv_uid'), 'c_ptv', ['uid'], unique=False)
|
||||
op.alter_column('common_department', 'department_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='部门名称',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_director_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='部门负责人ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_parent_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='上级部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'sort_value',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='排序值',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'email',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='邮箱',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'username',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='用户名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'nickname',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='姓名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'sex',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=64),
|
||||
comment=None,
|
||||
existing_comment='性别',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'position_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='职位名称',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'mobile',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='电话号码',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'avatar',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='头像',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'direct_supervisor_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='直接上级ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'department_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中uid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_virtual_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='ACL中虚拟角色rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'last_login',
|
||||
existing_type=mysql.TIMESTAMP(),
|
||||
comment=None,
|
||||
existing_comment='上次登录时间',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'block',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='锁定状态',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'info',
|
||||
existing_type=mysql.JSON(),
|
||||
comment=None,
|
||||
existing_comment='员工信息',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'employee_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment=None,
|
||||
existing_comment='员工ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'title',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='标题',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'content',
|
||||
existing_type=mysql.TEXT(charset='utf8mb3', collation='utf8mb3_unicode_ci'),
|
||||
comment=None,
|
||||
existing_comment='内容',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'path',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment=None,
|
||||
existing_comment='跳转路径',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'is_read',
|
||||
existing_type=mysql.TINYINT(display_width=1),
|
||||
comment=None,
|
||||
existing_comment='是否已读',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'app_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment=None,
|
||||
existing_comment='应用名称',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'category',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment=None,
|
||||
existing_comment='分类',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'message_data',
|
||||
existing_type=mysql.JSON(),
|
||||
comment=None,
|
||||
existing_comment='数据',
|
||||
existing_nullable=True)
|
||||
op.drop_column('users', 'apps')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('users', sa.Column('apps', mysql.JSON(), nullable=True))
|
||||
op.alter_column('common_internal_message', 'message_data',
|
||||
existing_type=mysql.JSON(),
|
||||
comment='数据',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'category',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment='分类',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'app_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=128),
|
||||
comment='应用名称',
|
||||
existing_nullable=False)
|
||||
op.alter_column('common_internal_message', 'is_read',
|
||||
existing_type=mysql.TINYINT(display_width=1),
|
||||
comment='是否已读',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'path',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='跳转路径',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'content',
|
||||
existing_type=mysql.TEXT(charset='utf8mb3', collation='utf8mb3_unicode_ci'),
|
||||
comment='内容',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_internal_message', 'title',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='标题',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'employee_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='员工ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee_info', 'info',
|
||||
existing_type=mysql.JSON(),
|
||||
comment='员工信息',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'block',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='锁定状态',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'last_login',
|
||||
existing_type=mysql.TIMESTAMP(),
|
||||
comment='上次登录时间',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_virtual_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中虚拟角色rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'acl_uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中uid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'department_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'direct_supervisor_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='直接上级ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'avatar',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='头像',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'mobile',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='电话号码',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'position_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='职位名称',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'sex',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=64),
|
||||
comment='性别',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'nickname',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='姓名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'username',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='用户名',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_employee', 'email',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='邮箱',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'acl_rid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='ACL中rid',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'sort_value',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='排序值',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_parent_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='上级部门ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_director_id',
|
||||
existing_type=mysql.INTEGER(),
|
||||
comment='部门负责人ID',
|
||||
existing_nullable=True)
|
||||
op.alter_column('common_department', 'department_name',
|
||||
existing_type=mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_unicode_ci', length=255),
|
||||
comment='部门名称',
|
||||
existing_nullable=True)
|
||||
op.drop_index(op.f('ix_c_ptv_uid'), table_name='c_ptv')
|
||||
op.drop_index(op.f('ix_c_ptv_deleted'), table_name='c_ptv')
|
||||
op.create_index('ix_c_preference_tree_views_uid', 'c_ptv', ['uid'], unique=False)
|
||||
op.create_index('ix_c_preference_tree_views_deleted', 'c_ptv', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_psa_uid'), table_name='c_psa')
|
||||
op.drop_index(op.f('ix_c_psa_deleted'), table_name='c_psa')
|
||||
op.create_index('ix_c_preference_show_attributes_uid', 'c_psa', ['uid'], unique=False)
|
||||
op.create_index('ix_c_preference_show_attributes_deleted', 'c_psa', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_prv_uid'), table_name='c_prv')
|
||||
op.drop_index(op.f('ix_c_prv_name'), table_name='c_prv')
|
||||
op.drop_index(op.f('ix_c_prv_deleted'), table_name='c_prv')
|
||||
op.create_index('ix_c_preference_relation_views_name', 'c_prv', ['name'], unique=False)
|
||||
op.create_index('ix_c_preference_relation_views_deleted', 'c_prv', ['deleted'], unique=False)
|
||||
op.alter_column('c_prv', 'uid',
|
||||
existing_type=mysql.INTEGER(),
|
||||
nullable=True)
|
||||
op.drop_index(op.f('ix_c_ci_types_uid'), table_name='c_ci_types')
|
||||
op.create_index('c_ci_types_uid', 'c_ci_types', ['uid'], unique=False)
|
||||
op.drop_index(op.f('ix_c_c_t_u_c_deleted'), table_name='c_c_t_u_c')
|
||||
op.create_index('ix_c_ci_type_unique_constraints_deleted', 'c_c_t_u_c', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_c_t_t_deleted'), table_name='c_c_t_t')
|
||||
op.create_index('ix_c_ci_type_triggers_deleted', 'c_c_t_t', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_c_d_deleted'), table_name='c_c_d')
|
||||
op.create_index('ix_c_custom_dashboard_deleted', 'c_c_d', ['deleted'], unique=False)
|
||||
op.drop_index(op.f('ix_c_attributes_uid'), table_name='c_attributes')
|
||||
op.create_index('idx_c_attributes_uid', 'c_attributes', ['uid'], unique=False)
|
||||
op.drop_column('c_attributes', 'choice_other')
|
||||
op.drop_index(op.f('ix_common_notice_config_deleted'), table_name='common_notice_config')
|
||||
op.drop_table('common_notice_config')
|
||||
op.drop_index(op.f('ix_common_data_deleted'), table_name='common_data')
|
||||
op.drop_table('common_data')
|
||||
# ### end Alembic commands ###
|
@@ -1,7 +1,7 @@
|
||||
-i https://mirrors.aliyun.com/pypi/simple
|
||||
alembic==1.7.7
|
||||
bs4==0.0.1
|
||||
celery>=5.3.1
|
||||
celery==5.3.1
|
||||
celery-once==3.0.1
|
||||
click==8.1.3
|
||||
elasticsearch==7.17.9
|
||||
@@ -12,29 +12,28 @@ Flask==2.3.2
|
||||
Flask-Bcrypt==1.0.1
|
||||
Flask-Caching==2.0.2
|
||||
Flask-Cors==4.0.0
|
||||
Flask-Login>=0.6.2
|
||||
Flask-Login==0.6.2
|
||||
Flask-Migrate==2.5.2
|
||||
Flask-RESTful==0.3.10
|
||||
Flask-SQLAlchemy==2.5.0
|
||||
future==0.18.3
|
||||
gunicorn==21.0.1
|
||||
hvac==2.0.0
|
||||
itsdangerous==2.1.2
|
||||
Jinja2==3.1.2
|
||||
jinja2schema==0.1.4
|
||||
jsonschema==4.18.0
|
||||
kombu>=5.3.1
|
||||
kombu==5.3.1
|
||||
Mako==1.2.4
|
||||
MarkupSafe==2.1.3
|
||||
marshmallow==2.20.2
|
||||
more-itertools==5.0.0
|
||||
msgpack-python==0.5.6
|
||||
Pillow>=10.0.1
|
||||
cryptography>=41.0.2
|
||||
Pillow==9.3.0
|
||||
pycryptodome==3.12.0
|
||||
PyJWT==2.4.0
|
||||
PyMySQL==1.1.0
|
||||
ldap3==2.9.1
|
||||
PyYAML==6.0.1
|
||||
python-ldap==3.4.0
|
||||
PyYAML==6.0
|
||||
redis==4.6.0
|
||||
requests==2.31.0
|
||||
requests_oauthlib==1.3.1
|
||||
@@ -45,9 +44,5 @@ supervisor==4.0.3
|
||||
timeout-decorator==0.5.0
|
||||
toposort==1.10
|
||||
treelib==1.6.1
|
||||
Werkzeug>=2.3.6
|
||||
Werkzeug==2.3.6
|
||||
WTForms==3.0.0
|
||||
shamir~=17.12.0
|
||||
pycryptodomex>=3.19.0
|
||||
colorama>=0.4.6
|
||||
lz4>=4.3.2
|
@@ -11,10 +11,10 @@ from environs import Env
|
||||
env = Env()
|
||||
env.read_env()
|
||||
|
||||
ENV = env.str('FLASK_ENV', default='production')
|
||||
DEBUG = ENV == 'development'
|
||||
SECRET_KEY = env.str('SECRET_KEY')
|
||||
BCRYPT_LOG_ROUNDS = env.int('BCRYPT_LOG_ROUNDS', default=13)
|
||||
ENV = env.str("FLASK_ENV", default="production")
|
||||
DEBUG = ENV == "development"
|
||||
SECRET_KEY = env.str("SECRET_KEY")
|
||||
BCRYPT_LOG_ROUNDS = env.int("BCRYPT_LOG_ROUNDS", default=13)
|
||||
DEBUG_TB_ENABLED = DEBUG
|
||||
DEBUG_TB_INTERCEPT_REDIRECTS = False
|
||||
|
||||
@@ -23,7 +23,7 @@ ERROR_CODES = [400, 401, 403, 404, 405, 500, 502]
|
||||
# # database
|
||||
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
|
||||
SQLALCHEMY_BINDS = {
|
||||
'user': 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
|
||||
"user": 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
|
||||
}
|
||||
SQLALCHEMY_ECHO = False
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
||||
@@ -32,11 +32,11 @@ SQLALCHEMY_ENGINE_OPTIONS = {
|
||||
}
|
||||
|
||||
# # cache
|
||||
CACHE_TYPE = 'redis'
|
||||
CACHE_REDIS_HOST = '127.0.0.1'
|
||||
CACHE_TYPE = "redis"
|
||||
CACHE_REDIS_HOST = "127.0.0.1"
|
||||
CACHE_REDIS_PORT = 6379
|
||||
CACHE_REDIS_PASSWORD = ''
|
||||
CACHE_KEY_PREFIX = 'CMDB::'
|
||||
CACHE_REDIS_PASSWORD = ""
|
||||
CACHE_KEY_PREFIX = "CMDB::"
|
||||
CACHE_DEFAULT_TIMEOUT = 3000
|
||||
|
||||
# # log
|
||||
@@ -55,10 +55,10 @@ DEFAULT_MAIL_SENDER = ''
|
||||
|
||||
# # queue
|
||||
CELERY = {
|
||||
'broker_url': 'redis://127.0.0.1:6379/2',
|
||||
'result_backend': 'redis://127.0.0.1:6379/2',
|
||||
'broker_vhost': '/',
|
||||
'broker_connection_retry_on_startup': True
|
||||
"broker_url": 'redis://127.0.0.1:6379/2',
|
||||
"result_backend": "redis://127.0.0.1:6379/2",
|
||||
"broker_vhost": "/",
|
||||
"broker_connection_retry_on_startup": True
|
||||
}
|
||||
ONCE = {
|
||||
'backend': 'celery_once.backends.Redis',
|
||||
@@ -67,87 +67,34 @@ ONCE = {
|
||||
}
|
||||
}
|
||||
|
||||
# =============================== Authentication ===========================================================
|
||||
# # SSO
|
||||
CAS_SERVER = "http://sso.xxx.com"
|
||||
CAS_VALIDATE_SERVER = "http://sso.xxx.com"
|
||||
CAS_LOGIN_ROUTE = "/cas/login"
|
||||
CAS_LOGOUT_ROUTE = "/cas/logout"
|
||||
CAS_VALIDATE_ROUTE = "/cas/serviceValidate"
|
||||
CAS_AFTER_LOGIN = "/"
|
||||
DEFAULT_SERVICE = "http://127.0.0.1:8000"
|
||||
|
||||
# # CAS
|
||||
CAS = dict(
|
||||
enabled=False,
|
||||
cas_server='https://{your-CASServer-hostname}',
|
||||
cas_validate_server='https://{your-CASServer-hostname}',
|
||||
cas_login_route='/cas/built-in/cas/login',
|
||||
cas_logout_route='/cas/built-in/cas/logout',
|
||||
cas_validate_route='/cas/built-in/cas/serviceValidate',
|
||||
cas_after_login='/',
|
||||
cas_user_map={
|
||||
'username': {'tag': 'cas:user'},
|
||||
'nickname': {'tag': 'cas:attribute', 'attrs': {'name': 'displayName'}},
|
||||
'email': {'tag': 'cas:attribute', 'attrs': {'name': 'email'}},
|
||||
'mobile': {'tag': 'cas:attribute', 'attrs': {'name': 'phone'}},
|
||||
'avatar': {'tag': 'cas:attribute', 'attrs': {'name': 'avatar'}},
|
||||
}
|
||||
)
|
||||
|
||||
# # OAuth2.0
|
||||
OAUTH2 = dict(
|
||||
enabled=False,
|
||||
client_id='',
|
||||
client_secret='',
|
||||
authorize_url='https://{your-OAuth2Server-hostname}/login/oauth/authorize',
|
||||
token_url='https://{your-OAuth2Server-hostname}/api/login/oauth/access_token',
|
||||
scopes=['profile', 'email'],
|
||||
user_info={
|
||||
'url': 'https://{your-OAuth2Server-hostname}/api/userinfo',
|
||||
'email': 'email',
|
||||
'username': 'name',
|
||||
'avatar': 'picture'
|
||||
},
|
||||
after_login='/'
|
||||
)
|
||||
|
||||
# # OIDC
|
||||
OIDC = dict(
|
||||
enabled=False,
|
||||
client_id='',
|
||||
client_secret='',
|
||||
authorize_url='https://{your-OIDCServer-hostname}/login/oauth/authorize',
|
||||
token_url='https://{your-OIDCServer-hostname}/api/login/oauth/access_token',
|
||||
scopes=['openid', 'profile', 'email'],
|
||||
user_info={
|
||||
'url': 'https://{your-OIDCServer-hostname}/api/userinfo',
|
||||
'email': 'email',
|
||||
'username': 'name',
|
||||
'avatar': 'picture'
|
||||
},
|
||||
after_login='/'
|
||||
)
|
||||
|
||||
# # LDAP
|
||||
LDAP = dict(
|
||||
enabled=False,
|
||||
ldap_server='',
|
||||
ldap_domain='',
|
||||
ldap_user_dn='cn={},ou=users,dc=xxx,dc=com'
|
||||
)
|
||||
# ==========================================================================================================
|
||||
# # ldap
|
||||
AUTH_WITH_LDAP = False
|
||||
LDAP_SERVER = ''
|
||||
LDAP_DOMAIN = ''
|
||||
LDAP_USER_DN = 'cn={},ou=users,dc=xxx,dc=com'
|
||||
|
||||
# # pagination
|
||||
DEFAULT_PAGE_COUNT = 50
|
||||
|
||||
# # permission
|
||||
WHITE_LIST = ['127.0.0.1']
|
||||
WHITE_LIST = ["127.0.0.1"]
|
||||
USE_ACL = True
|
||||
|
||||
# # elastic search
|
||||
ES_HOST = '127.0.0.1'
|
||||
USE_ES = False
|
||||
|
||||
BOOL_TRUE = ['true', 'TRUE', 'True', True, '1', 1, 'Yes', 'YES', 'yes', 'Y', 'y']
|
||||
BOOL_TRUE = ['true', 'TRUE', 'True', True, '1', 1, "Yes", "YES", "yes", 'Y', 'y']
|
||||
|
||||
# # messenger
|
||||
USE_MESSENGER = True
|
||||
|
||||
# # secrets
|
||||
SECRETS_ENGINE = 'inner' # 'inner' or 'vault'
|
||||
VAULT_URL = ''
|
||||
VAULT_TOKEN = ''
|
||||
INNER_TRIGGER_TOKEN = ''
|
||||
MESSENGER_URL = "http://{messenger_url}/v1/message"
|
||||
|
@@ -63,15 +63,14 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ant-design/colors": "^3.2.2",
|
||||
"@babel/core": "^7.23.2",
|
||||
"@babel/polyfill": "^7.2.5",
|
||||
"@babel/preset-env": "^7.23.2",
|
||||
"@vue/cli-plugin-babel": "4.5.17",
|
||||
"@vue/cli-plugin-eslint": "^4.0.5",
|
||||
"@vue/cli-plugin-unit-jest": "^4.0.5",
|
||||
"@vue/cli-service": "^4.0.5",
|
||||
"@vue/eslint-config-standard": "^4.0.0",
|
||||
"@vue/test-utils": "^1.0.0-beta.30",
|
||||
"babel-core": "7.0.0-bridge.0",
|
||||
"babel-jest": "^23.6.0",
|
||||
"babel-plugin-import": "^1.11.0",
|
||||
"babel-plugin-transform-remove-console": "^6.9.4",
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
@font-face {
|
||||
font-family: "iconfont"; /* Project id 3857903 */
|
||||
src: url('iconfont.woff2?t=1702544951995') format('woff2'),
|
||||
url('iconfont.woff?t=1702544951995') format('woff'),
|
||||
url('iconfont.ttf?t=1702544951995') format('truetype');
|
||||
src: url('iconfont.woff2?t=1694508259411') format('woff2'),
|
||||
url('iconfont.woff?t=1694508259411') format('woff'),
|
||||
url('iconfont.ttf?t=1694508259411') format('truetype');
|
||||
}
|
||||
|
||||
.iconfont {
|
||||
@@ -13,330 +13,6 @@
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
.OAUTH2:before {
|
||||
content: "\e8d8";
|
||||
}
|
||||
|
||||
.OIDC:before {
|
||||
content: "\e8d6";
|
||||
}
|
||||
|
||||
.CAS:before {
|
||||
content: "\e8d7";
|
||||
}
|
||||
|
||||
.ops-setting-auth:before {
|
||||
content: "\e8d5";
|
||||
}
|
||||
|
||||
.ops-setting-auth-selected:before {
|
||||
content: "\e8d4";
|
||||
}
|
||||
|
||||
.a-itsm-knowledge2:before {
|
||||
content: "\e8d2";
|
||||
}
|
||||
|
||||
.itsm-qrdownload:before {
|
||||
content: "\e8d3";
|
||||
}
|
||||
|
||||
.oneterm-playback:before {
|
||||
content: "\e8d1";
|
||||
}
|
||||
|
||||
.oneterm-disconnect:before {
|
||||
content: "\e8d0";
|
||||
}
|
||||
|
||||
.ops-oneterm-publickey-selected:before {
|
||||
content: "\e8cf";
|
||||
}
|
||||
|
||||
.ops-oneterm-publickey:before {
|
||||
content: "\e8ce";
|
||||
}
|
||||
|
||||
.ops-oneterm-gateway:before {
|
||||
content: "\e8b9";
|
||||
}
|
||||
|
||||
.ops-oneterm-gateway-selected:before {
|
||||
content: "\e8bf";
|
||||
}
|
||||
|
||||
.ops-oneterm-account:before {
|
||||
content: "\e8c0";
|
||||
}
|
||||
|
||||
.ops-oneterm-account-selected:before {
|
||||
content: "\e8c1";
|
||||
}
|
||||
|
||||
.ops-oneterm-command:before {
|
||||
content: "\e8c2";
|
||||
}
|
||||
|
||||
.ops-oneterm-command-selected:before {
|
||||
content: "\e8c3";
|
||||
}
|
||||
|
||||
.ops-oneterm-assetlist:before {
|
||||
content: "\e8c4";
|
||||
}
|
||||
|
||||
.ops-oneterm-assetlist-selected:before {
|
||||
content: "\e8c5";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessiononline:before {
|
||||
content: "\e8c6";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessiononline-selected:before {
|
||||
content: "\e8c7";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessionhistory-selected:before {
|
||||
content: "\e8c8";
|
||||
}
|
||||
|
||||
.ops-oneterm-sessionhistory:before {
|
||||
content: "\e8c9";
|
||||
}
|
||||
|
||||
.ops-oneterm-login:before {
|
||||
content: "\e8ca";
|
||||
}
|
||||
|
||||
.ops-oneterm-login-selected:before {
|
||||
content: "\e8cb";
|
||||
}
|
||||
|
||||
.ops-oneterm-operation:before {
|
||||
content: "\e8cc";
|
||||
}
|
||||
|
||||
.ops-oneterm-operation-selected:before {
|
||||
content: "\e8cd";
|
||||
}
|
||||
|
||||
.ops-oneterm-workstation-selected:before {
|
||||
content: "\e8b7";
|
||||
}
|
||||
|
||||
.ops-oneterm-workstation:before {
|
||||
content: "\e8b8";
|
||||
}
|
||||
|
||||
.oneterm-file-selected:before {
|
||||
content: "\e8be";
|
||||
}
|
||||
|
||||
.oneterm-file:before {
|
||||
content: "\e8bc";
|
||||
}
|
||||
|
||||
.oneterm-time:before {
|
||||
content: "\e8bd";
|
||||
}
|
||||
|
||||
.oneterm-download:before {
|
||||
content: "\e8bb";
|
||||
}
|
||||
|
||||
.oneterm-commandrecord:before {
|
||||
content: "\e8ba";
|
||||
}
|
||||
|
||||
.oneterm-asset:before {
|
||||
content: "\e8b6";
|
||||
}
|
||||
|
||||
.oneterm-total_asset:before {
|
||||
content: "\e8b5";
|
||||
}
|
||||
|
||||
.oneterm-switch:before {
|
||||
content: "\e8b4";
|
||||
}
|
||||
|
||||
.oneterm-session:before {
|
||||
content: "\e8b3";
|
||||
}
|
||||
|
||||
.oneterm-connect:before {
|
||||
content: "\e8b2";
|
||||
}
|
||||
|
||||
.oneterm-login:before {
|
||||
content: "\e8b1";
|
||||
}
|
||||
|
||||
.ops-oneterm-dashboard:before {
|
||||
content: "\e8af";
|
||||
}
|
||||
|
||||
.ops-oneterm-dashboard-selected:before {
|
||||
content: "\e8b0";
|
||||
}
|
||||
|
||||
.oneterm-recentsession:before {
|
||||
content: "\e8ae";
|
||||
}
|
||||
|
||||
.oneterm-myassets:before {
|
||||
content: "\e8ad";
|
||||
}
|
||||
|
||||
.ops-oneterm-log:before {
|
||||
content: "\e8aa";
|
||||
}
|
||||
|
||||
.ops-oneterm-session-selected:before {
|
||||
content: "\e8ab";
|
||||
}
|
||||
|
||||
.ops-oneterm-session:before {
|
||||
content: "\e8ac";
|
||||
}
|
||||
|
||||
.ops-oneterm-log-selected:before {
|
||||
content: "\e8a9";
|
||||
}
|
||||
|
||||
.ops-oneterm-assets:before {
|
||||
content: "\e8a7";
|
||||
}
|
||||
|
||||
.ops-oneterm-assets-selected:before {
|
||||
content: "\e8a8";
|
||||
}
|
||||
|
||||
.itsm-down:before {
|
||||
content: "\e8a5";
|
||||
}
|
||||
|
||||
.itsm-up:before {
|
||||
content: "\e8a6";
|
||||
}
|
||||
|
||||
.itsm-download:before {
|
||||
content: "\e8a4";
|
||||
}
|
||||
|
||||
.itsm-print:before {
|
||||
content: "\e8a3";
|
||||
}
|
||||
|
||||
.itsm-view:before {
|
||||
content: "\e8a2";
|
||||
}
|
||||
|
||||
.itsm-word:before {
|
||||
content: "\e8a1";
|
||||
}
|
||||
|
||||
.datainsight-custom:before {
|
||||
content: "\e89e";
|
||||
}
|
||||
|
||||
.datainsight-prometheus:before {
|
||||
content: "\e89f";
|
||||
}
|
||||
|
||||
.datainsight-zabbix:before {
|
||||
content: "\e8a0";
|
||||
}
|
||||
|
||||
.setting-mainpeople:before {
|
||||
content: "\e89a";
|
||||
}
|
||||
|
||||
.setting-deputypeople:before {
|
||||
content: "\e89d";
|
||||
}
|
||||
|
||||
.ops-setting-duty:before {
|
||||
content: "\e89c";
|
||||
}
|
||||
|
||||
.ops-setting-duty-selected:before {
|
||||
content: "\e89b";
|
||||
}
|
||||
|
||||
.datainsight-sequential:before {
|
||||
content: "\e899";
|
||||
}
|
||||
|
||||
.datainsight-close:before {
|
||||
content: "\e898";
|
||||
}
|
||||
|
||||
.datainsight-handle:before {
|
||||
content: "\e897";
|
||||
}
|
||||
|
||||
.datainsight-table:before {
|
||||
content: "\e896";
|
||||
}
|
||||
|
||||
.icon-xianxing-password:before {
|
||||
content: "\e894";
|
||||
}
|
||||
|
||||
.icon-xianxing-link:before {
|
||||
content: "\e895";
|
||||
}
|
||||
|
||||
.itsm-download-all:before {
|
||||
content: "\e892";
|
||||
}
|
||||
|
||||
.itsm-download-package:before {
|
||||
content: "\e893";
|
||||
}
|
||||
|
||||
.a-Frame4:before {
|
||||
content: "\e891";
|
||||
}
|
||||
|
||||
.itsm-again:before {
|
||||
content: "\e88f";
|
||||
}
|
||||
|
||||
.itsm-next:before {
|
||||
content: "\e890";
|
||||
}
|
||||
|
||||
.wechatApp:before {
|
||||
content: "\e88e";
|
||||
}
|
||||
|
||||
.robot:before {
|
||||
content: "\e88b";
|
||||
}
|
||||
|
||||
.feishuApp:before {
|
||||
content: "\e88c";
|
||||
}
|
||||
|
||||
.dingdingApp:before {
|
||||
content: "\e88d";
|
||||
}
|
||||
|
||||
.email:before {
|
||||
content: "\e88a";
|
||||
}
|
||||
|
||||
.ops-setting-notice-feishu:before {
|
||||
content: "\e887";
|
||||
}
|
||||
|
||||
.ops-setting-notice-feishu-selected:before {
|
||||
content: "\e888";
|
||||
}
|
||||
|
||||
.cmdb-bar:before {
|
||||
content: "\e886";
|
||||
}
|
||||
@@ -1701,10 +1377,6 @@
|
||||
content: "\e738";
|
||||
}
|
||||
|
||||
.ops-setting-notice-email-selected-copy:before {
|
||||
content: "\e889";
|
||||
}
|
||||
|
||||
.ops-setting-notice:before {
|
||||
content: "\e72f";
|
||||
}
|
||||
|
File diff suppressed because one or more lines are too long
@@ -5,573 +5,6 @@
|
||||
"css_prefix_text": "",
|
||||
"description": "",
|
||||
"glyphs": [
|
||||
{
|
||||
"icon_id": "38566548",
|
||||
"name": "OAuth2.0",
|
||||
"font_class": "OAUTH2",
|
||||
"unicode": "e8d8",
|
||||
"unicode_decimal": 59608
|
||||
},
|
||||
{
|
||||
"icon_id": "38566584",
|
||||
"name": "OIDC",
|
||||
"font_class": "OIDC",
|
||||
"unicode": "e8d6",
|
||||
"unicode_decimal": 59606
|
||||
},
|
||||
{
|
||||
"icon_id": "38566578",
|
||||
"name": "cas",
|
||||
"font_class": "CAS",
|
||||
"unicode": "e8d7",
|
||||
"unicode_decimal": 59607
|
||||
},
|
||||
{
|
||||
"icon_id": "38547395",
|
||||
"name": "setting-authentication",
|
||||
"font_class": "ops-setting-auth",
|
||||
"unicode": "e8d5",
|
||||
"unicode_decimal": 59605
|
||||
},
|
||||
{
|
||||
"icon_id": "38547389",
|
||||
"name": "setting-authentication-selected",
|
||||
"font_class": "ops-setting-auth-selected",
|
||||
"unicode": "e8d4",
|
||||
"unicode_decimal": 59604
|
||||
},
|
||||
{
|
||||
"icon_id": "38533133",
|
||||
"name": "itsm-knowledge (2)",
|
||||
"font_class": "a-itsm-knowledge2",
|
||||
"unicode": "e8d2",
|
||||
"unicode_decimal": 59602
|
||||
},
|
||||
{
|
||||
"icon_id": "38531868",
|
||||
"name": "itsm-QRcode",
|
||||
"font_class": "itsm-qrdownload",
|
||||
"unicode": "e8d3",
|
||||
"unicode_decimal": 59603
|
||||
},
|
||||
{
|
||||
"icon_id": "38413515",
|
||||
"name": "oneterm-playback",
|
||||
"font_class": "oneterm-playback",
|
||||
"unicode": "e8d1",
|
||||
"unicode_decimal": 59601
|
||||
},
|
||||
{
|
||||
"icon_id": "38413481",
|
||||
"name": "oneterm-disconnect",
|
||||
"font_class": "oneterm-disconnect",
|
||||
"unicode": "e8d0",
|
||||
"unicode_decimal": 59600
|
||||
},
|
||||
{
|
||||
"icon_id": "38407867",
|
||||
"name": "oneterm-key-selected",
|
||||
"font_class": "ops-oneterm-publickey-selected",
|
||||
"unicode": "e8cf",
|
||||
"unicode_decimal": 59599
|
||||
},
|
||||
{
|
||||
"icon_id": "38407915",
|
||||
"name": "oneterm-key",
|
||||
"font_class": "ops-oneterm-publickey",
|
||||
"unicode": "e8ce",
|
||||
"unicode_decimal": 59598
|
||||
},
|
||||
{
|
||||
"icon_id": "38311855",
|
||||
"name": "oneterm-gateway",
|
||||
"font_class": "ops-oneterm-gateway",
|
||||
"unicode": "e8b9",
|
||||
"unicode_decimal": 59577
|
||||
},
|
||||
{
|
||||
"icon_id": "38311938",
|
||||
"name": "oneterm-gateway-selected",
|
||||
"font_class": "ops-oneterm-gateway-selected",
|
||||
"unicode": "e8bf",
|
||||
"unicode_decimal": 59583
|
||||
},
|
||||
{
|
||||
"icon_id": "38311957",
|
||||
"name": "oneterm-account",
|
||||
"font_class": "ops-oneterm-account",
|
||||
"unicode": "e8c0",
|
||||
"unicode_decimal": 59584
|
||||
},
|
||||
{
|
||||
"icon_id": "38311961",
|
||||
"name": "oneterm-account-selected",
|
||||
"font_class": "ops-oneterm-account-selected",
|
||||
"unicode": "e8c1",
|
||||
"unicode_decimal": 59585
|
||||
},
|
||||
{
|
||||
"icon_id": "38311974",
|
||||
"name": "oneterm-command",
|
||||
"font_class": "ops-oneterm-command",
|
||||
"unicode": "e8c2",
|
||||
"unicode_decimal": 59586
|
||||
},
|
||||
{
|
||||
"icon_id": "38311976",
|
||||
"name": "oneterm-command-selected",
|
||||
"font_class": "ops-oneterm-command-selected",
|
||||
"unicode": "e8c3",
|
||||
"unicode_decimal": 59587
|
||||
},
|
||||
{
|
||||
"icon_id": "38311979",
|
||||
"name": "oneterm-asset_list",
|
||||
"font_class": "ops-oneterm-assetlist",
|
||||
"unicode": "e8c4",
|
||||
"unicode_decimal": 59588
|
||||
},
|
||||
{
|
||||
"icon_id": "38311985",
|
||||
"name": "oneterm-asset_list-selected",
|
||||
"font_class": "ops-oneterm-assetlist-selected",
|
||||
"unicode": "e8c5",
|
||||
"unicode_decimal": 59589
|
||||
},
|
||||
{
|
||||
"icon_id": "38312030",
|
||||
"name": "oneterm-online",
|
||||
"font_class": "ops-oneterm-sessiononline",
|
||||
"unicode": "e8c6",
|
||||
"unicode_decimal": 59590
|
||||
},
|
||||
{
|
||||
"icon_id": "38312152",
|
||||
"name": "oneterm-online-selected",
|
||||
"font_class": "ops-oneterm-sessiononline-selected",
|
||||
"unicode": "e8c7",
|
||||
"unicode_decimal": 59591
|
||||
},
|
||||
{
|
||||
"icon_id": "38312154",
|
||||
"name": "oneterm-history-selected",
|
||||
"font_class": "ops-oneterm-sessionhistory-selected",
|
||||
"unicode": "e8c8",
|
||||
"unicode_decimal": 59592
|
||||
},
|
||||
{
|
||||
"icon_id": "38312155",
|
||||
"name": "oneterm-history",
|
||||
"font_class": "ops-oneterm-sessionhistory",
|
||||
"unicode": "e8c9",
|
||||
"unicode_decimal": 59593
|
||||
},
|
||||
{
|
||||
"icon_id": "38312404",
|
||||
"name": "oneterm-entry_log",
|
||||
"font_class": "ops-oneterm-login",
|
||||
"unicode": "e8ca",
|
||||
"unicode_decimal": 59594
|
||||
},
|
||||
{
|
||||
"icon_id": "38312423",
|
||||
"name": "oneterm-entry_log-selected",
|
||||
"font_class": "ops-oneterm-login-selected",
|
||||
"unicode": "e8cb",
|
||||
"unicode_decimal": 59595
|
||||
},
|
||||
{
|
||||
"icon_id": "38312426",
|
||||
"name": "oneterm-operation_log",
|
||||
"font_class": "ops-oneterm-operation",
|
||||
"unicode": "e8cc",
|
||||
"unicode_decimal": 59596
|
||||
},
|
||||
{
|
||||
"icon_id": "38312445",
|
||||
"name": "oneterm-operation_log-selected",
|
||||
"font_class": "ops-oneterm-operation-selected",
|
||||
"unicode": "e8cd",
|
||||
"unicode_decimal": 59597
|
||||
},
|
||||
{
|
||||
"icon_id": "38307876",
|
||||
"name": "oneterm-workstation-selected",
|
||||
"font_class": "ops-oneterm-workstation-selected",
|
||||
"unicode": "e8b7",
|
||||
"unicode_decimal": 59575
|
||||
},
|
||||
{
|
||||
"icon_id": "38307871",
|
||||
"name": "oneterm-workstation",
|
||||
"font_class": "ops-oneterm-workstation",
|
||||
"unicode": "e8b8",
|
||||
"unicode_decimal": 59576
|
||||
},
|
||||
{
|
||||
"icon_id": "38302246",
|
||||
"name": "oneterm-file-selected",
|
||||
"font_class": "oneterm-file-selected",
|
||||
"unicode": "e8be",
|
||||
"unicode_decimal": 59582
|
||||
},
|
||||
{
|
||||
"icon_id": "38302255",
|
||||
"name": "oneterm-file",
|
||||
"font_class": "oneterm-file",
|
||||
"unicode": "e8bc",
|
||||
"unicode_decimal": 59580
|
||||
},
|
||||
{
|
||||
"icon_id": "38203528",
|
||||
"name": "oneterm-time",
|
||||
"font_class": "oneterm-time",
|
||||
"unicode": "e8bd",
|
||||
"unicode_decimal": 59581
|
||||
},
|
||||
{
|
||||
"icon_id": "38203331",
|
||||
"name": "oneterm-download",
|
||||
"font_class": "oneterm-download",
|
||||
"unicode": "e8bb",
|
||||
"unicode_decimal": 59579
|
||||
},
|
||||
{
|
||||
"icon_id": "38201351",
|
||||
"name": "oneterm-command record",
|
||||
"font_class": "oneterm-commandrecord",
|
||||
"unicode": "e8ba",
|
||||
"unicode_decimal": 59578
|
||||
},
|
||||
{
|
||||
"icon_id": "38199341",
|
||||
"name": "oneterm-connected assets",
|
||||
"font_class": "oneterm-asset",
|
||||
"unicode": "e8b6",
|
||||
"unicode_decimal": 59574
|
||||
},
|
||||
{
|
||||
"icon_id": "38199350",
|
||||
"name": "oneterm-total assets",
|
||||
"font_class": "oneterm-total_asset",
|
||||
"unicode": "e8b5",
|
||||
"unicode_decimal": 59573
|
||||
},
|
||||
{
|
||||
"icon_id": "38199303",
|
||||
"name": "oneterm-switch (3)",
|
||||
"font_class": "oneterm-switch",
|
||||
"unicode": "e8b4",
|
||||
"unicode_decimal": 59572
|
||||
},
|
||||
{
|
||||
"icon_id": "38199317",
|
||||
"name": "oneterm-session",
|
||||
"font_class": "oneterm-session",
|
||||
"unicode": "e8b3",
|
||||
"unicode_decimal": 59571
|
||||
},
|
||||
{
|
||||
"icon_id": "38199339",
|
||||
"name": "oneterm-connection",
|
||||
"font_class": "oneterm-connect",
|
||||
"unicode": "e8b2",
|
||||
"unicode_decimal": 59570
|
||||
},
|
||||
{
|
||||
"icon_id": "38198321",
|
||||
"name": "oneterm-log in",
|
||||
"font_class": "oneterm-login",
|
||||
"unicode": "e8b1",
|
||||
"unicode_decimal": 59569
|
||||
},
|
||||
{
|
||||
"icon_id": "38194554",
|
||||
"name": "oneterm-dashboard",
|
||||
"font_class": "ops-oneterm-dashboard",
|
||||
"unicode": "e8af",
|
||||
"unicode_decimal": 59567
|
||||
},
|
||||
{
|
||||
"icon_id": "38194525",
|
||||
"name": "oneterm-dashboard-selected",
|
||||
"font_class": "ops-oneterm-dashboard-selected",
|
||||
"unicode": "e8b0",
|
||||
"unicode_decimal": 59568
|
||||
},
|
||||
{
|
||||
"icon_id": "38194352",
|
||||
"name": "oneterm-recent session",
|
||||
"font_class": "oneterm-recentsession",
|
||||
"unicode": "e8ae",
|
||||
"unicode_decimal": 59566
|
||||
},
|
||||
{
|
||||
"icon_id": "38194383",
|
||||
"name": "oneterm-my assets",
|
||||
"font_class": "oneterm-myassets",
|
||||
"unicode": "e8ad",
|
||||
"unicode_decimal": 59565
|
||||
},
|
||||
{
|
||||
"icon_id": "38194089",
|
||||
"name": "oneterm-log",
|
||||
"font_class": "ops-oneterm-log",
|
||||
"unicode": "e8aa",
|
||||
"unicode_decimal": 59562
|
||||
},
|
||||
{
|
||||
"icon_id": "38194088",
|
||||
"name": "oneterm-conversation-selected",
|
||||
"font_class": "ops-oneterm-session-selected",
|
||||
"unicode": "e8ab",
|
||||
"unicode_decimal": 59563
|
||||
},
|
||||
{
|
||||
"icon_id": "38194065",
|
||||
"name": "oneterm-conversation",
|
||||
"font_class": "ops-oneterm-session",
|
||||
"unicode": "e8ac",
|
||||
"unicode_decimal": 59564
|
||||
},
|
||||
{
|
||||
"icon_id": "38194105",
|
||||
"name": "oneterm-log-selected",
|
||||
"font_class": "ops-oneterm-log-selected",
|
||||
"unicode": "e8a9",
|
||||
"unicode_decimal": 59561
|
||||
},
|
||||
{
|
||||
"icon_id": "38194054",
|
||||
"name": "oneterm-assets",
|
||||
"font_class": "ops-oneterm-assets",
|
||||
"unicode": "e8a7",
|
||||
"unicode_decimal": 59559
|
||||
},
|
||||
{
|
||||
"icon_id": "38194055",
|
||||
"name": "oneterm-assets-selected",
|
||||
"font_class": "ops-oneterm-assets-selected",
|
||||
"unicode": "e8a8",
|
||||
"unicode_decimal": 59560
|
||||
},
|
||||
{
|
||||
"icon_id": "38123087",
|
||||
"name": "itsm-down",
|
||||
"font_class": "itsm-down",
|
||||
"unicode": "e8a5",
|
||||
"unicode_decimal": 59557
|
||||
},
|
||||
{
|
||||
"icon_id": "38123084",
|
||||
"name": "itsm-up",
|
||||
"font_class": "itsm-up",
|
||||
"unicode": "e8a6",
|
||||
"unicode_decimal": 59558
|
||||
},
|
||||
{
|
||||
"icon_id": "38105374",
|
||||
"name": "itsm-download",
|
||||
"font_class": "itsm-download",
|
||||
"unicode": "e8a4",
|
||||
"unicode_decimal": 59556
|
||||
},
|
||||
{
|
||||
"icon_id": "38105235",
|
||||
"name": "itsm-print",
|
||||
"font_class": "itsm-print",
|
||||
"unicode": "e8a3",
|
||||
"unicode_decimal": 59555
|
||||
},
|
||||
{
|
||||
"icon_id": "38104997",
|
||||
"name": "itsm-view",
|
||||
"font_class": "itsm-view",
|
||||
"unicode": "e8a2",
|
||||
"unicode_decimal": 59554
|
||||
},
|
||||
{
|
||||
"icon_id": "38105129",
|
||||
"name": "itsm-word",
|
||||
"font_class": "itsm-word",
|
||||
"unicode": "e8a1",
|
||||
"unicode_decimal": 59553
|
||||
},
|
||||
{
|
||||
"icon_id": "38095730",
|
||||
"name": "datainsight-custom",
|
||||
"font_class": "datainsight-custom",
|
||||
"unicode": "e89e",
|
||||
"unicode_decimal": 59550
|
||||
},
|
||||
{
|
||||
"icon_id": "38095729",
|
||||
"name": "datainsight-prometheus",
|
||||
"font_class": "datainsight-prometheus",
|
||||
"unicode": "e89f",
|
||||
"unicode_decimal": 59551
|
||||
},
|
||||
{
|
||||
"icon_id": "38095728",
|
||||
"name": "datainsight-zabbix",
|
||||
"font_class": "datainsight-zabbix",
|
||||
"unicode": "e8a0",
|
||||
"unicode_decimal": 59552
|
||||
},
|
||||
{
|
||||
"icon_id": "37944507",
|
||||
"name": "setting-main people",
|
||||
"font_class": "setting-mainpeople",
|
||||
"unicode": "e89a",
|
||||
"unicode_decimal": 59546
|
||||
},
|
||||
{
|
||||
"icon_id": "37944503",
|
||||
"name": "setting-deputy people",
|
||||
"font_class": "setting-deputypeople",
|
||||
"unicode": "e89d",
|
||||
"unicode_decimal": 59549
|
||||
},
|
||||
{
|
||||
"icon_id": "37940080",
|
||||
"name": "ops-setting-duty",
|
||||
"font_class": "ops-setting-duty",
|
||||
"unicode": "e89c",
|
||||
"unicode_decimal": 59548
|
||||
},
|
||||
{
|
||||
"icon_id": "37940033",
|
||||
"name": "ops-setting-duty-selected",
|
||||
"font_class": "ops-setting-duty-selected",
|
||||
"unicode": "e89b",
|
||||
"unicode_decimal": 59547
|
||||
},
|
||||
{
|
||||
"icon_id": "37841524",
|
||||
"name": "datainsight-sequential",
|
||||
"font_class": "datainsight-sequential",
|
||||
"unicode": "e899",
|
||||
"unicode_decimal": 59545
|
||||
},
|
||||
{
|
||||
"icon_id": "37841535",
|
||||
"name": "datainsight-close",
|
||||
"font_class": "datainsight-close",
|
||||
"unicode": "e898",
|
||||
"unicode_decimal": 59544
|
||||
},
|
||||
{
|
||||
"icon_id": "37841537",
|
||||
"name": "datainsight-handle",
|
||||
"font_class": "datainsight-handle",
|
||||
"unicode": "e897",
|
||||
"unicode_decimal": 59543
|
||||
},
|
||||
{
|
||||
"icon_id": "37841515",
|
||||
"name": "datainsight-table",
|
||||
"font_class": "datainsight-table",
|
||||
"unicode": "e896",
|
||||
"unicode_decimal": 59542
|
||||
},
|
||||
{
|
||||
"icon_id": "37830610",
|
||||
"name": "icon-xianxing-password",
|
||||
"font_class": "icon-xianxing-password",
|
||||
"unicode": "e894",
|
||||
"unicode_decimal": 59540
|
||||
},
|
||||
{
|
||||
"icon_id": "37830609",
|
||||
"name": "icon-xianxing-link",
|
||||
"font_class": "icon-xianxing-link",
|
||||
"unicode": "e895",
|
||||
"unicode_decimal": 59541
|
||||
},
|
||||
{
|
||||
"icon_id": "37822199",
|
||||
"name": "itsm-oneclick download",
|
||||
"font_class": "itsm-download-all",
|
||||
"unicode": "e892",
|
||||
"unicode_decimal": 59538
|
||||
},
|
||||
{
|
||||
"icon_id": "37822198",
|
||||
"name": "itsm-package download",
|
||||
"font_class": "itsm-download-package",
|
||||
"unicode": "e893",
|
||||
"unicode_decimal": 59539
|
||||
},
|
||||
{
|
||||
"icon_id": "37772067",
|
||||
"name": "weixin",
|
||||
"font_class": "a-Frame4",
|
||||
"unicode": "e891",
|
||||
"unicode_decimal": 59537
|
||||
},
|
||||
{
|
||||
"icon_id": "37632784",
|
||||
"name": "itsm-again",
|
||||
"font_class": "itsm-again",
|
||||
"unicode": "e88f",
|
||||
"unicode_decimal": 59535
|
||||
},
|
||||
{
|
||||
"icon_id": "37632783",
|
||||
"name": "itsm-next",
|
||||
"font_class": "itsm-next",
|
||||
"unicode": "e890",
|
||||
"unicode_decimal": 59536
|
||||
},
|
||||
{
|
||||
"icon_id": "37590786",
|
||||
"name": "wechatApp",
|
||||
"font_class": "wechatApp",
|
||||
"unicode": "e88e",
|
||||
"unicode_decimal": 59534
|
||||
},
|
||||
{
|
||||
"icon_id": "37590798",
|
||||
"name": "robot",
|
||||
"font_class": "robot",
|
||||
"unicode": "e88b",
|
||||
"unicode_decimal": 59531
|
||||
},
|
||||
{
|
||||
"icon_id": "37590794",
|
||||
"name": "feishuApp",
|
||||
"font_class": "feishuApp",
|
||||
"unicode": "e88c",
|
||||
"unicode_decimal": 59532
|
||||
},
|
||||
{
|
||||
"icon_id": "37590791",
|
||||
"name": "dingdingApp",
|
||||
"font_class": "dingdingApp",
|
||||
"unicode": "e88d",
|
||||
"unicode_decimal": 59533
|
||||
},
|
||||
{
|
||||
"icon_id": "37590776",
|
||||
"name": "email",
|
||||
"font_class": "email",
|
||||
"unicode": "e88a",
|
||||
"unicode_decimal": 59530
|
||||
},
|
||||
{
|
||||
"icon_id": "37537876",
|
||||
"name": "setting-feishu",
|
||||
"font_class": "ops-setting-notice-feishu",
|
||||
"unicode": "e887",
|
||||
"unicode_decimal": 59527
|
||||
},
|
||||
{
|
||||
"icon_id": "37537859",
|
||||
"name": "setting-feishu-selected",
|
||||
"font_class": "ops-setting-notice-feishu-selected",
|
||||
"unicode": "e888",
|
||||
"unicode_decimal": 59528
|
||||
},
|
||||
{
|
||||
"icon_id": "37334642",
|
||||
"name": "cmdb-histogram",
|
||||
@@ -2959,13 +2392,6 @@
|
||||
"unicode": "e738",
|
||||
"unicode_decimal": 59192
|
||||
},
|
||||
{
|
||||
"icon_id": "37575490",
|
||||
"name": "ops-setting-notice-email-selected",
|
||||
"font_class": "ops-setting-notice-email-selected-copy",
|
||||
"unicode": "e889",
|
||||
"unicode_decimal": 59529
|
||||
},
|
||||
{
|
||||
"icon_id": "34108346",
|
||||
"name": "ops-setting-notice",
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,39 +0,0 @@
|
||||
import { axios } from '@/utils/request'
|
||||
|
||||
export function getAuthData(data_type) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/${data_type}`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function postAuthData(data_type, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/${data_type}`,
|
||||
method: 'post',
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
export function putAuthData(data_type, id, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/${data_type}/${id}`,
|
||||
method: 'put',
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
export function getAuthDataEnable() {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/enable_list`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function testLDAP(test_type, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/auth_config/LDAP/test?test_type=${test_type}`,
|
||||
method: 'post',
|
||||
data,
|
||||
})
|
||||
}
|
@@ -1,134 +1,127 @@
|
||||
import { axios } from '@/utils/request'
|
||||
|
||||
export function getEmployeeList(params) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee',
|
||||
method: 'get',
|
||||
params: params,
|
||||
})
|
||||
}
|
||||
// export function getEmployeeList(params, orderBy) {
|
||||
// return axios({
|
||||
// url: '/common-setting/v1/employee' + '/' + orderBy,
|
||||
// method: 'get',
|
||||
// params: params,
|
||||
// })
|
||||
// }
|
||||
export function postEmployee(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee',
|
||||
method: 'post',
|
||||
data: data,
|
||||
})
|
||||
}
|
||||
export function getEmployeeCount(params) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/count',
|
||||
method: 'get',
|
||||
params: params,
|
||||
})
|
||||
}
|
||||
export function deleteEmployee(_id) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/${_id}`,
|
||||
method: 'delete',
|
||||
})
|
||||
}
|
||||
export function putEmployee(_id, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/${_id}`,
|
||||
method: 'put',
|
||||
data: data,
|
||||
})
|
||||
}
|
||||
export function batchEditEmployee(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/batch',
|
||||
method: 'post',
|
||||
data: data,
|
||||
})
|
||||
}
|
||||
export function importEmployee(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/import',
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function getEmployeeByUid(uid) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/${uid}`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function updateEmployeeByUid(uid, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/${uid}`,
|
||||
method: 'put',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function updatePasswordByUid(uid, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/change_password/${uid}`,
|
||||
method: 'put',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function bindPlatformByUid(platform, uid) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/bind_notice/${platform}/${uid}`,
|
||||
method: 'put',
|
||||
})
|
||||
}
|
||||
|
||||
export function unbindPlatformByUid(platform, uid) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/bind_notice/${platform}/${uid}`,
|
||||
method: 'delete',
|
||||
})
|
||||
}
|
||||
|
||||
export function getAllPosition() {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/position`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function getEmployeeByEmployeeId(employee_id) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/${employee_id}`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
// 下载员工列表
|
||||
export function downloadAllEmployee(params) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/export_all`,
|
||||
method: 'get',
|
||||
params,
|
||||
responseType: 'blob'
|
||||
})
|
||||
}
|
||||
|
||||
export function getEmployeeListByFilter(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/filter',
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function getNoticeByEmployeeIds(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/get_notice_by_ids',
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
import { axios } from '@/utils/request'
|
||||
|
||||
export function getEmployeeList(params) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee',
|
||||
method: 'get',
|
||||
params: params,
|
||||
})
|
||||
}
|
||||
// export function getEmployeeList(params, orderBy) {
|
||||
// return axios({
|
||||
// url: '/common-setting/v1/employee' + '/' + orderBy,
|
||||
// method: 'get',
|
||||
// params: params,
|
||||
// })
|
||||
// }
|
||||
export function postEmployee(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee',
|
||||
method: 'post',
|
||||
data: data,
|
||||
})
|
||||
}
|
||||
export function getEmployeeCount(params) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/count',
|
||||
method: 'get',
|
||||
params: params,
|
||||
})
|
||||
}
|
||||
export function deleteEmployee(_id) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/${_id}`,
|
||||
method: 'delete',
|
||||
})
|
||||
}
|
||||
export function putEmployee(_id, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/${_id}`,
|
||||
method: 'put',
|
||||
data: data,
|
||||
})
|
||||
}
|
||||
export function batchEditEmployee(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/batch',
|
||||
method: 'post',
|
||||
data: data,
|
||||
})
|
||||
}
|
||||
export function importEmployee(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/import',
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function getEmployeeByUid(uid) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/${uid}`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function updateEmployeeByUid(uid, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/${uid}`,
|
||||
method: 'put',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function updatePasswordByUid(uid, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/change_password/${uid}`,
|
||||
method: 'put',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function bindWxByUid(uid) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/by_uid/bind_work_wechat/${uid}`,
|
||||
method: 'put',
|
||||
})
|
||||
}
|
||||
|
||||
export function getAllPosition() {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/position`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
export function getEmployeeByEmployeeId(employee_id) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/${employee_id}`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
||||
|
||||
// 下载员工列表
|
||||
export function downloadAllEmployee(params) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/employee/export_all`,
|
||||
method: 'get',
|
||||
params,
|
||||
responseType: 'blob'
|
||||
})
|
||||
}
|
||||
|
||||
export function getEmployeeListByFilter(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/filter',
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export function getNoticeByEmployeeIds(data) {
|
||||
return axios({
|
||||
url: '/common-setting/v1/employee/get_notice_by_ids',
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import config from '@/config/setting'
|
||||
|
||||
const api = {
|
||||
Login: '/v1/acl/login',
|
||||
Logout: '/v1/acl/logout',
|
||||
Login: config.useSSO ? '/api/sso/login' : '/v1/acl/login',
|
||||
Logout: config.useSSO ? '/api/sso/logout' : '/v1/acl/logout',
|
||||
ForgePassword: '/auth/forge-password',
|
||||
Register: '/auth/register',
|
||||
twoStepCode: '/auth/2step-code',
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import api from './index'
|
||||
import { axios } from '@/utils/request'
|
||||
import config from '@/config/setting'
|
||||
/**
|
||||
* login func
|
||||
* parameter: {
|
||||
@@ -11,10 +12,9 @@ import { axios } from '@/utils/request'
|
||||
* @param parameter
|
||||
* @returns {*}
|
||||
*/
|
||||
export function login(data, auth_type) {
|
||||
if (auth_type) {
|
||||
localStorage.setItem('ops_auth_type', auth_type)
|
||||
window.location.href = `/api/${auth_type.toLowerCase()}/login`
|
||||
export function login(data) {
|
||||
if (config.useSSO) {
|
||||
window.location.href = config.ssoLoginUrl
|
||||
} else {
|
||||
return axios({
|
||||
url: api.Login,
|
||||
@@ -43,15 +43,17 @@ export function getInfo() {
|
||||
}
|
||||
|
||||
export function logout() {
|
||||
const auth_type = localStorage.getItem('ops_auth_type')
|
||||
localStorage.clear()
|
||||
return axios({
|
||||
url: auth_type ? `/${auth_type.toLowerCase()}/logout` : api.Logout,
|
||||
method: auth_type ? 'get' : 'post',
|
||||
headers: {
|
||||
'Content-Type': 'application/json;charset=UTF-8'
|
||||
}
|
||||
})
|
||||
if (config.useSSO) {
|
||||
window.location.replace(api.Logout)
|
||||
} else {
|
||||
return axios({
|
||||
url: api.Logout,
|
||||
method: 'post',
|
||||
headers: {
|
||||
'Content-Type': 'application/json;charset=UTF-8'
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -1,40 +0,0 @@
|
||||
import { axios } from '@/utils/request'
|
||||
|
||||
export function sendTestEmail(receive_address, data) {
|
||||
return axios({
|
||||
url: `/common-setting/v1/notice_config/send_test_email?receive_address=${receive_address}`,
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export const getNoticeConfigByPlatform = (platform) => {
|
||||
return axios({
|
||||
url: '/common-setting/v1/notice_config',
|
||||
method: 'get',
|
||||
params: { ...platform },
|
||||
})
|
||||
}
|
||||
|
||||
export const postNoticeConfigByPlatform = (data) => {
|
||||
return axios({
|
||||
url: '/common-setting/v1/notice_config',
|
||||
method: 'post',
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
export const putNoticeConfigByPlatform = (id, info) => {
|
||||
return axios({
|
||||
url: `/common-setting/v1/notice_config/${id}`,
|
||||
method: 'put',
|
||||
data: info
|
||||
})
|
||||
}
|
||||
|
||||
export const getNoticeConfigAppBot = () => {
|
||||
return axios({
|
||||
url: `/common-setting/v1/notice_config/app_bot`,
|
||||
method: 'get',
|
||||
})
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user