mirror of
https://github.com/veops/cmdb.git
synced 2025-09-13 23:16:54 +08:00
Compare commits
314 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb904b01a6 | ||
|
|
63af79ec45 | ||
|
|
38af86317a | ||
|
|
03bac86588 | ||
|
|
130b68cadd | ||
|
|
65000f8141 | ||
|
|
23692ad50b | ||
|
|
16cd34e8b8 | ||
|
|
985f67ee47 | ||
|
|
8d95f8d57d | ||
|
|
cf6230008d | ||
|
|
ec97fa84d8 | ||
|
|
76f074704b | ||
|
|
e5addab3af | ||
|
|
1c6be9e281 | ||
|
|
9552892c68 | ||
|
|
b59e1af318 | ||
|
|
d164d883ab | ||
|
|
1fef160d9e | ||
|
|
2e537d390a | ||
|
|
5b9fe15afa | ||
|
|
89fa5f2243 | ||
|
|
652a5c7fb8 | ||
|
|
afb6adec89 | ||
|
|
a9db4285ab | ||
|
|
a04bdc29a5 | ||
|
|
91e0e076a7 | ||
|
|
339a7b857e | ||
|
|
e86e5ad1fd | ||
|
|
c50a69de77 | ||
|
|
4d16e9e6d9 | ||
|
|
fcea4dcb9f | ||
|
|
f98fd24c62 | ||
|
|
f10eeb8439 | ||
|
|
f070948122 | ||
|
|
4112bcf547 | ||
|
|
2292756bf7 | ||
|
|
93e2483974 | ||
|
|
fbb4fcc255 | ||
|
|
fc77241006 | ||
|
|
0d04ad7d90 | ||
|
|
e6290e49ea | ||
|
|
97aa2e0ebe | ||
|
|
939d9dc3cd | ||
|
|
576d2e3bc4 | ||
|
|
9a40246d29 | ||
|
|
044f95c3be | ||
|
|
a386de355e | ||
|
|
b93afc1790 | ||
|
|
77d89677ef | ||
|
|
7ec6775f03 | ||
|
|
98cc853dbc | ||
|
|
f57ff80099 | ||
|
|
51e4b5dd8f | ||
|
|
dbf44a020b | ||
|
|
8e578797ef | ||
|
|
158de4b946 | ||
|
|
3cf234d49e | ||
|
|
a7debc1b3b | ||
|
|
9268da2ffa | ||
|
|
cfcb092478 | ||
|
|
0d8b41b64a | ||
|
|
d85715793f | ||
|
|
afbdbe4682 | ||
|
|
e629abebb7 | ||
|
|
029c12365a | ||
|
|
4d000d9805 | ||
|
|
f1fc66bd2c | ||
|
|
d6af4af1d1 | ||
|
|
7fe2bdca5f | ||
|
|
1432131d2b | ||
|
|
bc94d039f5 | ||
|
|
5abafed9c8 | ||
|
|
04e249feac | ||
|
|
ef3e6bc6b0 | ||
|
|
d9d5f8f818 | ||
|
|
578da0807c | ||
|
|
3eb35f5497 | ||
|
|
9669ad04cd | ||
|
|
70214807ca | ||
|
|
7c1c309f7a | ||
|
|
9b9799ff5e | ||
|
|
b2578b61fa | ||
|
|
619f47ae13 | ||
|
|
37c5e31799 | ||
|
|
ab70b2a655 | ||
|
|
c285606f4a | ||
|
|
6d3611bd73 | ||
|
|
764f6a07e0 | ||
|
|
ae8d487af4 | ||
|
|
87c6554555 | ||
|
|
f5671c2a2a | ||
|
|
43ad3dfa7b | ||
|
|
29fa17a0b8 | ||
|
|
5191d6ed73 | ||
|
|
8348f8e7b1 | ||
|
|
75c48a0807 | ||
|
|
5b38385f7e | ||
|
|
036e1d236b | ||
|
|
c31be0f753 | ||
|
|
764d2fac3f | ||
|
|
f4079e9c3e | ||
|
|
2a0ed72235 | ||
|
|
9e803ae4c7 | ||
|
|
bebdb61adf | ||
|
|
f49cad771b | ||
|
|
a5b4fbda40 | ||
|
|
2cce2d5cf2 | ||
|
|
e720b7af66 | ||
|
|
09e4a5111b | ||
|
|
3539b12503 | ||
|
|
21d8673b5d | ||
|
|
7154426dc7 | ||
|
|
ca75c7dcd0 | ||
|
|
194a2254a6 | ||
|
|
26abad14d0 | ||
|
|
1521a71f9c | ||
|
|
d425b455f1 | ||
|
|
230307474b | ||
|
|
69d6b40e39 | ||
|
|
5dc2f89e7f | ||
|
|
9eaca4d6a0 | ||
|
|
3680a462f5 | ||
|
|
3ac50e7cd8 | ||
|
|
21b2cc1d5d | ||
|
|
cd5448cc7d | ||
|
|
10610bdb4b | ||
|
|
b5c2156387 | ||
|
|
b05ae0d1a7 | ||
|
|
bbf6138d43 | ||
|
|
1ba3e6a680 | ||
|
|
64045c1f93 | ||
|
|
5a3e55813c | ||
|
|
bc72e58886 | ||
|
|
9e78955ba1 | ||
|
|
136853d9a4 | ||
|
|
036e3ad00d | ||
|
|
5ce6c93237 | ||
|
|
43dba7f7ed | ||
|
|
f4879d20d6 | ||
|
|
740e4c6034 | ||
|
|
0f2baa1d94 | ||
|
|
405b0af72c | ||
|
|
a4e5178979 | ||
|
|
c14fe23283 | ||
|
|
b3a058f908 | ||
|
|
bd82a0e27c | ||
|
|
f22a5c3543 | ||
|
|
ed81c3f091 | ||
|
|
07814b85f9 | ||
|
|
db52b28d6b | ||
|
|
fc85ba21c8 | ||
|
|
6c5ee3fcd9 | ||
|
|
40f1ef88a9 | ||
|
|
bce422ffc8 | ||
|
|
7c79066532 | ||
|
|
1129ac93fb | ||
|
|
5ab0e7e737 | ||
|
|
23319c7417 | ||
|
|
c74f85cabb | ||
|
|
fce2b689fb | ||
|
|
105327bb0c | ||
|
|
745c43d0a4 | ||
|
|
3130d94568 | ||
|
|
04a66eb239 | ||
|
|
68390ec6f1 | ||
|
|
17392be138 | ||
|
|
f2fdb29221 | ||
|
|
4a18698423 | ||
|
|
95ccee04f9 | ||
|
|
b60628247b | ||
|
|
a6d7699ab4 | ||
|
|
4b21bcc438 | ||
|
|
33dce2f0f3 | ||
|
|
d43b827fe5 | ||
|
|
aec8bade41 | ||
|
|
89ae89a449 | ||
|
|
945f90e386 | ||
|
|
2ba6a16613 | ||
|
|
6089039366 | ||
|
|
e1e5307084 | ||
|
|
2ff7fce9dd | ||
|
|
fc4d3e0c1a | ||
|
|
f66a94712e | ||
|
|
24664c7686 | ||
|
|
1d668bab6e | ||
|
|
3d4b84909e | ||
|
|
8341e742eb | ||
|
|
a71ba83de0 | ||
|
|
9668131c18 | ||
|
|
4a744dcad9 | ||
|
|
2a420225e2 | ||
|
|
ff67785618 | ||
|
|
dfe1ba55d5 | ||
|
|
90b1b6b7af | ||
|
|
d5fbe42ed7 | ||
|
|
f424ad6864 | ||
|
|
16b724bd40 | ||
|
|
f70ed54cad | ||
|
|
dd64564160 | ||
|
|
cc2cdbcc9f | ||
|
|
81fe850627 | ||
|
|
487d9f76f6 | ||
|
|
92dd4c5dfe | ||
|
|
8ee7c6daf8 | ||
|
|
882b158d18 | ||
|
|
85222443c0 | ||
|
|
1696ecf49d | ||
|
|
73b92ff533 | ||
|
|
e977bb15a5 | ||
|
|
7c46d6cdbf | ||
|
|
4d11c1f7db | ||
|
|
0a563deb11 | ||
|
|
ba80ec4403 | ||
|
|
3b7cc4595b | ||
|
|
9fe47657a6 | ||
|
|
5a4a6caa07 | ||
|
|
9dadbe1599 | ||
|
|
40d016f513 | ||
|
|
655edaa7c8 | ||
|
|
7fa5cff919 | ||
|
|
d19834ed5d | ||
|
|
b6be430aa3 | ||
|
|
63792c242f | ||
|
|
10f7029722 | ||
|
|
ba176542dc | ||
|
|
aae3b6e2ff | ||
|
|
b370c7d46e | ||
|
|
efa5a8ea5d | ||
|
|
fd532626ac | ||
|
|
617337c614 | ||
|
|
9a3d24ac81 | ||
|
|
454dd4c56b | ||
|
|
88ad72d4dc | ||
|
|
8d1517d550 | ||
|
|
d3a8ef5966 | ||
|
|
e5baa5012d | ||
|
|
a1f63b00dd | ||
|
|
47ded84231 | ||
|
|
224a48a5f3 | ||
|
|
0e7c52df71 | ||
|
|
ff701cc770 | ||
|
|
6a7bb725cc | ||
|
|
0a13186c13 | ||
|
|
a0ffeb9950 | ||
|
|
6c70ec6d53 | ||
|
|
4b5f82699a | ||
|
|
f78c3b928b | ||
|
|
332659c1d5 | ||
|
|
3beb2706dc | ||
|
|
a14111e1ce | ||
|
|
c4320c14f9 | ||
|
|
4c5442748f | ||
|
|
a81750acba | ||
|
|
0439e2462b | ||
|
|
3b62bd7ac9 | ||
|
|
f6add52721 | ||
|
|
c85e535288 | ||
|
|
c0c6d116b5 | ||
|
|
39153e92d1 | ||
|
|
42bcc2e510 | ||
|
|
398fbb25dc | ||
|
|
4b312d4f99 | ||
|
|
10414155a5 | ||
|
|
feda0c37e7 | ||
|
|
173c120b64 | ||
|
|
5f2a0d1a7b | ||
|
|
50f894a01d | ||
|
|
66e93e73af | ||
|
|
58ad9d3f05 | ||
|
|
08c96039e9 | ||
|
|
ca0dd97626 | ||
|
|
7810ee3974 | ||
|
|
2cfea7ef08 | ||
|
|
0cee6cea25 | ||
|
|
5d13ba2f26 | ||
|
|
a583433530 | ||
|
|
733ac3b2b4 | ||
|
|
ef6300255a | ||
|
|
aad37dcf0b | ||
|
|
cce10d39ea | ||
|
|
c521dd447e | ||
|
|
4d0cd4ba56 | ||
|
|
7291274cb1 | ||
|
|
44f2e383c3 | ||
|
|
1f8219b418 | ||
|
|
cb2f170ded | ||
|
|
1241a23ba8 | ||
|
|
7d7744b7dc | ||
|
|
9c7d51127a | ||
|
|
b5a987f6b4 | ||
|
|
7bbc68bfd5 | ||
|
|
99d11e11ce | ||
|
|
7b96ac4638 | ||
|
|
0a36330852 | ||
|
|
9105f92c82 | ||
|
|
57541ab486 | ||
|
|
a0fcbd220e | ||
|
|
d54b404eb6 | ||
|
|
620c5bb5eb | ||
|
|
0fde1d699d | ||
|
|
61f77cf311 | ||
|
|
13476128d5 | ||
|
|
5cdb4ecd2a | ||
|
|
64c3b9da3b | ||
|
|
55dad7a58c | ||
|
|
38dabc35e5 | ||
|
|
5b4f95a50e | ||
|
|
f3046d3c91 | ||
|
|
5faae9af67 | ||
|
|
c0b50642e0 | ||
|
|
12ca296879 | ||
|
|
420c6cea2b | ||
|
|
ccc4bb48fa |
6
.env
6
.env
@@ -1,6 +0,0 @@
|
||||
MYSQL_ROOT_PASSWORD='123456'
|
||||
MYSQL_HOST='mysql'
|
||||
MYSQL_PORT=3306
|
||||
MYSQL_USER='cmdb'
|
||||
MYSQL_DATABASE='cmdb'
|
||||
MYSQL_PASSWORD='123456'
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1 +0,0 @@
|
||||
*.vue linguist-language=python
|
||||
60
.github/ISSUE_TEMPLATE/1bug.yaml
vendored
60
.github/ISSUE_TEMPLATE/1bug.yaml
vendored
@@ -1,60 +0,0 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["☢️ bug"]
|
||||
assignees:
|
||||
- Selina316
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: aspects
|
||||
attributes:
|
||||
label: This bug is related to UI or API?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
value: "A bug happened!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: browsers
|
||||
attributes:
|
||||
label: What browsers are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Firefox
|
||||
- Chrome
|
||||
- Safari
|
||||
- Microsoft Edge
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
44
.github/ISSUE_TEMPLATE/2feature.yaml
vendored
44
.github/ISSUE_TEMPLATE/2feature.yaml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Feature wanted
|
||||
description: A new feature would be good
|
||||
title: "[Feature]: "
|
||||
labels: ["✏️ feature"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your feature suggestion; we will evaluate it carefully!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: aspects
|
||||
attributes:
|
||||
label: feature is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
label: What is your advice?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you want!
|
||||
value: "everyone wants this feature!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
||||
36
.github/ISSUE_TEMPLATE/3consultation.yaml
vendored
36
.github/ISSUE_TEMPLATE/3consultation.yaml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Help wanted
|
||||
description: I have a question
|
||||
title: "[help wanted]: "
|
||||
labels: ["help wanted"]
|
||||
assignees:
|
||||
- ivonGwy
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please tell us what's you need!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: What is your question?
|
||||
description: Also tell us, how can we help?
|
||||
placeholder: Tell us what you need!
|
||||
value: "i have a question!"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
value: "newest"
|
||||
validations:
|
||||
required: true
|
||||
60
.github/ISSUE_TEMPLATE/bug.yaml
vendored
60
.github/ISSUE_TEMPLATE/bug.yaml
vendored
@@ -1,60 +0,0 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["bug"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: bug is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
value: "A bug happened!"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
default: 2.3.5
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: browsers
|
||||
attributes:
|
||||
label: What browsers are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Firefox
|
||||
- Chrome
|
||||
- Safari
|
||||
- Microsoft Edge
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,6 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: veops official website
|
||||
url: https://veops.cn/#hero
|
||||
about: you can contact us here.
|
||||
|
||||
44
.github/ISSUE_TEMPLATE/feature.yaml
vendored
44
.github/ISSUE_TEMPLATE/feature.yaml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Feature wanted
|
||||
description: A new feature would be good
|
||||
title: "[Feature]: "
|
||||
labels: ["feature"]
|
||||
assignees:
|
||||
- pycook
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your feature suggestion; we will evaluate it carefully!
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact Details
|
||||
description: How can we get in touch with you if we need more info?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: feature is related to UI or API aspects?
|
||||
multiple: true
|
||||
options:
|
||||
- UI
|
||||
- API
|
||||
- type: textarea
|
||||
id: describe the feature
|
||||
attributes:
|
||||
label: What is your advice?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you want!
|
||||
value: "everyone wants this feature!"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of our software are you running?
|
||||
default: 2.3.5
|
||||
validations:
|
||||
required: true
|
||||
79
.github/workflows/docker-build-and-release.yaml
vendored
79
.github/workflows/docker-build-and-release.yaml
vendored
@@ -1,79 +0,0 @@
|
||||
name: docker-images-build-and-release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags: ["v*"]
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - master
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY_SERVER_ADDRESS: ghcr.io/veops
|
||||
TAG: ${{ github.sha }}
|
||||
|
||||
jobs:
|
||||
setup-environment:
|
||||
timeout-minutes: 30
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
release-api-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [setup-environment]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Login to GitHub Package Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build and push CMDB-API Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
file: docker/Dockerfile-API
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ env.REGISTRY_SERVER_ADDRESS }}/cmdb-api:${{ env.TAG }}
|
||||
# release-ui-images:
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [setup-environment]
|
||||
# permissions:
|
||||
# contents: read
|
||||
# packages: write
|
||||
# timeout-minutes: 90
|
||||
# steps:
|
||||
# - name: Checkout Repo
|
||||
# uses: actions/checkout@v4
|
||||
# - name: Login to GitHub Package Registry
|
||||
# uses: docker/login-action@v2
|
||||
# with:
|
||||
# registry: ghcr.io
|
||||
# username: ${{ github.repository_owner }}
|
||||
# password: ${{ secrets.GITHUB_TOKEN }}
|
||||
# - name: Set up QEMU
|
||||
# uses: docker/setup-qemu-action@v3
|
||||
# - name: Set up Docker Buildx
|
||||
# uses: docker/setup-buildx-action@v3
|
||||
# - name: Build and push CMDB-UI Docker image
|
||||
# uses: docker/build-push-action@v6
|
||||
# with:
|
||||
# file: docker/Dockerfile-UI
|
||||
# context: .
|
||||
# platforms: linux/amd64,linux/arm64
|
||||
# push: true
|
||||
# tags: ${{ env.REGISTRY_SERVER_ADDRESS }}/cmdb-ui:${{ env.TAG }}
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -39,12 +39,9 @@ pip-log.txt
|
||||
nosetests.xml
|
||||
.pytest_cache
|
||||
cmdb-api/test-output
|
||||
cmdb-api/api/uploaded_files
|
||||
cmdb-api/migrations/versions
|
||||
|
||||
# Translations
|
||||
#*.mo
|
||||
messages.pot
|
||||
*.mo
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
@@ -71,11 +68,9 @@ settings.py
|
||||
# UI
|
||||
cmdb-ui/node_modules
|
||||
cmdb-ui/dist
|
||||
cmdb-ui/yarn.lock
|
||||
|
||||
# Log files
|
||||
cmdb-ui/npm-debug.log*
|
||||
cmdb-ui/yarn-debug.log*
|
||||
cmdb-ui/yarn-error.log*
|
||||
cmdb-ui/package-lock.json
|
||||
start.sh
|
||||
|
||||
48
Dockerfile
Normal file
48
Dockerfile
Normal file
@@ -0,0 +1,48 @@
|
||||
# ================================= UI ================================
|
||||
FROM node:16.0.0-alpine AS builder
|
||||
|
||||
LABEL description="cmdb-ui"
|
||||
|
||||
COPY cmdb-ui /data/apps/cmdb-ui
|
||||
|
||||
WORKDIR /data/apps/cmdb-ui
|
||||
|
||||
RUN sed -i "s#http://127.0.0.1:5000##g" .env && yarn install && yarn build
|
||||
|
||||
|
||||
FROM nginx:alpine AS cmdb-ui
|
||||
|
||||
RUN mkdir /etc/nginx/html && rm -f /etc/nginx/conf.d/default.conf
|
||||
|
||||
COPY --from=builder /data/apps/cmdb-ui/dist /etc/nginx/html/
|
||||
|
||||
|
||||
# ================================= API ================================
|
||||
FROM python:3.8-alpine AS cmdb-api
|
||||
|
||||
LABEL description="Python3.8,cmdb"
|
||||
|
||||
COPY cmdb-api /data/apps/cmdb
|
||||
|
||||
WORKDIR /data/apps/cmdb
|
||||
|
||||
RUN apk add --no-cache tzdata gcc musl-dev libffi-dev openldap-dev python3-dev jpeg-dev zlib-dev build-base
|
||||
|
||||
ENV TZ=Asia/Shanghai
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt \
|
||||
&& cp ./settings.example.py settings.py \
|
||||
&& sed -i "s#{user}:{password}@127.0.0.1:3306/{db}#cmdb:123456@mysql:3306/cmdb#g" settings.py \
|
||||
&& sed -i "s#redis://127.0.0.1#redis://redis#g" settings.py \
|
||||
&& sed -i 's#CACHE_REDIS_HOST = "127.0.0.1"#CACHE_REDIS_HOST = "redis"#g' settings.py
|
||||
|
||||
ADD https://github.com/ufoscout/docker-compose-wait/releases/download/2.7.3/wait /wait
|
||||
RUN chmod +x /wait
|
||||
|
||||
CMD ["bash", "-c", "flask run"]
|
||||
|
||||
|
||||
# ================================= Search ================================
|
||||
FROM docker.elastic.co/elasticsearch/elasticsearch:7.4.2 AS cmdb-search
|
||||
|
||||
RUN yes | ./bin/elasticsearch-plugin install https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v7.4.2/elasticsearch-analysis-ik-7.4.2.zip
|
||||
71
Makefile
71
Makefile
@@ -1,72 +1,37 @@
|
||||
include ./Makefile.variable
|
||||
.PHONY: env clean api ui worker
|
||||
|
||||
default: help
|
||||
help: ## display this help
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z0-9_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo " env create a development environment using pipenv"
|
||||
@echo " deps install dependencies using pip"
|
||||
@echo " clean remove unwanted files like .pyc's"
|
||||
@echo " lint check style with flake8"
|
||||
@echo " api start api server"
|
||||
@echo " ui start ui server"
|
||||
@echo " worker start async tasks worker"
|
||||
|
||||
env: ## create a development environment using pipenv
|
||||
env:
|
||||
sudo easy_install pip && \
|
||||
pip install pipenv -i https://repo.huaweicloud.com/repository/pypi/simple && \
|
||||
pip install pipenv -i https://pypi.douban.com/simple && \
|
||||
npm install yarn && \
|
||||
make deps
|
||||
.PHONY: env
|
||||
|
||||
docker-mysql: ## deploy MySQL use docker
|
||||
@docker run --name mysql -p ${MYSQL_PORT}:3306 -e MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD} -d mysql:latest
|
||||
.PHONY: docker-mysql
|
||||
|
||||
docker-redis: ## deploy Redis use docker
|
||||
@docker run --name redis -p ${REDIS_PORT}:6379 -d redis:latest
|
||||
.PHONY: docker-redis
|
||||
|
||||
deps: ## install dependencies using pip
|
||||
cd cmdb-api && \
|
||||
deps:
|
||||
pipenv install --dev && \
|
||||
pipenv run flask db-setup && \
|
||||
pipenv run flask cmdb-init-cache && \
|
||||
cd .. && \
|
||||
cd cmdb-ui && yarn install && cd ..
|
||||
.PHONY: deps
|
||||
|
||||
api: ## start api server
|
||||
api:
|
||||
cd cmdb-api && pipenv run flask run -h 0.0.0.0
|
||||
.PHONY: api
|
||||
|
||||
worker: ## start async tasks worker
|
||||
cd cmdb-api && pipenv run celery -A celery_worker.celery worker -E -Q one_cmdb_async --autoscale=5,2 --logfile=one_cmdb_async.log -D && pipenv run celery -A celery_worker.celery worker -E -Q acl_async --autoscale=2,1 --logfile=one_acl_async.log -D
|
||||
.PHONY: worker
|
||||
worker:
|
||||
cd cmdb-api && pipenv run celery worker -A celery_worker.celery -E -Q one_cmdb_async --concurrency=1 -D && pipenv run celery worker -A celery_worker.celery -E -Q acl_async --concurrency=1 -D
|
||||
|
||||
ui: ## start ui server
|
||||
ui:
|
||||
cd cmdb-ui && yarn run serve
|
||||
.PHONY: ui
|
||||
|
||||
clean: ## remove unwanted files like .pyc's
|
||||
clean:
|
||||
pipenv run flask clean
|
||||
.PHONY: clean
|
||||
|
||||
lint: ## check style with flake8
|
||||
lint:
|
||||
flake8 --exclude=env .
|
||||
.PHONY: lint
|
||||
|
||||
api-docker-build:
|
||||
export DOCKER_CLI_EXPERIMENTAL=enabled ;\
|
||||
! ( docker buildx ls | grep multi-platform-builder ) && docker buildx create --use --platform=$(BUILD_ARCH) --name multi-platform-builder ;\
|
||||
docker buildx build \
|
||||
--builder multi-platform-builder \
|
||||
--platform=$(BUILD_ARCH) \
|
||||
--tag $(REGISTRY)/cmdb-api:$(CMDB_DOCKER_VERSION) \
|
||||
--tag $(REGISTRY)/cmdb-api:latest \
|
||||
-f docker/Dockerfile-API \
|
||||
.
|
||||
|
||||
ui-docker-build:
|
||||
export DOCKER_CLI_EXPERIMENTAL=enabled ;\
|
||||
! ( docker buildx ls | grep multi-platform-builder ) && docker buildx create --use --platform=$(BUILD_ARCH) --name multi-platform-builder ;\
|
||||
docker buildx build \
|
||||
--builder multi-platform-builder \
|
||||
--platform=$(BUILD_ARCH) \
|
||||
--tag $(REGISTRY)/cmdb-ui:$(CMDB_DOCKER_VERSION) \
|
||||
--tag $(REGISTRY)/cmdb-ui:latest \
|
||||
-f docker/Dockerfile-UI \
|
||||
.
|
||||
@@ -1,21 +0,0 @@
|
||||
SHELL := /bin/bash -o pipefail
|
||||
|
||||
MYSQL_ROOT_PASSWORD ?= root
|
||||
MYSQL_PORT ?= 3306
|
||||
REDIS_PORT ?= 6379
|
||||
|
||||
LATEST_TAG_DIFF:=$(shell git describe --tags --abbrev=8)
|
||||
LATEST_COMMIT:=$(VERSION)-dev-$(shell git rev-parse --short=8 HEAD)
|
||||
BUILD_ARCH ?= linux/amd64,linux/arm64
|
||||
|
||||
# Set your version by env or using latest tags from git
|
||||
CMDB_VERSION?=$(LATEST_TAG_DIFF)
|
||||
ifeq ($(CMDB_VERSION),)
|
||||
#fall back to last commit
|
||||
CMDB_VERSION=$(LATEST_COMMIT)
|
||||
endif
|
||||
COMMIT_VERSION:=$(LATEST_COMMIT)
|
||||
CMDB_DOCKER_VERSION:=${CMDB_VERSION}
|
||||
CMDB_CHART_VERSION:=$(shell echo ${CMDB_VERSION} | sed 's/^v//g' )
|
||||
|
||||
REGISTRY ?= local
|
||||
93
README.md
93
README.md
@@ -1,21 +1,13 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
<a href="https://veops.cn"><img src="docs/images/logo.png" alt="维易CMDB" width="300"/></a>
|
||||
</p>
|
||||
<h3 align="center">简单、轻量、通用的运维配置管理数据库</h3>
|
||||
<p align="center">
|
||||
<a href="https://github.com/veops/cmdb/blob/master/LICENSE"><img src="https://img.shields.io/badge/License-AGPLv3-brightgreen" alt="License: GPLv3"></a>
|
||||
<a href="https:https://github.com/sendya/ant-design-pro-vue"><img src="https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-brightgreen" alt="UI"></a>
|
||||
<a href="https://github.com/pallets/flask"><img src="https://img.shields.io/badge/API-Flask-brightgreen" alt="API"></a>
|
||||
</p>
|
||||
[](https://github.com/veops/cmdb/blob/master/LICENSE)
|
||||
[](https://github.com/sendya/ant-design-pro-vue)
|
||||
[](https://github.com/pallets/flask)
|
||||
|
||||
[English](README_en.md) / [中文](README.md)
|
||||
|
||||
------------------------------
|
||||
|
||||
[English](docs/README_en.md) / [中文](README.md)
|
||||
- 产品文档:https://veops.cn/docs/
|
||||
- 在线体验:<a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
|
||||
- username: demo 或者 admin
|
||||
- 在线体验: <a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
|
||||
- username: demo
|
||||
- password: 123456
|
||||
|
||||
> **重要提示**: `master` 分支在开发过程中可能处于 _不稳定的状态_ 。
|
||||
@@ -23,43 +15,45 @@
|
||||
|
||||
## 系统介绍
|
||||
|
||||
### 系统概览
|
||||
### 整体架构
|
||||
|
||||
<img src=docs/images/dashboard.png />
|
||||
<img src=docs/images/view.jpg />
|
||||
|
||||
[查看更多展示](docs/screenshot.md)
|
||||
### 相关文档
|
||||
|
||||
### 相关文章
|
||||
|
||||
- <a href="https://mp.weixin.qq.com/s/v3eANth64UBW5xdyOkK3tg" target="_blank">概要设计</a>
|
||||
- <a href="https://zhuanlan.zhihu.com/p/98453732" target="_blank">设计文档</a>
|
||||
- <a href="https://github.com/veops/cmdb/tree/master/docs/cmdb_api.md" target="_blank">API 文档</a>
|
||||
- <a href="https://mp.weixin.qq.com/s/rQaf4AES7YJsyNQG_MKOLg" target="_blank">自动发现</a>
|
||||
- 更多文章可以在公众号 **维易科技OneOps** 里查看
|
||||
- <a href="https://mp.weixin.qq.com/s/EflmmJ-qdUkddTx2hRt3pA" target="_blank">树形视图实践</a>
|
||||
|
||||
### 特点
|
||||
|
||||
- 灵活性
|
||||
1. 配置灵活,不设定任何运维场景,有内置模板
|
||||
2. 自动发现、入库 IT 资产
|
||||
1. 规范并统一纳管复杂数据资产
|
||||
2. 自动发现、入库 IT 资产
|
||||
- 安全性
|
||||
1. 细粒度权限控制
|
||||
1. 细粒度访问控制
|
||||
2. 完备操作日志
|
||||
- 多应用
|
||||
1. 丰富视图展示维度
|
||||
2. API简单强大
|
||||
3. 支持定义属性触发器、计算属性
|
||||
2. 提供 Restful API
|
||||
3. 自定义字段触发器
|
||||
|
||||
### 主要功能
|
||||
|
||||
- 模型属性支持索引、多值、默认排序、字体颜色,支持计算属性
|
||||
- 支持自动发现、定时巡检、文件导入
|
||||
- 支持资源、层级、关系视图展示
|
||||
- 支持资源、树形、关系视图展示
|
||||
- 支持模型间关系配置和展示
|
||||
- 细粒度访问控制,完备的操作日志
|
||||
- 支持跨模型搜索
|
||||
|
||||
### 系统概览
|
||||
|
||||
- 服务树
|
||||
|
||||

|
||||
|
||||
[查看更多展示](docs/screenshot.md)
|
||||
|
||||
|
||||
### 更多功能
|
||||
@@ -68,52 +62,27 @@
|
||||
|
||||
## 接入公司
|
||||
|
||||
> 欢迎使用开源CMDB的公司,在 [#112](https://github.com/veops/cmdb/issues/112) 登记
|
||||
> 欢迎使用CMDB的公司,在 [#112](https://github.com/veops/cmdb/issues/112) 登记
|
||||
|
||||
## 安装
|
||||
|
||||
### Docker 一键快速构建
|
||||
- 进入主目录(先安装 docker 环境)
|
||||
|
||||
[//]: # (> 方法一)
|
||||
- 第一步: 先安装 Docker 环境, 以及Docker Compose (v2)
|
||||
- 第二步: 拷贝项目
|
||||
```shell
|
||||
git clone https://github.com/veops/cmdb.git
|
||||
```
|
||||
- 第三步:进入主目录,执行:
|
||||
```
|
||||
docker compose up -d
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
[//]: # (> 方法二, 该方法适用于linux系统)
|
||||
|
||||
[//]: # (- 第一步: 先安装 Docker 环境, 以及Docker Compose (v2))
|
||||
|
||||
[//]: # (- 第二步: 直接使用项目根目录下的install.sh 文件进行 `安装`、`启动`、`暂停`、`查状态`、`删除`、`卸载`)
|
||||
|
||||
[//]: # (```shell)
|
||||
|
||||
[//]: # (curl -so install.sh https://raw.githubusercontent.com/veops/cmdb/deploy_on_kylin_docker/install.sh)
|
||||
|
||||
[//]: # (sh install.sh install)
|
||||
|
||||
[//]: # (```)
|
||||
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
|
||||
### [本地开发环境搭建](docs/local.md)
|
||||
|
||||
### [Makefile 安装](docs/makefile.md)
|
||||
|
||||
## 验证
|
||||
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
|
||||
- username: demo 或者 admin
|
||||
- password: 123456
|
||||
|
||||
|
||||
---
|
||||
|
||||
_**欢迎关注公众号(维易科技OneOps),关注后可加入微信群,进行产品和技术交流。**_
|
||||
_**欢迎关注我们的公众号,点击联系我们,加入微信、qq运维群(336164978),获得更多产品、行业相关资讯**_
|
||||
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/images/wechat.png" alt="公众号: 维易科技OneOps" />
|
||||
</p>
|
||||

|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
<a href="https://veops.cn"><img src="images/logo.png" alt="维易CMDB" width="300"/></a>
|
||||
</p>
|
||||
<h3 align="center">Simple, lightweight, and versatile operational CMDB</h3>
|
||||
<p align="center">
|
||||
<a href="https://github.com/veops/cmdb/blob/master/LICENSE"><img src="https://img.shields.io/badge/License-AGPLv3-brightgreen" alt="License: GPLv3"></a>
|
||||
<a href="https:https://github.com/sendya/ant-design-pro-vue"><img src="https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-brightgreen" alt="UI"></a>
|
||||
<a href="https://github.com/pallets/flask"><img src="https://img.shields.io/badge/API-Flask-brightgreen" alt="API"></a>
|
||||
</p>
|
||||
[](https://github.com/veops/cmdb/blob/master/LICENSE)
|
||||
[](https://github.com/sendya/ant-design-pro-vue)
|
||||
[](https://github.com/pallets/flask)
|
||||
|
||||
|
||||
------------------------------
|
||||
|
||||
[English](README_en.md) / [中文](../README.md)
|
||||
[English](README_en.md) / [中文](README.md)
|
||||
|
||||
## DEMO ONLINE
|
||||
- Product document:https://veops.cn/docs/
|
||||
|
||||
- Preview online: <a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
|
||||
- username: demo
|
||||
- password: 123456
|
||||
@@ -24,11 +16,9 @@
|
||||
|
||||
## Overview
|
||||
|
||||
### System Overview
|
||||
### Technical Architecture
|
||||
|
||||
<img src=images/dashboard.png />
|
||||
|
||||
[View more screenshots](screenshot.md)
|
||||
<img src=docs/images/view.jpg />
|
||||
|
||||
### Document
|
||||
|
||||
@@ -58,7 +48,12 @@
|
||||
- Fine-grained access control and comprehensive operation logs.
|
||||
- Support cross-model search.
|
||||
|
||||
### System Overview
|
||||
|
||||
- Service Tree
|
||||

|
||||
|
||||
[View more screenshots](docs/screenshot.md)
|
||||
|
||||
### More Features
|
||||
|
||||
@@ -68,35 +63,20 @@
|
||||
|
||||
### One-Click Docker Quick Build
|
||||
|
||||
> Method 1
|
||||
- step 1: **Prepare: install Docker and Docker Compose (v2)**
|
||||
- step 2: copy the repository
|
||||
```shell
|
||||
git clone https://github.com/veops/cmdb.git
|
||||
```
|
||||
- step 3: In directory cmdb:
|
||||
```
|
||||
docker compose up -d
|
||||
```
|
||||
> Method 2 Usefull for linux os.
|
||||
- step 1: **Prepare: install Docker and Docker Compose (v2)**
|
||||
- step 2: directly use the install.sh file in the project's root directory to `install`, `start`, `pause`, `status`, `delete`, and `uninstall` the application.
|
||||
```shell
|
||||
curl -so install.sh https://raw.githubusercontent.com/veops/cmdb/master/install.sh
|
||||
sh install.sh install
|
||||
```
|
||||
|
||||
|
||||
### [Local Setup](local_en.md)
|
||||
|
||||
### [Installation with Makefile](makefile_en.md)
|
||||
|
||||
## Validation
|
||||
|
||||
- Prepare: install docker and docker-compose
|
||||
- In directory cmdb
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
- View: [http://127.0.0.1:8000](http://127.0.0.1:8000)
|
||||
- username: demo or admin
|
||||
- password: 123456
|
||||
|
||||
|
||||
### [Local Setup](docs/local_en.md)
|
||||
|
||||
### [Installation with Makefile](docs/makefile_en.md)
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork it
|
||||
@@ -109,4 +89,4 @@ sh install.sh install
|
||||
|
||||
_**Welcome to pay attention to our public account, click to contact us, join WeChat, QQ operation and maintenance group, and get more product and industry related information**_
|
||||
|
||||

|
||||

|
||||
16
cmdb-api/Makefile
Normal file
16
cmdb-api/Makefile
Normal file
@@ -0,0 +1,16 @@
|
||||
default: help
|
||||
|
||||
test: ## test in local environment
|
||||
pytest -s --html=test-output/test/index.html --cov-report html:test-output/coverage --cov=api tests
|
||||
|
||||
clean_test: ## clean test output
|
||||
rm -f .coverage
|
||||
rm -rf .pytest_cache
|
||||
rm -rf test-output
|
||||
|
||||
|
||||
docker_test: ## test all case in docker container
|
||||
@echo "TODO"
|
||||
|
||||
help:
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' ./Makefile | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
@@ -5,70 +5,60 @@ name = "pypi"
|
||||
|
||||
[packages]
|
||||
# Flask
|
||||
Flask = "==2.2.5"
|
||||
Werkzeug = "==2.2.3"
|
||||
Flask = "==1.0.3"
|
||||
Werkzeug = "==0.15.5"
|
||||
click = ">=5.0"
|
||||
# Api
|
||||
Flask-RESTful = "==0.3.10"
|
||||
Flask-RESTful = "==0.3.7"
|
||||
# Database
|
||||
Flask-SQLAlchemy = "==2.5.0"
|
||||
SQLAlchemy = "==1.4.49"
|
||||
PyMySQL = "==1.1.0"
|
||||
redis = "==4.6.0"
|
||||
python-redis-lock = "==4.0.0"
|
||||
Flask-SQLAlchemy = "==2.4.0"
|
||||
SQLAlchemy = "==1.3.5"
|
||||
PyMySQL = "==0.9.3"
|
||||
redis = "==3.2.1"
|
||||
# Migrations
|
||||
Flask-Migrate = "==2.5.2"
|
||||
# Deployment
|
||||
gunicorn = "==21.0.1"
|
||||
gunicorn = "==19.5.0"
|
||||
supervisor = "==4.0.3"
|
||||
# Auth
|
||||
Flask-Login = ">=0.6.2"
|
||||
Flask-Bcrypt = "==1.0.1"
|
||||
Flask-Login = "==0.4.1"
|
||||
Flask-Bcrypt = "==0.7.1"
|
||||
Flask-Cors = ">=3.0.8"
|
||||
ldap3 = "==2.9.1"
|
||||
python-ldap = "==3.2.0"
|
||||
pycryptodome = "==3.12.0"
|
||||
cryptography = ">=41.0.2"
|
||||
# i18n
|
||||
flask-babel = "==4.0.0"
|
||||
# Caching
|
||||
Flask-Caching = ">=1.0.0"
|
||||
# Environment variable parsing
|
||||
environs = "==4.2.0"
|
||||
marshmallow = "==2.20.2"
|
||||
# async tasks
|
||||
celery = "==5.3.1"
|
||||
celery = "==4.3.0"
|
||||
celery_once = "==3.0.1"
|
||||
more-itertools = "==5.0.0"
|
||||
kombu = ">=5.3.1"
|
||||
kombu = "==4.4.0"
|
||||
# common setting
|
||||
Flask-APScheduler = "==1.12.4"
|
||||
timeout-decorator = "==0.5.0"
|
||||
numpy = "==1.18.5"
|
||||
pandas = "==1.3.2"
|
||||
WTForms = "==3.0.0"
|
||||
email-validator = "==1.3.1"
|
||||
treelib = "==1.6.1"
|
||||
flasgger = "==0.9.5"
|
||||
Pillow = ">=10.0.1"
|
||||
Pillow = "==8.3.2"
|
||||
# other
|
||||
six = "==1.16.0"
|
||||
six = "==1.12.0"
|
||||
bs4 = ">=0.0.1"
|
||||
toposort = ">=1.5"
|
||||
requests = ">=2.22.0"
|
||||
requests_oauthlib = "==1.3.1"
|
||||
markdownify = "==0.11.6"
|
||||
PyJWT = "==2.4.0"
|
||||
elasticsearch = "==7.17.9"
|
||||
future = "==0.18.3"
|
||||
itsdangerous = "==2.1.2"
|
||||
Jinja2 = "==3.1.2"
|
||||
future = "==0.18.2"
|
||||
itsdangerous = "==2.0.1"
|
||||
Jinja2 = "==3.0.1"
|
||||
jinja2schema = "==0.1.4"
|
||||
msgpack-python = "==0.5.6"
|
||||
alembic = "==1.7.7"
|
||||
hvac = "==2.0.0"
|
||||
colorama = ">=0.4.6"
|
||||
pycryptodomex = ">=3.19.0"
|
||||
lz4 = ">=4.3.2"
|
||||
python-magic = "==0.4.27"
|
||||
jsonpath = "==0.82.2"
|
||||
networkx = ">=3.1"
|
||||
|
||||
[dev-packages]
|
||||
# Testing
|
||||
@@ -85,3 +75,4 @@ flake8-isort = "==2.7.0"
|
||||
isort = "==4.3.21"
|
||||
pep8-naming = "==0.8.2"
|
||||
pydocstyle = "==3.0.0"
|
||||
|
||||
|
||||
@@ -7,28 +7,31 @@ import os
|
||||
import sys
|
||||
from inspect import getmembers
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Flask
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask import request
|
||||
from flask import make_response, jsonify
|
||||
from flask.blueprints import Blueprint
|
||||
from flask.cli import click
|
||||
from flask.json.provider import DefaultJSONProvider
|
||||
from flask_babel.speaklater import LazyString
|
||||
from flask.json import JSONEncoder
|
||||
|
||||
import api.views.entry
|
||||
from api.extensions import (bcrypt, babel, cache, celery, cors, db, es, login_manager, migrate, rd)
|
||||
from api.extensions import inner_secrets
|
||||
from api.lib.perm.authentication.cas import CAS
|
||||
from api.lib.perm.authentication.oauth2 import OAuth2
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.extensions import (
|
||||
bcrypt,
|
||||
cors,
|
||||
cache,
|
||||
db,
|
||||
login_manager,
|
||||
migrate,
|
||||
celery,
|
||||
rd,
|
||||
es,
|
||||
)
|
||||
from api.flask_cas import CAS
|
||||
from api.models.acl import User
|
||||
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
PROJECT_ROOT = os.path.join(HERE, os.pardir)
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
API_PACKAGE = "api"
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
@@ -72,9 +75,9 @@ class ReverseProxy(object):
|
||||
return self.app(environ, start_response)
|
||||
|
||||
|
||||
class MyJSONEncoder(DefaultJSONProvider):
|
||||
class MyJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, (decimal.Decimal, datetime.date, datetime.time, LazyString)):
|
||||
if isinstance(o, (decimal.Decimal, datetime.date, datetime.time)):
|
||||
return str(o)
|
||||
|
||||
if isinstance(o, datetime.datetime):
|
||||
@@ -83,6 +86,15 @@ class MyJSONEncoder(DefaultJSONProvider):
|
||||
return o
|
||||
|
||||
|
||||
def create_acl_app(config_object="settings"):
|
||||
app = Flask(__name__.split(".")[0])
|
||||
app.config.from_object(config_object)
|
||||
|
||||
register_extensions(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def create_app(config_object="settings"):
|
||||
"""Create application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
|
||||
|
||||
@@ -91,7 +103,7 @@ def create_app(config_object="settings"):
|
||||
app = Flask(__name__.split(".")[0])
|
||||
|
||||
app.config.from_object(config_object)
|
||||
app.json = MyJSONEncoder(app)
|
||||
app.json_encoder = MyJSONEncoder
|
||||
configure_logger(app)
|
||||
register_extensions(app)
|
||||
register_blueprints(app)
|
||||
@@ -99,7 +111,6 @@ def create_app(config_object="settings"):
|
||||
register_shell_context(app)
|
||||
register_commands(app)
|
||||
CAS(app)
|
||||
OAuth2(app)
|
||||
app.wsgi_app = ReverseProxy(app.wsgi_app)
|
||||
configure_upload_dir(app)
|
||||
|
||||
@@ -119,29 +130,17 @@ def configure_upload_dir(app):
|
||||
|
||||
def register_extensions(app):
|
||||
"""Register Flask extensions."""
|
||||
|
||||
def get_locale():
|
||||
accept_languages = app.config.get('ACCEPT_LANGUAGES', ['en', 'zh'])
|
||||
return request.accept_languages.best_match(accept_languages)
|
||||
|
||||
bcrypt.init_app(app)
|
||||
babel.init_app(app, locale_selector=get_locale)
|
||||
cache.init_app(app)
|
||||
db.init_app(app)
|
||||
cors.init_app(app)
|
||||
login_manager.init_app(app)
|
||||
migrate.init_app(app, db, directory=f"{BASE_DIR}/migrations")
|
||||
migrate.init_app(app, db)
|
||||
rd.init_app(app)
|
||||
if app.config.get('USE_ES'):
|
||||
es.init_app(app)
|
||||
|
||||
app.config.update(app.config.get("CELERY"))
|
||||
celery.conf.update(app.config)
|
||||
|
||||
if app.config.get('SECRETS_ENGINE') == 'inner':
|
||||
with app.app_context():
|
||||
inner_secrets.init_app(app, InnerKVManger())
|
||||
|
||||
|
||||
def register_blueprints(app):
|
||||
for item in getmembers(api.views.entry):
|
||||
@@ -159,8 +158,10 @@ def register_error_handlers(app):
|
||||
error_code = getattr(error, "code", 500)
|
||||
if not str(error_code).isdigit():
|
||||
error_code = 400
|
||||
|
||||
return make_response(jsonify(message=str(error)), error_code)
|
||||
if error_code != 500:
|
||||
return make_response(jsonify(message=str(error)), error_code)
|
||||
else:
|
||||
return make_response(jsonify(message=traceback.format_exc(-1)), error_code)
|
||||
|
||||
for errcode in app.config.get("ERROR_CODES") or [400, 401, 403, 404, 405, 500, 502]:
|
||||
app.errorhandler(errcode)(render_error)
|
||||
@@ -183,8 +184,9 @@ def register_commands(app):
|
||||
for root, _, files in os.walk(os.path.join(HERE, "commands")):
|
||||
for filename in files:
|
||||
if not filename.startswith("_") and filename.endswith("py"):
|
||||
if root not in sys.path:
|
||||
sys.path.insert(1, root)
|
||||
module_path = os.path.join(API_PACKAGE, root[root.index("commands"):])
|
||||
if module_path not in sys.path:
|
||||
sys.path.insert(1, module_path)
|
||||
command = __import__(os.path.splitext(filename)[0])
|
||||
func_list = [o[0] for o in getmembers(command) if isinstance(o[1], click.core.Command)]
|
||||
for func_name in func_list:
|
||||
@@ -202,11 +204,10 @@ def configure_logger(app):
|
||||
app.logger.addHandler(handler)
|
||||
|
||||
log_file = app.config['LOG_PATH']
|
||||
if log_file and log_file != "/dev/stdout":
|
||||
file_handler = RotatingFileHandler(log_file,
|
||||
maxBytes=2 ** 30,
|
||||
backupCount=7)
|
||||
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
file_handler.setFormatter(formatter)
|
||||
app.logger.addHandler(file_handler)
|
||||
file_handler = RotatingFileHandler(log_file,
|
||||
maxBytes=2 ** 30,
|
||||
backupCount=7)
|
||||
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
file_handler.setFormatter(formatter)
|
||||
app.logger.addHandler(file_handler)
|
||||
app.logger.setLevel(getattr(logging, app.config['LOG_LEVEL']))
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
import click
|
||||
from flask.cli import with_appcontext
|
||||
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def init_acl():
|
||||
"""
|
||||
acl init
|
||||
"""
|
||||
from api.models.acl import Role
|
||||
from api.models.acl import App
|
||||
from api.tasks.acl import role_rebuild
|
||||
@@ -25,32 +20,50 @@ def init_acl():
|
||||
role_rebuild.apply_async(args=(role.id, app.id), queue=ACL_QUEUE)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def add_user():
|
||||
"""
|
||||
create a user
|
||||
|
||||
is_admin: default is False
|
||||
|
||||
"""
|
||||
|
||||
from api.models.acl import App
|
||||
from api.lib.perm.acl.cache import AppCache
|
||||
from api.lib.perm.acl.cache import RoleCache
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
from api.lib.perm.acl.role import RoleRelationCRUD
|
||||
|
||||
username = click.prompt('Enter username', confirmation_prompt=False)
|
||||
password = click.prompt('Enter password', hide_input=True, confirmation_prompt=True)
|
||||
email = click.prompt('Enter email ', confirmation_prompt=False)
|
||||
is_admin = click.prompt('Admin (Y/N) ', confirmation_prompt=False, type=bool, default=False)
|
||||
|
||||
UserCRUD.add(username=username, password=password, email=email)
|
||||
|
||||
if is_admin:
|
||||
app = AppCache.get('acl') or App.create(name='acl')
|
||||
acl_admin = RoleCache.get_by_name(app.id, 'acl_admin') or RoleCRUD.add_role('acl_admin', app.id, True)
|
||||
rid = RoleCache.get_by_name(None, username).id
|
||||
|
||||
RoleRelationCRUD.add(acl_admin, acl_admin.id, [rid], app.id)
|
||||
# @click.command()
|
||||
# @with_appcontext
|
||||
# def acl_clean():
|
||||
# from api.models.acl import Resource
|
||||
# from api.models.acl import Permission
|
||||
# from api.models.acl import RolePermission
|
||||
#
|
||||
# perms = RolePermission.get_by(to_dict=False)
|
||||
#
|
||||
# for r in perms:
|
||||
# perm = Permission.get_by_id(r.perm_id)
|
||||
# if perm and perm.app_id != r.app_id:
|
||||
# resource_id = r.resource_id
|
||||
# resource = Resource.get_by_id(resource_id)
|
||||
# perm_name = perm.name
|
||||
# existed = Permission.get_by(resource_type_id=resource.resource_type_id, name=perm_name, first=True,
|
||||
# to_dict=False)
|
||||
# if existed is not None:
|
||||
# other = RolePermission.get_by(rid=r.rid, perm_id=existed.id, resource_id=resource_id)
|
||||
# if not other:
|
||||
# r.update(perm_id=existed.id)
|
||||
# else:
|
||||
# r.soft_delete()
|
||||
# else:
|
||||
# r.soft_delete()
|
||||
#
|
||||
#
|
||||
# @click.command()
|
||||
# @with_appcontext
|
||||
# def acl_has_resource_role():
|
||||
# from api.models.acl import Role
|
||||
# from api.models.acl import App
|
||||
# from api.lib.perm.acl.cache import HasResourceRoleCache
|
||||
# from api.lib.perm.acl.role import RoleCRUD
|
||||
#
|
||||
# roles = Role.get_by(to_dict=False)
|
||||
# apps = App.get_by(to_dict=False)
|
||||
# for role in roles:
|
||||
# if role.app_id:
|
||||
# res = RoleCRUD.recursive_resources(role.id, role.app_id)
|
||||
# if res.get('resources') or res.get('groups'):
|
||||
# HasResourceRoleCache.add(role.id, role.app_id)
|
||||
# else:
|
||||
# for app in apps:
|
||||
# res = RoleCRUD.recursive_resources(role.id, app.id)
|
||||
# if res.get('resources') or res.get('groups'):
|
||||
# HasResourceRoleCache.add(role.id, app.id)
|
||||
|
||||
@@ -5,38 +5,30 @@ import copy
|
||||
import datetime
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import click
|
||||
import requests
|
||||
from flask import current_app
|
||||
from flask.cli import with_appcontext
|
||||
from flask_login import login_user
|
||||
|
||||
import api.lib.cmdb.ci
|
||||
from api.extensions import db
|
||||
from api.extensions import rd
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.ci_type import CITypeTriggerManager
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.exception import AbortException
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import UserCache
|
||||
from api.lib.perm.acl.cache import AppCache
|
||||
from api.lib.perm.acl.resource import ResourceCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.secrets.inner import global_key_threshold
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
from api.models.acl import App
|
||||
from api.models.acl import ResourceType
|
||||
from api.models.cmdb import Attribute
|
||||
from api.models.cmdb import CI
|
||||
from api.models.cmdb import CIRelation
|
||||
from api.models.cmdb import CIType
|
||||
@@ -51,22 +43,13 @@ def cmdb_init_cache():
|
||||
|
||||
ci_relations = CIRelation.get_by(to_dict=False)
|
||||
relations = dict()
|
||||
relations2 = dict()
|
||||
for cr in ci_relations:
|
||||
relations.setdefault(cr.first_ci_id, {}).update({cr.second_ci_id: cr.second_ci.type_id})
|
||||
if cr.ancestor_ids:
|
||||
relations2.setdefault('{},{}'.format(cr.ancestor_ids, cr.first_ci_id), {}).update(
|
||||
{cr.second_ci_id: cr.second_ci.type_id})
|
||||
for i in relations:
|
||||
relations[i] = json.dumps(relations[i])
|
||||
for i in relations2:
|
||||
relations2[i] = json.dumps(relations2[i])
|
||||
if relations:
|
||||
rd.create_or_update(relations, REDIS_PREFIX_CI_RELATION)
|
||||
if relations2:
|
||||
rd.create_or_update(relations2, REDIS_PREFIX_CI_RELATION2)
|
||||
|
||||
es = None
|
||||
if current_app.config.get("USE_ES"):
|
||||
from api.extensions import es
|
||||
from api.models.cmdb import Attribute
|
||||
@@ -118,20 +101,10 @@ def cmdb_init_acl():
|
||||
_app = AppCache.get('cmdb') or App.create(name='cmdb')
|
||||
app_id = _app.id
|
||||
|
||||
current_app.test_request_context().push()
|
||||
|
||||
# 1. add resource type
|
||||
for resource_type in ResourceTypeEnum.all():
|
||||
try:
|
||||
perms = PermEnum.all()
|
||||
if resource_type in (ResourceTypeEnum.CI_FILTER, ResourceTypeEnum.PAGE):
|
||||
perms = [PermEnum.READ]
|
||||
elif resource_type == ResourceTypeEnum.CI_TYPE_RELATION:
|
||||
perms = [PermEnum.ADD, PermEnum.DELETE, PermEnum.GRANT]
|
||||
elif resource_type in (ResourceTypeEnum.RELATION_VIEW, ResourceTypeEnum.TOPOLOGY_VIEW):
|
||||
perms = [PermEnum.READ, PermEnum.UPDATE, PermEnum.DELETE, PermEnum.GRANT]
|
||||
|
||||
ResourceTypeCRUD.add(app_id, resource_type, '', perms)
|
||||
ResourceTypeCRUD.add(app_id, resource_type, '', PermEnum.all())
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -147,10 +120,10 @@ def cmdb_init_acl():
|
||||
|
||||
# 3. add resource and grant
|
||||
ci_types = CIType.get_by(to_dict=False)
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
for ci_type in ci_types:
|
||||
try:
|
||||
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
|
||||
ResourceCRUD.add(ci_type.name, type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -160,10 +133,10 @@ def cmdb_init_acl():
|
||||
[PermEnum.READ])
|
||||
|
||||
relation_views = PreferenceRelationView.get_by(to_dict=False)
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.RELATION_VIEW, first=True, to_dict=False).id
|
||||
type_id = ResourceType.get_by(name=ResourceTypeEnum.RELATION_VIEW, first=True, to_dict=False).id
|
||||
for view in relation_views:
|
||||
try:
|
||||
ResourceCRUD.add(view.name, resource_type_id, app_id)
|
||||
ResourceCRUD.add(view.name, type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
@@ -173,41 +146,67 @@ def cmdb_init_acl():
|
||||
[PermEnum.READ])
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-u',
|
||||
'--user',
|
||||
help='username'
|
||||
)
|
||||
@click.option(
|
||||
'-p',
|
||||
'--password',
|
||||
help='password'
|
||||
)
|
||||
@click.option(
|
||||
'-m',
|
||||
'--mail',
|
||||
help='mail'
|
||||
)
|
||||
@with_appcontext
|
||||
def add_user(user, password, mail):
|
||||
"""
|
||||
create a user
|
||||
|
||||
is_admin: default is False
|
||||
|
||||
Example: flask add-user -u <username> -p <password> -m <mail>
|
||||
"""
|
||||
assert user is not None
|
||||
assert password is not None
|
||||
assert mail is not None
|
||||
UserCRUD.add(username=user, password=password, email=mail)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-u',
|
||||
'--user',
|
||||
help='username'
|
||||
)
|
||||
@with_appcontext
|
||||
def del_user(user):
|
||||
"""
|
||||
delete a user
|
||||
|
||||
Example: flask del-user -u <username>
|
||||
"""
|
||||
assert user is not None
|
||||
from api.models.acl import User
|
||||
|
||||
u = User.get_by(username=user, first=True, to_dict=False)
|
||||
u and UserCRUD.delete(u.uid)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_counter():
|
||||
"""
|
||||
Dashboard calculations
|
||||
"""
|
||||
from api.lib.cmdb.cache import CMDBCounterCache
|
||||
|
||||
current_app.test_request_context().push()
|
||||
if not UserCache.get('worker'):
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
UserCRUD.add(username='worker', password=uuid.uuid4().hex, email='worker@xxx.com')
|
||||
|
||||
login_user(UserCache.get('worker'))
|
||||
|
||||
i = 0
|
||||
today = datetime.date.today()
|
||||
while True:
|
||||
try:
|
||||
db.session.remove()
|
||||
|
||||
CMDBCounterCache.reset()
|
||||
|
||||
if i % 5 == 0:
|
||||
CMDBCounterCache.flush_adc_counter()
|
||||
i = 0
|
||||
|
||||
if datetime.date.today() != today:
|
||||
CMDBCounterCache.clear_ad_exec_history()
|
||||
today = datetime.date.today()
|
||||
|
||||
CMDBCounterCache.flush_sub_counter()
|
||||
|
||||
i += 1
|
||||
except:
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
@@ -218,344 +217,45 @@ def cmdb_counter():
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_trigger():
|
||||
"""
|
||||
Trigger execution for date attribute
|
||||
"""
|
||||
from api.lib.cmdb.ci import CITriggerManager
|
||||
|
||||
current_day = datetime.datetime.today().strftime("%Y-%m-%d")
|
||||
trigger2cis = dict()
|
||||
trigger2completed = dict()
|
||||
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
db.session.remove()
|
||||
db.session.remove()
|
||||
if datetime.datetime.today().strftime("%Y-%m-%d") != current_day:
|
||||
trigger2cis = dict()
|
||||
trigger2completed = dict()
|
||||
current_day = datetime.datetime.today().strftime("%Y-%m-%d")
|
||||
|
||||
if datetime.datetime.today().strftime("%Y-%m-%d") != current_day:
|
||||
trigger2cis = dict()
|
||||
trigger2completed = dict()
|
||||
current_day = datetime.datetime.today().strftime("%Y-%m-%d")
|
||||
if i == 360 or i == 0:
|
||||
i = 0
|
||||
try:
|
||||
triggers = CITypeTrigger.get_by(to_dict=False)
|
||||
|
||||
if i == 3 or i == 0:
|
||||
i = 0
|
||||
triggers = CITypeTrigger.get_by(to_dict=False, __func_isnot__key_attr_id=None)
|
||||
for trigger in triggers:
|
||||
try:
|
||||
ready_cis = CITriggerManager.waiting_cis(trigger)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
continue
|
||||
|
||||
ready_cis = CITypeTriggerManager.waiting_cis(trigger)
|
||||
if trigger.id not in trigger2cis:
|
||||
trigger2cis[trigger.id] = (trigger, ready_cis)
|
||||
else:
|
||||
cur = trigger2cis[trigger.id]
|
||||
cur_ci_ids = {i.ci_id for i in cur[1]}
|
||||
trigger2cis[trigger.id] = (
|
||||
trigger, cur[1] + [i for i in ready_cis if i.ci_id not in cur_ci_ids
|
||||
and i.ci_id not in trigger2completed.get(trigger.id, {})])
|
||||
trigger2cis[trigger.id] = (trigger, cur[1] + [i for i in ready_cis if i.ci_id not in cur_ci_ids
|
||||
and i.ci_id not in trigger2completed[trigger.id]])
|
||||
|
||||
for tid in trigger2cis:
|
||||
trigger, cis = trigger2cis[tid]
|
||||
for ci in copy.deepcopy(cis):
|
||||
if CITriggerManager.trigger_notify(trigger, ci):
|
||||
trigger2completed.setdefault(trigger.id, set()).add(ci.ci_id)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
for _ci in cis:
|
||||
if _ci.ci_id == ci.ci_id:
|
||||
cis.remove(_ci)
|
||||
for tid in trigger2cis:
|
||||
trigger, cis = trigger2cis[tid]
|
||||
for ci in copy.deepcopy(cis):
|
||||
if CITypeTriggerManager.trigger_notify(trigger, ci):
|
||||
trigger2completed.setdefault(trigger.id, set()).add(ci.ci_id)
|
||||
|
||||
i += 1
|
||||
time.sleep(10)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
current_app.logger.error("cmdb trigger exception: {}".format(e))
|
||||
time.sleep(60)
|
||||
for _ci in cis:
|
||||
if _ci.ci_id == ci.ci_id:
|
||||
cis.remove(_ci)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_index_table_upgrade():
|
||||
"""
|
||||
Migrate data from tables c_value_integers, c_value_floats, and c_value_datetime
|
||||
"""
|
||||
for attr in Attribute.get_by(to_dict=False):
|
||||
if attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON} and not attr.is_index:
|
||||
attr.update(is_index=True)
|
||||
AttributeCache.clean(attr)
|
||||
|
||||
from api.models.cmdb import CIValueInteger, CIIndexValueInteger
|
||||
from api.models.cmdb import CIValueFloat, CIIndexValueFloat
|
||||
from api.models.cmdb import CIValueDateTime, CIIndexValueDateTime
|
||||
|
||||
for i in CIValueInteger.get_by(to_dict=False):
|
||||
CIIndexValueInteger.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
|
||||
i.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
for i in CIValueFloat.get_by(to_dict=False):
|
||||
CIIndexValueFloat.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
|
||||
i.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
for i in CIValueDateTime.get_by(to_dict=False):
|
||||
CIIndexValueDateTime.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
|
||||
i.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def valid_address(address):
|
||||
if not address:
|
||||
return False
|
||||
|
||||
if not address.startswith(("http://127.0.0.1", "https://127.0.0.1")):
|
||||
response = {
|
||||
"message": "Address should start with http://127.0.0.1 or https://127.0.0.1",
|
||||
"status": "failed"
|
||||
}
|
||||
KeyManage.print_response(response)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_init(address):
|
||||
"""
|
||||
init inner secrets for password feature
|
||||
"""
|
||||
res, ok = KeyManage(backend=InnerKVManger()).init()
|
||||
if not ok:
|
||||
if res.get("status") == "failed":
|
||||
KeyManage.print_response(res)
|
||||
return
|
||||
|
||||
token = res.get("details", {}).get("root_token", "")
|
||||
if valid_address(address):
|
||||
token = current_app.config.get("INNER_TRIGGER_TOKEN", "") if not token else token
|
||||
if not token:
|
||||
token = click.prompt(f'Enter root token', hide_input=True, confirmation_prompt=False)
|
||||
assert token is not None
|
||||
resp = requests.post("{}/api/v0.1/secrets/auto_seal".format(address.strip("/")),
|
||||
headers={"Inner-Token": token})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.text or resp.status_code, "status": "failed"})
|
||||
else:
|
||||
KeyManage.print_response(res)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
required=True,
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_unseal(address):
|
||||
"""
|
||||
unseal the secrets feature
|
||||
"""
|
||||
# if not valid_address(address):
|
||||
# return
|
||||
address = "{}/api/v0.1/secrets/unseal".format(address.strip("/"))
|
||||
for i in range(global_key_threshold):
|
||||
token = click.prompt(f'Enter unseal token {i + 1}', hide_input=True, confirmation_prompt=False)
|
||||
assert token is not None
|
||||
resp = requests.post(address, headers={"Unseal-Token": token}, timeout=5)
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
if resp.json().get("status") in ["success", "skip"]:
|
||||
return
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
|
||||
return
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-a',
|
||||
'--address',
|
||||
help='inner cmdb api, http://127.0.0.1:8000',
|
||||
required=True,
|
||||
)
|
||||
@click.option(
|
||||
'-k',
|
||||
'--token',
|
||||
help='root token',
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_inner_secrets_seal(address, token):
|
||||
"""
|
||||
seal the secrets feature
|
||||
"""
|
||||
assert address is not None
|
||||
assert token is not None
|
||||
if not valid_address(address):
|
||||
return
|
||||
address = "{}/api/v0.1/secrets/seal".format(address.strip("/"))
|
||||
resp = requests.post(address, headers={
|
||||
"Inner-Token": token,
|
||||
})
|
||||
if resp.status_code == 200:
|
||||
KeyManage.print_response(resp.json())
|
||||
else:
|
||||
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_password_data_migrate():
|
||||
"""
|
||||
Migrate CI password data, version >= v2.3.6
|
||||
"""
|
||||
from api.models.cmdb import CIIndexValueText
|
||||
from api.models.cmdb import CIValueText
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.vault import VaultClient
|
||||
|
||||
attrs = Attribute.get_by(to_dict=False)
|
||||
for attr in attrs:
|
||||
if attr.is_password:
|
||||
|
||||
value_table = CIIndexValueText if attr.is_index else CIValueText
|
||||
|
||||
failed = False
|
||||
for i in value_table.get_by(attr_id=attr.id, to_dict=False):
|
||||
if current_app.config.get("SECRETS_ENGINE", 'inner') == 'inner':
|
||||
_, status = InnerCrypt().decrypt(i.value)
|
||||
if status:
|
||||
continue
|
||||
|
||||
encrypt_value, status = InnerCrypt().encrypt(i.value)
|
||||
if status:
|
||||
CIValueText.create(ci_id=i.ci_id, attr_id=attr.id, value=encrypt_value)
|
||||
else:
|
||||
failed = True
|
||||
continue
|
||||
elif current_app.config.get("SECRETS_ENGINE") == 'vault':
|
||||
if i.value == '******':
|
||||
continue
|
||||
|
||||
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
|
||||
try:
|
||||
vault.update("/{}/{}".format(i.ci_id, i.attr_id), dict(v=i.value))
|
||||
except Exception as e:
|
||||
print('save password to vault failed: {}'.format(e))
|
||||
failed = True
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
i.delete()
|
||||
|
||||
if not failed and attr.is_index:
|
||||
attr.update(is_index=False)
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def cmdb_agent_init():
|
||||
"""
|
||||
Initialize the agent's permissions and obtain the key and secret
|
||||
"""
|
||||
|
||||
from api.models.acl import User
|
||||
|
||||
user = User.get_by(username="cmdb_agent", first=True, to_dict=False)
|
||||
if user is None:
|
||||
click.echo(
|
||||
click.style('user cmdb_agent does not exist, please use flask add-user to create it first', fg='red'))
|
||||
return
|
||||
|
||||
# grant
|
||||
_app = AppCache.get('cmdb') or App.create(name='cmdb')
|
||||
app_id = _app.id
|
||||
|
||||
ci_types = CIType.get_by(to_dict=False)
|
||||
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
|
||||
for ci_type in ci_types:
|
||||
try:
|
||||
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
|
||||
except AbortException:
|
||||
pass
|
||||
|
||||
ACLManager().grant_resource_to_role(ci_type.name,
|
||||
"cmdb_agent",
|
||||
ResourceTypeEnum.CI,
|
||||
[PermEnum.READ, PermEnum.UPDATE, PermEnum.ADD, PermEnum.DELETE])
|
||||
|
||||
click.echo("Key : {}".format(click.style(user.key, bg='red')))
|
||||
click.echo("Secret: {}".format(click.style(user.secret, bg='red')))
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
'-v',
|
||||
'--version',
|
||||
help='input cmdb version, e.g. 2.4.6',
|
||||
required=True,
|
||||
)
|
||||
@with_appcontext
|
||||
def cmdb_patch(version):
|
||||
"""
|
||||
CMDB upgrade patch
|
||||
"""
|
||||
|
||||
version = version[1:] if version.lower().startswith("v") else version
|
||||
|
||||
try:
|
||||
if version >= '2.4.6':
|
||||
|
||||
from api.models.cmdb import CITypeRelation
|
||||
for cr in CITypeRelation.get_by(to_dict=False):
|
||||
if hasattr(cr, 'parent_attr_id') and cr.parent_attr_id and not cr.parent_attr_ids:
|
||||
parent_attr_ids, child_attr_ids = [cr.parent_attr_id], [cr.child_attr_id]
|
||||
cr.update(parent_attr_ids=parent_attr_ids, child_attr_ids=child_attr_ids, commit=False)
|
||||
db.session.commit()
|
||||
|
||||
from api.models.cmdb import AutoDiscoveryCIType, AutoDiscoveryCITypeRelation
|
||||
from api.lib.cmdb.cache import CITypeCache, AttributeCache
|
||||
for adt in AutoDiscoveryCIType.get_by(to_dict=False):
|
||||
if adt.relation:
|
||||
if not AutoDiscoveryCITypeRelation.get_by(ad_type_id=adt.type_id):
|
||||
peer_type = CITypeCache.get(list(adt.relation.values())[0]['type_name'])
|
||||
peer_type_id = peer_type and peer_type.id
|
||||
peer_attr = AttributeCache.get(list(adt.relation.values())[0]['attr_name'])
|
||||
peer_attr_id = peer_attr and peer_attr.id
|
||||
if peer_type_id and peer_attr_id:
|
||||
AutoDiscoveryCITypeRelation.create(ad_type_id=adt.type_id,
|
||||
ad_key=list(adt.relation.keys())[0],
|
||||
peer_type_id=peer_type_id,
|
||||
peer_attr_id=peer_attr_id,
|
||||
commit=False)
|
||||
if hasattr(adt, 'interval') and adt.interval and not adt.cron:
|
||||
adt.cron = "*/{} * * * *".format(adt.interval // 60 or 1)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
if version >= "2.4.7":
|
||||
from api.lib.cmdb.auto_discovery.const import DEFAULT_INNER
|
||||
from api.models.cmdb import AutoDiscoveryRule
|
||||
for i in DEFAULT_INNER:
|
||||
existed = AutoDiscoveryRule.get_by(name=i['name'], first=True, to_dict=False)
|
||||
if existed is not None:
|
||||
if "en" in i['option'] and 'en' not in (existed.option or {}):
|
||||
option = copy.deepcopy(existed.option)
|
||||
option['en'] = i['option']['en']
|
||||
existed.update(option=option, commit=False)
|
||||
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
print("cmdb patch failed: {}".format(e))
|
||||
i += 1
|
||||
time.sleep(10)
|
||||
|
||||
@@ -5,7 +5,9 @@ from glob import glob
|
||||
from subprocess import call
|
||||
|
||||
import click
|
||||
from flask import current_app
|
||||
from flask.cli import with_appcontext
|
||||
from werkzeug.exceptions import MethodNotAllowed, NotFound
|
||||
|
||||
from api.extensions import db
|
||||
|
||||
@@ -82,59 +84,69 @@ def clean():
|
||||
os.remove(full_pathname)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option("--url", default=None, help="Url to test (ex. /static/image.png)")
|
||||
@click.option(
|
||||
"--order", default="rule", help="Property on Rule to order by (default: rule)"
|
||||
)
|
||||
@with_appcontext
|
||||
def urls(url, order):
|
||||
"""Display all of the url matching routes for the project.
|
||||
|
||||
Borrowed from Flask-Script, converted to use Click.
|
||||
"""
|
||||
rows = []
|
||||
column_headers = ("Rule", "Endpoint", "Arguments")
|
||||
|
||||
if url:
|
||||
try:
|
||||
rule, arguments = current_app.url_map.bind("localhost").match(
|
||||
url, return_rule=True
|
||||
)
|
||||
rows.append((rule.rule, rule.endpoint, arguments))
|
||||
column_length = 3
|
||||
except (NotFound, MethodNotAllowed) as e:
|
||||
rows.append(("<{}>".format(e), None, None))
|
||||
column_length = 1
|
||||
else:
|
||||
rules = sorted(
|
||||
current_app.url_map.iter_rules(), key=lambda rule: getattr(rule, order)
|
||||
)
|
||||
for rule in rules:
|
||||
rows.append((rule.rule, rule.endpoint, None))
|
||||
column_length = 2
|
||||
|
||||
str_template = ""
|
||||
table_width = 0
|
||||
|
||||
if column_length >= 1:
|
||||
max_rule_length = max(len(r[0]) for r in rows)
|
||||
max_rule_length = max_rule_length if max_rule_length > 4 else 4
|
||||
str_template += "{:" + str(max_rule_length) + "}"
|
||||
table_width += max_rule_length
|
||||
|
||||
if column_length >= 2:
|
||||
max_endpoint_length = max(len(str(r[1])) for r in rows)
|
||||
max_endpoint_length = max_endpoint_length if max_endpoint_length > 8 else 8
|
||||
str_template += " {:" + str(max_endpoint_length) + "}"
|
||||
table_width += 2 + max_endpoint_length
|
||||
|
||||
if column_length >= 3:
|
||||
max_arguments_length = max(len(str(r[2])) for r in rows)
|
||||
max_arguments_length = max_arguments_length if max_arguments_length > 9 else 9
|
||||
str_template += " {:" + str(max_arguments_length) + "}"
|
||||
table_width += 2 + max_arguments_length
|
||||
|
||||
click.echo(str_template.format(*column_headers[:column_length]))
|
||||
click.echo("-" * table_width)
|
||||
|
||||
for row in rows:
|
||||
click.echo(str_template.format(*row[:column_length]))
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def db_setup():
|
||||
"""create tables
|
||||
"""
|
||||
db.create_all()
|
||||
|
||||
try:
|
||||
db.session.execute("set global sql_mode='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,"
|
||||
"ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'")
|
||||
db.session.commit()
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
db.session.execute("set global tidb_enable_noop_functions='ON'")
|
||||
db.session.commit()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@click.group()
|
||||
def translate():
|
||||
"""Translation and localization commands."""
|
||||
|
||||
|
||||
@translate.command()
|
||||
@click.argument('lang')
|
||||
def init(lang):
|
||||
"""Initialize a new language."""
|
||||
|
||||
if os.system('pybabel extract -F babel.cfg -k _l -o messages.pot .'):
|
||||
raise RuntimeError('extract command failed')
|
||||
if os.system(
|
||||
'pybabel init -i messages.pot -d api/translations -l ' + lang):
|
||||
raise RuntimeError('init command failed')
|
||||
os.remove('messages.pot')
|
||||
|
||||
|
||||
@translate.command()
|
||||
def update():
|
||||
"""Update all languages."""
|
||||
|
||||
if os.system('pybabel extract -F babel.cfg -k _l -o messages.pot .'):
|
||||
raise RuntimeError('extract command failed')
|
||||
if os.system('pybabel update -i messages.pot -d api/translations'):
|
||||
raise RuntimeError('update command failed')
|
||||
os.remove('messages.pot')
|
||||
|
||||
|
||||
@translate.command()
|
||||
def compile():
|
||||
"""Compile all languages."""
|
||||
|
||||
if os.system('pybabel compile -d api/translations'):
|
||||
raise RuntimeError('compile command failed')
|
||||
|
||||
@@ -4,40 +4,31 @@ from flask.cli import with_appcontext
|
||||
from werkzeug.datastructures import MultiDict
|
||||
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.common_setting.employee import EmployeeAddForm, GrantEmployeeACLPerm
|
||||
from api.lib.common_setting.employee import EmployeeAddForm
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.common_setting.utils import CheckNewColumn
|
||||
from api.models.common_setting import Employee, Department
|
||||
|
||||
|
||||
class InitEmployee(object):
|
||||
"""
|
||||
初始化员工
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.log = current_app.logger
|
||||
|
||||
def import_user_from_acl(self):
|
||||
"""
|
||||
Import users from ACL
|
||||
从ACL导入用户
|
||||
"""
|
||||
|
||||
InitDepartment().init()
|
||||
acl = ACLManager('acl')
|
||||
user_list = acl.get_all_users()
|
||||
|
||||
username_list = [e['username'] for e in Employee.get_by()]
|
||||
|
||||
for user in user_list:
|
||||
acl_uid = user['uid']
|
||||
block = 1 if user['block'] else 0
|
||||
acl_rid = self.get_rid_by_uid(acl_uid)
|
||||
if user['username'] in username_list:
|
||||
existed = Employee.get_by(first=True, username=user['username'], to_dict=False)
|
||||
if existed:
|
||||
existed.update(
|
||||
acl_uid=acl_uid,
|
||||
acl_rid=acl_rid,
|
||||
block=block,
|
||||
)
|
||||
continue
|
||||
try:
|
||||
form = EmployeeAddForm(MultiDict(user))
|
||||
@@ -45,9 +36,8 @@ class InitEmployee(object):
|
||||
raise Exception(
|
||||
','.join(['{}: {}'.format(filed, ','.join(msg)) for filed, msg in form.errors.items()]))
|
||||
data = form.data
|
||||
data['acl_uid'] = acl_uid
|
||||
data['acl_rid'] = acl_rid
|
||||
data['block'] = block
|
||||
data['acl_uid'] = user['uid']
|
||||
data['block'] = 1 if user['block'] else 0
|
||||
data.pop('password')
|
||||
Employee.create(
|
||||
**data
|
||||
@@ -56,12 +46,6 @@ class InitEmployee(object):
|
||||
self.log.error(ErrFormat.acl_import_user_failed.format(user['username'], str(e)))
|
||||
self.log.error(e)
|
||||
|
||||
@staticmethod
|
||||
def get_rid_by_uid(uid):
|
||||
from api.models.acl import Role
|
||||
role = Role.get_by(first=True, uid=uid)
|
||||
return role['id'] if role is not None else 0
|
||||
|
||||
|
||||
class InitDepartment(object):
|
||||
def __init__(self):
|
||||
@@ -70,8 +54,7 @@ class InitDepartment(object):
|
||||
def init(self):
|
||||
self.init_wide_company()
|
||||
|
||||
@staticmethod
|
||||
def hard_delete(department_id, department_name):
|
||||
def hard_delete(self, department_id, department_name):
|
||||
existed_deleted_list = Department.query.filter(
|
||||
Department.department_name == department_name,
|
||||
Department.department_id == department_id,
|
||||
@@ -80,12 +63,11 @@ class InitDepartment(object):
|
||||
for existed in existed_deleted_list:
|
||||
existed.delete()
|
||||
|
||||
@staticmethod
|
||||
def get_department(department_name):
|
||||
def get_department(self, department_name):
|
||||
return Department.query.filter(
|
||||
Department.department_name == department_name,
|
||||
Department.deleted == 0,
|
||||
).first()
|
||||
).order_by(Department.created_at.asc()).first()
|
||||
|
||||
def run(self, department_id, department_name, department_parent_id):
|
||||
self.hard_delete(department_id, department_name)
|
||||
@@ -95,7 +77,7 @@ class InitDepartment(object):
|
||||
if res.department_id == department_id:
|
||||
return
|
||||
else:
|
||||
res.update(
|
||||
new_d = res.update(
|
||||
department_id=department_id,
|
||||
department_parent_id=department_parent_id,
|
||||
)
|
||||
@@ -109,11 +91,11 @@ class InitDepartment(object):
|
||||
new_d = self.get_department(department_name)
|
||||
|
||||
if new_d.department_id != department_id:
|
||||
new_d.update(
|
||||
new_d = new_d.update(
|
||||
department_id=department_id,
|
||||
department_parent_id=department_parent_id,
|
||||
)
|
||||
self.log.info(f"init {department_name} success.")
|
||||
self.log.info(f"初始化 {department_name} 部门成功.")
|
||||
|
||||
def run_common(self, department_id, department_name, department_parent_id):
|
||||
try:
|
||||
@@ -124,14 +106,19 @@ class InitDepartment(object):
|
||||
raise Exception(e)
|
||||
|
||||
def init_wide_company(self):
|
||||
"""
|
||||
创建 id 0, name 全公司 的部门
|
||||
"""
|
||||
department_id = 0
|
||||
department_name = '全公司'
|
||||
department_parent_id = -1
|
||||
|
||||
self.run_common(department_id, department_name, department_parent_id)
|
||||
|
||||
@staticmethod
|
||||
def create_acl_role_with_department():
|
||||
def create_acl_role_with_department(self):
|
||||
"""
|
||||
当前所有部门,在ACL创建 role
|
||||
"""
|
||||
acl = ACLManager('acl')
|
||||
role_name_map = {role['name']: role for role in acl.get_all_roles()}
|
||||
|
||||
@@ -142,7 +129,7 @@ class InitDepartment(object):
|
||||
continue
|
||||
|
||||
role = role_name_map.get(department.department_name)
|
||||
if not role:
|
||||
if role is None:
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'name': department.department_name,
|
||||
@@ -157,37 +144,12 @@ class InitDepartment(object):
|
||||
info = f"update department acl_rid: {acl_rid}"
|
||||
current_app.logger.info(info)
|
||||
|
||||
def init_backend_resource(self):
|
||||
acl = self.check_app('backend')
|
||||
acl_rid = self.get_admin_user_rid()
|
||||
|
||||
if acl_rid == 0:
|
||||
return
|
||||
GrantEmployeeACLPerm(acl).grant_by_rid(acl_rid, True)
|
||||
|
||||
@staticmethod
|
||||
def check_app(app_name):
|
||||
acl = ACLManager(app_name)
|
||||
payload = dict(
|
||||
name=app_name,
|
||||
description=app_name
|
||||
)
|
||||
app = acl.validate_app()
|
||||
if not app:
|
||||
acl.create_app(payload)
|
||||
return acl
|
||||
|
||||
@staticmethod
|
||||
def get_admin_user_rid():
|
||||
admin = Employee.get_by(first=True, username='admin', to_dict=False)
|
||||
return admin.acl_rid if admin else 0
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def init_import_user_from_acl():
|
||||
"""
|
||||
Import users from ACL
|
||||
从ACL导入用户
|
||||
"""
|
||||
InitEmployee().import_user_from_acl()
|
||||
|
||||
@@ -196,35 +158,7 @@ def init_import_user_from_acl():
|
||||
@with_appcontext
|
||||
def init_department():
|
||||
"""
|
||||
Department initialization
|
||||
初始化 部门
|
||||
"""
|
||||
cli = InitDepartment()
|
||||
cli.init_wide_company()
|
||||
cli.create_acl_role_with_department()
|
||||
cli.init_backend_resource()
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def common_check_new_columns():
|
||||
"""
|
||||
add new columns to tables
|
||||
"""
|
||||
CheckNewColumn().run()
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
def common_sync_file_to_db():
|
||||
from api.lib.common_setting.upload_file import CommonFileCRUD
|
||||
CommonFileCRUD.sync_file_to_db()
|
||||
|
||||
|
||||
@click.command()
|
||||
@with_appcontext
|
||||
@click.option('--value', type=click.INT, default=-1)
|
||||
def set_auth_auto_redirect_enable(value):
|
||||
if value < 0:
|
||||
return
|
||||
from api.lib.common_setting.common_data import CommonDataCRUD
|
||||
CommonDataCRUD.set_auth_auto_redirect_enable(value)
|
||||
InitDepartment().init()
|
||||
InitDepartment().create_acl_role_with_department()
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
|
||||
from celery import Celery
|
||||
from flask_babel import Babel
|
||||
from flask_bcrypt import Bcrypt
|
||||
from flask_caching import Cache
|
||||
from flask_cors import CORS
|
||||
@@ -10,12 +9,10 @@ from flask_login import LoginManager
|
||||
from flask_migrate import Migrate
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
from api.lib.utils import ESHandler
|
||||
from api.lib.utils import RedisHandler
|
||||
|
||||
bcrypt = Bcrypt()
|
||||
babel = Babel()
|
||||
login_manager = LoginManager()
|
||||
db = SQLAlchemy(session_options={"autoflush": False})
|
||||
migrate = Migrate()
|
||||
@@ -24,4 +21,3 @@ celery = Celery()
|
||||
cors = CORS(supports_credentials=True)
|
||||
rd = RedisHandler()
|
||||
es = ESHandler()
|
||||
inner_secrets = KeyManage()
|
||||
|
||||
@@ -15,7 +15,7 @@ try:
|
||||
except ImportError:
|
||||
from flask import _request_ctx_stack as stack
|
||||
|
||||
from . import routing
|
||||
from api.flask_cas import routing
|
||||
|
||||
|
||||
class CAS(object):
|
||||
@@ -119,4 +119,4 @@ def create_cas_validate_url(cas_url, cas_route, service, ticket,
|
||||
('service', service),
|
||||
('ticket', ticket),
|
||||
('renew', renew),
|
||||
)
|
||||
)
|
||||
@@ -1,24 +1,14 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
import json
|
||||
|
||||
import bs4
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask import url_for
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from flask import current_app, session, request, url_for, redirect
|
||||
from flask_login import login_user, logout_user
|
||||
from six.moves.urllib_request import urlopen
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from .cas_urls import create_cas_login_url
|
||||
from .cas_urls import create_cas_logout_url
|
||||
from .cas_urls import create_cas_validate_url
|
||||
@@ -26,7 +16,6 @@ from .cas_urls import create_cas_validate_url
|
||||
blueprint = Blueprint('cas', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/api/cas/login')
|
||||
@blueprint.route('/api/sso/login')
|
||||
def login():
|
||||
"""
|
||||
@@ -40,20 +29,16 @@ def login():
|
||||
If validation was successful the logged in username is saved in
|
||||
the user's session under the key `CAS_USERNAME_SESSION_KEY`.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
|
||||
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
|
||||
if request.values.get("next"):
|
||||
session["next"] = request.values.get("next")
|
||||
|
||||
# _service = url_for('cas.login', _external=True)
|
||||
_service = "{}://{}{}".format(urlparse(request.referrer).scheme,
|
||||
urlparse(request.referrer).netloc,
|
||||
url_for('cas.login'))
|
||||
|
||||
_service = url_for('cas.login', _external=True, next=session["next"]) \
|
||||
if session.get("next") else url_for('cas.login', _external=True)
|
||||
redirect_url = create_cas_login_url(
|
||||
config['cas_server'],
|
||||
config['cas_login_route'],
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGIN_ROUTE'],
|
||||
_service)
|
||||
|
||||
if 'ticket' in request.args:
|
||||
@@ -62,38 +47,30 @@ def login():
|
||||
if request.args.get('ticket'):
|
||||
|
||||
if validate(request.args['ticket']):
|
||||
redirect_url = session.get("next") or config.get("cas_after_login") or "/"
|
||||
redirect_url = session.get("next") or \
|
||||
current_app.config.get("CAS_AFTER_LOGIN")
|
||||
username = session.get("CAS_USERNAME")
|
||||
user = UserCache.get(username)
|
||||
login_user(user)
|
||||
|
||||
session.permanent = True
|
||||
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
else:
|
||||
del session[cas_token_session_key]
|
||||
redirect_url = create_cas_login_url(
|
||||
config['cas_server'],
|
||||
config['cas_login_route'],
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGIN_ROUTE'],
|
||||
url_for('cas.login', _external=True),
|
||||
renew=True)
|
||||
|
||||
AuditCRUD.add_login_log(session.get("CAS_USERNAME"), False, ErrFormat.invalid_password)
|
||||
|
||||
current_app.logger.info("redirect to: {0}".format(redirect_url))
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
||||
@blueprint.route('/api/cas/logout')
|
||||
@blueprint.route('/api/sso/logout')
|
||||
def logout():
|
||||
"""
|
||||
When the user accesses this route they are logged out.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
current_app.logger.info(config)
|
||||
|
||||
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
|
||||
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
|
||||
@@ -105,14 +82,12 @@ def logout():
|
||||
"next" in session and session.pop("next")
|
||||
|
||||
redirect_url = create_cas_logout_url(
|
||||
config['cas_server'],
|
||||
config['cas_logout_route'],
|
||||
current_app.config['CAS_SERVER'],
|
||||
current_app.config['CAS_LOGOUT_ROUTE'],
|
||||
url_for('cas.login', _external=True, next=request.referrer))
|
||||
|
||||
logout_user()
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
current_app.logger.debug('Redirecting to: {0}'.format(redirect_url))
|
||||
|
||||
return redirect(redirect_url)
|
||||
@@ -125,15 +100,14 @@ def validate(ticket):
|
||||
and the validated username is saved in the session under the
|
||||
key `CAS_USERNAME_SESSION_KEY`.
|
||||
"""
|
||||
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
|
||||
|
||||
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
|
||||
|
||||
current_app.logger.debug("validating token {0}".format(ticket))
|
||||
|
||||
cas_validate_url = create_cas_validate_url(
|
||||
config['cas_validate_server'],
|
||||
config['cas_validate_route'],
|
||||
current_app.config['CAS_VALIDATE_SERVER'],
|
||||
current_app.config['CAS_VALIDATE_ROUTE'],
|
||||
url_for('cas.login', _external=True),
|
||||
ticket)
|
||||
|
||||
@@ -141,35 +115,23 @@ def validate(ticket):
|
||||
|
||||
try:
|
||||
response = urlopen(cas_validate_url).read()
|
||||
ticket_id = _parse_tag(response, "cas:user")
|
||||
strs = [s.strip() for s in ticket_id.split('|') if s.strip()]
|
||||
ticketid = _parse_tag(response, "cas:user")
|
||||
strs = [s.strip() for s in ticketid.split('|') if s.strip()]
|
||||
username, is_valid = None, False
|
||||
if len(strs) == 1:
|
||||
username = strs[0]
|
||||
is_valid = True
|
||||
user_info = json.loads(_parse_tag(response, "cas:other"))
|
||||
current_app.logger.info(user_info)
|
||||
except ValueError:
|
||||
current_app.logger.error("CAS returned unexpected result")
|
||||
is_valid = False
|
||||
return is_valid
|
||||
|
||||
if is_valid:
|
||||
current_app.logger.debug("{}: {}".format(cas_username_session_key, username))
|
||||
current_app.logger.debug("valid")
|
||||
session[cas_username_session_key] = username
|
||||
user = UserCache.get(username)
|
||||
if user is None:
|
||||
current_app.logger.info("create user: {}".format(username))
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
soup = bs4.BeautifulSoup(response)
|
||||
cas_user_map = config.get('cas_user_map')
|
||||
user_dict = dict()
|
||||
for k in cas_user_map:
|
||||
v = soup.find(cas_user_map[k]['tag'], cas_user_map[k].get('attrs', {}))
|
||||
user_dict[k] = v and v.text or None
|
||||
user_dict['password'] = uuid.uuid4().hex
|
||||
if "email" not in user_dict:
|
||||
user_dict['email'] = username
|
||||
|
||||
UserCRUD.add(**user_dict)
|
||||
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
user_info = ACLManager.get_user_info(username)
|
||||
@@ -194,7 +156,7 @@ def validate(ticket):
|
||||
|
||||
def _parse_tag(string, tag):
|
||||
"""
|
||||
Used for parsing xml. Search string for the first occurrence of
|
||||
Used for parsing xml. Search string for the first occurence of
|
||||
<tag>.....</tag> and return text (stripped of leading and tailing
|
||||
whitespace) between tags. Return "" if tag not found.
|
||||
"""
|
||||
@@ -202,5 +164,4 @@ def _parse_tag(string, tag):
|
||||
|
||||
if soup.find(tag) is None:
|
||||
return ''
|
||||
|
||||
return soup.find(tag).string.strip()
|
||||
@@ -1,20 +1,15 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import requests
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import session
|
||||
from flask_login import current_user
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.const import BUILTIN_KEYWORDS
|
||||
from api.lib.cmdb.const import CITypeOperateType
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum, RoleEnum, PermEnum
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.cmdb.history import CITypeHistoryManager
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
@@ -22,7 +17,6 @@ from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.decorator import kwargs_required
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.perm.acl.acl import validate_permission
|
||||
from api.lib.webhook import webhook_request
|
||||
from api.models.cmdb import Attribute
|
||||
from api.models.cmdb import CIType
|
||||
from api.models.cmdb import CITypeAttribute
|
||||
@@ -40,11 +34,15 @@ class AttributeManager(object):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _get_choice_values_from_webhook(choice_webhook, payload=None):
|
||||
ret_key = choice_webhook.get('ret_key')
|
||||
def _get_choice_values_from_web_hook(choice_web_hook):
|
||||
url = choice_web_hook.get('url')
|
||||
ret_key = choice_web_hook.get('ret_key')
|
||||
headers = choice_web_hook.get('headers') or {}
|
||||
payload = choice_web_hook.get('payload') or {}
|
||||
method = choice_web_hook.get('method', 'GET').lower()
|
||||
|
||||
try:
|
||||
res = webhook_request(choice_webhook, payload or {}).json()
|
||||
res = getattr(requests, method)(url, headers=headers, data=payload).json()
|
||||
if ret_key:
|
||||
ret_key_list = ret_key.strip().split("##")
|
||||
for key in ret_key_list[:-1]:
|
||||
@@ -56,102 +54,52 @@ class AttributeManager(object):
|
||||
return [[i, {}] for i in (res.get(ret_key_list[-1]) or [])]
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.error("get choice values failed: {}".format(e))
|
||||
current_app.logger.error(str(e))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def _get_choice_values_from_other(choice_other):
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search
|
||||
|
||||
if choice_other.get('type_ids'):
|
||||
type_ids = choice_other.get('type_ids')
|
||||
attr_id = choice_other.get('attr_id')
|
||||
other_filter = choice_other.get('filter') or ''
|
||||
|
||||
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
|
||||
s = search(query, fl=[str(attr_id)], facet=[str(attr_id)], count=1)
|
||||
try:
|
||||
_, _, _, _, _, facet = s.search()
|
||||
return [[i[0], {}] for i in (list(facet.values()) or [[]])[0]]
|
||||
except SearchError as e:
|
||||
current_app.logger.error("get choice values from other ci failed: {}".format(e))
|
||||
return []
|
||||
|
||||
elif choice_other.get('script'):
|
||||
try:
|
||||
x = compile(choice_other['script'], '', "exec")
|
||||
local_ns = {}
|
||||
exec(x, {}, local_ns)
|
||||
res = local_ns['ChoiceValue']().values() or []
|
||||
return [[i, {}] for i in res]
|
||||
except Exception as e:
|
||||
current_app.logger.error("get choice values from script: {}".format(e))
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def get_choice_values(cls, attr_id, value_type, choice_web_hook, choice_other,
|
||||
choice_web_hook_parse=True, choice_other_parse=True):
|
||||
if choice_web_hook:
|
||||
if choice_web_hook_parse and isinstance(choice_web_hook, dict):
|
||||
return cls._get_choice_values_from_webhook(choice_web_hook)
|
||||
else:
|
||||
return []
|
||||
elif choice_other:
|
||||
if choice_other_parse and isinstance(choice_other, dict):
|
||||
return cls._get_choice_values_from_other(choice_other)
|
||||
else:
|
||||
return []
|
||||
def get_choice_values(cls, attr_id, value_type, choice_web_hook, choice_web_hook_parse=True):
|
||||
if choice_web_hook and isinstance(choice_web_hook, dict) and choice_web_hook_parse:
|
||||
return cls._get_choice_values_from_web_hook(choice_web_hook)
|
||||
elif choice_web_hook and not choice_web_hook_parse:
|
||||
return []
|
||||
|
||||
choice_table = ValueTypeMap.choice.get(value_type)
|
||||
if not choice_table:
|
||||
return []
|
||||
choice_values = choice_table.get_by(fl=["value", "option"], attr_id=attr_id)
|
||||
|
||||
return [[ValueTypeMap.serialize[value_type](choice_value['value']), choice_value['option'] or
|
||||
{"label": ValueTypeMap.serialize[value_type](choice_value['value'])}]
|
||||
for choice_value in choice_values]
|
||||
return [[choice_value['value'], choice_value['option']] for choice_value in choice_values]
|
||||
|
||||
@staticmethod
|
||||
def add_choice_values(_id, value_type, choice_values):
|
||||
choice_table = ValueTypeMap.choice.get(value_type)
|
||||
if choice_table is None:
|
||||
return
|
||||
|
||||
choice_table.get_by(attr_id=_id, only_query=True).delete()
|
||||
|
||||
db.session.query(choice_table).filter(choice_table.attr_id == _id).delete()
|
||||
db.session.flush()
|
||||
choice_values = choice_values
|
||||
for v, option in choice_values:
|
||||
choice_table.create(attr_id=_id, value=v, option=option, commit=False)
|
||||
table = choice_table(attr_id=_id, value=v, option=option)
|
||||
|
||||
db.session.add(table)
|
||||
|
||||
try:
|
||||
db.session.flush()
|
||||
except Exception as e:
|
||||
current_app.logger.warning("add choice values failed: {}".format(e))
|
||||
except:
|
||||
return abort(400, ErrFormat.invalid_choice_values)
|
||||
|
||||
@staticmethod
|
||||
def _del_choice_values(_id, value_type):
|
||||
choice_table = ValueTypeMap.choice.get(value_type)
|
||||
|
||||
choice_table and choice_table.get_by(attr_id=_id, only_query=True).delete()
|
||||
db.session.query(choice_table).filter(choice_table.attr_id == _id).delete()
|
||||
db.session.flush()
|
||||
|
||||
@classmethod
|
||||
def get_enum_map(cls, _attr_id, _attr=None):
|
||||
attr = AttributeCache.get(_attr_id) if _attr_id else _attr
|
||||
if attr and attr.is_choice:
|
||||
choice_values = cls.get_choice_values(attr.id, attr.value_type, None, None)
|
||||
return {i[0]: i[1]['label'] for i in choice_values if i[1] and i[1].get('label')}
|
||||
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def search_attributes(cls, name=None, alias=None, page=1, page_size=None):
|
||||
"""
|
||||
:param name:
|
||||
:param alias:
|
||||
:param page:
|
||||
:param page_size:
|
||||
:param name:
|
||||
:param alias:
|
||||
:param page:
|
||||
:param page_size:
|
||||
:return: attribute, if name is None, then return all attributes
|
||||
"""
|
||||
if name is not None:
|
||||
@@ -165,9 +113,8 @@ class AttributeManager(object):
|
||||
attrs = attrs[(page - 1) * page_size:][:page_size]
|
||||
res = list()
|
||||
for attr in attrs:
|
||||
attr["is_choice"] and attr.update(
|
||||
dict(choice_value=cls.get_choice_values(attr["id"], attr["value_type"],
|
||||
attr["choice_web_hook"], attr.get("choice_other"))))
|
||||
attr["is_choice"] and attr.update(dict(choice_value=cls.get_choice_values(
|
||||
attr["id"], attr["value_type"], attr["choice_web_hook"])))
|
||||
attr['is_choice'] and attr.pop('choice_web_hook', None)
|
||||
|
||||
res.append(attr)
|
||||
@@ -176,47 +123,30 @@ class AttributeManager(object):
|
||||
|
||||
def get_attribute_by_name(self, name):
|
||||
attr = Attribute.get_by(name=name, first=True)
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"],
|
||||
attr["value_type"],
|
||||
attr["choice_web_hook"],
|
||||
attr.get("choice_other"))
|
||||
|
||||
if attr and attr["is_choice"]:
|
||||
attr.update(dict(choice_value=self.get_choice_values(
|
||||
attr["id"], attr["value_type"], attr["choice_web_hook"])))
|
||||
return attr
|
||||
|
||||
def get_attribute_by_alias(self, alias):
|
||||
attr = Attribute.get_by(alias=alias, first=True)
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"],
|
||||
attr["value_type"],
|
||||
attr["choice_web_hook"],
|
||||
attr.get("choice_other"))
|
||||
|
||||
if attr and attr["is_choice"]:
|
||||
attr.update(dict(choice_value=self.get_choice_values(
|
||||
attr["id"], attr["value_type"], attr["choice_web_hook"])))
|
||||
return attr
|
||||
|
||||
def get_attribute_by_id(self, _id):
|
||||
attr = Attribute.get_by_id(_id).to_dict()
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(attr["id"],
|
||||
attr["value_type"],
|
||||
attr["choice_web_hook"],
|
||||
attr.get("choice_other"))
|
||||
|
||||
if attr and attr["is_choice"]:
|
||||
attr.update(dict(choice_value=self.get_choice_values(
|
||||
attr["id"], attr["value_type"], attr["choice_web_hook"])))
|
||||
return attr
|
||||
|
||||
def get_attribute(self, key, choice_web_hook_parse=True, choice_other_parse=True):
|
||||
attr = AttributeCache.get(key) or dict()
|
||||
attr = attr and attr.to_dict()
|
||||
if attr.get("is_choice"):
|
||||
attr["choice_value"] = self.get_choice_values(
|
||||
attr["id"],
|
||||
attr["value_type"],
|
||||
attr["choice_web_hook"],
|
||||
attr.get("choice_other"),
|
||||
choice_web_hook_parse=choice_web_hook_parse,
|
||||
choice_other_parse=choice_other_parse,
|
||||
)
|
||||
|
||||
def get_attribute(self, key, choice_web_hook_parse=True):
|
||||
attr = AttributeCache.get(key).to_dict()
|
||||
if attr and attr["is_choice"]:
|
||||
attr.update(dict(choice_value=self.get_choice_values(
|
||||
attr["id"], attr["value_type"], attr["choice_web_hook"])), choice_web_hook_parse=choice_web_hook_parse)
|
||||
return attr
|
||||
|
||||
@staticmethod
|
||||
@@ -224,40 +154,16 @@ class AttributeManager(object):
|
||||
if RoleEnum.CONFIG not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin('cmdb'):
|
||||
return abort(403, ErrFormat.role_required.format(RoleEnum.CONFIG))
|
||||
|
||||
@classmethod
|
||||
def calc_computed_attribute(cls, attr_id):
|
||||
"""
|
||||
calculate computed attribute for all ci
|
||||
:param attr_id:
|
||||
:return:
|
||||
"""
|
||||
cls.can_create_computed_attribute()
|
||||
|
||||
from api.tasks.cmdb import calc_computed_attribute
|
||||
|
||||
calc_computed_attribute.apply_async(args=(attr_id, current_user.uid), queue=CMDB_QUEUE)
|
||||
|
||||
@classmethod
|
||||
@kwargs_required("name")
|
||||
def add(cls, **kwargs):
|
||||
choice_value = kwargs.pop("choice_value", [])
|
||||
kwargs.pop("is_choice", None)
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') or kwargs.get('choice_other') else False
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') else False
|
||||
|
||||
name = kwargs.pop("name")
|
||||
if name in BUILTIN_KEYWORDS or kwargs.get('alias') in BUILTIN_KEYWORDS:
|
||||
if name in {'id', '_id', 'ci_id', 'type', '_type', 'ci_type'}:
|
||||
return abort(400, ErrFormat.attribute_name_cannot_be_builtin)
|
||||
|
||||
while kwargs.get('choice_other'):
|
||||
if isinstance(kwargs['choice_other'], dict):
|
||||
if kwargs['choice_other'].get('script'):
|
||||
break
|
||||
|
||||
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
|
||||
break
|
||||
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
|
||||
alias = kwargs.pop("alias", "")
|
||||
alias = name if not alias else alias
|
||||
Attribute.get_by(name=name, first=True) and abort(400, ErrFormat.attribute_name_duplicate.format(name))
|
||||
@@ -267,13 +173,11 @@ class AttributeManager(object):
|
||||
|
||||
kwargs.get('is_computed') and cls.can_create_computed_attribute()
|
||||
|
||||
kwargs.get('choice_other') and kwargs['choice_other'].get('script') and cls.can_create_computed_attribute()
|
||||
|
||||
attr = Attribute.create(flush=True,
|
||||
name=name,
|
||||
alias=alias,
|
||||
is_choice=is_choice,
|
||||
uid=current_user.uid,
|
||||
uid=g.user.uid,
|
||||
**kwargs)
|
||||
|
||||
if choice_value:
|
||||
@@ -307,11 +211,6 @@ class AttributeManager(object):
|
||||
|
||||
return attr.id
|
||||
|
||||
@staticmethod
|
||||
def _clean_ci_type_attributes_cache(attr_id):
|
||||
for i in CITypeAttribute.get_by(attr_id=attr_id, to_dict=False):
|
||||
CITypeAttributesCache.clean(i.type_id)
|
||||
|
||||
@staticmethod
|
||||
def _change_index(attr, old, new):
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
@@ -322,11 +221,11 @@ class AttributeManager(object):
|
||||
new_table = TableMap(attr=attr, is_index=new).table
|
||||
|
||||
ci_ids = []
|
||||
for i in old_table.get_by(attr_id=attr.id, to_dict=False):
|
||||
for i in db.session.query(old_table).filter(getattr(old_table, 'attr_id') == attr.id):
|
||||
new_table.create(ci_id=i.ci_id, attr_id=attr.id, value=i.value, flush=True)
|
||||
ci_ids.append(i.ci_id)
|
||||
|
||||
old_table.get_by(attr_id=attr.id, only_query=True).delete()
|
||||
db.session.query(old_table).filter(getattr(old_table, 'attr_id') == attr.id).delete()
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
@@ -341,7 +240,7 @@ class AttributeManager(object):
|
||||
def _can_edit_attribute(attr):
|
||||
from api.lib.cmdb.ci_type import CITypeManager
|
||||
|
||||
if attr.uid == current_user.uid:
|
||||
if attr.uid == g.user.uid:
|
||||
return True
|
||||
|
||||
for i in CITypeAttribute.get_by(attr_id=attr.id, to_dict=False):
|
||||
@@ -354,6 +253,9 @@ class AttributeManager(object):
|
||||
def update(self, _id, **kwargs):
|
||||
attr = Attribute.get_by_id(_id) or abort(404, ErrFormat.attribute_not_found.format("id={}".format(_id)))
|
||||
|
||||
if not self._can_edit_attribute(attr):
|
||||
return abort(403, ErrFormat.cannot_edit_attribute)
|
||||
|
||||
if kwargs.get("name"):
|
||||
other = Attribute.get_by(name=kwargs['name'], first=True, to_dict=False)
|
||||
if other and other.id != attr.id:
|
||||
@@ -371,22 +273,12 @@ class AttributeManager(object):
|
||||
|
||||
self._change_index(attr, attr.is_index, kwargs['is_index'])
|
||||
|
||||
while kwargs.get('choice_other'):
|
||||
if isinstance(kwargs['choice_other'], dict):
|
||||
if kwargs['choice_other'].get('script'):
|
||||
break
|
||||
|
||||
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
|
||||
break
|
||||
|
||||
return abort(400, ErrFormat.attribute_choice_other_invalid)
|
||||
|
||||
existed2 = attr.to_dict()
|
||||
if not existed2['choice_web_hook'] and not existed2.get('choice_other') and existed2['is_choice']:
|
||||
existed2['choice_value'] = self.get_choice_values(attr.id, attr.value_type, None, None)
|
||||
if not existed2['choice_web_hook'] and existed2['is_choice']:
|
||||
existed2['choice_value'] = self.get_choice_values(attr.id, attr.value_type, attr.choice_web_hook)
|
||||
|
||||
choice_value = kwargs.pop("choice_value", False)
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') or kwargs.get('choice_other') else False
|
||||
is_choice = True if choice_value or kwargs.get('choice_web_hook') else False
|
||||
kwargs['is_choice'] = is_choice
|
||||
|
||||
if kwargs.get('default') and not (isinstance(kwargs['default'], dict) and 'default' in kwargs['default']):
|
||||
@@ -394,19 +286,11 @@ class AttributeManager(object):
|
||||
|
||||
kwargs.get('is_computed') and self.can_create_computed_attribute()
|
||||
|
||||
is_changed = False
|
||||
for k in kwargs:
|
||||
if kwargs[k] != getattr(attr, k, None):
|
||||
is_changed = True
|
||||
|
||||
if is_changed and not self._can_edit_attribute(attr):
|
||||
return abort(403, ErrFormat.cannot_edit_attribute)
|
||||
|
||||
attr.update(flush=True, filter_none=False, **kwargs)
|
||||
|
||||
if is_choice and choice_value:
|
||||
self.add_choice_values(attr.id, attr.value_type, choice_value)
|
||||
elif existed2['is_choice']:
|
||||
elif is_choice:
|
||||
self._del_choice_values(attr.id, attr.value_type)
|
||||
|
||||
try:
|
||||
@@ -425,8 +309,6 @@ class AttributeManager(object):
|
||||
|
||||
AttributeCache.clean(attr)
|
||||
|
||||
self._clean_ci_type_attributes_cache(_id)
|
||||
|
||||
return attr.id
|
||||
|
||||
@staticmethod
|
||||
@@ -437,28 +319,25 @@ class AttributeManager(object):
|
||||
if CIType.get_by(unique_id=attr.id, first=True, to_dict=False) is not None:
|
||||
return abort(400, ErrFormat.attribute_is_unique_id)
|
||||
|
||||
ref = CITypeAttribute.get_by(attr_id=_id, to_dict=False, first=True)
|
||||
if ref is not None:
|
||||
ci_type = CITypeCache.get(ref.type_id)
|
||||
return abort(400, ErrFormat.attribute_is_ref_by_type.format(ci_type and ci_type.alias or ref.type_id))
|
||||
|
||||
if attr.uid != current_user.uid and not is_app_admin('cmdb'):
|
||||
if attr.uid and attr.uid != g.user.uid:
|
||||
return abort(403, ErrFormat.cannot_delete_attribute)
|
||||
|
||||
if attr.is_choice:
|
||||
choice_table = ValueTypeMap.choice.get(attr.value_type)
|
||||
choice_table.get_by(attr_id=_id, only_query=True).delete()
|
||||
|
||||
attr.soft_delete()
|
||||
db.session.query(choice_table).filter(choice_table.attr_id == _id).delete() # FIXME: session conflict
|
||||
db.session.flush()
|
||||
|
||||
AttributeCache.clean(attr)
|
||||
|
||||
attr.soft_delete()
|
||||
|
||||
for i in CITypeAttribute.get_by(attr_id=_id, to_dict=False):
|
||||
i.soft_delete()
|
||||
|
||||
for i in PreferenceShowAttributes.get_by(attr_id=_id, to_dict=False):
|
||||
i.soft_delete(commit=False)
|
||||
i.soft_delete()
|
||||
|
||||
for i in CITypeAttributeGroupItem.get_by(attr_id=_id, to_dict=False):
|
||||
i.soft_delete(commit=False)
|
||||
|
||||
db.session.commit()
|
||||
i.soft_delete()
|
||||
|
||||
return name
|
||||
|
||||
@@ -1,61 +1,44 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import copy
|
||||
import datetime
|
||||
import json
|
||||
import jsonpath
|
||||
import os
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from flask import g
|
||||
from sqlalchemy import func
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.auto_discovery.const import CLOUD_MAP
|
||||
from api.lib.cmdb.auto_discovery.const import DEFAULT_INNER
|
||||
from api.lib.cmdb.auto_discovery.const import PRIVILEGED_USERS
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import AutoDiscoveryMappingCache
|
||||
from api.lib.cmdb.auto_discovery.const import ClOUD_MAP
|
||||
from api.lib.cmdb.cache import CITypeAttributeCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.ci_type import CITypeGroupManager
|
||||
from api.lib.cmdb.const import AutoDiscoveryType
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.custom_dashboard import SystemConfigManager
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search as ci_search
|
||||
from api.lib.common_setting.role_perm_base import CMDBApp
|
||||
from api.lib.cmdb.search.ci import search
|
||||
from api.lib.mixin import DBMixin
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.perm.acl.acl import validate_permission
|
||||
from api.lib.utils import AESCrypto
|
||||
from api.models.cmdb import AutoDiscoveryAccount
|
||||
from api.models.cmdb import AutoDiscoveryCI
|
||||
from api.models.cmdb import AutoDiscoveryCIType
|
||||
from api.models.cmdb import AutoDiscoveryCITypeRelation
|
||||
from api.models.cmdb import AutoDiscoveryCounter
|
||||
from api.models.cmdb import AutoDiscoveryExecHistory
|
||||
from api.models.cmdb import AutoDiscoveryRule
|
||||
from api.models.cmdb import AutoDiscoveryRuleSyncHistory
|
||||
from api.tasks.cmdb import build_relations_for_ad_accept
|
||||
from api.tasks.cmdb import write_ad_rule_sync_history
|
||||
|
||||
PWD = os.path.abspath(os.path.dirname(__file__))
|
||||
app_cli = CMDBApp()
|
||||
|
||||
|
||||
def parse_plugin_script(script):
|
||||
attributes = []
|
||||
try:
|
||||
x = compile(script, '', "exec")
|
||||
local_ns = {}
|
||||
exec(x, {}, local_ns)
|
||||
unique_key = local_ns['AutoDiscovery']().unique_key
|
||||
attrs = local_ns['AutoDiscovery']().attributes() or []
|
||||
exec(x)
|
||||
unique_key = locals()['AutoDiscovery']().unique_key
|
||||
attrs = locals()['AutoDiscovery']().attributes() or []
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@@ -113,30 +96,14 @@ class AutoDiscoveryRuleCRUD(DBMixin):
|
||||
else:
|
||||
self.cls.create(**rule)
|
||||
|
||||
def _can_add(self, valid=True, **kwargs):
|
||||
def _can_add(self, **kwargs):
|
||||
self.cls.get_by(name=kwargs['name']) and abort(400, ErrFormat.adr_duplicate.format(kwargs['name']))
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script') and valid:
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
|
||||
kwargs = check_plugin_script(**kwargs)
|
||||
acl = ACLManager(app_cli.app_name)
|
||||
has_perm = True
|
||||
try:
|
||||
if not acl.has_permission(app_cli.op.Auto_Discovery,
|
||||
app_cli.resource_type_name,
|
||||
app_cli.op.create_plugin) and not is_app_admin(app_cli.app_name):
|
||||
has_perm = False
|
||||
except Exception:
|
||||
if not is_app_admin(app_cli.app_name):
|
||||
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
|
||||
|
||||
if not has_perm:
|
||||
return abort(403, ErrFormat.no_permission.format(
|
||||
app_cli.op.Auto_Discovery, app_cli.op.create_plugin))
|
||||
|
||||
kwargs['owner'] = current_user.uid
|
||||
|
||||
return kwargs
|
||||
|
||||
def _can_update(self, valid=True, **kwargs):
|
||||
def _can_update(self, **kwargs):
|
||||
existed = self.cls.get_by_id(kwargs['_id']) or abort(
|
||||
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['_id'])))
|
||||
|
||||
@@ -148,22 +115,6 @@ class AutoDiscoveryRuleCRUD(DBMixin):
|
||||
if other and other.id != existed.id:
|
||||
return abort(400, ErrFormat.adr_duplicate.format(kwargs['name']))
|
||||
|
||||
if existed.is_plugin and valid:
|
||||
acl = ACLManager(app_cli.app_name)
|
||||
has_perm = True
|
||||
try:
|
||||
if not acl.has_permission(app_cli.op.Auto_Discovery,
|
||||
app_cli.resource_type_name,
|
||||
app_cli.op.update_plugin) and not is_app_admin(app_cli.app_name):
|
||||
has_perm = False
|
||||
except Exception:
|
||||
if not is_app_admin(app_cli.app_name):
|
||||
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
|
||||
|
||||
if not has_perm:
|
||||
return abort(403, ErrFormat.no_permission.format(
|
||||
app_cli.op.Auto_Discovery, app_cli.op.update_plugin))
|
||||
|
||||
return existed
|
||||
|
||||
def update(self, _id, **kwargs):
|
||||
@@ -171,44 +122,21 @@ class AutoDiscoveryRuleCRUD(DBMixin):
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
|
||||
kwargs = check_plugin_script(**kwargs)
|
||||
|
||||
for item in AutoDiscoveryCIType.get_by(adr_id=_id, to_dict=False):
|
||||
item.update(updated_at=datetime.datetime.now())
|
||||
|
||||
return super(AutoDiscoveryRuleCRUD, self).update(_id, filter_none=False, **kwargs)
|
||||
|
||||
def _can_delete(self, **kwargs):
|
||||
if AutoDiscoveryCIType.get_by(adr_id=kwargs['_id'], first=True):
|
||||
return abort(400, ErrFormat.adr_referenced)
|
||||
|
||||
existed = self.cls.get_by_id(kwargs['_id']) or abort(
|
||||
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['_id'])))
|
||||
|
||||
if existed.is_plugin:
|
||||
acl = ACLManager(app_cli.app_name)
|
||||
has_perm = True
|
||||
try:
|
||||
if not acl.has_permission(app_cli.op.Auto_Discovery,
|
||||
app_cli.resource_type_name,
|
||||
app_cli.op.delete_plugin) and not is_app_admin(app_cli.app_name):
|
||||
has_perm = False
|
||||
except Exception:
|
||||
if not is_app_admin(app_cli.app_name):
|
||||
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
|
||||
|
||||
if not has_perm:
|
||||
return abort(403, ErrFormat.no_permission.format(
|
||||
app_cli.op.Auto_Discovery, app_cli.op.delete_plugin))
|
||||
|
||||
return existed
|
||||
return self._can_update(**kwargs)
|
||||
|
||||
|
||||
class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
cls = AutoDiscoveryCIType
|
||||
|
||||
@classmethod
|
||||
def get_all(cls, type_ids=None):
|
||||
res = cls.cls.get_by(to_dict=False)
|
||||
return [i for i in res if type_ids is None or i.type_id in type_ids]
|
||||
def get_all(cls):
|
||||
return cls.cls.get_by(to_dict=False)
|
||||
|
||||
@classmethod
|
||||
def get_by_id(cls, _id):
|
||||
@@ -219,59 +147,25 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
return cls.cls.get_by(type_id=type_id, to_dict=False)
|
||||
|
||||
@classmethod
|
||||
def get_ad_attributes(cls, type_id):
|
||||
result = []
|
||||
adts = cls.get_by_type_id(type_id)
|
||||
for adt in adts:
|
||||
adr = AutoDiscoveryRuleCRUD.get_by_id(adt.adr_id)
|
||||
if not adr:
|
||||
continue
|
||||
if adr.type == AutoDiscoveryType.HTTP:
|
||||
for i in DEFAULT_INNER:
|
||||
if adr.name == i['name']:
|
||||
attrs = AutoDiscoveryHTTPManager.get_attributes(
|
||||
i['en'], (adt.extra_option or {}).get('category')) or []
|
||||
result.extend([i.get('name') for i in attrs])
|
||||
break
|
||||
elif adr.type == AutoDiscoveryType.SNMP:
|
||||
attributes = AutoDiscoverySNMPManager.get_attributes()
|
||||
result.extend([i.get('name') for i in (attributes or [])])
|
||||
else:
|
||||
result.extend([i.get('name') for i in (adr.attributes or [])])
|
||||
|
||||
return sorted(list(set(result)))
|
||||
|
||||
@classmethod
|
||||
def get(cls, ci_id, oneagent_id, oneagent_name, last_update_at=None):
|
||||
"""
|
||||
OneAgent sync rules
|
||||
:param ci_id:
|
||||
:param oneagent_id:
|
||||
:param oneagent_name:
|
||||
:param last_update_at:
|
||||
:return:
|
||||
"""
|
||||
def get(cls, ci_id, oneagent_id, last_update_at=None):
|
||||
result = []
|
||||
rules = cls.cls.get_by(to_dict=True)
|
||||
|
||||
for rule in rules:
|
||||
if not rule['enabled']:
|
||||
if rule.get('relation'):
|
||||
continue
|
||||
|
||||
if isinstance(rule.get("extra_option"), dict):
|
||||
decrypt_account(rule['extra_option'], rule['uid'])
|
||||
|
||||
if rule['extra_option'].get('_reference'):
|
||||
rule['extra_option'].pop('password', None)
|
||||
if isinstance(rule.get("extra_option"), dict) and rule['extra_option'].get('secret'):
|
||||
if not (g.user.username == "cmdb_agent" or g.user.uid == rule['uid']):
|
||||
rule['extra_option'].pop('secret', None)
|
||||
rule['extra_option'].update(
|
||||
AutoDiscoveryAccountCRUD().get_config_by_id(rule['extra_option']['_reference']))
|
||||
else:
|
||||
rule['extra_option']['secret'] = AESCrypto.decrypt(rule['extra_option']['secret'])
|
||||
|
||||
if oneagent_id and rule['agent_id'] == oneagent_id:
|
||||
result.append(rule)
|
||||
elif rule['query_expr']:
|
||||
query = rule['query_expr'].lstrip('q').lstrip('=')
|
||||
s = ci_search(query, fl=['_id'], count=1000000)
|
||||
s = search(query, fl=['_id'], count=1000000)
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
except SearchError as e:
|
||||
@@ -282,32 +176,25 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
result.append(rule)
|
||||
break
|
||||
elif not rule['agent_id'] and not rule['query_expr'] and rule['adr_id']:
|
||||
try:
|
||||
if not int(oneagent_id, 16): # excludes master
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
adr = AutoDiscoveryRuleCRUD.get_by_id(rule['adr_id'])
|
||||
if not adr:
|
||||
continue
|
||||
if adr.type in (AutoDiscoveryType.SNMP, AutoDiscoveryType.HTTP):
|
||||
continue
|
||||
|
||||
if not rule['updated_at']:
|
||||
continue
|
||||
|
||||
result.append(rule)
|
||||
|
||||
ad_rules_updated_at = (SystemConfigManager.get('ad_rules_updated_at') or {}).get('option', {}).get('v') or ""
|
||||
new_last_update_at = ""
|
||||
for i in result:
|
||||
i['adr'] = AutoDiscoveryRule.get_by_id(i['adr_id']).to_dict()
|
||||
i['adr'].pop("attributes", None)
|
||||
__last_update_at = max([i['updated_at'] or "", i['created_at'] or "",
|
||||
i['adr']['created_at'] or "", i['adr']['updated_at'] or "", ad_rules_updated_at])
|
||||
i['adr']['created_at'] or "", i['adr']['updated_at'] or ""])
|
||||
if new_last_update_at < __last_update_at:
|
||||
new_last_update_at = __last_update_at
|
||||
|
||||
write_ad_rule_sync_history.apply_async(args=(result, oneagent_id, oneagent_name, datetime.datetime.now()),
|
||||
queue=CMDB_QUEUE)
|
||||
if not last_update_at or new_last_update_at > last_update_at:
|
||||
return result, new_last_update_at
|
||||
else:
|
||||
@@ -326,7 +213,7 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
agent_id = agent_id.strip()
|
||||
q = "op_duty:{0},-rd_duty:{0},oneagent_id:{1}"
|
||||
|
||||
s = ci_search(q.format(current_user.username, agent_id.strip()))
|
||||
s = search(q.format(g.user.username, agent_id.strip()))
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
if response:
|
||||
@@ -335,7 +222,7 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
current_app.logger.warning(e)
|
||||
return abort(400, str(e))
|
||||
|
||||
s = ci_search(q.format(current_user.nickname, agent_id.strip()))
|
||||
s = search(q.format(g.user.nickname, agent_id.strip()))
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
if response:
|
||||
@@ -349,59 +236,41 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
if query_expr.startswith('q='):
|
||||
query_expr = query_expr[2:]
|
||||
|
||||
s = ci_search(query_expr, count=1000000)
|
||||
s = search(query_expr, count=1000000)
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
for i in response:
|
||||
if (current_user.username not in (i.get('rd_duty') or []) and
|
||||
current_user.username not in (i.get('op_duty') or []) and
|
||||
current_user.nickname not in (i.get('rd_duty') or []) and
|
||||
current_user.nickname not in (i.get('op_duty') or [])):
|
||||
if g.user.username not in (i.get('rd_duty') or []) and g.user.username not in \
|
||||
(i.get('op_duty') or []) and g.user.nickname not in (i.get('rd_duty') or []) and \
|
||||
g.user.nickname not in (i.get('op_duty') or []):
|
||||
return abort(403, ErrFormat.adt_target_expr_no_permission.format(
|
||||
i.get("{}_name".format(i.get('ci_type')))))
|
||||
except SearchError as e:
|
||||
current_app.logger.warning(e)
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def _can_add(**kwargs):
|
||||
def _can_add(self, **kwargs):
|
||||
self.cls.get_by(type_id=kwargs['type_id'], adr_id=kwargs.get('adr_id') or None) and abort(
|
||||
400, ErrFormat.ad_duplicate)
|
||||
|
||||
# self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
|
||||
|
||||
if kwargs.get('adr_id'):
|
||||
adr = AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
|
||||
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['adr_id'])))
|
||||
if adr.type == AutoDiscoveryType.HTTP:
|
||||
kwargs.setdefault('extra_option', dict())
|
||||
en_name = None
|
||||
for i in DEFAULT_INNER:
|
||||
if i['name'] == adr.name:
|
||||
en_name = i['en']
|
||||
break
|
||||
if en_name and kwargs['extra_option'].get('category'):
|
||||
for item in CLOUD_MAP[en_name]:
|
||||
if item["collect_key_map"].get(kwargs['extra_option']['category']):
|
||||
kwargs["extra_option"]["collect_key"] = item["collect_key_map"][
|
||||
kwargs['extra_option']['category']]
|
||||
kwargs["extra_option"]["provider"] = en_name
|
||||
break
|
||||
|
||||
if adr.type == AutoDiscoveryType.COMPONENTS and kwargs.get('extra_option'):
|
||||
for i in DEFAULT_INNER:
|
||||
if i['name'] == adr.name:
|
||||
kwargs['extra_option']['collect_key'] = i['option'].get('collect_key')
|
||||
break
|
||||
if not adr.is_plugin:
|
||||
other = self.cls.get_by(adr_id=adr.id, first=True, to_dict=False)
|
||||
if other:
|
||||
ci_type = CITypeCache.get(other.type_id)
|
||||
return abort(400, ErrFormat.adr_default_ref_once.format(ci_type.alias))
|
||||
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
|
||||
kwargs = check_plugin_script(**kwargs)
|
||||
|
||||
encrypt_account(kwargs.get('extra_option'))
|
||||
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('secret'):
|
||||
kwargs['extra_option']['secret'] = AESCrypto.encrypt(kwargs['extra_option']['secret'])
|
||||
|
||||
ci_type = CITypeCache.get(kwargs['type_id'])
|
||||
unique = AttributeCache.get(ci_type.unique_id)
|
||||
if unique and unique.name not in (kwargs.get('attributes') or {}).values():
|
||||
current_app.logger.warning((unique.name, kwargs.get('attributes'), ci_type.alias))
|
||||
return abort(400, ErrFormat.ad_not_unique_key.format(unique.name))
|
||||
|
||||
kwargs['uid'] = current_user.uid
|
||||
kwargs['uid'] = g.user.uid
|
||||
|
||||
return kwargs
|
||||
|
||||
@@ -409,43 +278,10 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
existed = self.cls.get_by_id(kwargs['_id']) or abort(
|
||||
404, ErrFormat.ad_not_found.format("id={}".format(kwargs['_id'])))
|
||||
|
||||
adr = AutoDiscoveryRule.get_by_id(existed.adr_id) or abort(
|
||||
404, ErrFormat.adr_not_found.format("id={}".format(existed.adr_id)))
|
||||
if adr.type == AutoDiscoveryType.HTTP:
|
||||
kwargs.setdefault('extra_option', dict())
|
||||
en_name = None
|
||||
for i in DEFAULT_INNER:
|
||||
if i['name'] == adr.name:
|
||||
en_name = i['en']
|
||||
break
|
||||
if en_name and kwargs['extra_option'].get('category'):
|
||||
for item in CLOUD_MAP[en_name]:
|
||||
if item["collect_key_map"].get(kwargs['extra_option']['category']):
|
||||
kwargs["extra_option"]["collect_key"] = item["collect_key_map"][
|
||||
kwargs['extra_option']['category']]
|
||||
kwargs["extra_option"]["provider"] = en_name
|
||||
break
|
||||
|
||||
if adr.type == AutoDiscoveryType.COMPONENTS and kwargs.get('extra_option'):
|
||||
for i in DEFAULT_INNER:
|
||||
if i['name'] == adr.name:
|
||||
kwargs['extra_option']['collect_key'] = i['option'].get('collect_key')
|
||||
break
|
||||
|
||||
if 'attributes' in kwargs:
|
||||
self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
|
||||
|
||||
ci_type = CITypeCache.get(existed.type_id)
|
||||
unique = AttributeCache.get(ci_type.unique_id)
|
||||
if unique and unique.name not in (kwargs.get('attributes') or {}).values():
|
||||
current_app.logger.warning((unique.name, kwargs.get('attributes'), ci_type.alias))
|
||||
return abort(400, ErrFormat.ad_not_unique_key.format(unique.name))
|
||||
self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
|
||||
|
||||
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('secret'):
|
||||
if current_user.uid != existed.uid:
|
||||
return abort(403, ErrFormat.adt_secret_no_permission)
|
||||
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('password'):
|
||||
if current_user.uid != existed.uid:
|
||||
if g.user.uid != existed.uid:
|
||||
return abort(403, ErrFormat.adt_secret_no_permission)
|
||||
|
||||
return existed
|
||||
@@ -455,22 +291,10 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
|
||||
kwargs = check_plugin_script(**kwargs)
|
||||
|
||||
encrypt_account(kwargs.get('extra_option'))
|
||||
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('secret'):
|
||||
kwargs['extra_option']['secret'] = AESCrypto.encrypt(kwargs['extra_option']['secret'])
|
||||
|
||||
inst = self._can_update(_id=_id, **kwargs)
|
||||
if len(kwargs) == 1 and 'enabled' in kwargs: # enable or disable
|
||||
pass
|
||||
elif inst.agent_id != kwargs.get('agent_id') or inst.query_expr != kwargs.get('query_expr'):
|
||||
for item in AutoDiscoveryRuleSyncHistory.get_by(adt_id=inst.id, to_dict=False):
|
||||
item.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
SystemConfigManager.create_or_update("ad_rules_updated_at",
|
||||
dict(v=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")))
|
||||
|
||||
obj = inst.update(_id=_id, filter_none=False, **kwargs)
|
||||
|
||||
return obj
|
||||
return super(AutoDiscoveryCITypeCRUD, self).update(_id, filter_none=False, **kwargs)
|
||||
|
||||
def _can_delete(self, **kwargs):
|
||||
if AutoDiscoveryCICRUD.get_by_adt_id(kwargs['_id']):
|
||||
@@ -481,61 +305,6 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
|
||||
|
||||
return existed
|
||||
|
||||
def delete(self, _id):
|
||||
inst = self._can_delete(_id=_id)
|
||||
|
||||
inst.soft_delete()
|
||||
|
||||
for item in AutoDiscoveryRuleSyncHistory.get_by(adt_id=inst.id, to_dict=False):
|
||||
item.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
attributes = self.get_ad_attributes(inst.type_id)
|
||||
for item in AutoDiscoveryCITypeRelationCRUD.get_by_type_id(inst.type_id):
|
||||
if item.ad_key not in attributes:
|
||||
item.soft_delete()
|
||||
|
||||
return inst
|
||||
|
||||
|
||||
class AutoDiscoveryCITypeRelationCRUD(DBMixin):
|
||||
cls = AutoDiscoveryCITypeRelation
|
||||
|
||||
@classmethod
|
||||
def get_all(cls, type_ids=None):
|
||||
res = cls.cls.get_by(to_dict=False)
|
||||
return [i for i in res if type_ids is None or i.ad_type_id in type_ids]
|
||||
|
||||
@classmethod
|
||||
def get_by_type_id(cls, type_id, to_dict=False):
|
||||
return cls.cls.get_by(ad_type_id=type_id, to_dict=to_dict)
|
||||
|
||||
def upsert(self, ad_type_id, relations):
|
||||
existed = self.cls.get_by(ad_type_id=ad_type_id, to_dict=False)
|
||||
existed = {(i.ad_key, i.peer_type_id, i.peer_attr_id): i for i in existed}
|
||||
|
||||
new = []
|
||||
for r in relations:
|
||||
k = (r.get('ad_key'), r.get('peer_type_id'), r.get('peer_attr_id'))
|
||||
if len(list(filter(lambda x: x, k))) == 3 and k not in existed:
|
||||
self.cls.create(ad_type_id=ad_type_id, **r)
|
||||
|
||||
new.append(k)
|
||||
|
||||
for deleted in set(existed.keys()) - set(new):
|
||||
existed[deleted].soft_delete()
|
||||
|
||||
return self.get_by_type_id(ad_type_id, to_dict=True)
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_update(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_delete(self, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class AutoDiscoveryCICRUD(DBMixin):
|
||||
cls = AutoDiscoveryCI
|
||||
@@ -563,14 +332,15 @@ class AutoDiscoveryCICRUD(DBMixin):
|
||||
|
||||
@staticmethod
|
||||
def get_attributes_by_type_id(type_id):
|
||||
from api.lib.cmdb.ci_type import CITypeAttributeManager
|
||||
attributes = [i for i in CITypeAttributeManager.get_attributes_by_type_id(type_id) or []]
|
||||
from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
attributes = [i[1] for i in CITypeAttributesCache.get2(type_id) or []]
|
||||
|
||||
attr_names = set()
|
||||
adts = AutoDiscoveryCITypeCRUD.get_by_type_id(type_id)
|
||||
for adt in adts:
|
||||
attr_names |= set((adt.attributes or {}).values())
|
||||
return [attr for attr in attributes if attr['name'] in attr_names]
|
||||
|
||||
return [attr.to_dict() for attr in attributes if attr.name in attr_names]
|
||||
|
||||
@classmethod
|
||||
def search(cls, page, page_size, fl=None, **kwargs):
|
||||
@@ -623,24 +393,16 @@ class AutoDiscoveryCICRUD(DBMixin):
|
||||
changed = False
|
||||
if existed is not None:
|
||||
if existed.instance != kwargs['instance']:
|
||||
instance = copy.deepcopy(existed.instance) or {}
|
||||
instance.update(kwargs['instance'])
|
||||
kwargs['instance'] = instance
|
||||
existed.update(filter_none=False, **kwargs)
|
||||
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
|
||||
stdout="update resource: {}".format(kwargs.get('unique_value')))
|
||||
changed = True
|
||||
else:
|
||||
existed = self.cls.create(**kwargs)
|
||||
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
|
||||
stdout="add resource: {}".format(kwargs.get('unique_value')))
|
||||
changed = True
|
||||
|
||||
if adt.auto_accept and changed:
|
||||
try:
|
||||
self.accept(existed)
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
return abort(400, str(e))
|
||||
elif changed:
|
||||
existed.update(is_accept=False, accept_time=None, accept_by=None, filter_none=False)
|
||||
@@ -660,13 +422,6 @@ class AutoDiscoveryCICRUD(DBMixin):
|
||||
|
||||
inst.delete()
|
||||
|
||||
adt = AutoDiscoveryCIType.get_by_id(inst.adt_id)
|
||||
if adt:
|
||||
adt.update(updated_at=datetime.datetime.now())
|
||||
|
||||
AutoDiscoveryExecHistoryCRUD().add(type_id=inst.type_id,
|
||||
stdout="delete resource: {}".format(inst.unique_value))
|
||||
|
||||
self._after_delete(inst)
|
||||
|
||||
return inst
|
||||
@@ -682,13 +437,6 @@ class AutoDiscoveryCICRUD(DBMixin):
|
||||
not is_app_admin("cmdb") and validate_permission(ci_type.name, ResourceTypeEnum.CI, PermEnum.DELETE, "cmdb")
|
||||
|
||||
existed.delete()
|
||||
|
||||
adt = AutoDiscoveryCIType.get_by_id(existed.adt_id)
|
||||
if adt:
|
||||
adt.update(updated_at=datetime.datetime.now())
|
||||
|
||||
AutoDiscoveryExecHistoryCRUD().add(type_id=type_id,
|
||||
stdout="delete resource: {}".format(unique_value))
|
||||
# TODO: delete ci
|
||||
|
||||
@classmethod
|
||||
@@ -699,103 +447,52 @@ class AutoDiscoveryCICRUD(DBMixin):
|
||||
adt = AutoDiscoveryCITypeCRUD.get_by_id(adc.adt_id) or abort(404, ErrFormat.adt_not_found)
|
||||
|
||||
ci_id = None
|
||||
if adt.attributes:
|
||||
ci_dict = {adt.attributes[k]: v for k, v in adc.instance.items() if k in adt.attributes}
|
||||
ci_id = CIManager.add(adc.type_id, is_auto_discovery=True, **ci_dict)
|
||||
|
||||
ad_key2attr = adt.attributes or {}
|
||||
if ad_key2attr:
|
||||
ci_dict = {ad_key2attr[k]: None if not v and isinstance(v, (list, dict)) else v
|
||||
for k, v in adc.instance.items() if k in ad_key2attr}
|
||||
extra_option = adt.extra_option or {}
|
||||
mapping, path_mapping = AutoDiscoveryHTTPManager.get_predefined_value_mapping(
|
||||
extra_option.get('provider'), extra_option.get('category'))
|
||||
if mapping:
|
||||
ci_dict = {k: (mapping.get(k) or {}).get(str(v), v) for k, v in ci_dict.items()}
|
||||
if path_mapping:
|
||||
ci_dict = {k: jsonpath.jsonpath(v, path_mapping[k]) if k in path_mapping else v
|
||||
for k, v in ci_dict.items()}
|
||||
ci_id = CIManager.add(adc.type_id, is_auto_discovery=True, _is_admin=True, **ci_dict)
|
||||
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
|
||||
stdout="accept resource: {}".format(adc.unique_value))
|
||||
relation_adts = AutoDiscoveryCIType.get_by(type_id=adt.type_id, adr_id=None, to_dict=False)
|
||||
for r_adt in relation_adts:
|
||||
if r_adt.relation and ci_id is not None:
|
||||
ad_key, cmdb_key = None, {}
|
||||
for ad_key in r_adt.relation:
|
||||
cmdb_key = r_adt.relation[ad_key]
|
||||
query = "_type:{},{}:{}".format(cmdb_key.get('type_name'), cmdb_key.get('attr_name'),
|
||||
adc.instance.get(ad_key))
|
||||
s = search(query)
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.warning(e)
|
||||
return abort(400, str(e))
|
||||
|
||||
build_relations_for_ad_accept.apply_async(args=(adc.to_dict(), ci_id, ad_key2attr), queue=CMDB_QUEUE)
|
||||
relation_ci_id = response and response[0]['_id']
|
||||
if relation_ci_id:
|
||||
try:
|
||||
CIRelationManager.add(ci_id, relation_ci_id)
|
||||
except:
|
||||
try:
|
||||
CIRelationManager.add(relation_ci_id, ci_id)
|
||||
except:
|
||||
pass
|
||||
|
||||
adc.update(is_accept=True,
|
||||
accept_by=nickname or current_user.nickname,
|
||||
accept_time=datetime.datetime.now(),
|
||||
ci_id=ci_id)
|
||||
adc.update(is_accept=True, accept_by=nickname or g.user.nickname, accept_time=datetime.datetime.now())
|
||||
|
||||
|
||||
class AutoDiscoveryHTTPManager(object):
|
||||
@staticmethod
|
||||
def get_categories(name):
|
||||
categories = (CLOUD_MAP.get(name) or {}) or []
|
||||
for item in copy.deepcopy(categories):
|
||||
item.pop('map', None)
|
||||
item.pop('collect_key_map', None)
|
||||
return (ClOUD_MAP.get(name) or {}).get('categories') or []
|
||||
|
||||
return categories
|
||||
|
||||
def get_resources(self, name):
|
||||
en_name = None
|
||||
for i in DEFAULT_INNER:
|
||||
if i['name'] == name:
|
||||
en_name = i['en']
|
||||
break
|
||||
|
||||
if en_name:
|
||||
categories = self.get_categories(en_name)
|
||||
|
||||
return [j for i in categories for j in i['items']]
|
||||
@staticmethod
|
||||
def get_attributes(name, category):
|
||||
tpt = ((ClOUD_MAP.get(name) or {}).get('map') or {}).get(category)
|
||||
if tpt and os.path.exists(os.path.join(PWD, tpt)):
|
||||
with open(os.path.join(PWD, tpt)) as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def get_attributes(provider, resource):
|
||||
for item in (CLOUD_MAP.get(provider) or {}):
|
||||
for _resource in (item.get('map') or {}):
|
||||
if _resource == resource:
|
||||
tpt = item['map'][_resource]
|
||||
if isinstance(tpt, dict):
|
||||
tpt = tpt.get('template')
|
||||
if tpt and os.path.exists(os.path.join(PWD, tpt)):
|
||||
with open(os.path.join(PWD, tpt)) as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def get_mapping(provider, resource):
|
||||
for item in (CLOUD_MAP.get(provider) or {}):
|
||||
for _resource in (item.get('map') or {}):
|
||||
if _resource == resource:
|
||||
mapping = item['map'][_resource]
|
||||
if not isinstance(mapping, dict):
|
||||
return {}
|
||||
name = mapping.get('mapping')
|
||||
mapping = AutoDiscoveryMappingCache.get(name)
|
||||
if isinstance(mapping, dict):
|
||||
return {mapping[key][provider]['key'].split('.')[0]: key for key in mapping if
|
||||
(mapping[key].get(provider) or {}).get('key')}
|
||||
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def get_predefined_value_mapping(provider, resource):
|
||||
for item in (CLOUD_MAP.get(provider) or {}):
|
||||
for _resource in (item.get('map') or {}):
|
||||
if _resource == resource:
|
||||
mapping = item['map'][_resource]
|
||||
if not isinstance(mapping, dict):
|
||||
return {}, {}
|
||||
name = mapping.get('mapping')
|
||||
mapping = AutoDiscoveryMappingCache.get(name)
|
||||
if isinstance(mapping, dict):
|
||||
return ({key: mapping[key][provider].get('map') for key in mapping if
|
||||
mapping[key].get(provider, {}).get('map')},
|
||||
{key: mapping[key][provider]['key'].split('.', 1)[1] for key in mapping if
|
||||
((mapping[key].get(provider) or {}).get('key') or '').split('.')[1:]})
|
||||
|
||||
return {}, {}
|
||||
|
||||
|
||||
class AutoDiscoverySNMPManager(object):
|
||||
|
||||
@@ -806,191 +503,3 @@ class AutoDiscoverySNMPManager(object):
|
||||
return json.loads(f.read())
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class AutoDiscoveryComponentsManager(object):
|
||||
|
||||
@staticmethod
|
||||
def get_attributes(name):
|
||||
if os.path.exists(os.path.join(PWD, "templates/{}.json".format(name))):
|
||||
with open(os.path.join(PWD, "templates/{}.json".format(name))) as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class AutoDiscoveryRuleSyncHistoryCRUD(DBMixin):
|
||||
cls = AutoDiscoveryRuleSyncHistory
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_update(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_delete(self, **kwargs):
|
||||
pass
|
||||
|
||||
def upsert(self, **kwargs):
|
||||
existed = self.cls.get_by(adt_id=kwargs.get('adt_id'),
|
||||
oneagent_id=kwargs.get('oneagent_id'),
|
||||
oneagent_name=kwargs.get('oneagent_name'),
|
||||
first=True,
|
||||
to_dict=False)
|
||||
|
||||
if existed is not None:
|
||||
existed.update(**kwargs)
|
||||
else:
|
||||
self.cls.create(**kwargs)
|
||||
|
||||
|
||||
class AutoDiscoveryExecHistoryCRUD(DBMixin):
|
||||
cls = AutoDiscoveryExecHistory
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_update(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_delete(self, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class AutoDiscoveryCounterCRUD(DBMixin):
|
||||
cls = AutoDiscoveryCounter
|
||||
|
||||
def get(self, type_id):
|
||||
res = self.cls.get_by(type_id=type_id, first=True, to_dict=True)
|
||||
if res is None:
|
||||
return dict(rule_count=0, exec_target_count=0, instance_count=0, accept_count=0,
|
||||
this_month_count=0, this_week_count=0, last_month_count=0, last_week_count=0)
|
||||
|
||||
return res
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_update(self, **kwargs):
|
||||
pass
|
||||
|
||||
def _can_delete(self, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
def encrypt_account(config):
|
||||
if isinstance(config, dict):
|
||||
if config.get('secret'):
|
||||
config['secret'] = AESCrypto.encrypt(config['secret'])
|
||||
if config.get('password'):
|
||||
config['password'] = AESCrypto.encrypt(config['password'])
|
||||
|
||||
|
||||
def decrypt_account(config, uid):
|
||||
if isinstance(config, dict):
|
||||
if config.get('password'):
|
||||
if not (current_user.username in PRIVILEGED_USERS or current_user.uid == uid):
|
||||
config.pop('password', None)
|
||||
else:
|
||||
try:
|
||||
config['password'] = AESCrypto.decrypt(config['password'])
|
||||
except Exception as e:
|
||||
current_app.logger.error('decrypt account failed: {}'.format(e))
|
||||
|
||||
if config.get('secret'):
|
||||
if not (current_user.username in PRIVILEGED_USERS or current_user.uid == uid):
|
||||
config.pop('secret', None)
|
||||
else:
|
||||
try:
|
||||
config['secret'] = AESCrypto.decrypt(config['secret'])
|
||||
except Exception as e:
|
||||
current_app.logger.error('decrypt account failed: {}'.format(e))
|
||||
|
||||
|
||||
class AutoDiscoveryAccountCRUD(DBMixin):
|
||||
cls = AutoDiscoveryAccount
|
||||
|
||||
def get(self, adr_id):
|
||||
res = self.cls.get_by(adr_id=adr_id, to_dict=True)
|
||||
|
||||
for i in res:
|
||||
decrypt_account(i.get('config'), i['uid'])
|
||||
|
||||
return res
|
||||
|
||||
def get_config_by_id(self, _id):
|
||||
res = self.cls.get_by_id(_id)
|
||||
if not res:
|
||||
return {}
|
||||
|
||||
config = res.to_dict().get('config') or {}
|
||||
|
||||
decrypt_account(config, res.uid)
|
||||
|
||||
return config
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
encrypt_account(kwargs.get('config'))
|
||||
|
||||
kwargs['uid'] = current_user.uid
|
||||
|
||||
return kwargs
|
||||
|
||||
def upsert(self, adr_id, accounts):
|
||||
existed_all = self.cls.get_by(adr_id=adr_id, to_dict=False)
|
||||
account_names = {i['name'] for i in accounts}
|
||||
|
||||
name_changed = dict()
|
||||
for account in accounts:
|
||||
existed = None
|
||||
if account.get('id'):
|
||||
existed = self.cls.get_by_id(account.get('id'))
|
||||
if existed is None:
|
||||
continue
|
||||
|
||||
account.pop('id')
|
||||
name_changed[existed.name] = account.get('name')
|
||||
else:
|
||||
account = self._can_add(**account)
|
||||
|
||||
if existed is not None:
|
||||
if current_user.uid == existed.uid:
|
||||
config = copy.deepcopy(existed.config) or {}
|
||||
config.update(account.get('config') or {})
|
||||
account['config'] = config
|
||||
existed.update(**account)
|
||||
else:
|
||||
self.cls.create(adr_id=adr_id, **account)
|
||||
|
||||
for item in existed_all:
|
||||
if name_changed.get(item.name, item.name) not in account_names:
|
||||
if current_user.uid == item.uid:
|
||||
item.soft_delete()
|
||||
|
||||
def _can_update(self, **kwargs):
|
||||
existed = self.cls.get_by_id(kwargs['_id']) or abort(404, ErrFormat.not_found)
|
||||
|
||||
if isinstance(kwargs.get('config'), dict) and kwargs['config'].get('secret'):
|
||||
if current_user.uid != existed.uid:
|
||||
return abort(403, ErrFormat.adt_secret_no_permission)
|
||||
if isinstance(kwargs.get('config'), dict) and kwargs['config'].get('password'):
|
||||
if current_user.uid != existed.uid:
|
||||
return abort(403, ErrFormat.adt_secret_no_permission)
|
||||
|
||||
return existed
|
||||
|
||||
def update(self, _id, **kwargs):
|
||||
|
||||
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
|
||||
kwargs = check_plugin_script(**kwargs)
|
||||
|
||||
encrypt_account(kwargs.get('config'))
|
||||
|
||||
inst = self._can_update(_id=_id, **kwargs)
|
||||
|
||||
obj = inst.update(_id=_id, filter_none=False, **kwargs)
|
||||
|
||||
return obj
|
||||
|
||||
def _can_delete(self, **kwargs):
|
||||
pass
|
||||
|
||||
@@ -2,38 +2,15 @@
|
||||
|
||||
from api.lib.cmdb.const import AutoDiscoveryType
|
||||
|
||||
PRIVILEGED_USERS = ("cmdb_agent", "worker", "admin")
|
||||
|
||||
DEFAULT_INNER = [
|
||||
dict(name="阿里云", en="aliyun", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-aliyun'}, "en": "aliyun"}),
|
||||
dict(name="腾讯云", en="tencentcloud", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-tengxunyun'}, "en": "tencentcloud"}),
|
||||
dict(name="华为云", en="huaweicloud", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-huaweiyun'}, "en": "huaweicloud"}),
|
||||
dict(name="AWS", en="aws", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-aws'}, "en": "aws"}),
|
||||
|
||||
dict(name="VCenter", en="vcenter", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'cmdb-vcenter'}, "category": "private_cloud", "en": "vcenter"}),
|
||||
dict(name="KVM", en="kvm", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'ops-KVM'}, "category": "private_cloud", "en": "kvm"}),
|
||||
|
||||
|
||||
dict(name="Nginx", en="nginx", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-nginx'}, "en": "nginx", "collect_key": "nginx"}),
|
||||
dict(name="Apache", en="apache", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-apache'}, "en": "apache", "collect_key": "apache"}),
|
||||
dict(name="Tomcat", en="tomcat", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-tomcat'}, "en": "tomcat", "collect_key": "tomcat"}),
|
||||
dict(name="MySQL", en="mysql", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-mySQL'}, "en": "mysql", "collect_key": "mysql"}),
|
||||
dict(name="MSSQL", en="mssql", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-SQLServer'}, "en": "mssql", "collect_key": "sqlserver"}),
|
||||
dict(name="Oracle", en="oracle", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-oracle'}, "en": "oracle", "collect_key": "oracle"}),
|
||||
dict(name="Redis", en="redis", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-redis'}, "en": "redis", "collect_key": "redis"}),
|
||||
DEFAULT_HTTP = [
|
||||
dict(name="阿里云", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-aliyun'}}),
|
||||
dict(name="腾讯云", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-tengxunyun'}}),
|
||||
dict(name="华为云", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-huaweiyun'}}),
|
||||
dict(name="AWS", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-aws'}}),
|
||||
|
||||
dict(name="交换机", type=AutoDiscoveryType.SNMP, is_inner=True, is_plugin=False,
|
||||
option={'icon': {'name': 'caise-jiaohuanji'}}),
|
||||
@@ -45,307 +22,32 @@ DEFAULT_INNER = [
|
||||
option={'icon': {'name': 'caise-dayinji'}}),
|
||||
]
|
||||
|
||||
CLOUD_MAP = {
|
||||
"aliyun": [
|
||||
{
|
||||
"category": "计算",
|
||||
"items": ["云服务器 ECS", "云服务器 Disk"],
|
||||
"map": {
|
||||
"云服务器 ECS": {"template": "templates/aliyun_ecs.json", "mapping": "ecs"},
|
||||
"云服务器 Disk": {"template": "templates/aliyun_ecs_disk.json", "mapping": "evs"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云服务器 ECS": "ali.ecs",
|
||||
"云服务器 Disk": "ali.ecs_disk",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "网络与CDN",
|
||||
"items": [
|
||||
"内容分发CDN",
|
||||
"负载均衡SLB",
|
||||
"专有网络VPC",
|
||||
"交换机Switch",
|
||||
],
|
||||
"map": {
|
||||
"内容分发CDN": {"template": "templates/aliyun_cdn.json", "mapping": "CDN"},
|
||||
"负载均衡SLB": {"template": "templates/aliyun_slb.json", "mapping": "loadbalancer"},
|
||||
"专有网络VPC": {"template": "templates/aliyun_vpc.json", "mapping": "vpc"},
|
||||
"交换机Switch": {"template": "templates/aliyun_switch.json", "mapping": "vswitch"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"内容分发CDN": "ali.cdn",
|
||||
"负载均衡SLB": "ali.slb",
|
||||
"专有网络VPC": "ali.vpc",
|
||||
"交换机Switch": "ali.switch",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "存储",
|
||||
"items": ["块存储EBS", "对象存储OSS"],
|
||||
"map": {
|
||||
"块存储EBS": {"template": "templates/aliyun_ebs.json", "mapping": "evs"},
|
||||
"对象存储OSS": {"template": "templates/aliyun_oss.json", "mapping": "objectStorage"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"块存储EBS": "ali.ebs",
|
||||
"对象存储OSS": "ali.oss",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "数据库",
|
||||
"items": ["云数据库RDS MySQL", "云数据库RDS PostgreSQL", "云数据库 Redis"],
|
||||
"map": {
|
||||
"云数据库RDS MySQL": {"template": "templates/aliyun_rds_mysql.json", "mapping": "mysql"},
|
||||
"云数据库RDS PostgreSQL": {"template": "templates/aliyun_rds_postgre.json", "mapping": "postgresql"},
|
||||
"云数据库 Redis": {"template": "templates/aliyun_redis.json", "mapping": "redis"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云数据库RDS MySQL": "ali.rds_mysql",
|
||||
"云数据库RDS PostgreSQL": "ali.rds_postgre",
|
||||
"云数据库 Redis": "ali.redis",
|
||||
},
|
||||
},
|
||||
],
|
||||
"tencentcloud": [
|
||||
{
|
||||
"category": "计算",
|
||||
"items": ["云服务器 CVM"],
|
||||
"map": {
|
||||
"云服务器 CVM": {"template": "templates/tencent_cvm.json", "mapping": "ecs"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云服务器 CVM": "tencent.cvm",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "CDN与边缘",
|
||||
"items": ["内容分发CDN"],
|
||||
"map": {
|
||||
"内容分发CDN": {"template": "templates/tencent_cdn.json", "mapping": "CDN"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"内容分发CDN": "tencent.cdn",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "网络",
|
||||
"items": ["负载均衡CLB", "私有网络VPC", "子网"],
|
||||
"map": {
|
||||
"负载均衡CLB": {"template": "templates/tencent_clb.json", "mapping": "loadbalancer"},
|
||||
"私有网络VPC": {"template": "templates/tencent_vpc.json", "mapping": "vpc"},
|
||||
"子网": {"template": "templates/tencent_subnet.json", "mapping": "vswitch"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"负载均衡CLB": "tencent.clb",
|
||||
"私有网络VPC": "tencent.vpc",
|
||||
"子网": "tencent.subnet",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "存储",
|
||||
"items": ["云硬盘CBS", "对象存储COS"],
|
||||
"map": {
|
||||
"云硬盘CBS": {"template": "templates/tencent_cbs.json", "mapping": "evs"},
|
||||
"对象存储COS": {"template": "templates/tencent_cos.json", "mapping": "objectStorage"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云硬盘CBS": "tencent.cbs",
|
||||
"对象存储COS": "tencent.cos",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "数据库",
|
||||
"items": ["云数据库 MySQL", "云数据库 PostgreSQL", "云数据库 Redis"],
|
||||
"map": {
|
||||
"云数据库 MySQL": {"template": "templates/tencent_rdb.json", "mapping": "mysql"},
|
||||
"云数据库 PostgreSQL": {"template": "templates/tencent_postgres.json", "mapping": "postgresql"},
|
||||
"云数据库 Redis": {"template": "templates/tencent_redis.json", "mapping": "redis"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云数据库 MySQL": "tencent.rdb",
|
||||
"云数据库 PostgreSQL": "tencent.rds_postgres",
|
||||
"云数据库 Redis": "tencent.redis",
|
||||
},
|
||||
},
|
||||
],
|
||||
"huaweicloud": [
|
||||
{
|
||||
"category": "计算",
|
||||
"items": ["云服务器 ECS"],
|
||||
"map": {
|
||||
"云服务器 ECS": {"template": "templates/huaweicloud_ecs.json", "mapping": "ecs"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云服务器 ECS": "huawei.ecs",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "CDN与智能边缘",
|
||||
"items": ["内容分发网络CDN"],
|
||||
"map": {
|
||||
"内容分发网络CDN": {"template": "templates/huawei_cdn.json", "mapping": "CDN"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"内容分发网络CDN": "huawei.cdn",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "网络",
|
||||
"items": ["弹性负载均衡ELB", "虚拟私有云VPC", "子网"],
|
||||
"map": {
|
||||
"弹性负载均衡ELB": {"template": "templates/huawei_elb.json", "mapping": "loadbalancer"},
|
||||
"虚拟私有云VPC": {"template": "templates/huawei_vpc.json", "mapping": "vpc"},
|
||||
"子网": {"template": "templates/huawei_subnet.json", "mapping": "vswitch"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"弹性负载均衡ELB": "huawei.elb",
|
||||
"虚拟私有云VPC": "huawei.vpc",
|
||||
"子网": "huawei.subnet",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "存储",
|
||||
"items": ["云硬盘EVS", "对象存储OBS"],
|
||||
"map": {
|
||||
"云硬盘EVS": {"template": "templates/huawei_evs.json", "mapping": "evs"},
|
||||
"对象存储OBS": {"template": "templates/huawei_obs.json", "mapping": "objectStorage"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云硬盘EVS": "huawei.evs",
|
||||
"对象存储OBS": "huawei.obs",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "数据库",
|
||||
"items": ["云数据库RDS MySQL", "云数据库RDS PostgreSQL"],
|
||||
"map": {
|
||||
"云数据库RDS MySQL": {"template": "templates/huawei_rds_mysql.json", "mapping": "mysql"},
|
||||
"云数据库RDS PostgreSQL": {"template": "templates/huawei_rds_postgre.json", "mapping": "postgresql"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云数据库RDS MySQL": "huawei.rds_mysql",
|
||||
"云数据库RDS PostgreSQL": "huawei.rds_postgre",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "应用中间件",
|
||||
"items": ["分布式缓存Redis"],
|
||||
"map": {
|
||||
"分布式缓存Redis": {"template": "templates/huawei_dcs.json", "mapping": "redis"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"分布式缓存Redis": "huawei.dcs",
|
||||
},
|
||||
},
|
||||
],
|
||||
"aws": [
|
||||
{
|
||||
"category": "计算",
|
||||
"items": ["云服务器 EC2"],
|
||||
"map": {
|
||||
"云服务器 EC2": {"template": "templates/aws_ec2.json", "mapping": "ecs"},
|
||||
},
|
||||
"collect_key_map": {
|
||||
"云服务器 EC2": "aws.ec2",
|
||||
},
|
||||
},
|
||||
{"category": "网络与CDN", "items": [], "map": {}, "collect_key_map": {}},
|
||||
],
|
||||
"vcenter": [
|
||||
{
|
||||
"category": "计算",
|
||||
"items": [
|
||||
"主机",
|
||||
"虚拟机",
|
||||
"主机集群"
|
||||
],
|
||||
"map": {
|
||||
"主机": "templates/vsphere_host.json",
|
||||
"虚拟机": "templates/vsphere_vm.json",
|
||||
"主机集群": "templates/vsphere_cluster.json",
|
||||
},
|
||||
"collect_key_map": {
|
||||
"主机": "vsphere.host",
|
||||
"虚拟机": "vsphere.vm",
|
||||
"主机集群": "vsphere.cluster",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "网络",
|
||||
"items": [
|
||||
"网络",
|
||||
"标准交换机",
|
||||
"分布式交换机",
|
||||
],
|
||||
"map": {
|
||||
"网络": "templates/vsphere_network.json",
|
||||
"标准交换机": "templates/vsphere_standard_switch.json",
|
||||
"分布式交换机": "templates/vsphere_distributed_switch.json",
|
||||
},
|
||||
"collect_key_map": {
|
||||
"网络": "vsphere.network",
|
||||
"标准交换机": "vsphere.standard_switch",
|
||||
"分布式交换机": "vsphere.distributed_switch",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "存储",
|
||||
"items": ["数据存储", "数据存储集群"],
|
||||
"map": {
|
||||
"数据存储": "templates/vsphere_datastore.json",
|
||||
"数据存储集群": "templates/vsphere_storage_pod.json",
|
||||
},
|
||||
"collect_key_map": {
|
||||
"数据存储": "vsphere.datastore",
|
||||
"数据存储集群": "vsphere.storage_pod",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "其他",
|
||||
"items": ["资源池", "数据中心", "文件夹"],
|
||||
"map": {
|
||||
"资源池": "templates/vsphere_pool.json",
|
||||
"数据中心": "templates/vsphere_datacenter.json",
|
||||
"文件夹": "templates/vsphere_folder.json",
|
||||
},
|
||||
"collect_key_map": {
|
||||
"资源池": "vsphere.pool",
|
||||
"数据中心": "vsphere.datacenter",
|
||||
"文件夹": "vsphere.folder",
|
||||
},
|
||||
},
|
||||
],
|
||||
"kvm": [
|
||||
{
|
||||
"category": "计算",
|
||||
"items": ["虚拟机"],
|
||||
"map": {
|
||||
"虚拟机": "templates/kvm_vm.json",
|
||||
},
|
||||
"collect_key_map": {
|
||||
"虚拟机": "kvm.vm",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "存储",
|
||||
"items": ["存储"],
|
||||
"map": {
|
||||
"存储": "templates/kvm_storage.json",
|
||||
},
|
||||
"collect_key_map": {
|
||||
"存储": "kvm.storage",
|
||||
},
|
||||
},
|
||||
{
|
||||
"category": "network",
|
||||
"items": ["网络"],
|
||||
"map": {
|
||||
"网络": "templates/kvm_network.json",
|
||||
},
|
||||
"collect_key_map": {
|
||||
"网络": "kvm.network",
|
||||
},
|
||||
},
|
||||
],
|
||||
ClOUD_MAP = {
|
||||
"aliyun": {
|
||||
"categories": ["云服务器 ECS"],
|
||||
"map": {
|
||||
"云服务器 ECS": "templates/aliyun_ecs.json",
|
||||
}
|
||||
},
|
||||
|
||||
"tencentcloud": {
|
||||
"categories": ["云服务器 CVM"],
|
||||
"map": {
|
||||
"云服务器 CVM": "templates/tencent_cvm.json",
|
||||
}
|
||||
},
|
||||
|
||||
"huaweicloud": {
|
||||
"categories": ["云服务器 ECS"],
|
||||
"map": {
|
||||
"云服务器 ECS": "templates/huaweicloud_ecs.json",
|
||||
}
|
||||
},
|
||||
|
||||
"aws": {
|
||||
"categories": ["云服务器 EC2"],
|
||||
"map": {
|
||||
"云服务器 EC2": "templates/aws_ec2.json",
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,344 +1,427 @@
|
||||
[
|
||||
{
|
||||
"name": "amiLaunchIndex",
|
||||
"type": "Integer",
|
||||
"desc": "The AMI launch index, which can be used to find this instance in the launch group.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "architecture",
|
||||
"type": "String",
|
||||
"desc": "The architecture of the image.",
|
||||
"example": "i386"
|
||||
},
|
||||
{
|
||||
"name": "blockDeviceMapping",
|
||||
"type": "Array of InstanceBlockDeviceMapping objects",
|
||||
"desc": "Any block device mapping entries for the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "bootMode",
|
||||
"type": "String",
|
||||
"desc": "The boot mode that was specified by the AMI. If the value is uefi-preferred, the AMI supports both UEFI and Legacy BIOS. The currentInstanceBootMode parameter is the boot mode that is used to boot the instance at launch or start. For more information, see Boot modes in the Amazon EC2 User Guide.",
|
||||
"example": "legacy-bios"
|
||||
},
|
||||
{
|
||||
"name": "capacityReservationId",
|
||||
"type": "String",
|
||||
"desc": "The ID of the Capacity Reservation.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "capacityReservationSpecification",
|
||||
"type": "CapacityReservationSpecificationResponse object",
|
||||
"desc": "Information about the Capacity Reservation targeting option.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "clientToken",
|
||||
"type": "String",
|
||||
"desc": "The idempotency token you provided when you launched the instance, if applicable.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "cpuOptions",
|
||||
"type": "CpuOptions object",
|
||||
"desc": "The CPU options for the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "currentInstanceBootMode",
|
||||
"type": "String",
|
||||
"desc": "The boot mode that is used to boot the instance at launch or start. For more information, see Boot modes in the Amazon EC2 User Guide.",
|
||||
"example": "legacy-bios"
|
||||
},
|
||||
{
|
||||
"name": "dnsName",
|
||||
"type": "String",
|
||||
"desc": "[IPv4 only] The public DNS name assigned to the instance. This name is not available until the instance enters the running state. This name is only available if you've enabled DNS hostnames for your VPC.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "ebsOptimized",
|
||||
"type": "Boolean",
|
||||
"desc": "Indicates whether the instance is optimized for Amazon EBS I/O. This optimization provides dedicated throughput to Amazon EBS and an optimized configuration stack to provide optimal I/O performance. This optimization isn't available with all instance types. Additional usage charges apply when using an EBS Optimized instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "elasticGpuAssociationSet",
|
||||
"type": "Array of ElasticGpuAssociation objects",
|
||||
"desc": "The Elastic GPU associated with the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "elasticInferenceAcceleratorAssociationSet",
|
||||
"type": "Array of ElasticInferenceAcceleratorAssociation objects",
|
||||
"desc": "The elastic inference accelerator associated with the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "enaSupport",
|
||||
"type": "Boolean",
|
||||
"desc": "Specifies whether enhanced networking with ENA is enabled.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "enclaveOptions",
|
||||
"type": "EnclaveOptions object",
|
||||
"desc": "Indicates whether the instance is enabled for AWS Nitro Enclaves.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "groupSet",
|
||||
"type": "Array of GroupIdentifier objects",
|
||||
"desc": "The security groups for the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "hibernationOptions",
|
||||
"type": "HibernationOptions object",
|
||||
"desc": "Indicates whether the instance is enabled for hibernation.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "hypervisor",
|
||||
"type": "String",
|
||||
"desc": "The hypervisor type of the instance. The value xen is used for both Xen and Nitro hypervisors.",
|
||||
"example": "ovm"
|
||||
},
|
||||
{
|
||||
"name": "iamInstanceProfile",
|
||||
"type": "IamInstanceProfile object",
|
||||
"desc": "The IAM instance profile associated with the instance, if applicable.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "imageId",
|
||||
"type": "String",
|
||||
"desc": "The ID of the AMI used to launch the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "instanceId",
|
||||
"type": "String",
|
||||
"desc": "The ID of the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "instanceLifecycle",
|
||||
"type": "String",
|
||||
"desc": "Indicates whether this is a Spot Instance or a Scheduled Instance.",
|
||||
"example": "spot"
|
||||
},
|
||||
{
|
||||
"name": "instanceState",
|
||||
"type": "InstanceState object",
|
||||
"desc": "The current state of the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "instanceType",
|
||||
"type": "String",
|
||||
"desc": "The instance type.",
|
||||
"example": "a1.medium"
|
||||
},
|
||||
{
|
||||
"name": "ipAddress",
|
||||
"type": "String",
|
||||
"desc": "The public IPv4 address, or the Carrier IP address assigned to the instance, if applicable. A Carrier IP address only applies to an instance launched in a subnet associated with a Wavelength Zone.",
|
||||
"example": "Required: No"
|
||||
},
|
||||
{
|
||||
"name": "ipv6Address",
|
||||
"type": "String",
|
||||
"desc": "The IPv6 address assigned to the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "kernelId",
|
||||
"type": "String",
|
||||
"desc": "The kernel associated with this instance, if applicable.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "keyName",
|
||||
"type": "String",
|
||||
"desc": "The name of the key pair, if this instance was launched with an associated key pair.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "launchTime",
|
||||
"type": "Timestamp",
|
||||
"desc": "The time the instance was launched.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "licenseSet",
|
||||
"type": "Array of LicenseConfiguration objects",
|
||||
"desc": "The license configurations for the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "maintenanceOptions",
|
||||
"type": "InstanceMaintenanceOptions object",
|
||||
"desc": "Provides information on the recovery and maintenance options of your instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "metadataOptions",
|
||||
"type": "InstanceMetadataOptionsResponse object",
|
||||
"desc": "The metadata options for the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "monitoring",
|
||||
"type": "Monitoring object",
|
||||
"desc": "The monitoring for the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "networkInterfaceSet",
|
||||
"type": "Array of InstanceNetworkInterface objects",
|
||||
"desc": "The network interfaces for the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "outpostArn",
|
||||
"type": "String",
|
||||
"desc": "The Amazon Resource Name (ARN) of the Outpost.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "placement",
|
||||
"type": "Placement object",
|
||||
"desc": "The location where the instance launched, if applicable.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "platform",
|
||||
"type": "String",
|
||||
"desc": "The platform. This value is windows for Windows instances; otherwise, it is empty.",
|
||||
"example": "windows"
|
||||
},
|
||||
{
|
||||
"name": "platformDetails",
|
||||
"type": "String",
|
||||
"desc": "The platform details value for the instance. For more information, see AMI billing information fields in the Amazon EC2 User Guide.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "privateDnsName",
|
||||
"type": "String",
|
||||
"desc": "[IPv4 only] The private DNS hostname name assigned to the instance. This DNS hostname can only be used inside the Amazon EC2 network. This name is not available until the instance enters the running state. The Amazon-provided DNS server resolves Amazon-provided private DNS hostnames if you've enabled DNS resolution and DNS hostnames in your VPC. If you are not using the Amazon-provided DNS server in your VPC, your custom domain name servers must resolve the hostname as appropriate.",
|
||||
"example": "Required: No"
|
||||
},
|
||||
{
|
||||
"name": "privateDnsNameOptions",
|
||||
"type": "PrivateDnsNameOptionsResponse object",
|
||||
"desc": "The options for the instance hostname.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "privateIpAddress",
|
||||
"type": "String",
|
||||
"desc": "The private IPv4 address assigned to the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "productCodes",
|
||||
"type": "Array of ProductCode objects",
|
||||
"desc": "The product codes attached to this instance, if applicable.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "ramdiskId",
|
||||
"type": "String",
|
||||
"desc": "The RAM disk associated with this instance, if applicable.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "reason",
|
||||
"type": "String",
|
||||
"desc": "The reason for the most recent state transition. This might be an empty string.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "rootDeviceName",
|
||||
"type": "String",
|
||||
"desc": "The device name of the root device volume (for example, /dev/sda1).",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "rootDeviceType",
|
||||
"type": "String",
|
||||
"desc": "The root device type used by the AMI. The AMI can use an EBS volume or an instance store volume.",
|
||||
"example": "ebs"
|
||||
},
|
||||
{
|
||||
"name": "sourceDestCheck",
|
||||
"type": "Boolean",
|
||||
"desc": "Indicates whether source/destination checking is enabled.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "spotInstanceRequestId",
|
||||
"type": "String",
|
||||
"desc": "If the request is a Spot Instance request, the ID of the request.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "sriovNetSupport",
|
||||
"type": "String",
|
||||
"desc": "Specifies whether enhanced networking with the Intel 82599 Virtual Function interface is enabled.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "stateReason",
|
||||
"type": "StateReason object",
|
||||
"desc": "The reason for the most recent state transition.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "subnetId",
|
||||
"type": "String",
|
||||
"desc": "The ID of the subnet in which the instance is running.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "tagSet",
|
||||
"type": "Array of Tag objects",
|
||||
"desc": "Any tags assigned to the instance.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "tpmSupport",
|
||||
"type": "String",
|
||||
"desc": "If the instance is configured for NitroTPM support, the value is v2.0. For more information, see NitroTPM in the Amazon EC2 User Guide.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "usageOperation",
|
||||
"type": "String",
|
||||
"desc": "The usage operation value for the instance. For more information, see AMI billing information fields in the Amazon EC2 User Guide.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "usageOperationUpdateTime",
|
||||
"type": "Timestamp",
|
||||
"desc": "The time that the usage operation was last updated.",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "virtualizationType",
|
||||
"type": "String",
|
||||
"desc": "The virtualization type of the instance.",
|
||||
"example": "hvm"
|
||||
},
|
||||
{
|
||||
"name": "vpcId",
|
||||
"type": "String",
|
||||
"desc": "The ID of the VPC in which the instance is running.",
|
||||
"example": ""
|
||||
}
|
||||
[
|
||||
{
|
||||
"name": "amiLaunchIndex",
|
||||
"type": "整数",
|
||||
"desc": "The AMI launch index, which can be used to find this instance in the launch group.",
|
||||
"example": "0"
|
||||
},
|
||||
{
|
||||
"name": "architecture",
|
||||
"type": "文本",
|
||||
"desc": "The architecture of the image.",
|
||||
"example": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "blockDeviceMapping",
|
||||
"type": "json",
|
||||
"desc": "Any block device mapping entries for the instance.",
|
||||
"example": {
|
||||
"item": {
|
||||
"deviceName": "/dev/xvda",
|
||||
"ebs": {
|
||||
"volumeId": "vol-1234567890abcdef0",
|
||||
"status": "attached",
|
||||
"attachTime": "2015-12-22T10:44:09.000Z",
|
||||
"deleteOnTermination": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "bootMode",
|
||||
"type": "文本",
|
||||
"desc": "The boot mode that was specified by the AMI. If the value is uefi-preferred, the AMI supports both UEFI and Legacy BIOS. The currentInstanceBootMode parameter is the boot mode that is used to boot the instance at launch or start.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "capacityReservationId",
|
||||
"type": "文本",
|
||||
"desc": "The ID of the Capacity Reservation.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "capacityReservationSpecification",
|
||||
"type": "json",
|
||||
"desc": "Information about the Capacity Reservation targeting option.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "clientToken",
|
||||
"type": "文本",
|
||||
"desc": "The idempotency token you provided when you launched the instance, if applicable.",
|
||||
"example": "xMcwG14507example"
|
||||
},
|
||||
{
|
||||
"name": "cpuOptions",
|
||||
"type": "json",
|
||||
"desc": "The CPU options for the instance.",
|
||||
"example": {
|
||||
"coreCount": "1",
|
||||
"threadsPerCore": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "currentInstanceBootMode",
|
||||
"type": "文本",
|
||||
"desc": "The boot mode that is used to boot the instance at launch or start. For more information, see Boot modes in the Amazon EC2 User Guide.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "dnsName",
|
||||
"type": "文本",
|
||||
"desc": "[IPv4 only] The public DNS name assigned to the instance. This name is not available until the instance enters the running state. This name is only available if you've enabled DNS hostnames for your VPC.",
|
||||
"example": "ec2-54-194-252-215.eu-west-1.compute.amazonaws.com"
|
||||
},
|
||||
{
|
||||
"name": "ebsOptimized",
|
||||
"type": "Boolean",
|
||||
"desc": "Indicates whether the instance is optimized for Amazon EBS I/O. This optimization provides dedicated throughput to Amazon EBS and an optimized configuration stack to provide optimal I/O performance. This optimization isn't available with all instance types. Additional usage charges apply when using an EBS Optimized instance.",
|
||||
"example": "false"
|
||||
},
|
||||
{
|
||||
"name": "elasticGpuAssociationSet",
|
||||
"type": "json",
|
||||
"desc": "The Elastic GPU associated with the instance.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "elasticInferenceAcceleratorAssociationSet",
|
||||
"type": "json",
|
||||
"desc": "The elastic inference accelerator associated with the instance.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "enaSupport",
|
||||
"type": "Boolean",
|
||||
"desc": "Specifies whether enhanced networking with ENA is enabled.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "enclaveOptions",
|
||||
"type": "json",
|
||||
"desc": "Indicates whether the instance is enabled for AWS Nitro Enclaves.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "groupSet",
|
||||
"type": "json",
|
||||
"desc": "The security groups for the instance.",
|
||||
"example": {
|
||||
"item": {
|
||||
"groupId": "sg-e4076980",
|
||||
"groupName": "SecurityGroup1"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "hibernationOptions",
|
||||
"type": "json",
|
||||
"desc": "Indicates whether the instance is enabled for hibernation.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "hypervisor",
|
||||
"type": "文本",
|
||||
"desc": "The hypervisor type of the instance. The value xen is used for both Xen and Nitro hypervisors.",
|
||||
"example": "xen"
|
||||
},
|
||||
{
|
||||
"name": "iamInstanceProfile",
|
||||
"type": "json",
|
||||
"desc": "The IAM instance profile associated with the instance, if applicable.",
|
||||
"example": {
|
||||
"arn": "arn:aws:iam::123456789012:instance-profile/AdminRole",
|
||||
"id": "ABCAJEDNCAA64SSD123AB"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "imageId",
|
||||
"type": "文本",
|
||||
"desc": "The ID of the AMI used to launch the instance.",
|
||||
"example": "ami-bff32ccc"
|
||||
},
|
||||
{
|
||||
"name": "instanceId",
|
||||
"type": "文本",
|
||||
"desc": "The ID of the instance.",
|
||||
"example": "i-1234567890abcdef0"
|
||||
},
|
||||
{
|
||||
"name": "instanceLifecycle",
|
||||
"type": "文本",
|
||||
"desc": "Indicates whether this is a Spot Instance or a Scheduled Instance.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "instanceState",
|
||||
"type": "json",
|
||||
"desc": "The current state of the instance.",
|
||||
"example": {
|
||||
"code": "16",
|
||||
"name": "running"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "instanceType",
|
||||
"type": "文本",
|
||||
"desc": "The instance type.",
|
||||
"example": "t2.micro"
|
||||
},
|
||||
{
|
||||
"name": "ipAddress",
|
||||
"type": "文本",
|
||||
"desc": "The public IPv4 address, or the Carrier IP address assigned to the instance, if applicable.",
|
||||
"example": "54.194.252.215"
|
||||
},
|
||||
{
|
||||
"name": "ipv6Address",
|
||||
"type": "文本",
|
||||
"desc": "The IPv6 address assigned to the instance.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "kernelId",
|
||||
"type": "文本",
|
||||
"desc": "The kernel associated with this instance, if applicable.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "keyName",
|
||||
"type": "文本",
|
||||
"desc": "The name of the key pair, if this instance was launched with an associated key pair.",
|
||||
"example": "my_keypair"
|
||||
},
|
||||
{
|
||||
"name": "launchTime",
|
||||
"type": "Time",
|
||||
"desc": "The time the instance was launched.",
|
||||
"example": "2018-05-08T16:46:19.000Z"
|
||||
},
|
||||
{
|
||||
"name": "licenseSet",
|
||||
"type": "json",
|
||||
"desc": "The license configurations for the instance.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "maintenanceOptions",
|
||||
"type": "json",
|
||||
"desc": "Provides information on the recovery and maintenance options of your instance.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "metadataOptions",
|
||||
"type": "json",
|
||||
"desc": "The metadata options for the instance.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "monitoring",
|
||||
"type": "json",
|
||||
"desc": "The monitoring for the instance.",
|
||||
"example": {
|
||||
"state": "disabled"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "networkInterfaceSet",
|
||||
"type": "json",
|
||||
"desc": "The network interfaces for the instance.",
|
||||
"example": {
|
||||
"item": {
|
||||
"networkInterfaceId": "eni-551ba033",
|
||||
"subnetId": "subnet-56f5f633",
|
||||
"vpcId": "vpc-11112222",
|
||||
"description": "Primary network interface",
|
||||
"ownerId": "123456789012",
|
||||
"status": "in-use",
|
||||
"macAddress": "02:dd:2c:5e:01:69",
|
||||
"privateIpAddress": "192.168.1.88",
|
||||
"privateDnsName": "ip-192-168-1-88.eu-west-1.compute.internal",
|
||||
"sourceDestCheck": "true",
|
||||
"groupSet": {
|
||||
"item": {
|
||||
"groupId": "sg-e4076980",
|
||||
"groupName": "SecurityGroup1"
|
||||
}
|
||||
},
|
||||
"attachment": {
|
||||
"attachmentId": "eni-attach-39697adc",
|
||||
"deviceIndex": "0",
|
||||
"status": "attached",
|
||||
"attachTime": "2018-05-08T16:46:19.000Z",
|
||||
"deleteOnTermination": "true"
|
||||
},
|
||||
"association": {
|
||||
"publicIp": "54.194.252.215",
|
||||
"publicDnsName": "ec2-54-194-252-215.eu-west-1.compute.amazonaws.com",
|
||||
"ipOwnerId": "amazon"
|
||||
},
|
||||
"privateIpAddressesSet": {
|
||||
"item": {
|
||||
"privateIpAddress": "192.168.1.88",
|
||||
"privateDnsName": "ip-192-168-1-88.eu-west-1.compute.internal",
|
||||
"primary": "true",
|
||||
"association": {
|
||||
"publicIp": "54.194.252.215",
|
||||
"publicDnsName": "ec2-54-194-252-215.eu-west-1.compute.amazonaws.com",
|
||||
"ipOwnerId": "amazon"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ipv6AddressesSet": {
|
||||
"item": {
|
||||
"ipv6Address": "2001:db8:1234:1a2b::123"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "outpostArn",
|
||||
"type": "文本",
|
||||
"desc": "The Amazon Resource Name (ARN) of the Outpost.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "placement",
|
||||
"type": "json",
|
||||
"desc": "The location where the instance launched, if applicable.",
|
||||
"example": {
|
||||
"availabilityZone": "eu-west-1c",
|
||||
"groupName": null,
|
||||
"tenancy": "default"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "platform",
|
||||
"type": "文本",
|
||||
"desc": "The value is Windows for Windows instances; otherwise blank.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "platformDetails",
|
||||
"type": "文本",
|
||||
"desc": "The platform details value for the instance. For more information, see AMI billing information fields in the Amazon EC2 User Guide.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "privateDnsName",
|
||||
"type": "文本",
|
||||
"desc": "[IPv4 only] The private DNS hostname name assigned to the instance. This DNS hostname can only be used inside the Amazon EC2 network. This name is not available until the instance enters the running state.",
|
||||
"example": "ip-192-168-1-88.eu-west-1.compute.internal"
|
||||
},
|
||||
{
|
||||
"name": "privateDnsNameOptions",
|
||||
"type": "json",
|
||||
"desc": "The options for the instance hostname.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "privateIpAddress",
|
||||
"type": "文本",
|
||||
"desc": "The private IPv4 address assigned to the instance.",
|
||||
"example": "192.168.1.88"
|
||||
},
|
||||
{
|
||||
"name": "productCodes",
|
||||
"type": "json",
|
||||
"desc": "The product codes attached to this instance, if applicable.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "ramdiskId",
|
||||
"type": "文本",
|
||||
"desc": "The RAM disk associated with this instance, if applicable.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "reason",
|
||||
"type": "文本",
|
||||
"desc": "The reason for the most recent state transition. This might be an empty string.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "rootDeviceName",
|
||||
"type": "文本",
|
||||
"desc": "The device name of the root device volume (for example, /dev/sda1).",
|
||||
"example": "/dev/xvda"
|
||||
},
|
||||
{
|
||||
"name": "rootDeviceType",
|
||||
"type": "文本",
|
||||
"desc": "The root device type used by the AMI. The AMI can use an EBS volume or an instance store volume.",
|
||||
"example": "ebs"
|
||||
},
|
||||
{
|
||||
"name": "sourceDestCheck",
|
||||
"type": "Boolean",
|
||||
"desc": "Indicates whether source/destination checking is enabled.",
|
||||
"example": "true"
|
||||
},
|
||||
{
|
||||
"name": "spotInstanceRequestId",
|
||||
"type": "文本",
|
||||
"desc": "If the request is a Spot Instance request, the ID of the request.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "sriovNetSupport",
|
||||
"type": "文本",
|
||||
"desc": "Specifies whether enhanced networking with the Intel 82599 Virtual Function interface is enabled.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "stateReason",
|
||||
"type": "json",
|
||||
"desc": "The reason for the most recent state transition.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "subnetId",
|
||||
"type": "文本",
|
||||
"desc": "The ID of the subnet in which the instance is running.",
|
||||
"example": "subnet-56f5f633"
|
||||
},
|
||||
{
|
||||
"name": "tagSet",
|
||||
"type": "json",
|
||||
"desc": "Any tags assigned to the instance.",
|
||||
"example": {
|
||||
"item": {
|
||||
"key": "Name",
|
||||
"value": "Server_1"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "tpmSupport",
|
||||
"type": "文本",
|
||||
"desc": "If the instance is configured for NitroTPM support, the value is v2.0. For more information, see NitroTPM in the Amazon EC2 User Guide.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "usageOperation",
|
||||
"type": "文本",
|
||||
"desc": "The usage operation value for the instance. For more information, see AMI billing information fields in the Amazon EC2 User Guide.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "usageOperationUpdateTime",
|
||||
"type": "Time",
|
||||
"desc": "The time that the usage operation was last updated.",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "virtualizationType",
|
||||
"type": "文本",
|
||||
"desc": "The virtualization type of the instance.",
|
||||
"example": "hvm"
|
||||
},
|
||||
{
|
||||
"name": "vpcId",
|
||||
"type": "文本",
|
||||
"desc": "The ID of the VPC in which the instance is running.",
|
||||
"example": "vpc-11112222"
|
||||
}
|
||||
]
|
||||
@@ -1,284 +1,292 @@
|
||||
[
|
||||
{
|
||||
"name": "status",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器状态。\n\n取值范围:\n\nACTIVE、BUILD、DELETED、ERROR、HARD_REBOOT、MIGRATING、PAUSED、REBOOT、REBUILD、RESIZE、REVERT_RESIZE、SHUTOFF、SHELVED、SHELVED_OFFLOADED、SOFT_DELETED、SUSPENDED、VERIFY_RESIZE\n\n弹性云服务器状态说明请参考[云服务器状态](https://support.huaweicloud.com/api-ecs/ecs_08_0002.html)",
|
||||
"example": "ACTIVE"
|
||||
},
|
||||
{
|
||||
"name": "updated",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器更新时间。\n\n时间格式例如:2019-05-22T03:30:52Z",
|
||||
"example": "2019-05-22T03:30:52Z"
|
||||
},
|
||||
{
|
||||
"name": "auto_terminate_time",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器定时删除时间。\n\n时间格式例如:2020-01-19T03:30:52Z",
|
||||
"example": "2020-01-19T03:30:52Z"
|
||||
},
|
||||
{
|
||||
"name": "hostId",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器所在主机的主机ID。",
|
||||
"example": "c7145889b2e3202cd295ceddb1742ff8941b827b586861fd0acedf64"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:host",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器所在主机的主机名称。",
|
||||
"example": "pod01.cn-north-1c"
|
||||
},
|
||||
{
|
||||
"name": "addresses",
|
||||
"type": "object",
|
||||
"desc": "弹性云服务器的网络属性。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "key_name",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器使用的密钥对名称。",
|
||||
"example": "KeyPair-test"
|
||||
},
|
||||
{
|
||||
"name": "image",
|
||||
"type": "",
|
||||
"desc": "弹性云服务器镜像信息。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-STS:task_state",
|
||||
"type": "string",
|
||||
"desc": "扩展属性,弹性云服务器当前任务的状态。\n\n取值范围请参考[云服务器状态](https://support.huaweicloud.com/api-ecs/ecs_08_0002.html)表3。",
|
||||
"example": "rebooting"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-STS:vm_state",
|
||||
"type": "string",
|
||||
"desc": "扩展属性,弹性云服务器当前状态。\n\n云服务器状态说明请参考[云服务器状态](https://support.huaweicloud.com/api-ecs/ecs_08_0002.html)。",
|
||||
"example": "active"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:instance_name",
|
||||
"type": "string",
|
||||
"desc": "扩展属性,弹性云服务器别名。",
|
||||
"example": "instance-0048a91b"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:hypervisor_hostname",
|
||||
"type": "string",
|
||||
"desc": "扩展属性,弹性云服务器所在虚拟化主机名。",
|
||||
"example": "nova022@36"
|
||||
},
|
||||
{
|
||||
"name": "flavor",
|
||||
"type": "",
|
||||
"desc": "弹性云服务器规格信息。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "id",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器ID,格式为UUID。",
|
||||
"example": "4f4b3dfa-eb70-47cf-a60a-998a53bd6666"
|
||||
},
|
||||
{
|
||||
"name": "security_groups",
|
||||
"type": "array",
|
||||
"desc": "弹性云服务器所属安全组列表。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-AZ:availability_zone",
|
||||
"type": "string",
|
||||
"desc": "扩展属性,弹性云服务器所在可用区名称。",
|
||||
"example": "cn-north-1c"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"type": "string",
|
||||
"desc": "创建弹性云服务器的用户ID,格式为UUID。",
|
||||
"example": "05498fe56b8010d41f7fc01e280b6666"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器名称。",
|
||||
"example": "ecs-test-server"
|
||||
},
|
||||
{
|
||||
"name": "created",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器创建时间。\n\n时间格式例如:2019-05-22T03:19:19Z",
|
||||
"example": "2017-07-15T11:30:52Z"
|
||||
},
|
||||
{
|
||||
"name": "tenant_id",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器所属租户ID,即项目id,和project_id表示相同的概念,格式为UUID。",
|
||||
"example": "743b4c0428d94531b9f2add666646666"
|
||||
},
|
||||
{
|
||||
"name": "OS-DCF:diskConfig",
|
||||
"type": "string",
|
||||
"desc": "扩展属性, diskConfig的类型。\n\n- MANUAL,镜像空间不会扩展。\n- AUTO,系统盘镜像空间会自动扩展为与flavor大小一致。",
|
||||
"example": "AUTO"
|
||||
},
|
||||
{
|
||||
"name": "accessIPv4",
|
||||
"type": "string",
|
||||
"desc": "预留属性。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "accessIPv6",
|
||||
"type": "string",
|
||||
"desc": "预留属性。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "fault",
|
||||
"type": "",
|
||||
"desc": "弹性云服务器故障信息。\n\n可选参数,在弹性云服务器状态为ERROR且存在异常的情况下返回。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "progress",
|
||||
"type": "integer",
|
||||
"desc": "弹性云服务器进度。",
|
||||
"example": 0
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-STS:power_state",
|
||||
"type": "integer",
|
||||
"desc": "扩展属性,弹性云服务器电源状态。",
|
||||
"example": 4
|
||||
},
|
||||
{
|
||||
"name": "config_drive",
|
||||
"type": "string",
|
||||
"desc": "config drive信息。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "metadata",
|
||||
"type": "object",
|
||||
"desc": "弹性云服务器元数据。\n\n> 说明:\n> \n> 元数据包含系统默认添加字段和用户设置的字段。\n\n系统默认添加字段\n\n1. charging_mode\n云服务器的计费类型。\n\n- “0”:按需计费(即postPaid-后付费方式)。\n- “1”:按包年包月计费(即prePaid-预付费方式)。\"2\":竞价实例计费\n\n2. metering.order_id\n按“包年/包月”计费的云服务器对应的订单ID。\n\n3. metering.product_id\n按“包年/包月”计费的云服务器对应的产品ID。\n\n4. vpc_id\n云服务器所属的虚拟私有云ID。\n\n5. EcmResStatus\n云服务器的冻结状态。\n\n- normal:云服务器正常状态(未被冻结)。\n- freeze:云服务器被冻结。\n\n> 当云服务器被冻结或者解冻后,系统默认添加该字段,且该字段必选。\n\n6. metering.image_id\n云服务器操作系统对应的镜像ID\n\n7. metering.imagetype\n镜像类型,目前支持:\n\n- 公共镜像(gold)\n- 私有镜像(private)\n- 共享镜像(shared)\n\n8. metering.resourcespeccode\n云服务器对应的资源规格。\n\n9. image_name\n云服务器操作系统对应的镜像名称。\n\n10. os_bit\n操作系统位数,一般取值为“32”或者“64”。\n\n11. lockCheckEndpoint\n回调URL,用于检查弹性云服务器的加锁是否有效。\n\n- 如果有效,则云服务器保持锁定状态。\n- 如果无效,解除锁定状态,删除失效的锁。\n\n12. lockSource\n弹性云服务器来自哪个服务。订单加锁(ORDER)\n\n13. lockSourceId\n弹性云服务器的加锁来自哪个ID。lockSource为“ORDER”时,lockSourceId为订单ID。\n\n14. lockScene\n弹性云服务器的加锁类型。\n\n- 按需转包周期(TO_PERIOD_LOCK)\n\n15. virtual_env_type\n\n- IOS镜像创建虚拟机,\"virtual_env_type\": \"IsoImage\" 属性;\n- 非IOS镜像创建虚拟机,在19.5.0版本以后创建的虚拟机将不会添加virtual_env_type 属性,而在此之前的版本创建的虚拟机可能会返回\"virtual_env_type\": \"FusionCompute\"属性 。\n\n> virtual_env_type属性不允许用户增加、删除和修改。\n\n16. metering.resourcetype\n云服务器对应的资源类型。\n\n17. os_type\n操作系统类型,取值为:Linux、Windows。\n\n18. cascaded.instance_extrainfo\n系统内部虚拟机扩展信息。\n\n19. __support_agent_list\n云服务器是否支持企业主机安全、主机监控。\n\n- “hss”:企业主机安全\n- “ces”:主机监控\n\n20. agency_name\n委托的名称。\n\n委托是由租户管理员在统一身份认证服务(Identity and Access Management,IAM)上创建的,可以为弹性云服务器提供访问云服务的临时凭证。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "OS-SRV-USG:launched_at",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器启动时间。时间格式例如:2019-05-22T03:23:59.000000",
|
||||
"example": "2018-08-15T14:21:22.000000"
|
||||
},
|
||||
{
|
||||
"name": "OS-SRV-USG:terminated_at",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器删除时间。\n\n时间格式例如:2019-05-22T03:23:59.000000",
|
||||
"example": "2019-05-22T03:23:59.000000"
|
||||
},
|
||||
{
|
||||
"name": "os-extended-volumes:volumes_attached",
|
||||
"type": "array",
|
||||
"desc": "挂载到弹性云服务器上的磁盘。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器的描述信息。",
|
||||
"example": "ecs description"
|
||||
},
|
||||
{
|
||||
"name": "host_status",
|
||||
"type": "string",
|
||||
"desc": "nova-compute状态。\n\n- UP:服务正常\n- UNKNOWN:状态未知\n- DOWN:服务异常\n- MAINTENANCE:维护状态\n- 空字符串:弹性云服务器无主机信息",
|
||||
"example": "UP"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:hostname",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器的主机名。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:reservation_id",
|
||||
"type": "string",
|
||||
"desc": "批量创建场景,弹性云服务器的预留ID。",
|
||||
"example": "r-f06p3js8"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:launch_index",
|
||||
"type": "integer",
|
||||
"desc": "批量创建场景,弹性云服务器的启动顺序。",
|
||||
"example": 0
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:kernel_id",
|
||||
"type": "string",
|
||||
"desc": "若使用AMI格式的镜像,则表示kernel image的UUID;否则,留空。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:ramdisk_id",
|
||||
"type": "string",
|
||||
"desc": "若使用AMI格式镜像,则表示ramdisk image的UUID;否则,留空。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:root_device_name",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器系统盘的设备名称。",
|
||||
"example": "/dev/vda"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:user_data",
|
||||
"type": "string",
|
||||
"desc": "创建弹性云服务器时指定的user_data。",
|
||||
"example": "IyEvYmluL2Jhc2gKZWNobyAncm9vdDokNiRjcGRkSjckWm5WZHNiR253Z0l0SGlxUjZxbWtLTlJaeU9lZUtKd3dPbG9XSFdUeGFzWjA1STYwdnJYRTdTUTZGbEpFbWlXZ21WNGNmZ1pac1laN1BkMTBLRndyeC8nIHwgY2hwYXNzd2Q6666"
|
||||
},
|
||||
{
|
||||
"name": "locked",
|
||||
"type": "boolean",
|
||||
"desc": "弹性云服务器是否为锁定状态。\n\n- true:锁定\n- false:未锁定",
|
||||
"example": false
|
||||
},
|
||||
{
|
||||
"name": "tags",
|
||||
"type": "array",
|
||||
"desc": "弹性云服务器标签。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "os:scheduler_hints",
|
||||
"type": "",
|
||||
"desc": "弹性云服务器调度信息",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "enterprise_project_id",
|
||||
"type": "string",
|
||||
"desc": "弹性云服务器所属的企业项目ID。",
|
||||
"example": "0"
|
||||
},
|
||||
{
|
||||
"name": "sys_tags",
|
||||
"type": "array",
|
||||
"desc": "弹性云服务器系统标签。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "cpu_options",
|
||||
"type": "",
|
||||
"desc": "自定义CPU选项。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "hypervisor",
|
||||
"type": "",
|
||||
"desc": "hypervisor信息。",
|
||||
"example": ""
|
||||
}
|
||||
[
|
||||
{
|
||||
"name": "status",
|
||||
"type": "文本",
|
||||
"example": "ACTIVE",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u72b6\u6001\u3002\n\n\u53d6\u503c\u8303\u56f4:\n\nACTIVE\u3001BUILD\u3001DELETED\u3001ERROR\u3001HARD_REBOOT\u3001MIGRATING\u3001PAUSED\u3001REBOOT\u3001REBUILD\u3001RESIZE\u3001REVERT_RESIZE\u3001SHUTOFF\u3001SHELVED\u3001SHELVED_OFFLOADED\u3001SOFT_DELETED\u3001SUSPENDED\u3001VERIFY_RESIZE\n\n\u5f39\u6027\u4e91\u670d\u52a1\u5668\u72b6\u6001\u8bf4\u660e\u8bf7\u53c2\u8003[\u4e91\u670d\u52a1\u5668\u72b6\u6001](https://support.huaweicloud.com/api-ecs/ecs_08_0002.html)"
|
||||
},
|
||||
{
|
||||
"name": "updated",
|
||||
"type": "文本",
|
||||
"example": "2019-05-22T03:30:52Z",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u66f4\u65b0\u65f6\u95f4\u3002\n\n\u65f6\u95f4\u683c\u5f0f\u4f8b\u5982:2019-05-22T03:30:52Z"
|
||||
},
|
||||
{
|
||||
"name": "auto_terminate_time",
|
||||
"type": "文本",
|
||||
"example": "2020-01-19T03:30:52Z",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u81ea\u52a8\u91ca\u653e\u65f6\u95f4\u3002\n\n\u65f6\u95f4\u683c\u5f0f\u4f8b\u5982:2020-01-19T03:30:52Z"
|
||||
},
|
||||
{
|
||||
"name": "hostId",
|
||||
"type": "文本",
|
||||
"example": "c7145889b2e3202cd295ceddb1742ff8941b827b586861fd0acedf64",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6240\u5728\u4e3b\u673a\u7684\u4e3b\u673aID\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:host",
|
||||
"type": "文本",
|
||||
"example": "pod01.cn-north-1c",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6240\u5728\u4e3b\u673a\u7684\u4e3b\u673a\u540d\u79f0\u3002"
|
||||
},
|
||||
{
|
||||
"name": "addresses",
|
||||
"type": "json",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u7f51\u7edc\u5c5e\u6027\u3002"
|
||||
},
|
||||
{
|
||||
"name": "key_name",
|
||||
"type": "文本",
|
||||
"example": "KeyPair-test",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u4f7f\u7528\u7684\u5bc6\u94a5\u5bf9\u540d\u79f0\u3002"
|
||||
},
|
||||
{
|
||||
"name": "image",
|
||||
"type": "json",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u955c\u50cf\u4fe1\u606f\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-STS:task_state",
|
||||
"type": "文本",
|
||||
"example": "rebooting",
|
||||
"desc": "\u6269\u5c55\u5c5e\u6027,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u5f53\u524d\u4efb\u52a1\u7684\u72b6\u6001\u3002\n\n\u53d6\u503c\u8303\u56f4\u8bf7\u53c2\u8003[\u4e91\u670d\u52a1\u5668\u72b6\u6001](https://support.huaweicloud.com/api-ecs/ecs_08_0002.html)\u88683\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-STS:vm_state",
|
||||
"type": "文本",
|
||||
"example": "active",
|
||||
"desc": "\u6269\u5c55\u5c5e\u6027,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u5f53\u524d\u72b6\u6001\u3002\n\n\u4e91\u670d\u52a1\u5668\u72b6\u6001\u8bf4\u660e\u8bf7\u53c2\u8003[\u4e91\u670d\u52a1\u5668\u72b6\u6001](https://support.huaweicloud.com/api-ecs/ecs_08_0002.html)\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:instance_name",
|
||||
"type": "文本",
|
||||
"example": "instance-0048a91b",
|
||||
"desc": "\u6269\u5c55\u5c5e\u6027,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u522b\u540d\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:hypervisor_hostname",
|
||||
"type": "文本",
|
||||
"example": "nova022@36",
|
||||
"desc": "\u6269\u5c55\u5c5e\u6027,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6240\u5728\u865a\u62df\u5316\u4e3b\u673a\u540d\u3002"
|
||||
},
|
||||
{
|
||||
"name": "flavor",
|
||||
"type": "json",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u89c4\u683c\u4fe1\u606f\u3002"
|
||||
},
|
||||
{
|
||||
"name": "id",
|
||||
"type": "文本",
|
||||
"example": "4f4b3dfa-eb70-47cf-a60a-998a53bd6666",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668ID,\u683c\u5f0f\u4e3aUUID\u3002"
|
||||
},
|
||||
{
|
||||
"name": "security_groups",
|
||||
"type": "json",
|
||||
"example": {
|
||||
"$ref": "#/definitions/ServerSecurityGroup"
|
||||
},
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6240\u5c5e\u5b89\u5168\u7ec4\u5217\u8868\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-AZ:availability_zone",
|
||||
"type": "文本",
|
||||
"example": "cn-north-1c",
|
||||
"desc": "\u6269\u5c55\u5c5e\u6027,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6240\u5728\u53ef\u7528\u533a\u540d\u79f0\u3002"
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"type": "文本",
|
||||
"example": "05498fe56b8010d41f7fc01e280b6666",
|
||||
"desc": "\u521b\u5efa\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u7528\u6237ID,\u683c\u5f0f\u4e3aUUID\u3002"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"type": "文本",
|
||||
"example": "ecs-test-server",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u540d\u79f0\u3002"
|
||||
},
|
||||
{
|
||||
"name": "created",
|
||||
"type": "文本",
|
||||
"example": "2017-07-15T11:30:52Z",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u521b\u5efa\u65f6\u95f4\u3002\n\n\u65f6\u95f4\u683c\u5f0f\u4f8b\u5982:2019-05-22T03:19:19Z"
|
||||
},
|
||||
{
|
||||
"name": "tenant_id",
|
||||
"type": "文本",
|
||||
"example": "743b4c0428d94531b9f2add666646666",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6240\u5c5e\u79df\u6237ID,\u5373\u9879\u76eeid,\u548cproject_id\u8868\u793a\u76f8\u540c\u7684\u6982\u5ff5,\u683c\u5f0f\u4e3aUUID\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-DCF:diskConfig",
|
||||
"type": "文本",
|
||||
"example": "AUTO",
|
||||
"desc": "\u6269\u5c55\u5c5e\u6027, diskConfig\u7684\u7c7b\u578b\u3002\n\n- MANUAL,\u955c\u50cf\u7a7a\u95f4\u4e0d\u4f1a\u6269\u5c55\u3002\n- AUTO,\u7cfb\u7edf\u76d8\u955c\u50cf\u7a7a\u95f4\u4f1a\u81ea\u52a8\u6269\u5c55\u4e3a\u4e0eflavor\u5927\u5c0f\u4e00\u81f4\u3002"
|
||||
},
|
||||
{
|
||||
"name": "accessIPv4",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "\u9884\u7559\u5c5e\u6027\u3002"
|
||||
},
|
||||
{
|
||||
"name": "accessIPv6",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "\u9884\u7559\u5c5e\u6027\u3002"
|
||||
},
|
||||
{
|
||||
"name": "fault",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6545\u969c\u4fe1\u606f\u3002\n\n\u53ef\u9009\u53c2\u6570,\u5728\u5f39\u6027\u4e91\u670d\u52a1\u5668\u72b6\u6001\u4e3aERROR\u4e14\u5b58\u5728\u5f02\u5e38\u7684\u60c5\u51b5\u4e0b\u8fd4\u56de\u3002"
|
||||
},
|
||||
{
|
||||
"name": "progress",
|
||||
"type": "整数",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u8fdb\u5ea6\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-STS:power_state",
|
||||
"type": "整数",
|
||||
"example": 4,
|
||||
"desc": "\u6269\u5c55\u5c5e\u6027,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7535\u6e90\u72b6\u6001\u3002"
|
||||
},
|
||||
{
|
||||
"name": "config_drive",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "config drive\u4fe1\u606f\u3002"
|
||||
},
|
||||
{
|
||||
"name": "metadata",
|
||||
"type": "json",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u5143\u6570\u636e\u3002\n\n> \u8bf4\u660e:\n> \n> \u5143\u6570\u636e\u5305\u542b\u7cfb\u7edf\u9ed8\u8ba4\u6dfb\u52a0\u5b57\u6bb5\u548c\u7528\u6237\u8bbe\u7f6e\u7684\u5b57\u6bb5\u3002\n\n\u7cfb\u7edf\u9ed8\u8ba4\u6dfb\u52a0\u5b57\u6bb5\n\n1. charging_mode\n\u4e91\u670d\u52a1\u5668\u7684\u8ba1\u8d39\u7c7b\u578b\u3002\n\n- \u201c0\u201d:\u6309\u9700\u8ba1\u8d39(\u5373postPaid-\u540e\u4ed8\u8d39\u65b9\u5f0f)\u3002\n- \u201c1\u201d:\u6309\u5305\u5e74\u5305\u6708\u8ba1\u8d39(\u5373prePaid-\u9884\u4ed8\u8d39\u65b9\u5f0f)\u3002\"2\":\u7ade\u4ef7\u5b9e\u4f8b\u8ba1\u8d39\n\n2. metering.order_id\n\u6309\u201c\u5305\u5e74/\u5305\u6708\u201d\u8ba1\u8d39\u7684\u4e91\u670d\u52a1\u5668\u5bf9\u5e94\u7684\u8ba2\u5355ID\u3002\n\n3. metering.product_id\n\u6309\u201c\u5305\u5e74/\u5305\u6708\u201d\u8ba1\u8d39\u7684\u4e91\u670d\u52a1\u5668\u5bf9\u5e94\u7684\u4ea7\u54c1ID\u3002\n\n4. vpc_id\n\u4e91\u670d\u52a1\u5668\u6240\u5c5e\u7684\u865a\u62df\u79c1\u6709\u4e91ID\u3002\n\n5. EcmResStatus\n\u4e91\u670d\u52a1\u5668\u7684\u51bb\u7ed3\u72b6\u6001\u3002\n\n- normal:\u4e91\u670d\u52a1\u5668\u6b63\u5e38\u72b6\u6001(\u672a\u88ab\u51bb\u7ed3)\u3002\n- freeze:\u4e91\u670d\u52a1\u5668\u88ab\u51bb\u7ed3\u3002\n\n> \u5f53\u4e91\u670d\u52a1\u5668\u88ab\u51bb\u7ed3\u6216\u8005\u89e3\u51bb\u540e,\u7cfb\u7edf\u9ed8\u8ba4\u6dfb\u52a0\u8be5\u5b57\u6bb5,\u4e14\u8be5\u5b57\u6bb5\u5fc5\u9009\u3002\n\n6. metering.image_id\n\u4e91\u670d\u52a1\u5668\u64cd\u4f5c\u7cfb\u7edf\u5bf9\u5e94\u7684\u955c\u50cfID\n\n7. metering.imagetype\n\u955c\u50cf\u7c7b\u578b,\u76ee\u524d\u652f\u6301:\n\n- \u516c\u5171\u955c\u50cf(gold)\n- \u79c1\u6709\u955c\u50cf(private)\n- \u5171\u4eab\u955c\u50cf(shared)\n\n8. metering.resourcespeccode\n\u4e91\u670d\u52a1\u5668\u5bf9\u5e94\u7684\u8d44\u6e90\u89c4\u683c\u3002\n\n9. image_name\n\u4e91\u670d\u52a1\u5668\u64cd\u4f5c\u7cfb\u7edf\u5bf9\u5e94\u7684\u955c\u50cf\u540d\u79f0\u3002\n\n10. os_bit\n\u64cd\u4f5c\u7cfb\u7edf\u4f4d\u6570,\u4e00\u822c\u53d6\u503c\u4e3a\u201c32\u201d\u6216\u8005\u201c64\u201d\u3002\n\n11. lockCheckEndpoint\n\u56de\u8c03URL,\u7528\u4e8e\u68c0\u67e5\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u52a0\u9501\u662f\u5426\u6709\u6548\u3002\n\n- \u5982\u679c\u6709\u6548,\u5219\u4e91\u670d\u52a1\u5668\u4fdd\u6301\u9501\u5b9a\u72b6\u6001\u3002\n- \u5982\u679c\u65e0\u6548,\u89e3\u9664\u9501\u5b9a\u72b6\u6001,\u5220\u9664\u5931\u6548\u7684\u9501\u3002\n\n12. lockSource\n\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6765\u81ea\u54ea\u4e2a\u670d\u52a1\u3002\u8ba2\u5355\u52a0\u9501(ORDER)\n\n13. lockSourceId\n\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u52a0\u9501\u6765\u81ea\u54ea\u4e2aID\u3002lockSource\u4e3a\u201cORDER\u201d\u65f6,lockSourceId\u4e3a\u8ba2\u5355ID\u3002\n\n14. lockScene\n\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u52a0\u9501\u7c7b\u578b\u3002\n\n- \u6309\u9700\u8f6c\u5305\u5468\u671f(TO_PERIOD_LOCK)\n\n15. virtual_env_type\n\n- IOS\u955c\u50cf\u521b\u5efa\u865a\u62df\u673a,\"virtual_env_type\": \"IsoImage\" \u5c5e\u6027;\n- \u975eIOS\u955c\u50cf\u521b\u5efa\u865a\u62df\u673a,\u572819.5.0\u7248\u672c\u4ee5\u540e\u521b\u5efa\u7684\u865a\u62df\u673a\u5c06\u4e0d\u4f1a\u6dfb\u52a0virtual_env_type \u5c5e\u6027,\u800c\u5728\u6b64\u4e4b\u524d\u7684\u7248\u672c\u521b\u5efa\u7684\u865a\u62df\u673a\u53ef\u80fd\u4f1a\u8fd4\u56de\"virtual_env_type\": \"FusionCompute\"\u5c5e\u6027 \u3002\n\n> virtual_env_type\u5c5e\u6027\u4e0d\u5141\u8bb8\u7528\u6237\u589e\u52a0\u3001\u5220\u9664\u548c\u4fee\u6539\u3002\n\n16. metering.resourcetype\n\u4e91\u670d\u52a1\u5668\u5bf9\u5e94\u7684\u8d44\u6e90\u7c7b\u578b\u3002\n\n17. os_type\n\u64cd\u4f5c\u7cfb\u7edf\u7c7b\u578b,\u53d6\u503c\u4e3a:Linux\u3001Windows\u3002\n\n18. cascaded.instance_extrainfo\n\u7cfb\u7edf\u5185\u90e8\u865a\u62df\u673a\u6269\u5c55\u4fe1\u606f\u3002\n\n19. __support_agent_list\n\u4e91\u670d\u52a1\u5668\u662f\u5426\u652f\u6301\u4f01\u4e1a\u4e3b\u673a\u5b89\u5168\u3001\u4e3b\u673a\u76d1\u63a7\u3002\n\n- \u201chss\u201d:\u4f01\u4e1a\u4e3b\u673a\u5b89\u5168\n- \u201cces\u201d:\u4e3b\u673a\u76d1\u63a7\n\n20. agency_name\n\u59d4\u6258\u7684\u540d\u79f0\u3002\n\n\u59d4\u6258\u662f\u7531\u79df\u6237\u7ba1\u7406\u5458\u5728\u7edf\u4e00\u8eab\u4efd\u8ba4\u8bc1\u670d\u52a1(Identity and Access Management,IAM)\u4e0a\u521b\u5efa\u7684,\u53ef\u4ee5\u4e3a\u5f39\u6027\u4e91\u670d\u52a1\u5668\u63d0\u4f9b\u8bbf\u95ee\u4e91\u670d\u52a1\u7684\u4e34\u65f6\u51ed\u8bc1\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-SRV-USG:launched_at",
|
||||
"type": "文本",
|
||||
"example": "2018-08-15T14:21:22.000000",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u542f\u52a8\u65f6\u95f4\u3002\u65f6\u95f4\u683c\u5f0f\u4f8b\u5982:2019-05-22T03:23:59.000000"
|
||||
},
|
||||
{
|
||||
"name": "OS-SRV-USG:terminated_at",
|
||||
"type": "文本",
|
||||
"example": "2019-05-22T03:23:59.000000",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u5220\u9664\u65f6\u95f4\u3002\n\n\u65f6\u95f4\u683c\u5f0f\u4f8b\u5982:2019-05-22T03:23:59.000000"
|
||||
},
|
||||
{
|
||||
"name": "os-extended-volumes:volumes_attached",
|
||||
"type": "json",
|
||||
"example": {
|
||||
"$ref": "#/definitions/ServerExtendVolumeAttachment"
|
||||
},
|
||||
"desc": "\u6302\u8f7d\u5230\u5f39\u6027\u4e91\u670d\u52a1\u5668\u4e0a\u7684\u78c1\u76d8\u3002"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"type": "文本",
|
||||
"example": "ecs description",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u63cf\u8ff0\u4fe1\u606f\u3002"
|
||||
},
|
||||
{
|
||||
"name": "host_status",
|
||||
"type": "文本",
|
||||
"example": "UP",
|
||||
"desc": "nova-compute\u72b6\u6001\u3002\n\n- UP:\u670d\u52a1\u6b63\u5e38\n- UNKNOWN:\u72b6\u6001\u672a\u77e5\n- DOWN:\u670d\u52a1\u5f02\u5e38\n- MAINTENANCE:\u7ef4\u62a4\u72b6\u6001\n- \u7a7a\u5b57\u7b26\u4e32:\u5f39\u6027\u4e91\u670d\u52a1\u5668\u65e0\u4e3b\u673a\u4fe1\u606f"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:hostname",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u4e3b\u673a\u540d\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:reservation_id",
|
||||
"type": "文本",
|
||||
"example": "r-f06p3js8",
|
||||
"desc": "\u6279\u91cf\u521b\u5efa\u573a\u666f,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u9884\u7559ID\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:launch_index",
|
||||
"type": "整数",
|
||||
"example": null,
|
||||
"desc": "\u6279\u91cf\u521b\u5efa\u573a\u666f,\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7684\u542f\u52a8\u987a\u5e8f\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:kernel_id",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "\u82e5\u4f7f\u7528AMI\u683c\u5f0f\u7684\u955c\u50cf,\u5219\u8868\u793akernel image\u7684UUID;\u5426\u5219,\u7559\u7a7a\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:ramdisk_id",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "\u82e5\u4f7f\u7528AMI\u683c\u5f0f\u955c\u50cf,\u5219\u8868\u793aramdisk image\u7684UUID;\u5426\u5219,\u7559\u7a7a\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:root_device_name",
|
||||
"type": "文本",
|
||||
"example": "/dev/vda",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7cfb\u7edf\u76d8\u7684\u8bbe\u5907\u540d\u79f0\u3002"
|
||||
},
|
||||
{
|
||||
"name": "OS-EXT-SRV-ATTR:user_data",
|
||||
"type": "文本",
|
||||
"example": "IyEvYmluL2Jhc2gKZWNobyAncm9vdDokNiRjcGRkSjckWm5WZHNiR253Z0l0SGlxUjZxbWtLTlJaeU9lZUtKd3dPbG9XSFdUeGFzWjA1STYwdnJYRTdTUTZGbEpFbWlXZ21WNGNmZ1pac1laN1BkMTBLRndyeC8nIHwgY2hwYXNzd2Q6666",
|
||||
"desc": "\u521b\u5efa\u5f39\u6027\u4e91\u670d\u52a1\u5668\u65f6\u6307\u5b9a\u7684user_data\u3002"
|
||||
},
|
||||
{
|
||||
"name": "locked",
|
||||
"type": "boolean",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u662f\u5426\u4e3a\u9501\u5b9a\u72b6\u6001\u3002\n\n- true:\u9501\u5b9a\n- false:\u672a\u9501\u5b9a"
|
||||
},
|
||||
{
|
||||
"name": "tags",
|
||||
"type": "文本、多值",
|
||||
"example": {
|
||||
"type": "文本"
|
||||
},
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6807\u7b7e\u3002"
|
||||
},
|
||||
{
|
||||
"name": "os:scheduler_hints",
|
||||
"type": "json",
|
||||
"example": null,
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u8c03\u5ea6\u4fe1\u606f"
|
||||
},
|
||||
{
|
||||
"name": "enterprise_project_id",
|
||||
"type": "文本",
|
||||
"example": "0",
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u6240\u5c5e\u7684\u4f01\u4e1a\u9879\u76eeID\u3002"
|
||||
},
|
||||
{
|
||||
"name": "sys_tags",
|
||||
"type": "文本、多值",
|
||||
"example": {
|
||||
"$ref": "#/definitions/ServerSystemTag"
|
||||
},
|
||||
"desc": "\u5f39\u6027\u4e91\u670d\u52a1\u5668\u7cfb\u7edf\u6807\u7b7e\u3002"
|
||||
},
|
||||
{
|
||||
"name": "cpu_options",
|
||||
"type": "json",
|
||||
"example": null,
|
||||
"desc": "\u81ea\u5b9a\u4e49CPU\u9009\u9879\u3002"
|
||||
},
|
||||
{
|
||||
"name": "hypervisor",
|
||||
"type": "文本",
|
||||
"example": null,
|
||||
"desc": "hypervisor\u4fe1\u606f\u3002"
|
||||
}
|
||||
]
|
||||
@@ -1,248 +1,297 @@
|
||||
[
|
||||
{
|
||||
"name": "Placement",
|
||||
"type": "Placement",
|
||||
"desc": "实例所在的位置。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "InstanceId",
|
||||
"type": "String",
|
||||
"desc": "实例ID。",
|
||||
"example": "ins-9bxebleo"
|
||||
},
|
||||
{
|
||||
"name": "InstanceType",
|
||||
"type": "String",
|
||||
"desc": "实例机型。",
|
||||
"example": "S1.SMALL1"
|
||||
},
|
||||
{
|
||||
"name": "CPU",
|
||||
"type": "Integer",
|
||||
"desc": "实例的CPU核数,单位:核。",
|
||||
"example": "1"
|
||||
},
|
||||
{
|
||||
"name": "Memory",
|
||||
"type": "Integer",
|
||||
"desc": "实例内存容量,单位:GB。",
|
||||
"example": "1"
|
||||
},
|
||||
{
|
||||
"name": "RestrictState",
|
||||
"type": "String",
|
||||
"desc": "NORMAL:表示正常状态的实例\nEXPIRED:表示过期的实例\nPROTECTIVELY_ISOLATED:表示被安全隔离的实例。",
|
||||
"example": "NORMAL"
|
||||
},
|
||||
{
|
||||
"name": "InstanceName",
|
||||
"type": "String",
|
||||
"desc": "实例名称。",
|
||||
"example": "测试实例"
|
||||
},
|
||||
{
|
||||
"name": "InstanceChargeType",
|
||||
"type": "String",
|
||||
"desc": "PREPAID:表示预付费,即包年包月\nPOSTPAID_BY_HOUR:表示后付费,即按量计费\nCDHPAID:专用宿主机付费,即只对专用宿主机计费,不对专用宿主机上的实例计费。\nSPOTPAID:表示竞价实例付费。",
|
||||
"example": "PREPAID"
|
||||
},
|
||||
{
|
||||
"name": "SystemDisk",
|
||||
"type": "SystemDisk",
|
||||
"desc": "实例系统盘信息。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "DataDisks",
|
||||
"type": "Array of DataDisk",
|
||||
"desc": "实例数据盘信息。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "PrivateIpAddresses",
|
||||
"type": "Array of String",
|
||||
"desc": "实例主网卡的内网IP列表。",
|
||||
"example": "[\"172.16.32.78\"]"
|
||||
},
|
||||
{
|
||||
"name": "PublicIpAddresses",
|
||||
"type": "Array of String",
|
||||
"desc": "实例主网卡的公网IP列表。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "[\"123.207.11.190\"]"
|
||||
},
|
||||
{
|
||||
"name": "InternetAccessible",
|
||||
"type": "InternetAccessible",
|
||||
"desc": "实例带宽信息。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "VirtualPrivateCloud",
|
||||
"type": "VirtualPrivateCloud",
|
||||
"desc": "实例所属虚拟私有网络信息。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "ImageId",
|
||||
"type": "String",
|
||||
"desc": "生产实例所使用的镜像ID。",
|
||||
"example": "img-9qabwvbn"
|
||||
},
|
||||
{
|
||||
"name": "RenewFlag",
|
||||
"type": "String",
|
||||
"desc": "NOTIFY_AND_MANUAL_RENEW:表示通知即将过期,但不自动续费\nNOTIFY_AND_AUTO_RENEW:表示通知即将过期,而且自动续费\nDISABLE_NOTIFY_AND_MANUAL_RENEW:表示不通知即将过期,也不自动续费。\n注意:后付费模式本项为null",
|
||||
"example": "NOTIFY_AND_MANUAL_RENEW"
|
||||
},
|
||||
{
|
||||
"name": "CreatedTime",
|
||||
"type": "Timestamp ISO8601",
|
||||
"desc": "创建时间。按照ISO8601标准表示,并且使用UTC时间。格式为:YYYY-MM-DDThh:mm:ssZ。",
|
||||
"example": "2020-03-10T02:43:51Z"
|
||||
},
|
||||
{
|
||||
"name": "ExpiredTime",
|
||||
"type": "Timestamp ISO8601",
|
||||
"desc": "到期时间。按照ISO8601标准表示,并且使用UTC时间。格式为:YYYY-MM-DDThh:mm:ssZ。注意:后付费模式本项为null",
|
||||
"example": "2020-04-10T02:47:36Z"
|
||||
},
|
||||
{
|
||||
"name": "OsName",
|
||||
"type": "String",
|
||||
"desc": "操作系统名称。",
|
||||
"example": "CentOS 7.6 64bit"
|
||||
},
|
||||
{
|
||||
"name": "SecurityGroupIds",
|
||||
"type": "Array of String",
|
||||
"desc": "实例所属安全组。该参数可以通过调用 DescribeSecurityGroups 的返回值中的sgId字段来获取。",
|
||||
"example": "[\"sg-p1ezv4wz\"]"
|
||||
},
|
||||
{
|
||||
"name": "LoginSettings",
|
||||
"type": "LoginSettings",
|
||||
"desc": "实例登录设置。目前只返回实例所关联的密钥。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "InstanceState",
|
||||
"type": "String",
|
||||
"desc": "PENDING:表示创建中\nLAUNCH_FAILED:表示创建失败\nRUNNING:表示运行中\nSTOPPED:表示关机\nSTARTING:表示开机中\nSTOPPING:表示关机中\nREBOOTING:表示重启中\nSHUTDOWN:表示停止待销毁\nTERMINATING:表示销毁中。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "Tags",
|
||||
"type": "Array of Tag",
|
||||
"desc": "实例关联的标签列表。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "StopChargingMode",
|
||||
"type": "String",
|
||||
"desc": "KEEP_CHARGING:关机继续收费\nSTOP_CHARGING:关机停止收费\nNOT_APPLICABLE:实例处于非关机状态或者不适用关机停止计费的条件",
|
||||
"example": "NOT_APPLICABLE"
|
||||
},
|
||||
{
|
||||
"name": "Uuid",
|
||||
"type": "String",
|
||||
"desc": "实例全局唯一ID",
|
||||
"example": "68b510db-b4c1-4630-a62b-73d0c7c970f9"
|
||||
},
|
||||
{
|
||||
"name": "LatestOperation",
|
||||
"type": "String",
|
||||
"desc": "实例的最新操作。例:StopInstances、ResetInstance。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "RenewInstances"
|
||||
},
|
||||
{
|
||||
"name": "LatestOperationState",
|
||||
"type": "String",
|
||||
"desc": "SUCCESS:表示操作成功\nOPERATING:表示操作执行中\nFAILED:表示操作失败注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "SUCCESS"
|
||||
},
|
||||
{
|
||||
"name": "LatestOperationRequestId",
|
||||
"type": "String",
|
||||
"desc": "实例最新操作的唯一请求 ID。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "3554eb5b-1cfa-471a-ae76-dc436c9d43e8"
|
||||
},
|
||||
{
|
||||
"name": "DisasterRecoverGroupId",
|
||||
"type": "String",
|
||||
"desc": "分散置放群组ID。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "null"
|
||||
},
|
||||
{
|
||||
"name": "IPv6Addresses",
|
||||
"type": "Array of String",
|
||||
"desc": "实例的IPv6地址。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "null"
|
||||
},
|
||||
{
|
||||
"name": "CamRoleName",
|
||||
"type": "String",
|
||||
"desc": "CAM角色名。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "null"
|
||||
},
|
||||
{
|
||||
"name": "HpcClusterId",
|
||||
"type": "String",
|
||||
"desc": "高性能计算集群ID。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "null"
|
||||
},
|
||||
{
|
||||
"name": "RdmaIpAddresses",
|
||||
"type": "Array of String",
|
||||
"desc": "高性能计算集群IP列表。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "null"
|
||||
},
|
||||
{
|
||||
"name": "DedicatedClusterId",
|
||||
"type": "String",
|
||||
"desc": "实例所在的专用集群ID。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "cluster-du3jken"
|
||||
},
|
||||
{
|
||||
"name": "IsolatedSource",
|
||||
"type": "String",
|
||||
"desc": "ARREAR:表示欠费隔离\nEXPIRE:表示到期隔离\nMANMADE:表示主动退还隔离\nNOTISOLATED:表示未隔离",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "GPUInfo",
|
||||
"type": "GPUInfo",
|
||||
"desc": "GPU信息。如果是gpu类型子机,该值会返回GPU信息,如果是其他类型子机则不返回。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "LicenseType",
|
||||
"type": "String",
|
||||
"desc": "实例的操作系统许可类型,默认为TencentCloud",
|
||||
"example": "TencentCloud"
|
||||
},
|
||||
{
|
||||
"name": "DisableApiTermination",
|
||||
"type": "Boolean",
|
||||
"desc": "TRUE:表示开启实例保护,不允许通过api接口删除实例\nFALSE:表示关闭实例保护,允许通过api接口删除实例默认取值:FALSE。",
|
||||
"example": "false"
|
||||
},
|
||||
{
|
||||
"name": "DefaultLoginUser",
|
||||
"type": "String",
|
||||
"desc": "默认登录用户。",
|
||||
"example": "root"
|
||||
},
|
||||
{
|
||||
"name": "DefaultLoginPort",
|
||||
"type": "Integer",
|
||||
"desc": "默认登录端口。",
|
||||
"example": "22"
|
||||
},
|
||||
{
|
||||
"name": "LatestOperationErrorMsg",
|
||||
"type": "String",
|
||||
"desc": "实例的最新操作错误信息。注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "None"
|
||||
}
|
||||
[
|
||||
{
|
||||
"name": "Placement",
|
||||
"type": "json",
|
||||
"desc": "实例所在的位置。",
|
||||
"example": {
|
||||
"HostId": "host-h3m57oik",
|
||||
"ProjectId": 1174660,
|
||||
"HostIds": [],
|
||||
"Zone": "ap-guangzhou-1",
|
||||
"HostIps": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "InstanceId",
|
||||
"type": "文本",
|
||||
"desc": "实例ID。",
|
||||
"example": "ins-xlsyru2j"
|
||||
},
|
||||
{
|
||||
"name": "InstanceType",
|
||||
"type": "文本",
|
||||
"desc": "实例机型。",
|
||||
"example": "S2.SMALL2"
|
||||
},
|
||||
{
|
||||
"name": "CPU",
|
||||
"type": "整数",
|
||||
"desc": "实例的CPU核数,单位:核。",
|
||||
"example": 1
|
||||
},
|
||||
{
|
||||
"name": "Memory",
|
||||
"type": "整数",
|
||||
"desc": "实例内存容量,单位:GB。",
|
||||
"example": 1
|
||||
},
|
||||
{
|
||||
"name": "RestrictState",
|
||||
"type": "文本",
|
||||
"desc": "实例业务状态。取值范围: NORMAL:表示正常状态的实例 EXPIRED:表示过期的实例 PROTECTIVELY_ISOLATED:表示被安全隔离的实例。",
|
||||
"example": "PROTECTIVELY_ISOLATED"
|
||||
},
|
||||
{
|
||||
"name": "InstanceName",
|
||||
"type": "文本",
|
||||
"desc": "实例名称。",
|
||||
"example": "test"
|
||||
},
|
||||
{
|
||||
"name": "InstanceChargeType",
|
||||
"type": "文本",
|
||||
"desc": "实例计费模式。取值范围: PREPAID:表示预付费,即包年包月 POSTPAID_BY_HOUR:表示后付费,即按量计费 CDHPAID:专用宿主机付费,即只对专用宿主机计费,不对专用宿主机上的实例计费。 SPOTPAID:表示竞价实例付费。",
|
||||
"example": "POSTPAID_BY_HOUR"
|
||||
},
|
||||
{
|
||||
"name": "SystemDisk",
|
||||
"type": "json",
|
||||
"desc": "实例系统盘信息。",
|
||||
"example": {
|
||||
"DiskSize": 50,
|
||||
"CdcId": null,
|
||||
"DiskId": "disk-czsodtl1",
|
||||
"DiskType": "CLOUD_SSD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "DataDisks",
|
||||
"type": "json",
|
||||
"desc": "实例数据盘信息。",
|
||||
"example": [
|
||||
{
|
||||
"DeleteWithInstance": true,
|
||||
"Encrypt": true,
|
||||
"CdcId": null,
|
||||
"DiskType": "CLOUD_SSD",
|
||||
"ThroughputPerformance": 0,
|
||||
"KmsKeyId": null,
|
||||
"DiskSize": 50,
|
||||
"SnapshotId": null,
|
||||
"DiskId": "disk-bzsodtn1"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "PrivateIpAddresses",
|
||||
"type": "文本、多值",
|
||||
"desc": "实例主网卡的内网IP列表。",
|
||||
"example": [
|
||||
"172.16.32.78"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "PublicIpAddresses",
|
||||
"type": "文本、多值",
|
||||
"desc": "实例主网卡的公网IP列表。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": [
|
||||
"123.207.11.190"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "InternetAccessible",
|
||||
"type": "json",
|
||||
"desc": "实例带宽信息。",
|
||||
"example": {
|
||||
"PublicIpAssigned": true,
|
||||
"InternetChargeType": "TRAFFIC_POSTPAID_BY_HOUR",
|
||||
"BandwidthPackageId": null,
|
||||
"InternetMaxBandwidthOut": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "VirtualPrivateCloud",
|
||||
"type": "json",
|
||||
"desc": "实例所属虚拟私有网络信息。",
|
||||
"example": {
|
||||
"SubnetId": "subnet-mv4sn55k",
|
||||
"AsVpcGateway": false,
|
||||
"Ipv6AddressCount": 1,
|
||||
"VpcId": "vpc-m0cnatxj",
|
||||
"PrivateIpAddresses": [
|
||||
"172.16.3.59"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "ImageId",
|
||||
"type": "文本",
|
||||
"desc": "生产实例所使用的镜像ID。",
|
||||
"example": "img-8toqc6s3"
|
||||
},
|
||||
{
|
||||
"name": "RenewFlag",
|
||||
"type": "文本",
|
||||
"desc": "自动续费标识。取值范围: NOTIFY_AND_MANUAL_RENEW:表示通知即将过期,但不自动续费 NOTIFY_AND_AUTO_RENEW:表示通知即将过期,而且自动续费 DISABLE_NOTIFY_AND_MANUAL_RENEW:表示不通知即将过期,也不自动续费。 注意:后付费模式本项为null",
|
||||
"example": "NOTIFY_AND_MANUAL_RENEW"
|
||||
},
|
||||
{
|
||||
"name": "CreatedTime",
|
||||
"type": "json",
|
||||
"desc": "创建时间。按照ISO8601标准表示,并且使用UTC时间。格式为:YYYY-MM-DDThh:mm:ssZ。",
|
||||
"example": "2020-09-22T00:00:00+00:00"
|
||||
},
|
||||
{
|
||||
"name": "ExpiredTime",
|
||||
"type": "json",
|
||||
"desc": "到期时间。按照ISO8601标准表示,并且使用UTC时间。格式为:YYYY-MM-DDThh:mm:ssZ。注意:后付费模式本项为null",
|
||||
"example": "2020-09-22T00:00:00+00:00"
|
||||
},
|
||||
{
|
||||
"name": "OsName",
|
||||
"type": "文本",
|
||||
"desc": "操作系统名称。",
|
||||
"example": "CentOS 7.4 64bit"
|
||||
},
|
||||
{
|
||||
"name": "SecurityGroupIds",
|
||||
"type": "文本、多值",
|
||||
"desc": "实例所属安全组。该参数可以通过调用 DescribeSecurityGroups 的返回值中的sgId字段来获取。",
|
||||
"example": [
|
||||
"sg-p1ezv4wz"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "LoginSettings",
|
||||
"type": "json",
|
||||
"desc": "实例登录设置。目前只返回实例所关联的密钥。",
|
||||
"example": {
|
||||
"Password": "123qwe!@#QWE",
|
||||
"KeepImageLogin": "False",
|
||||
"KeyIds": [
|
||||
"skey-b4vakk62"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "InstanceState",
|
||||
"type": "文本",
|
||||
"desc": "实例状态。取值范围: PENDING:表示创建中 LAUNCH_FAILED:表示创建失败 RUNNING:表示运行中 STOPPED:表示关机 STARTING:表示开机中 STOPPING:表示关机中 REBOOTING:表示重启中 SHUTDOWN:表示停止待销毁 TERMINATING:表示销毁中。",
|
||||
"example": "RUNNING"
|
||||
},
|
||||
{
|
||||
"name": "Tags",
|
||||
"type": "json",
|
||||
"desc": "实例关联的标签列表。",
|
||||
"example": [
|
||||
{
|
||||
"Value": "test",
|
||||
"Key": "test"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "StopChargingMode",
|
||||
"type": "文本",
|
||||
"desc": "实例的关机计费模式。 取值范围: KEEP_CHARGING:关机继续收费 STOP_CHARGING:关机停止收费NOT_APPLICABLE:实例处于非关机状态或者不适用关机停止计费的条件",
|
||||
"example": "NOT_APPLICABLE"
|
||||
},
|
||||
{
|
||||
"name": "Uuid",
|
||||
"type": "文本",
|
||||
"desc": "实例全局唯一ID",
|
||||
"example": "e85f1388-0422-410d-8e50-bef540e78c18"
|
||||
},
|
||||
{
|
||||
"name": "LatestOperation",
|
||||
"type": "文本",
|
||||
"desc": "实例的最新操作。例:StopInstances、ResetInstance。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "ResetInstancesType"
|
||||
},
|
||||
{
|
||||
"name": "LatestOperationState",
|
||||
"type": "文本",
|
||||
"desc": "实例的最新操作状态。取值范围: SUCCESS:表示操作成功 OPERATING:表示操作执行中 FAILED:表示操作失败 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "SUCCESS"
|
||||
},
|
||||
{
|
||||
"name": "LatestOperationRequestId",
|
||||
"type": "文本",
|
||||
"desc": "实例最新操作的唯一请求 ID。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "c7de1287-061d-4ace-8caf-6ad8e5a2f29a"
|
||||
},
|
||||
{
|
||||
"name": "DisasterRecoverGroupId",
|
||||
"type": "文本",
|
||||
"desc": "分散置放群组ID。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "IPv6Addresses",
|
||||
"type": "文本、多值",
|
||||
"desc": "实例的IPv6地址。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": [
|
||||
"2001:0db8:86a3:08d3:1319:8a2e:0370:7344"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "CamRoleName",
|
||||
"type": "文本",
|
||||
"desc": "CAM角色名。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "HpcClusterId",
|
||||
"type": "文本",
|
||||
"desc": "高性能计算集群ID。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": ""
|
||||
},
|
||||
{
|
||||
"name": "RdmaIpAddresses",
|
||||
"type": "文本、多值",
|
||||
"desc": "高性能计算集群IP列表。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": []
|
||||
},
|
||||
{
|
||||
"name": "IsolatedSource",
|
||||
"type": "文本",
|
||||
"desc": "实例隔离类型。取值范围: ARREAR:表示欠费隔离 EXPIRE:表示到期隔离 MANMADE:表示主动退还隔离 NOTISOLATED:表示未隔离 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": "NOTISOLATED"
|
||||
},
|
||||
{
|
||||
"name": "GPUInfo",
|
||||
"type": "json",
|
||||
"desc": "GPU信息。如果是gpu类型子机,该值会返回GPU信息,如果是其他类型子机则不返回。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "LicenseType",
|
||||
"type": "文本",
|
||||
"desc": "实例的操作系统许可类型,默认为TencentCloud",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "DisableApiTermination",
|
||||
"type": "Boolean",
|
||||
"desc": "实例销毁保护标志,表示是否允许通过api接口删除实例。取值范围: TRUE:表示开启实例保护,不允许通过api接口删除实例 FALSE:表示关闭实例保护,允许通过api接口删除实例 默认取值:FALSE。",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "DefaultLoginUser",
|
||||
"type": "文本",
|
||||
"desc": "默认登录用户。",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "DefaultLoginPort",
|
||||
"type": "整数",
|
||||
"desc": "默认登录端口。",
|
||||
"example": null
|
||||
},
|
||||
{
|
||||
"name": "LatestOperationErrorMsg",
|
||||
"type": "文本",
|
||||
"desc": "实例的最新操作错误信息。 注意:此字段可能返回 null,表示取不到有效值。",
|
||||
"example": null
|
||||
}
|
||||
]
|
||||
@@ -2,27 +2,16 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import yaml
|
||||
import requests
|
||||
from flask import current_app
|
||||
import json
|
||||
|
||||
from api.extensions import cache
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.custom_dashboard import CustomDashboardManager
|
||||
from api.models.cmdb import Attribute, AutoDiscoveryExecHistory
|
||||
from api.models.cmdb import AutoDiscoveryCI
|
||||
from api.models.cmdb import AutoDiscoveryCIType
|
||||
from api.models.cmdb import AutoDiscoveryCITypeRelation
|
||||
from api.models.cmdb import AutoDiscoveryCounter
|
||||
from api.models.cmdb import AutoDiscoveryRuleSyncHistory
|
||||
from api.models.cmdb import Attribute
|
||||
from api.models.cmdb import CI
|
||||
from api.models.cmdb import CIType
|
||||
from api.models.cmdb import CITypeAttribute
|
||||
from api.models.cmdb import PreferenceShowAttributes
|
||||
from api.models.cmdb import PreferenceTreeView
|
||||
from api.models.cmdb import RelationType
|
||||
|
||||
|
||||
@@ -45,7 +34,6 @@ class AttributeCache(object):
|
||||
attr = attr or Attribute.get_by(alias=key, first=True, to_dict=False)
|
||||
if attr is not None:
|
||||
cls.set(attr)
|
||||
|
||||
return attr
|
||||
|
||||
@classmethod
|
||||
@@ -79,7 +67,6 @@ class CITypeCache(object):
|
||||
ct = ct or CIType.get_by(alias=key, first=True, to_dict=False)
|
||||
if ct is not None:
|
||||
cls.set(ct)
|
||||
|
||||
return ct
|
||||
|
||||
@classmethod
|
||||
@@ -111,7 +98,6 @@ class RelationTypeCache(object):
|
||||
ct = RelationType.get_by(name=key, first=True, to_dict=False) or RelationType.get_by_id(key)
|
||||
if ct is not None:
|
||||
cls.set(ct)
|
||||
|
||||
return ct
|
||||
|
||||
@classmethod
|
||||
@@ -147,15 +133,12 @@ class CITypeAttributesCache(object):
|
||||
attrs = attrs or cache.get(cls.PREFIX_ID.format(key))
|
||||
if not attrs:
|
||||
attrs = CITypeAttribute.get_by(type_id=key, to_dict=False)
|
||||
|
||||
if not attrs:
|
||||
ci_type = CIType.get_by(name=key, first=True, to_dict=False)
|
||||
if ci_type is not None:
|
||||
attrs = CITypeAttribute.get_by(type_id=ci_type.id, to_dict=False)
|
||||
|
||||
if attrs is not None:
|
||||
cls.set(key, attrs)
|
||||
|
||||
return attrs
|
||||
|
||||
@classmethod
|
||||
@@ -172,16 +155,13 @@ class CITypeAttributesCache(object):
|
||||
attrs = attrs or cache.get(cls.PREFIX_ID2.format(key))
|
||||
if not attrs:
|
||||
attrs = CITypeAttribute.get_by(type_id=key, to_dict=False)
|
||||
|
||||
if not attrs:
|
||||
ci_type = CIType.get_by(name=key, first=True, to_dict=False)
|
||||
if ci_type is not None:
|
||||
attrs = CITypeAttribute.get_by(type_id=ci_type.id, to_dict=False)
|
||||
|
||||
if attrs is not None:
|
||||
attrs = [(i, AttributeCache.get(i.attr_id)) for i in attrs]
|
||||
cls.set2(key, attrs)
|
||||
|
||||
return attrs
|
||||
|
||||
@classmethod
|
||||
@@ -221,13 +201,13 @@ class CITypeAttributeCache(object):
|
||||
|
||||
@classmethod
|
||||
def get(cls, type_id, attr_id):
|
||||
|
||||
attr = cache.get(cls.PREFIX_ID.format(type_id, attr_id))
|
||||
attr = attr or cache.get(cls.PREFIX_ID.format(type_id, attr_id))
|
||||
attr = attr or CITypeAttribute.get_by(type_id=type_id, attr_id=attr_id, first=True, to_dict=False)
|
||||
|
||||
if attr is not None:
|
||||
cls.set(type_id, attr_id, attr)
|
||||
|
||||
if not attr:
|
||||
attr = CITypeAttribute.get_by(type_id=type_id, attr_id=attr_id, first=True, to_dict=False)
|
||||
if attr is not None:
|
||||
cls.set(type_id, attr_id, attr)
|
||||
return attr
|
||||
|
||||
@classmethod
|
||||
@@ -240,9 +220,7 @@ class CITypeAttributeCache(object):
|
||||
|
||||
|
||||
class CMDBCounterCache(object):
|
||||
KEY = 'CMDB::Counter::dashboard'
|
||||
KEY2 = 'CMDB::Counter::adc'
|
||||
KEY3 = 'CMDB::Counter::sub'
|
||||
KEY = 'CMDB::Counter'
|
||||
|
||||
@classmethod
|
||||
def get(cls):
|
||||
@@ -255,7 +233,7 @@ class CMDBCounterCache(object):
|
||||
|
||||
@classmethod
|
||||
def set(cls, result):
|
||||
cache.set(cls.KEY, json.loads(json.dumps(result)), timeout=0)
|
||||
cache.set(cls.KEY, result, timeout=0)
|
||||
|
||||
@classmethod
|
||||
def reset(cls):
|
||||
@@ -263,83 +241,53 @@ class CMDBCounterCache(object):
|
||||
result = {}
|
||||
for custom in customs:
|
||||
if custom['category'] == 0:
|
||||
res = cls.sum_counter(custom)
|
||||
result[custom['id']] = cls.summary_counter(custom['type_id'])
|
||||
elif custom['category'] == 1:
|
||||
res = cls.attribute_counter(custom)
|
||||
else:
|
||||
res = cls.relation_counter(custom.get('type_id'),
|
||||
custom.get('level'),
|
||||
custom.get('options', {}).get('filter', ''),
|
||||
custom.get('options', {}).get('type_ids', ''))
|
||||
|
||||
if res:
|
||||
result[custom['id']] = res
|
||||
result[custom['id']] = cls.attribute_counter(custom['type_id'], custom['attr_id'])
|
||||
elif custom['category'] == 2:
|
||||
result[custom['id']] = cls.relation_counter(custom['type_id'], custom['level'])
|
||||
|
||||
cls.set(result)
|
||||
|
||||
return json.loads(json.dumps(result))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def update(cls, custom, flush=True):
|
||||
def update(cls, custom):
|
||||
result = cache.get(cls.KEY) or {}
|
||||
if not result:
|
||||
result = cls.reset()
|
||||
|
||||
if custom['category'] == 0:
|
||||
res = cls.sum_counter(custom)
|
||||
result[custom['id']] = cls.summary_counter(custom['type_id'])
|
||||
elif custom['category'] == 1:
|
||||
res = cls.attribute_counter(custom)
|
||||
else:
|
||||
res = cls.relation_counter(custom.get('type_id'),
|
||||
custom.get('level'),
|
||||
custom.get('options', {}).get('filter', ''),
|
||||
custom.get('options', {}).get('type_ids', ''))
|
||||
result[custom['id']] = cls.attribute_counter(custom['type_id'], custom['attr_id'])
|
||||
elif custom['category'] == 2:
|
||||
result[custom['id']] = cls.relation_counter(custom['type_id'], custom['level'])
|
||||
|
||||
if res and flush:
|
||||
result[custom['id']] = res
|
||||
cls.set(result)
|
||||
cls.set(result)
|
||||
|
||||
return json.loads(json.dumps(res))
|
||||
@staticmethod
|
||||
def summary_counter(type_id):
|
||||
return db.session.query(CI.id).filter(CI.deleted.is_(False)).filter(CI.type_id == type_id).count()
|
||||
|
||||
@classmethod
|
||||
def relation_counter(cls, type_id, level, other_filer, type_ids):
|
||||
from api.lib.cmdb.search.ci_relation.search import Search as RelSearch
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search
|
||||
from api.lib.cmdb.attribute import AttributeManager
|
||||
@staticmethod
|
||||
def relation_counter(type_id, level):
|
||||
|
||||
query = "_type:{}".format(type_id)
|
||||
if other_filer:
|
||||
query = "{},{}".format(query, other_filer)
|
||||
s = search(query, count=1000000)
|
||||
try:
|
||||
type_names, _, _, _, _, _ = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.error(e)
|
||||
return
|
||||
root_type = CITypeCache.get(type_id)
|
||||
show_attr_id = root_type and root_type.show_id
|
||||
show_attr = AttributeCache.get(show_attr_id)
|
||||
uri = current_app.config.get('CMDB_API')
|
||||
|
||||
type_id_names = []
|
||||
for i in type_names:
|
||||
attr_value = i.get(show_attr and show_attr.name) or i.get(i.get('unique'))
|
||||
enum_map = AttributeManager.get_enum_map(show_attr_id or i.get('unique'))
|
||||
type_names = requests.get("{}/ci/s?q=_type:{}&count=10000".format(uri, type_id)).json().get('result')
|
||||
type_id_names = [(str(i.get('_id')), i.get(i.get('unique'))) for i in type_names]
|
||||
|
||||
type_id_names.append((str(i.get('_id')), enum_map.get(attr_value, attr_value)))
|
||||
|
||||
s = RelSearch([i[0] for i in type_id_names], level)
|
||||
try:
|
||||
stats = s.statistics(type_ids, need_filter=False)
|
||||
except SearchError as e:
|
||||
current_app.logger.error(e)
|
||||
return
|
||||
url = "{}/ci_relations/statistics?root_ids={}&level={}".format(
|
||||
uri, ','.join([i[0] for i in type_id_names]), level)
|
||||
stats = requests.get(url).json()
|
||||
|
||||
id2name = dict(type_id_names)
|
||||
type_ids = set()
|
||||
for i in (stats.get('detail') or []):
|
||||
for j in stats['detail'][i]:
|
||||
type_ids.add(j)
|
||||
|
||||
for type_id in type_ids:
|
||||
_type = CITypeCache.get(type_id)
|
||||
id2name[type_id] = _type and _type.alias
|
||||
@@ -358,241 +306,10 @@ class CMDBCounterCache(object):
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def attribute_counter(cls, custom):
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search
|
||||
from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.cmdb.attribute import AttributeManager
|
||||
|
||||
custom.setdefault('options', {})
|
||||
type_id = custom.get('type_id')
|
||||
attr_id = custom.get('attr_id')
|
||||
type_ids = custom['options'].get('type_ids') or (type_id and [type_id])
|
||||
attr_ids = list(map(str, custom['options'].get('attr_ids') or (attr_id and [attr_id])))
|
||||
try:
|
||||
attr2value_type = [AttributeCache.get(i).value_type for i in attr_ids]
|
||||
except AttributeError:
|
||||
return
|
||||
|
||||
other_filter = custom['options'].get('filter')
|
||||
other_filter = "{}".format(other_filter) if other_filter else ''
|
||||
|
||||
if custom['options'].get('ret') == 'cis':
|
||||
enum_map = {}
|
||||
for _attr_id in attr_ids:
|
||||
_attr = AttributeCache.get(_attr_id)
|
||||
if _attr:
|
||||
enum_map[_attr.alias] = AttributeManager.get_enum_map(_attr_id)
|
||||
|
||||
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
|
||||
s = search(query, fl=attr_ids, ret_key='alias', count=100)
|
||||
try:
|
||||
cis, _, _, _, _, _ = s.search()
|
||||
cis = [{k: (enum_map.get(k) or {}).get(v, v) for k, v in ci.items()} for ci in cis]
|
||||
except SearchError as e:
|
||||
current_app.logger.error(e)
|
||||
return
|
||||
|
||||
return cis
|
||||
|
||||
origin_result = dict()
|
||||
result = dict()
|
||||
# level = 1
|
||||
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
|
||||
s = search(query, fl=attr_ids, facet=[attr_ids[0]], count=1)
|
||||
try:
|
||||
_, _, _, _, _, facet = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.error(e)
|
||||
return
|
||||
|
||||
enum_map1 = AttributeManager.get_enum_map(attr_ids[0])
|
||||
for i in (list(facet.values()) or [[]])[0]:
|
||||
k = ValueTypeMap.serialize2[attr2value_type[0]](str(i[0]))
|
||||
result[enum_map1.get(k, k)] = i[1]
|
||||
origin_result[k] = i[1]
|
||||
if len(attr_ids) == 1:
|
||||
return result
|
||||
|
||||
# level = 2
|
||||
enum_map2 = AttributeManager.get_enum_map(attr_ids[1])
|
||||
for v in origin_result:
|
||||
query = "_type:({}),{},{}:{}".format(";".join(map(str, type_ids)), other_filter, attr_ids[0], v)
|
||||
s = search(query, fl=attr_ids, facet=[attr_ids[1]], count=1)
|
||||
try:
|
||||
_, _, _, _, _, facet = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.error(e)
|
||||
return
|
||||
result[enum_map1.get(v, v)] = dict()
|
||||
origin_result[v] = dict()
|
||||
for i in (list(facet.values()) or [[]])[0]:
|
||||
k = ValueTypeMap.serialize2[attr2value_type[1]](str(i[0]))
|
||||
result[enum_map1.get(v, v)][enum_map2.get(k, k)] = i[1]
|
||||
origin_result[v][k] = i[1]
|
||||
|
||||
if len(attr_ids) == 2:
|
||||
return result
|
||||
|
||||
# level = 3
|
||||
enum_map3 = AttributeManager.get_enum_map(attr_ids[2])
|
||||
for v1 in origin_result:
|
||||
if not isinstance(result[enum_map1.get(v1, v1)], dict):
|
||||
continue
|
||||
for v2 in origin_result[v1]:
|
||||
query = "_type:({}),{},{}:{},{}:{}".format(";".join(map(str, type_ids)), other_filter,
|
||||
attr_ids[0], v1, attr_ids[1], v2)
|
||||
s = search(query, fl=attr_ids, facet=[attr_ids[2]], count=1)
|
||||
try:
|
||||
_, _, _, _, _, facet = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.error(e)
|
||||
return
|
||||
result[enum_map1.get(v1, v1)][enum_map2.get(v2, v2)] = dict()
|
||||
for i in (list(facet.values()) or [[]])[0]:
|
||||
k = ValueTypeMap.serialize2[attr2value_type[2]](str(i[0]))
|
||||
result[enum_map1.get(v1, v1)][enum_map2.get(v2, v2)][enum_map3.get(k, k)] = i[1]
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def sum_counter(custom):
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search
|
||||
|
||||
custom.setdefault('options', {})
|
||||
type_id = custom.get('type_id')
|
||||
type_ids = custom['options'].get('type_ids') or (type_id and [type_id])
|
||||
other_filter = custom['options'].get('filter') or ''
|
||||
|
||||
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
|
||||
s = search(query, count=1)
|
||||
try:
|
||||
_, _, _, _, numfound, _ = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.error(e)
|
||||
return
|
||||
|
||||
return numfound
|
||||
|
||||
@classmethod
|
||||
def flush_adc_counter(cls):
|
||||
res = db.session.query(CI.type_id, CI.is_auto_discovery)
|
||||
result = dict()
|
||||
for i in res:
|
||||
result.setdefault(i.type_id, dict(total=0, auto_discovery=0))
|
||||
result[i.type_id]['total'] += 1
|
||||
if i.is_auto_discovery:
|
||||
result[i.type_id]['auto_discovery'] += 1
|
||||
|
||||
cache.set(cls.KEY2, result, timeout=0)
|
||||
|
||||
res = db.session.query(AutoDiscoveryCI.created_at,
|
||||
AutoDiscoveryCI.updated_at,
|
||||
AutoDiscoveryCI.adt_id,
|
||||
AutoDiscoveryCI.type_id,
|
||||
AutoDiscoveryCI.is_accept).filter(AutoDiscoveryCI.deleted.is_(False))
|
||||
|
||||
today = datetime.datetime.today()
|
||||
this_month = datetime.datetime(today.year, today.month, 1)
|
||||
last_month = this_month - datetime.timedelta(days=1)
|
||||
last_month = datetime.datetime(last_month.year, last_month.month, 1)
|
||||
this_week = today - datetime.timedelta(days=datetime.date.weekday(today))
|
||||
this_week = datetime.datetime(this_week.year, this_week.month, this_week.day)
|
||||
last_week = this_week - datetime.timedelta(days=7)
|
||||
last_week = datetime.datetime(last_week.year, last_week.month, last_week.day)
|
||||
result = dict()
|
||||
for i in res:
|
||||
if i.type_id not in result:
|
||||
result[i.type_id] = dict(instance_count=0, accept_count=0,
|
||||
this_month_count=0, this_week_count=0, last_month_count=0, last_week_count=0)
|
||||
|
||||
adts = AutoDiscoveryCIType.get_by(type_id=i.type_id, to_dict=False)
|
||||
result[i.type_id]['rule_count'] = len(adts) + AutoDiscoveryCITypeRelation.get_by(
|
||||
ad_type_id=i.type_id, only_query=True).count()
|
||||
result[i.type_id]['exec_target_count'] = len(
|
||||
set([i.oneagent_id for adt in adts for i in db.session.query(
|
||||
AutoDiscoveryRuleSyncHistory.oneagent_id).filter(
|
||||
AutoDiscoveryRuleSyncHistory.adt_id == adt.id)]))
|
||||
|
||||
result[i.type_id]['instance_count'] += 1
|
||||
if i.is_accept:
|
||||
result[i.type_id]['accept_count'] += 1
|
||||
|
||||
if last_month <= i.created_at < this_month:
|
||||
result[i.type_id]['last_month_count'] += 1
|
||||
elif i.created_at >= this_month:
|
||||
result[i.type_id]['this_month_count'] += 1
|
||||
|
||||
if last_week <= i.created_at < this_week:
|
||||
result[i.type_id]['last_week_count'] += 1
|
||||
elif i.created_at >= this_week:
|
||||
result[i.type_id]['this_week_count'] += 1
|
||||
|
||||
for type_id in result:
|
||||
existed = AutoDiscoveryCounter.get_by(type_id=type_id, first=True, to_dict=False)
|
||||
if existed is None:
|
||||
AutoDiscoveryCounter.create(type_id=type_id, **result[type_id])
|
||||
else:
|
||||
existed.update(**result[type_id])
|
||||
|
||||
for i in AutoDiscoveryCounter.get_by(to_dict=False):
|
||||
if i.type_id not in result:
|
||||
i.delete()
|
||||
|
||||
@classmethod
|
||||
def clear_ad_exec_history(cls):
|
||||
ci_types = CIType.get_by(to_dict=False)
|
||||
for ci_type in ci_types:
|
||||
for i in AutoDiscoveryExecHistory.get_by(type_id=ci_type.id, only_query=True).order_by(
|
||||
AutoDiscoveryExecHistory.id.desc()).offset(50000):
|
||||
i.delete(commit=False)
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def get_adc_counter(cls):
|
||||
return cache.get(cls.KEY2) or cls.flush_adc_counter()
|
||||
|
||||
@classmethod
|
||||
def flush_sub_counter(cls):
|
||||
result = dict(type_id2users=defaultdict(list))
|
||||
|
||||
types = db.session.query(PreferenceShowAttributes.type_id,
|
||||
PreferenceShowAttributes.uid, PreferenceShowAttributes.created_at).filter(
|
||||
PreferenceShowAttributes.deleted.is_(False)).group_by(
|
||||
PreferenceShowAttributes.uid, PreferenceShowAttributes.type_id)
|
||||
for i in types:
|
||||
result['type_id2users'][i.type_id].append(i.uid)
|
||||
|
||||
types = PreferenceTreeView.get_by(to_dict=False)
|
||||
for i in types:
|
||||
|
||||
if i.uid not in result['type_id2users'][i.type_id]:
|
||||
result['type_id2users'][i.type_id].append(i.uid)
|
||||
|
||||
cache.set(cls.KEY3, result, timeout=0)
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def get_sub_counter(cls):
|
||||
return cache.get(cls.KEY3) or cls.flush_sub_counter()
|
||||
|
||||
|
||||
class AutoDiscoveryMappingCache(object):
|
||||
PREFIX = 'CMDB::AutoDiscovery::Mapping::{}'
|
||||
|
||||
@classmethod
|
||||
def get(cls, name):
|
||||
res = cache.get(cls.PREFIX.format(name)) or {}
|
||||
if not res:
|
||||
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
||||
"auto_discovery/mapping/{}.yaml".format(name))
|
||||
if os.path.exists(path):
|
||||
with open(path, 'r') as f:
|
||||
mapping = yaml.safe_load(f)
|
||||
res = mapping.get('mapping') or {}
|
||||
res and cache.set(cls.PREFIX.format(name), res, timeout=0)
|
||||
|
||||
return res
|
||||
def attribute_counter(type_id, attr_id):
|
||||
uri = current_app.config.get('CMDB_API')
|
||||
url = "{}/ci/s?q=_type:{}&fl={}&facet={}".format(uri, type_id, attr_id, attr_id)
|
||||
res = requests.get(url).json()
|
||||
if res.get('facet'):
|
||||
return dict([i[:2] for i in list(res.get('facet').values())[0]])
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -12,10 +12,6 @@ class ValueTypeEnum(BaseEnum):
|
||||
DATE = "4"
|
||||
TIME = "5"
|
||||
JSON = "6"
|
||||
PASSWORD = TEXT
|
||||
LINK = TEXT
|
||||
BOOL = "7"
|
||||
REFERENCE = INT
|
||||
|
||||
|
||||
class ConstraintEnum(BaseEnum):
|
||||
@@ -43,23 +39,20 @@ class OperateType(BaseEnum):
|
||||
|
||||
|
||||
class CITypeOperateType(BaseEnum):
|
||||
ADD = "0" # add CIType
|
||||
UPDATE = "1" # update CIType
|
||||
DELETE = "2" # delete CIType
|
||||
ADD_ATTRIBUTE = "3"
|
||||
UPDATE_ATTRIBUTE = "4"
|
||||
DELETE_ATTRIBUTE = "5"
|
||||
ADD_TRIGGER = "6"
|
||||
UPDATE_TRIGGER = "7"
|
||||
DELETE_TRIGGER = "8"
|
||||
ADD_UNIQUE_CONSTRAINT = "9"
|
||||
UPDATE_UNIQUE_CONSTRAINT = "10"
|
||||
DELETE_UNIQUE_CONSTRAINT = "11"
|
||||
ADD_RELATION = "12"
|
||||
DELETE_RELATION = "13"
|
||||
ADD_RECONCILIATION = "14"
|
||||
UPDATE_RECONCILIATION = "15"
|
||||
DELETE_RECONCILIATION = "16"
|
||||
ADD = "0" # 新增模型
|
||||
UPDATE = "1" # 修改模型
|
||||
DELETE = "2" # 删除模型
|
||||
ADD_ATTRIBUTE = "3" # 新增属性
|
||||
UPDATE_ATTRIBUTE = "4" # 修改属性
|
||||
DELETE_ATTRIBUTE = "5" # 删除属性
|
||||
ADD_TRIGGER = "6" # 新增触发器
|
||||
UPDATE_TRIGGER = "7" # 修改触发器
|
||||
DELETE_TRIGGER = "8" # 删除触发器
|
||||
ADD_UNIQUE_CONSTRAINT = "9" # 新增联合唯一
|
||||
UPDATE_UNIQUE_CONSTRAINT = "10" # 修改联合唯一
|
||||
DELETE_UNIQUE_CONSTRAINT = "11" # 删除联合唯一
|
||||
ADD_RELATION = "12" # 新增关系
|
||||
DELETE_RELATION = "13" # 删除关系
|
||||
|
||||
|
||||
class RetKey(BaseEnum):
|
||||
@@ -74,8 +67,6 @@ class ResourceTypeEnum(BaseEnum):
|
||||
CI_TYPE_RELATION = "CITypeRelation" # create/delete/grant
|
||||
RELATION_VIEW = "RelationView" # read/update/delete/grant
|
||||
CI_FILTER = "CIFilter" # read
|
||||
PAGE = "page" # read
|
||||
TOPOLOGY_VIEW = "TopologyView" # read/update/delete/grant
|
||||
|
||||
|
||||
class PermEnum(BaseEnum):
|
||||
@@ -95,8 +86,7 @@ class RoleEnum(BaseEnum):
|
||||
class AutoDiscoveryType(BaseEnum):
|
||||
AGENT = "agent"
|
||||
SNMP = "snmp"
|
||||
HTTP = "http" # cloud
|
||||
COMPONENTS = "components"
|
||||
HTTP = "http"
|
||||
|
||||
|
||||
class AttributeDefaultValueEnum(BaseEnum):
|
||||
@@ -105,22 +95,9 @@ class AttributeDefaultValueEnum(BaseEnum):
|
||||
AUTO_INC_ID = "$auto_inc_id"
|
||||
|
||||
|
||||
class ExecuteStatusEnum(BaseEnum):
|
||||
COMPLETED = '0'
|
||||
FAILED = '1'
|
||||
RUNNING = '2'
|
||||
|
||||
class RelationSourceEnum(BaseEnum):
|
||||
ATTRIBUTE_VALUES = "0"
|
||||
AUTO_DISCOVERY = "1"
|
||||
|
||||
|
||||
CMDB_QUEUE = "one_cmdb_async"
|
||||
REDIS_PREFIX_CI = "ONE_CMDB"
|
||||
REDIS_PREFIX_CI_RELATION = "CMDB_CI_RELATION"
|
||||
REDIS_PREFIX_CI_RELATION2 = "CMDB_CI_RELATION2"
|
||||
|
||||
BUILTIN_KEYWORDS = {'id', '_id', 'ci_id', 'type', '_type', 'ci_type', 'ticket_id'}
|
||||
|
||||
L_TYPE = None
|
||||
L_CI = None
|
||||
|
||||
@@ -14,14 +14,6 @@ class CustomDashboardManager(object):
|
||||
def get():
|
||||
return sorted(CustomDashboard.get_by(to_dict=True), key=lambda x: (x["category"], x['order']))
|
||||
|
||||
@staticmethod
|
||||
def preview(**kwargs):
|
||||
from api.lib.cmdb.cache import CMDBCounterCache
|
||||
|
||||
res = CMDBCounterCache.update(kwargs, flush=False)
|
||||
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def add(**kwargs):
|
||||
from api.lib.cmdb.cache import CMDBCounterCache
|
||||
@@ -31,9 +23,9 @@ class CustomDashboardManager(object):
|
||||
|
||||
new = CustomDashboard.create(**kwargs)
|
||||
|
||||
res = CMDBCounterCache.update(new.to_dict())
|
||||
CMDBCounterCache.update(new.to_dict())
|
||||
|
||||
return new, res
|
||||
return new
|
||||
|
||||
@staticmethod
|
||||
def update(_id, **kwargs):
|
||||
@@ -43,9 +35,9 @@ class CustomDashboardManager(object):
|
||||
|
||||
new = existed.update(**kwargs)
|
||||
|
||||
res = CMDBCounterCache.update(new.to_dict())
|
||||
CMDBCounterCache.update(new.to_dict())
|
||||
|
||||
return new, res
|
||||
return new
|
||||
|
||||
@staticmethod
|
||||
def batch_update(id2options):
|
||||
|
||||
@@ -4,32 +4,28 @@
|
||||
import json
|
||||
|
||||
from flask import abort
|
||||
from flask_login import current_user
|
||||
from flask import g
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import RelationTypeCache
|
||||
from api.lib.cmdb.const import OperateType
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.perms import CIFilterPermsCRUD
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.models.cmdb import CI
|
||||
from api.models.cmdb import Attribute
|
||||
from api.models.cmdb import AttributeHistory
|
||||
from api.models.cmdb import CIRelationHistory
|
||||
from api.models.cmdb import CITriggerHistory
|
||||
from api.models.cmdb import CITypeHistory
|
||||
from api.models.cmdb import CITypeTrigger
|
||||
from api.models.cmdb import CITypeUniqueConstraint
|
||||
from api.models.cmdb import OperationRecord
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
|
||||
|
||||
class AttributeHistoryManger(object):
|
||||
@staticmethod
|
||||
def get_records_for_attributes(start, end, username, page, page_size, operate_type, type_id,
|
||||
ci_id=None, attr_id=None, ci_ids=None, more=False):
|
||||
ci_id=None, attr_id=None):
|
||||
|
||||
records = db.session.query(OperationRecord, AttributeHistory).join(
|
||||
AttributeHistory, OperationRecord.id == AttributeHistory.record_id)
|
||||
@@ -51,9 +47,6 @@ class AttributeHistoryManger(object):
|
||||
if ci_id is not None:
|
||||
records = records.filter(AttributeHistory.ci_id == ci_id)
|
||||
|
||||
if ci_ids and isinstance(ci_ids, list):
|
||||
records = records.filter(AttributeHistory.ci_id.in_(ci_ids))
|
||||
|
||||
if attr_id is not None:
|
||||
records = records.filter(AttributeHistory.attr_id == attr_id)
|
||||
|
||||
@@ -61,39 +54,17 @@ class AttributeHistoryManger(object):
|
||||
total = len(records)
|
||||
|
||||
res = {}
|
||||
show_attr_set = {}
|
||||
show_attr_cache = {}
|
||||
for record in records:
|
||||
record_id = record.OperationRecord.id
|
||||
type_id = record.OperationRecord.type_id
|
||||
ci_id = record.AttributeHistory.ci_id
|
||||
show_attr_set[ci_id] = None
|
||||
show_attr = show_attr_cache.setdefault(
|
||||
type_id,
|
||||
AttributeCache.get(
|
||||
CITypeCache.get(type_id).show_id or CITypeCache.get(type_id).unique_id) if CITypeCache.get(type_id) else None
|
||||
)
|
||||
if show_attr:
|
||||
attr_table = TableMap(attr=show_attr).table
|
||||
attr_record = attr_table.get_by(attr_id=show_attr.id, ci_id=ci_id, first=True, to_dict=False)
|
||||
show_attr_set[ci_id] = attr_record.value if attr_record else None
|
||||
|
||||
attr_hist = record.AttributeHistory.to_dict()
|
||||
attr_hist['attr'] = AttributeCache.get(attr_hist['attr_id'])
|
||||
if attr_hist['attr']:
|
||||
attr_hist['attr_name'] = attr_hist['attr'].name
|
||||
attr_hist['attr_alias'] = attr_hist['attr'].alias
|
||||
if more:
|
||||
attr_hist['is_list'] = attr_hist['attr'].is_list
|
||||
attr_hist['is_computed'] = attr_hist['attr'].is_computed
|
||||
attr_hist['is_password'] = attr_hist['attr'].is_password
|
||||
attr_hist['default'] = attr_hist['attr'].default
|
||||
attr_hist['value_type'] = attr_hist['attr'].value_type
|
||||
attr_hist.pop("attr")
|
||||
|
||||
if record_id not in res:
|
||||
record_dict = record.OperationRecord.to_dict()
|
||||
record_dict['show_attr_value'] = show_attr_set.get(ci_id)
|
||||
record_dict["user"] = UserCache.get(record_dict.get("uid"))
|
||||
if record_dict["user"]:
|
||||
record_dict['user'] = record_dict['user'].nickname
|
||||
@@ -163,7 +134,7 @@ class AttributeHistoryManger(object):
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
cis = CIManager().get_cis_by_ids(list(ci_ids),
|
||||
unique_required=True)
|
||||
cis = {i['_id']: i for i in cis if i}
|
||||
cis = {i['_id']: i for i in cis}
|
||||
|
||||
return total, res, cis
|
||||
|
||||
@@ -189,14 +160,12 @@ class AttributeHistoryManger(object):
|
||||
record = i.OperationRecord
|
||||
item = dict(attr_name=attr.name,
|
||||
attr_alias=attr.alias,
|
||||
value_type=attr.value_type,
|
||||
operate_type=hist.operate_type,
|
||||
username=user and user.nickname,
|
||||
old=hist.old,
|
||||
new=hist.new,
|
||||
created_at=record.created_at.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
record_id=record.id,
|
||||
ticket_id=record.ticket_id,
|
||||
hid=hist.id
|
||||
)
|
||||
result.append(item)
|
||||
@@ -207,8 +176,8 @@ class AttributeHistoryManger(object):
|
||||
def get_record_detail(record_id):
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
|
||||
record = (OperationRecord.get_by_id(record_id) or
|
||||
abort(404, ErrFormat.record_not_found.format("id={}".format(record_id))))
|
||||
record = OperationRecord.get_by_id(record_id) or \
|
||||
abort(404, ErrFormat.record_not_found.format("id={}".format(record_id)))
|
||||
|
||||
username = UserCache.get(record.uid).nickname or UserCache.get(record.uid).username
|
||||
timestamp = record.created_at.strftime("%Y-%m-%d %H:%M:%S")
|
||||
@@ -230,9 +199,9 @@ class AttributeHistoryManger(object):
|
||||
return username, timestamp, attr_dict, rel_dict
|
||||
|
||||
@staticmethod
|
||||
def add(record_id, ci_id, history_list, type_id=None, ticket_id=None, flush=False, commit=True):
|
||||
def add(record_id, ci_id, history_list, type_id=None, flush=False, commit=True):
|
||||
if record_id is None:
|
||||
record = OperationRecord.create(uid=current_user.uid, type_id=type_id, ticket_id=ticket_id)
|
||||
record = OperationRecord.create(uid=g.user.uid, type_id=type_id)
|
||||
record_id = record.id
|
||||
|
||||
for attr_id, operate_type, old, new in history_list or []:
|
||||
@@ -250,8 +219,8 @@ class AttributeHistoryManger(object):
|
||||
|
||||
class CIRelationHistoryManager(object):
|
||||
@staticmethod
|
||||
def add(rel_obj, operate_type=OperateType.ADD, uid=None):
|
||||
record = OperationRecord.create(uid=uid or current_user.uid)
|
||||
def add(rel_obj, operate_type=OperateType.ADD):
|
||||
record = OperationRecord.create(uid=g.user.uid)
|
||||
|
||||
CIRelationHistory.create(relation_id=rel_obj.id,
|
||||
record_id=record.id,
|
||||
@@ -300,7 +269,7 @@ class CITypeHistoryManager(object):
|
||||
return numfound, result
|
||||
|
||||
@staticmethod
|
||||
def add(operate_type, type_id, attr_id=None, trigger_id=None, unique_constraint_id=None, change=None, rc_id=None):
|
||||
def add(operate_type, type_id, attr_id=None, trigger_id=None, unique_constraint_id=None, change=None):
|
||||
if type_id is None and attr_id is not None:
|
||||
from api.models.cmdb import CITypeAttribute
|
||||
type_ids = [i.type_id for i in CITypeAttribute.get_by(attr_id=attr_id, to_dict=False)]
|
||||
@@ -310,76 +279,10 @@ class CITypeHistoryManager(object):
|
||||
for _type_id in type_ids:
|
||||
payload = dict(operate_type=operate_type,
|
||||
type_id=_type_id,
|
||||
uid=current_user.uid,
|
||||
uid=g.user.uid,
|
||||
attr_id=attr_id,
|
||||
trigger_id=trigger_id,
|
||||
rc_id=rc_id,
|
||||
unique_constraint_id=unique_constraint_id,
|
||||
change=change)
|
||||
|
||||
CITypeHistory.create(**payload)
|
||||
|
||||
|
||||
class CITriggerHistoryManager(object):
|
||||
@staticmethod
|
||||
def get(page, page_size, type_id=None, trigger_id=None, operate_type=None):
|
||||
query = CITriggerHistory.get_by(only_query=True)
|
||||
if type_id:
|
||||
query = query.join(CI, CI.id == CITriggerHistory.ci_id).filter(CI.type_id == type_id)
|
||||
|
||||
if trigger_id:
|
||||
query = query.filter(CITriggerHistory.trigger_id == trigger_id)
|
||||
|
||||
if operate_type:
|
||||
query = query.filter(CITriggerHistory.operate_type == operate_type)
|
||||
|
||||
numfound = query.count()
|
||||
|
||||
query = query.order_by(CITriggerHistory.id.desc())
|
||||
result = query.offset((page - 1) * page_size).limit(page_size)
|
||||
result = [i.to_dict() for i in result]
|
||||
for res in result:
|
||||
if res.get('trigger_id'):
|
||||
trigger = CITypeTrigger.get_by_id(res['trigger_id'])
|
||||
res['trigger'] = trigger and trigger.to_dict()
|
||||
|
||||
return numfound, result
|
||||
|
||||
@staticmethod
|
||||
def get_by_ci_id(ci_id):
|
||||
res = db.session.query(CITriggerHistory, CITypeTrigger).join(
|
||||
CITypeTrigger, CITypeTrigger.id == CITriggerHistory.trigger_id).filter(
|
||||
CITriggerHistory.ci_id == ci_id).order_by(CITriggerHistory.id.desc())
|
||||
|
||||
result = []
|
||||
id2trigger = dict()
|
||||
for i in res:
|
||||
hist = i.CITriggerHistory
|
||||
item = dict(is_ok=hist.is_ok,
|
||||
operate_type=hist.operate_type,
|
||||
notify=hist.notify,
|
||||
trigger_id=hist.trigger_id,
|
||||
trigger_name=hist.trigger_name,
|
||||
webhook=hist.webhook,
|
||||
created_at=hist.created_at.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
record_id=hist.record_id,
|
||||
hid=hist.id
|
||||
)
|
||||
if i.CITypeTrigger.id not in id2trigger:
|
||||
id2trigger[i.CITypeTrigger.id] = i.CITypeTrigger.to_dict()
|
||||
|
||||
result.append(item)
|
||||
|
||||
return dict(items=result, id2trigger=id2trigger)
|
||||
|
||||
@staticmethod
|
||||
def add(operate_type, record_id, ci_id, trigger_id, trigger_name, is_ok=False, notify=None, webhook=None):
|
||||
|
||||
CITriggerHistory.create(operate_type=operate_type,
|
||||
record_id=record_id,
|
||||
ci_id=ci_id,
|
||||
trigger_id=trigger_id,
|
||||
trigger_name=trigger_name,
|
||||
is_ok=is_ok,
|
||||
notify=notify,
|
||||
webhook=webhook)
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import copy
|
||||
|
||||
import functools
|
||||
|
||||
import redis_lock
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
|
||||
from api.extensions import db
|
||||
from api.extensions import rd
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.mixin import DBMixin
|
||||
@@ -43,11 +40,6 @@ class CIFilterPermsCRUD(DBMixin):
|
||||
result[i['rid']]['ci_filter'] = ""
|
||||
result[i['rid']]['ci_filter'] += (i['ci_filter'] or "")
|
||||
|
||||
if i['id_filter']:
|
||||
if not result[i['rid']]['id_filter']:
|
||||
result[i['rid']]['id_filter'] = {}
|
||||
result[i['rid']]['id_filter'].update(i['id_filter'] or {})
|
||||
|
||||
return result
|
||||
|
||||
def get_by_ids(self, _ids, type_id=None):
|
||||
@@ -78,16 +70,11 @@ class CIFilterPermsCRUD(DBMixin):
|
||||
result[i['type_id']]['ci_filter'] = ""
|
||||
result[i['type_id']]['ci_filter'] += (i['ci_filter'] or "")
|
||||
|
||||
if i['id_filter']:
|
||||
if not result[i['type_id']]['id_filter']:
|
||||
result[i['type_id']]['id_filter'] = {}
|
||||
result[i['type_id']]['id_filter'].update(i['id_filter'] or {})
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def get_attr_filter(cls, type_id):
|
||||
if is_app_admin('cmdb') or current_user.username in ('worker', 'cmdb_agent'):
|
||||
if is_app_admin('cmdb') or g.user.username in ('worker', 'cmdb_agent'):
|
||||
return []
|
||||
|
||||
res2 = ACLManager('cmdb').get_resources(ResourceTypeEnum.CI_FILTER)
|
||||
@@ -95,54 +82,6 @@ class CIFilterPermsCRUD(DBMixin):
|
||||
type2filter_perms = cls().get_by_ids(list(map(int, [i['name'] for i in res2])), type_id=type_id)
|
||||
return type2filter_perms.get(type_id, {}).get('attr_filter') or []
|
||||
|
||||
def _revoke_children(self, rid, id_filter, rebuild=True):
|
||||
items = self.cls.get_by(rid=rid, ci_filter=None, attr_filter=None, to_dict=False)
|
||||
for item in items:
|
||||
changed, item_id_filter = False, copy.deepcopy(item.id_filter)
|
||||
for prefix in id_filter:
|
||||
for k, v in copy.deepcopy((item.id_filter or {})).items():
|
||||
if k.startswith(prefix) and k != prefix:
|
||||
item_id_filter.pop(k)
|
||||
changed = True
|
||||
|
||||
if not item_id_filter and current_app.config.get('USE_ACL'):
|
||||
item.soft_delete(commit=False)
|
||||
ACLManager().del_resource(str(item.id), ResourceTypeEnum.CI_FILTER, rebuild=rebuild)
|
||||
elif changed:
|
||||
item.update(id_filter=item_id_filter, commit=False)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
def _revoke_parent(self, rid, parent_path, rebuild=True):
|
||||
parent_path = [i for i in parent_path.split(',') if i] or []
|
||||
revoke_nodes = [','.join(parent_path[:i]) for i in range(len(parent_path), 0, -1)]
|
||||
for node_path in revoke_nodes:
|
||||
delete_item, can_deleted = None, True
|
||||
items = self.cls.get_by(rid=rid, ci_filter=None, attr_filter=None, to_dict=False)
|
||||
for item in items:
|
||||
if node_path in item.id_filter:
|
||||
delete_item = item
|
||||
if any(filter(lambda x: x.startswith(node_path) and x != node_path, item.id_filter.keys())):
|
||||
can_deleted = False
|
||||
break
|
||||
|
||||
if can_deleted and delete_item:
|
||||
id_filter = copy.deepcopy(delete_item.id_filter)
|
||||
id_filter.pop(node_path)
|
||||
delete_item = delete_item.update(id_filter=id_filter, filter_none=False)
|
||||
|
||||
if current_app.config.get('USE_ACL') and not id_filter:
|
||||
ACLManager().del_resource(str(delete_item.id), ResourceTypeEnum.CI_FILTER, rebuild=False)
|
||||
delete_item.soft_delete()
|
||||
items.remove(delete_item)
|
||||
|
||||
if rebuild:
|
||||
from api.tasks.acl import role_rebuild
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.perm.acl.cache import AppCache
|
||||
|
||||
role_rebuild.apply_async(args=(rid, AppCache.get('cmdb').id), queue=ACL_QUEUE)
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
ci_filter = kwargs.get('ci_filter')
|
||||
attr_filter = kwargs.get('attr_filter') or ""
|
||||
@@ -163,67 +102,34 @@ class CIFilterPermsCRUD(DBMixin):
|
||||
|
||||
def add(self, **kwargs):
|
||||
kwargs = self._can_add(**kwargs) or kwargs
|
||||
with redis_lock.Lock(rd.r, 'CMDB_FILTER_{}_{}'.format(kwargs['type_id'], kwargs['rid'])):
|
||||
request_id_filter = {}
|
||||
if kwargs.get('id_filter'):
|
||||
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
|
||||
rid=kwargs.get('rid'),
|
||||
ci_filter=None,
|
||||
attr_filter=None,
|
||||
first=True, to_dict=False)
|
||||
|
||||
for _id, v in (kwargs.get('id_filter') or {}).items():
|
||||
key = ",".join(([v['parent_path']] if v.get('parent_path') else []) + [str(_id)])
|
||||
request_id_filter[key] = v['name']
|
||||
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
|
||||
rid=kwargs.get('rid'),
|
||||
first=True, to_dict=False)
|
||||
if obj is not None:
|
||||
obj = obj.update(filter_none=False, **kwargs)
|
||||
if not obj.attr_filter and not obj.ci_filter:
|
||||
if current_app.config.get('USE_ACL'):
|
||||
ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
|
||||
|
||||
else:
|
||||
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
|
||||
rid=kwargs.get('rid'),
|
||||
id_filter=None,
|
||||
first=True, to_dict=False)
|
||||
|
||||
is_recursive = kwargs.pop('is_recursive', 0)
|
||||
if obj is not None:
|
||||
if obj.id_filter and isinstance(kwargs.get('id_filter'), dict):
|
||||
obj_id_filter = copy.deepcopy(obj.id_filter)
|
||||
|
||||
for k, v in request_id_filter.items():
|
||||
obj_id_filter[k] = v
|
||||
|
||||
kwargs['id_filter'] = obj_id_filter
|
||||
|
||||
obj = obj.update(filter_none=False, **kwargs)
|
||||
|
||||
if not obj.attr_filter and not obj.ci_filter and not obj.id_filter:
|
||||
if current_app.config.get('USE_ACL'):
|
||||
ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER, rebuild=False)
|
||||
|
||||
obj.soft_delete()
|
||||
|
||||
if not is_recursive and request_id_filter:
|
||||
self._revoke_children(obj.rid, request_id_filter, rebuild=False)
|
||||
obj.soft_delete()
|
||||
|
||||
else:
|
||||
if not kwargs.get('ci_filter') and not kwargs.get('attr_filter'):
|
||||
return
|
||||
|
||||
else:
|
||||
if not kwargs.get('ci_filter') and not kwargs.get('attr_filter') and not kwargs.get('id_filter'):
|
||||
return
|
||||
obj = self.cls.create(**kwargs)
|
||||
|
||||
if request_id_filter:
|
||||
kwargs['id_filter'] = request_id_filter
|
||||
if current_app.config.get('USE_ACL'):
|
||||
try:
|
||||
ACLManager().add_resource(obj.id, ResourceTypeEnum.CI_FILTER)
|
||||
except:
|
||||
pass
|
||||
ACLManager().grant_resource_to_role_by_rid(obj.id,
|
||||
kwargs.get('rid'),
|
||||
ResourceTypeEnum.CI_FILTER)
|
||||
|
||||
obj = self.cls.create(**kwargs)
|
||||
|
||||
if current_app.config.get('USE_ACL'): # new resource
|
||||
try:
|
||||
ACLManager().add_resource(obj.id, ResourceTypeEnum.CI_FILTER)
|
||||
except:
|
||||
pass
|
||||
ACLManager().grant_resource_to_role_by_rid(obj.id,
|
||||
kwargs.get('rid'),
|
||||
ResourceTypeEnum.CI_FILTER)
|
||||
|
||||
return obj
|
||||
return obj
|
||||
|
||||
def _can_update(self, **kwargs):
|
||||
pass
|
||||
@@ -232,83 +138,15 @@ class CIFilterPermsCRUD(DBMixin):
|
||||
pass
|
||||
|
||||
def delete(self, **kwargs):
|
||||
with redis_lock.Lock(rd.r, 'CMDB_FILTER_{}_{}'.format(kwargs['type_id'], kwargs['rid'])):
|
||||
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
|
||||
rid=kwargs.get('rid'),
|
||||
id_filter=None,
|
||||
first=True, to_dict=False)
|
||||
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
|
||||
rid=kwargs.get('rid'),
|
||||
first=True, to_dict=False)
|
||||
|
||||
if obj is not None:
|
||||
resource = None
|
||||
if current_app.config.get('USE_ACL'):
|
||||
resource = ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
|
||||
if obj is not None:
|
||||
if current_app.config.get('USE_ACL'):
|
||||
ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
|
||||
|
||||
obj.soft_delete()
|
||||
|
||||
return resource
|
||||
|
||||
def delete2(self, **kwargs):
|
||||
|
||||
with redis_lock.Lock(rd.r, 'CMDB_FILTER_{}_{}'.format(kwargs['type_id'], kwargs['rid'])):
|
||||
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
|
||||
rid=kwargs.get('rid'),
|
||||
ci_filter=None,
|
||||
attr_filter=None,
|
||||
first=True, to_dict=False)
|
||||
|
||||
request_id_filter = {}
|
||||
for _id, v in (kwargs.get('id_filter') or {}).items():
|
||||
key = ",".join(([v['parent_path']] if v.get('parent_path') else []) + [str(_id)])
|
||||
request_id_filter[key] = v['name']
|
||||
|
||||
resource = None
|
||||
if obj is not None:
|
||||
|
||||
id_filter = {}
|
||||
for k, v in copy.deepcopy(obj.id_filter or {}).items(): # important
|
||||
if k not in request_id_filter:
|
||||
id_filter[k] = v
|
||||
|
||||
if not id_filter and current_app.config.get('USE_ACL'):
|
||||
resource = ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER, rebuild=False)
|
||||
obj.soft_delete()
|
||||
db.session.commit()
|
||||
|
||||
else:
|
||||
obj.update(id_filter=id_filter)
|
||||
|
||||
self._revoke_children(kwargs.get('rid'), request_id_filter, rebuild=False)
|
||||
self._revoke_parent(kwargs.get('rid'), kwargs.get('parent_path'))
|
||||
|
||||
return resource
|
||||
|
||||
def delete_id_filter_by_ci_id(self, ci_id):
|
||||
items = self.cls.get_by(ci_filter=None, attr_filter=None, to_dict=False)
|
||||
|
||||
rebuild_roles = set()
|
||||
for item in items:
|
||||
id_filter = copy.deepcopy(item.id_filter)
|
||||
changed = False
|
||||
for node_path in item.id_filter:
|
||||
if str(ci_id) in node_path:
|
||||
id_filter.pop(node_path)
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
rebuild_roles.add(item.rid)
|
||||
if not id_filter:
|
||||
item.soft_delete(commit=False)
|
||||
else:
|
||||
item.update(id_filter=id_filter, commit=False)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
if rebuild_roles:
|
||||
from api.tasks.acl import role_rebuild
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.perm.acl.cache import AppCache
|
||||
for rid in rebuild_roles:
|
||||
role_rebuild.apply_async(args=(rid, AppCache.get('cmdb').id), queue=ACL_QUEUE)
|
||||
obj.soft_delete()
|
||||
|
||||
|
||||
def has_perm_for_ci(arg_name, resource_type, perm, callback=None, app=None):
|
||||
@@ -322,7 +160,7 @@ def has_perm_for_ci(arg_name, resource_type, perm, callback=None, app=None):
|
||||
resource = callback(resource)
|
||||
|
||||
if current_app.config.get("USE_ACL") and resource:
|
||||
if current_user.username == "worker" or current_user.username == "cmdb_agent":
|
||||
if g.user.username == "worker" or g.user.username == "cmdb_agent":
|
||||
request.values['__is_admin'] = True
|
||||
return func(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -1,35 +1,26 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import copy
|
||||
|
||||
import six
|
||||
import toposort
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from flask import g
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.attribute import AttributeManager
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.cache import CMDBCounterCache
|
||||
from api.lib.cmdb.ci_type import CITypeAttributeManager
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.const import RoleEnum
|
||||
from api.lib.cmdb.const import PermEnum, ResourceTypeEnum, RoleEnum
|
||||
from api.lib.cmdb.perms import CIFilterPermsCRUD
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.exception import AbortException
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.models.cmdb import CITypeAttribute
|
||||
from api.models.cmdb import CITypeGroup
|
||||
from api.models.cmdb import CITypeGroupItem
|
||||
from api.models.cmdb import CITypeRelation
|
||||
from api.models.cmdb import PreferenceCITypeOrder
|
||||
from api.models.cmdb import PreferenceRelationView
|
||||
from api.models.cmdb import PreferenceSearchOption
|
||||
from api.models.cmdb import PreferenceShowAttributes
|
||||
@@ -44,48 +35,13 @@ class PreferenceManager(object):
|
||||
|
||||
@staticmethod
|
||||
def get_types(instance=False, tree=False):
|
||||
ci_type_order = sorted(PreferenceCITypeOrder.get_by(uid=current_user.uid, to_dict=False), key=lambda x: x.order)
|
||||
|
||||
type2group = {}
|
||||
for i in db.session.query(CITypeGroupItem, CITypeGroup).join(
|
||||
CITypeGroup, CITypeGroup.id == CITypeGroupItem.group_id).filter(
|
||||
CITypeGroup.deleted.is_(False)).filter(CITypeGroupItem.deleted.is_(False)):
|
||||
type2group[i.CITypeGroupItem.type_id] = i.CITypeGroup.to_dict()
|
||||
|
||||
types = db.session.query(PreferenceShowAttributes.type_id).filter(
|
||||
PreferenceShowAttributes.uid == current_user.uid).filter(
|
||||
PreferenceShowAttributes.deleted.is_(False)).group_by(
|
||||
PreferenceShowAttributes.type_id).all() if instance else []
|
||||
types = sorted(types, key=lambda x: {i.type_id: idx for idx, i in enumerate(
|
||||
ci_type_order) if not i.is_tree}.get(x.type_id, 1))
|
||||
group_types = []
|
||||
other_types = []
|
||||
group2idx = {}
|
||||
type_ids = set()
|
||||
for ci_type in types:
|
||||
type_id = ci_type.type_id
|
||||
type_ids.add(type_id)
|
||||
type_dict = CITypeCache.get(type_id).to_dict()
|
||||
if type_id not in type2group:
|
||||
other_types.append(type_dict)
|
||||
else:
|
||||
group = type2group[type_id]
|
||||
if group['id'] not in group2idx:
|
||||
group_types.append(type2group[type_id])
|
||||
group2idx[group['id']] = len(group_types) - 1
|
||||
group_types[group2idx[group['id']]].setdefault('ci_types', []).append(type_dict)
|
||||
if other_types:
|
||||
group_types.append(dict(ci_types=other_types))
|
||||
|
||||
tree_types = PreferenceTreeView.get_by(uid=current_user.uid, to_dict=False) if tree else []
|
||||
tree_types = sorted(tree_types, key=lambda x: {i.type_id: idx for idx, i in enumerate(
|
||||
ci_type_order) if i.is_tree}.get(x.type_id, 1))
|
||||
|
||||
tree_types = [CITypeCache.get(_type.type_id).to_dict() for _type in tree_types]
|
||||
for _type in tree_types:
|
||||
type_ids.add(_type['id'])
|
||||
|
||||
return dict(group_types=group_types, tree_types=tree_types, type_ids=list(type_ids))
|
||||
PreferenceShowAttributes.uid == g.user.uid).filter(
|
||||
PreferenceShowAttributes.deleted.is_(False)).group_by(PreferenceShowAttributes.type_id).all() \
|
||||
if instance else []
|
||||
tree_types = PreferenceTreeView.get_by(uid=g.user.uid, to_dict=False) if tree else []
|
||||
type_ids = list(set([i.type_id for i in types + tree_types]))
|
||||
return [CITypeCache.get(type_id).to_dict() for type_id in type_ids]
|
||||
|
||||
@staticmethod
|
||||
def get_types2(instance=False, tree=False):
|
||||
@@ -98,36 +54,32 @@ class PreferenceManager(object):
|
||||
:param tree:
|
||||
:return:
|
||||
"""
|
||||
result = dict(self=dict(instance=[], tree=[], type_id2subs_time=dict()))
|
||||
|
||||
result.update(CMDBCounterCache.get_sub_counter())
|
||||
|
||||
ci_type_order = sorted(PreferenceCITypeOrder.get_by(uid=current_user.uid, to_dict=False), key=lambda x: x.order)
|
||||
result = dict(self=dict(instance=[], tree=[], type_id2subs_time=dict()),
|
||||
type_id2users=dict())
|
||||
if instance:
|
||||
types = db.session.query(PreferenceShowAttributes.type_id,
|
||||
PreferenceShowAttributes.uid, PreferenceShowAttributes.created_at).filter(
|
||||
PreferenceShowAttributes.deleted.is_(False)).filter(
|
||||
PreferenceShowAttributes.uid == current_user.uid).group_by(
|
||||
PreferenceShowAttributes.deleted.is_(False)).group_by(
|
||||
PreferenceShowAttributes.uid, PreferenceShowAttributes.type_id)
|
||||
for i in types:
|
||||
result['self']['instance'].append(i.type_id)
|
||||
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
|
||||
result['self']['type_id2subs_time'][i.type_id] = i.created_at
|
||||
if i.uid == g.user.uid:
|
||||
result['self']['instance'].append(i.type_id)
|
||||
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
|
||||
result['self']['type_id2subs_time'][i.type_id] = i.created_at
|
||||
|
||||
instance_order = [i.type_id for i in ci_type_order if not i.is_tree]
|
||||
if len(instance_order) == len(result['self']['instance']):
|
||||
result['self']['instance'] = instance_order
|
||||
result['type_id2users'].setdefault(i.type_id, []).append(i.uid)
|
||||
|
||||
if tree:
|
||||
types = PreferenceTreeView.get_by(uid=current_user.uid, to_dict=False)
|
||||
types = PreferenceTreeView.get_by(to_dict=False)
|
||||
for i in types:
|
||||
result['self']['tree'].append(i.type_id)
|
||||
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
|
||||
result['self']['type_id2subs_time'][i.type_id] = i.created_at
|
||||
if i.uid == g.user.uid:
|
||||
result['self']['tree'].append(i.type_id)
|
||||
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
|
||||
result['self']['type_id2subs_time'][i.type_id] = i.created_at
|
||||
|
||||
tree_order = [i.type_id for i in ci_type_order if i.is_tree]
|
||||
if len(tree_order) == len(result['self']['tree']):
|
||||
result['self']['tree'] = tree_order
|
||||
result['type_id2users'].setdefault(i.type_id, [])
|
||||
if i.uid not in result['type_id2users'][i.type_id]:
|
||||
result['type_id2users'][i.type_id].append(i.uid)
|
||||
|
||||
return result
|
||||
|
||||
@@ -139,10 +91,10 @@ class PreferenceManager(object):
|
||||
|
||||
attrs = db.session.query(PreferenceShowAttributes, CITypeAttribute.order).join(
|
||||
CITypeAttribute, CITypeAttribute.attr_id == PreferenceShowAttributes.attr_id).filter(
|
||||
PreferenceShowAttributes.uid == current_user.uid).filter(
|
||||
PreferenceShowAttributes.uid == g.user.uid).filter(
|
||||
PreferenceShowAttributes.type_id == type_id).filter(
|
||||
PreferenceShowAttributes.deleted.is_(False)).filter(CITypeAttribute.deleted.is_(False)).group_by(
|
||||
CITypeAttribute.attr_id).all()
|
||||
PreferenceShowAttributes.deleted.is_(False)).filter(CITypeAttribute.deleted.is_(False)).filter(
|
||||
CITypeAttribute.type_id == type_id).all()
|
||||
|
||||
result = []
|
||||
for i in sorted(attrs, key=lambda x: x.PreferenceShowAttributes.order):
|
||||
@@ -152,22 +104,23 @@ class PreferenceManager(object):
|
||||
|
||||
is_subscribed = True
|
||||
if not attrs:
|
||||
result = CITypeAttributeManager.get_attributes_by_type_id(type_id,
|
||||
choice_web_hook_parse=False,
|
||||
choice_other_parse=False)
|
||||
result = [i for i in result if i['default_show']]
|
||||
attrs = db.session.query(CITypeAttribute).filter(
|
||||
CITypeAttribute.type_id == type_id).filter(
|
||||
CITypeAttribute.deleted.is_(False)).filter(
|
||||
CITypeAttribute.default_show.is_(True)).order_by(CITypeAttribute.order)
|
||||
result = [i.attr.to_dict() for i in attrs]
|
||||
is_subscribed = False
|
||||
|
||||
for i in result:
|
||||
if i["is_choice"]:
|
||||
i.update(dict(choice_value=AttributeManager.get_choice_values(
|
||||
i["id"], i["value_type"], i.get("choice_web_hook"), i.get("choice_other"))))
|
||||
i["id"], i["value_type"], i["choice_web_hook"])))
|
||||
|
||||
return is_subscribed, result
|
||||
|
||||
@classmethod
|
||||
def create_or_update_show_attributes(cls, type_id, attr_order):
|
||||
existed_all = PreferenceShowAttributes.get_by(type_id=type_id, uid=current_user.uid, to_dict=False)
|
||||
existed_all = PreferenceShowAttributes.get_by(type_id=type_id, uid=g.user.uid, to_dict=False)
|
||||
for x, order in attr_order:
|
||||
if isinstance(x, list):
|
||||
_attr, is_fixed = x
|
||||
@@ -175,13 +128,13 @@ class PreferenceManager(object):
|
||||
_attr, is_fixed = x, False
|
||||
attr = AttributeCache.get(_attr) or abort(404, ErrFormat.attribute_not_found.format("id={}".format(_attr)))
|
||||
existed = PreferenceShowAttributes.get_by(type_id=type_id,
|
||||
uid=current_user.uid,
|
||||
uid=g.user.uid,
|
||||
attr_id=attr.id,
|
||||
first=True,
|
||||
to_dict=False)
|
||||
if existed is None:
|
||||
PreferenceShowAttributes.create(type_id=type_id,
|
||||
uid=current_user.uid,
|
||||
uid=g.user.uid,
|
||||
attr_id=attr.id,
|
||||
order=order,
|
||||
is_fixed=is_fixed)
|
||||
@@ -193,22 +146,9 @@ class PreferenceManager(object):
|
||||
if i.attr_id not in attr_dict:
|
||||
i.soft_delete()
|
||||
|
||||
if not existed_all and attr_order:
|
||||
cls.add_ci_type_order_item(type_id, is_tree=False)
|
||||
|
||||
elif not PreferenceShowAttributes.get_by(type_id=type_id, uid=current_user.uid, to_dict=False):
|
||||
cls.delete_ci_type_order_item(type_id, is_tree=False)
|
||||
|
||||
@staticmethod
|
||||
def get_tree_view():
|
||||
ci_type_order = sorted(PreferenceCITypeOrder.get_by(uid=current_user.uid, is_tree=True, to_dict=False),
|
||||
key=lambda x: x.order)
|
||||
|
||||
res = PreferenceTreeView.get_by(uid=current_user.uid, to_dict=True)
|
||||
if ci_type_order:
|
||||
res = sorted(res, key=lambda x: {ii.type_id: idx for idx, ii in enumerate(
|
||||
ci_type_order)}.get(x['type_id'], 1))
|
||||
|
||||
res = PreferenceTreeView.get_by(uid=g.user.uid, to_dict=True)
|
||||
for item in res:
|
||||
if item["levels"]:
|
||||
ci_type = CITypeCache.get(item['type_id']).to_dict()
|
||||
@@ -227,8 +167,8 @@ class PreferenceManager(object):
|
||||
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def create_or_update_tree_view(cls, type_id, levels):
|
||||
@staticmethod
|
||||
def create_or_update_tree_view(type_id, levels):
|
||||
attrs = CITypeAttributesCache.get(type_id)
|
||||
for idx, i in enumerate(levels):
|
||||
for attr in attrs:
|
||||
@@ -236,17 +176,14 @@ class PreferenceManager(object):
|
||||
if i == attr.id or i == attr.name or i == attr.alias:
|
||||
levels[idx] = attr.id
|
||||
|
||||
existed = PreferenceTreeView.get_by(uid=current_user.uid, type_id=type_id, to_dict=False, first=True)
|
||||
existed = PreferenceTreeView.get_by(uid=g.user.uid, type_id=type_id, to_dict=False, first=True)
|
||||
if existed is not None:
|
||||
if not levels:
|
||||
existed.soft_delete()
|
||||
cls.delete_ci_type_order_item(type_id, is_tree=True)
|
||||
return existed
|
||||
return existed.update(levels=levels)
|
||||
elif levels:
|
||||
cls.add_ci_type_order_item(type_id, is_tree=True)
|
||||
|
||||
return PreferenceTreeView.create(levels=levels, type_id=type_id, uid=current_user.uid)
|
||||
return PreferenceTreeView.create(levels=levels, type_id=type_id, uid=g.user.uid)
|
||||
|
||||
@staticmethod
|
||||
def get_relation_view():
|
||||
@@ -264,14 +201,12 @@ class PreferenceManager(object):
|
||||
else:
|
||||
views = _views
|
||||
|
||||
view2cr_ids = defaultdict(list)
|
||||
name2view = dict()
|
||||
view2cr_ids = dict()
|
||||
result = dict()
|
||||
name2id = list()
|
||||
for view in views:
|
||||
view2cr_ids[view['name']].extend(view['cr_ids'])
|
||||
view2cr_ids.setdefault(view['name'], []).extend(view['cr_ids'])
|
||||
name2id.append([view['name'], view['id']])
|
||||
name2view[view['name']] = view
|
||||
|
||||
id2type = dict()
|
||||
for view_name in view2cr_ids:
|
||||
@@ -292,31 +227,15 @@ class PreferenceManager(object):
|
||||
if not parents:
|
||||
return
|
||||
|
||||
for _l in leaf:
|
||||
_find_parent(_l)
|
||||
for l in leaf:
|
||||
_find_parent(l)
|
||||
|
||||
for node_id in node2show_types:
|
||||
node2show_types[node_id] = [CITypeCache.get(i).to_dict() for i in set(node2show_types[node_id])]
|
||||
|
||||
topo_flatten = list(toposort.toposort_flatten(topo))
|
||||
level2constraint = {}
|
||||
for i, _ in enumerate(topo_flatten[1:]):
|
||||
ctr = CITypeRelation.get_by(
|
||||
parent_id=topo_flatten[i], child_id=topo_flatten[i + 1], first=True, to_dict=False)
|
||||
level2constraint[i + 1] = ctr and ctr.constraint
|
||||
|
||||
if leaf2show_types.get(topo_flatten[-1]):
|
||||
ctr = CITypeRelation.get_by(
|
||||
parent_id=topo_flatten[-1],
|
||||
child_id=leaf2show_types[topo_flatten[-1]][0], first=True, to_dict=False)
|
||||
level2constraint[len(topo_flatten)] = ctr and ctr.constraint
|
||||
|
||||
result[view_name] = dict(topo=list(map(list, toposort.toposort(topo))),
|
||||
topo_flatten=topo_flatten,
|
||||
level2constraint=level2constraint,
|
||||
topo_flatten=list(toposort.toposort_flatten(topo)),
|
||||
leaf=leaf,
|
||||
option=name2view[view_name]['option'],
|
||||
is_public=name2view[view_name]['is_public'],
|
||||
leaf2show_types=leaf2show_types,
|
||||
node2show_types=node2show_types,
|
||||
show_types=[CITypeCache.get(j).to_dict()
|
||||
@@ -324,26 +243,18 @@ class PreferenceManager(object):
|
||||
|
||||
for type_id in id2type:
|
||||
id2type[type_id] = CITypeCache.get(type_id).to_dict()
|
||||
id2type[type_id]['unique_name'] = AttributeCache.get(id2type[type_id]['unique_id']).name
|
||||
if id2type[type_id]['show_id']:
|
||||
show_attr = AttributeCache.get(id2type[type_id]['show_id'])
|
||||
id2type[type_id]['show_name'] = show_attr and show_attr.name
|
||||
|
||||
return result, id2type, sorted(name2id, key=lambda x: x[1])
|
||||
|
||||
@classmethod
|
||||
def create_or_update_relation_view(cls, name=None, cr_ids=None, _id=None, is_public=False, option=None):
|
||||
def create_or_update_relation_view(cls, name, cr_ids, is_public=False):
|
||||
if not cr_ids:
|
||||
return abort(400, ErrFormat.preference_relation_view_node_required)
|
||||
|
||||
if _id is None:
|
||||
existed = PreferenceRelationView.get_by(name=name, to_dict=False, first=True)
|
||||
else:
|
||||
existed = PreferenceRelationView.get_by_id(_id)
|
||||
existed = PreferenceRelationView.get_by(name=name, to_dict=False, first=True)
|
||||
current_app.logger.debug(existed)
|
||||
if existed is None:
|
||||
PreferenceRelationView.create(name=name, cr_ids=cr_ids, uid=current_user.uid,
|
||||
is_public=is_public, option=option)
|
||||
PreferenceRelationView.create(name=name, cr_ids=cr_ids, uid=g.user.uid, is_public=is_public)
|
||||
|
||||
if current_app.config.get("USE_ACL"):
|
||||
ACLManager().add_resource(name, ResourceTypeEnum.RELATION_VIEW)
|
||||
@@ -351,11 +262,6 @@ class PreferenceManager(object):
|
||||
RoleEnum.CMDB_READ_ALL,
|
||||
ResourceTypeEnum.RELATION_VIEW,
|
||||
permissions=[PermEnum.READ])
|
||||
else:
|
||||
if existed.name != name and current_app.config.get("USE_ACL"):
|
||||
ACLManager().update_resource(existed.name, name, ResourceTypeEnum.RELATION_VIEW)
|
||||
|
||||
existed.update(name=name, cr_ids=cr_ids, is_public=is_public, option=option)
|
||||
|
||||
return cls.get_relation_view()
|
||||
|
||||
@@ -372,7 +278,7 @@ class PreferenceManager(object):
|
||||
@staticmethod
|
||||
def get_search_option(**kwargs):
|
||||
query = PreferenceSearchOption.get_by(only_query=True)
|
||||
query = query.filter(PreferenceSearchOption.uid == current_user.uid)
|
||||
query = query.filter(PreferenceSearchOption.uid == g.user.uid)
|
||||
|
||||
for k in kwargs:
|
||||
if hasattr(PreferenceSearchOption, k) and kwargs[k]:
|
||||
@@ -382,24 +288,16 @@ class PreferenceManager(object):
|
||||
|
||||
@staticmethod
|
||||
def add_search_option(**kwargs):
|
||||
kwargs['uid'] = current_user.uid
|
||||
kwargs['uid'] = g.user.uid
|
||||
|
||||
if kwargs['name'] in ('__recent__', '__favor__'):
|
||||
if kwargs['name'] == '__recent__':
|
||||
for i in PreferenceSearchOption.get_by(
|
||||
only_query=True, name=kwargs['name'], uid=current_user.uid).order_by(
|
||||
PreferenceSearchOption.id.desc()).offset(20):
|
||||
i.delete()
|
||||
|
||||
else:
|
||||
existed = PreferenceSearchOption.get_by(uid=current_user.uid,
|
||||
name=kwargs.get('name'),
|
||||
prv_id=kwargs.get('prv_id'),
|
||||
ptv_id=kwargs.get('ptv_id'),
|
||||
type_id=kwargs.get('type_id'),
|
||||
)
|
||||
if existed:
|
||||
return abort(400, ErrFormat.preference_search_option_exists)
|
||||
existed = PreferenceSearchOption.get_by(uid=g.user.uid,
|
||||
name=kwargs.get('name'),
|
||||
prv_id=kwargs.get('prv_id'),
|
||||
ptv_id=kwargs.get('ptv_id'),
|
||||
type_id=kwargs.get('type_id'),
|
||||
)
|
||||
if existed:
|
||||
return abort(400, ErrFormat.preference_search_option_exists)
|
||||
|
||||
return PreferenceSearchOption.create(**kwargs)
|
||||
|
||||
@@ -408,10 +306,10 @@ class PreferenceManager(object):
|
||||
|
||||
existed = PreferenceSearchOption.get_by_id(_id) or abort(404, ErrFormat.preference_search_option_not_found)
|
||||
|
||||
if current_user.uid != existed.uid:
|
||||
if g.user.uid != existed.uid:
|
||||
return abort(400, ErrFormat.no_permission2)
|
||||
|
||||
other = PreferenceSearchOption.get_by(uid=current_user.uid,
|
||||
other = PreferenceSearchOption.get_by(uid=g.user.uid,
|
||||
name=kwargs.get('name'),
|
||||
prv_id=kwargs.get('prv_id'),
|
||||
ptv_id=kwargs.get('ptv_id'),
|
||||
@@ -426,7 +324,7 @@ class PreferenceManager(object):
|
||||
def delete_search_option(_id):
|
||||
existed = PreferenceSearchOption.get_by_id(_id) or abort(404, ErrFormat.preference_search_option_not_found)
|
||||
|
||||
if current_user.uid != existed.uid:
|
||||
if g.user.uid != existed.uid:
|
||||
return abort(400, ErrFormat.no_permission2)
|
||||
|
||||
existed.soft_delete()
|
||||
@@ -438,65 +336,3 @@ class PreferenceManager(object):
|
||||
|
||||
for i in PreferenceTreeView.get_by(type_id=type_id, uid=uid, to_dict=False):
|
||||
i.soft_delete()
|
||||
|
||||
for i in PreferenceCITypeOrder.get_by(type_id=type_id, uid=uid, to_dict=False):
|
||||
i.soft_delete()
|
||||
|
||||
@staticmethod
|
||||
def can_edit_relation(parent_id, child_id):
|
||||
views = PreferenceRelationView.get_by(to_dict=False)
|
||||
for view in views:
|
||||
has_m2m = False
|
||||
last_node_id = None
|
||||
for cr in view.cr_ids:
|
||||
_rel = CITypeRelation.get_by(parent_id=cr['parent_id'], child_id=cr['child_id'],
|
||||
first=True, to_dict=False)
|
||||
if _rel and _rel.constraint == ConstraintEnum.Many2Many:
|
||||
has_m2m = True
|
||||
|
||||
if parent_id == _rel.parent_id and child_id == _rel.child_id:
|
||||
return False
|
||||
|
||||
if _rel:
|
||||
last_node_id = _rel.child_id
|
||||
|
||||
if parent_id == last_node_id:
|
||||
rels = CITypeRelation.get_by(parent_id=last_node_id, to_dict=False)
|
||||
for rel in rels:
|
||||
if rel.child_id == child_id and has_m2m:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def add_ci_type_order_item(type_id, is_tree=False):
|
||||
max_order = PreferenceCITypeOrder.get_by(
|
||||
uid=current_user.uid, is_tree=is_tree, only_query=True).order_by(PreferenceCITypeOrder.order.desc()).first()
|
||||
order = (max_order and max_order.order + 1) or 1
|
||||
|
||||
PreferenceCITypeOrder.create(type_id=type_id, is_tree=is_tree, uid=current_user.uid, order=order)
|
||||
|
||||
@staticmethod
|
||||
def delete_ci_type_order_item(type_id, is_tree=False):
|
||||
existed = PreferenceCITypeOrder.get_by(uid=current_user.uid, type_id=type_id, is_tree=is_tree,
|
||||
first=True, to_dict=False)
|
||||
|
||||
existed and existed.soft_delete()
|
||||
|
||||
@staticmethod
|
||||
def upsert_ci_type_order(type_ids, is_tree=False):
|
||||
for idx, type_id in enumerate(type_ids):
|
||||
order = idx + 1
|
||||
existed = PreferenceCITypeOrder.get_by(uid=current_user.uid, type_id=type_id, is_tree=is_tree,
|
||||
to_dict=False, first=True)
|
||||
if existed is not None:
|
||||
existed.update(order=order, flush=True)
|
||||
else:
|
||||
PreferenceCITypeOrder.create(uid=current_user.uid, type_id=type_id, is_tree=is_tree, order=order,
|
||||
flush=True)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error("upsert citype order failed: {}".format(e))
|
||||
return abort(400, ErrFormat.unknown_error)
|
||||
|
||||
@@ -42,7 +42,7 @@ FACET_QUERY1 = """
|
||||
|
||||
FACET_QUERY = """
|
||||
SELECT {0}.value,
|
||||
count(distinct({0}.ci_id))
|
||||
count({0}.ci_id)
|
||||
FROM {0}
|
||||
INNER JOIN ({1}) AS F ON F.ci_id={0}.ci_id
|
||||
WHERE {0}.attr_id={2:d}
|
||||
|
||||
@@ -24,21 +24,21 @@ class RelationTypeManager(object):
|
||||
|
||||
@staticmethod
|
||||
def add(name):
|
||||
RelationType.get_by(name=name, first=True, to_dict=False) and abort(
|
||||
400, ErrFormat.relation_type_exists.format(name))
|
||||
RelationType.get_by(name=name, first=True, to_dict=False) and \
|
||||
abort(400, ErrFormat.relation_type_exists.format(name))
|
||||
|
||||
return RelationType.create(name=name)
|
||||
|
||||
@staticmethod
|
||||
def update(rel_id, name):
|
||||
existed = RelationType.get_by_id(rel_id) or abort(
|
||||
404, ErrFormat.relation_type_not_found.format("id={}".format(rel_id)))
|
||||
existed = RelationType.get_by_id(rel_id) or \
|
||||
abort(404, ErrFormat.relation_type_not_found.format("id={}".format(rel_id)))
|
||||
|
||||
return existed.update(name=name)
|
||||
|
||||
@staticmethod
|
||||
def delete(rel_id):
|
||||
existed = RelationType.get_by_id(rel_id) or abort(
|
||||
404, ErrFormat.relation_type_not_found.format("id={}".format(rel_id)))
|
||||
existed = RelationType.get_by_id(rel_id) or \
|
||||
abort(404, ErrFormat.relation_type_not_found.format("id={}".format(rel_id)))
|
||||
|
||||
existed.soft_delete()
|
||||
|
||||
@@ -1,158 +1,94 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask_babel import lazy_gettext as _l
|
||||
|
||||
from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
class ErrFormat(CommonErrFormat):
|
||||
ci_type_config = _l("CI Model") # 模型配置
|
||||
invalid_relation_type = "无效的关系类型: {}"
|
||||
ci_type_not_found = "模型不存在!"
|
||||
argument_attributes_must_be_list = "参数 attributes 类型必须是列表"
|
||||
argument_file_not_found = "文件似乎并未上传"
|
||||
|
||||
invalid_relation_type = _l("Invalid relation type: {}") # 无效的关系类型: {}
|
||||
ci_type_not_found = _l("CIType is not found") # 模型不存在!
|
||||
attribute_not_found = "属性 {} 不存在!"
|
||||
attribute_is_unique_id = "该属性是模型的唯一标识,不能被删除!"
|
||||
attribute_value_type_cannot_change = "属性的值类型不允许修改!"
|
||||
attribute_list_value_cannot_change = "多值不被允许修改!"
|
||||
attribute_index_cannot_change = "修改索引 非管理员不被允许!"
|
||||
attribute_index_change_failed = "索引切换失败!"
|
||||
invalid_choice_values = "预定义值的类型不对!"
|
||||
attribute_name_duplicate = "重复的属性名 {}"
|
||||
add_attribute_failed = "创建属性 {} 失败!"
|
||||
update_attribute_failed = "修改属性 {} 失败!"
|
||||
cannot_edit_attribute = "您没有权限修改该属性!"
|
||||
cannot_delete_attribute = "您没有权限删除该属性!"
|
||||
attribute_name_cannot_be_builtin = "属性字段名不能是内置字段: id, _id, ci_id, type, _type, ci_type"
|
||||
|
||||
# 参数 attributes 类型必须是列表
|
||||
argument_attributes_must_be_list = _l("The type of parameter attributes must be a list")
|
||||
argument_file_not_found = _l("The file doesn't seem to be uploaded") # 文件似乎并未上传
|
||||
ci_not_found = "CI {} 不存在"
|
||||
unique_constraint = "多属性联合唯一校验不通过: {}"
|
||||
unique_value_not_found = "模型的主键 {} 不存在!"
|
||||
unique_key_required = "主键字段 {} 缺失"
|
||||
ci_is_already_existed = "CI 已经存在!"
|
||||
relation_constraint = "关系约束: {}, 校验失败 "
|
||||
relation_not_found = "CI关系: {} 不存在"
|
||||
ci_search_Parentheses_invalid = "搜索表达式里小括号前不支持: 或、非"
|
||||
|
||||
attribute_not_found = _l("Attribute {} does not exist!") # 属性 {} 不存在!
|
||||
attribute_is_unique_id = _l(
|
||||
"This attribute is the unique identifier of the model and cannot be deleted!") # 该属性是模型的唯一标识,不能被删除!
|
||||
attribute_is_ref_by_type = _l(
|
||||
"This attribute is referenced by model {} and cannot be deleted!") # 该属性被模型 {} 引用, 不能删除!
|
||||
attribute_value_type_cannot_change = _l(
|
||||
"The value type of the attribute is not allowed to be modified!") # 属性的值类型不允许修改!
|
||||
attribute_list_value_cannot_change = _l("Multiple values are not allowed to be modified!") # 多值不被允许修改!
|
||||
# 修改索引 非管理员不被允许!
|
||||
attribute_index_cannot_change = _l("Modifying the index is not allowed for non-administrators!")
|
||||
attribute_index_change_failed = _l("Index switching failed!") # 索引切换失败!
|
||||
invalid_choice_values = _l("The predefined value is of the wrong type!") # 预定义值的类型不对!
|
||||
attribute_name_duplicate = _l("Duplicate attribute name {}") # 重复的属性名 {}
|
||||
add_attribute_failed = _l("Failed to create attribute {}!") # 创建属性 {} 失败!
|
||||
update_attribute_failed = _l("Modify attribute {} failed!") # 修改属性 {} 失败!
|
||||
cannot_edit_attribute = _l("You do not have permission to modify this attribute!") # 您没有权限修改该属性!
|
||||
cannot_delete_attribute = _l(
|
||||
"Only creators and administrators are allowed to delete attributes!") # 目前只允许 属性创建人、管理员 删除属性!
|
||||
# 属性字段名不能是内置字段: id, _id, ci_id, type, _type, ci_type
|
||||
attribute_name_cannot_be_builtin = _l(
|
||||
"Attribute field names cannot be built-in fields: id, _id, ci_id, type, _type, ci_type, ticket_id")
|
||||
attribute_choice_other_invalid = _l(
|
||||
"Predefined value: Other model request parameters are illegal!") # 预定义值: 其他模型请求参数不合法!
|
||||
ci_type_not_found2 = "模型 {} 不存在"
|
||||
ci_type_is_already_existed = "模型 {} 已经存在"
|
||||
unique_key_not_define = "主键未定义或者已被删除"
|
||||
only_owner_can_delete = "只有创建人才能删除它!"
|
||||
ci_exists_and_cannot_delete_type = "因为CI已经存在,不能删除模型"
|
||||
ci_type_group_not_found = "模型分组 {} 不存在"
|
||||
ci_type_group_exists = "模型分组 {} 已经存在"
|
||||
ci_type_relation_not_found = "模型关系 {} 不存在"
|
||||
ci_type_attribute_group_duplicate = "属性分组 {} 已存在"
|
||||
ci_type_attribute_group_not_found = "属性分组 {} 不存在"
|
||||
ci_type_group_attribute_not_found = "属性组<{0}> - 属性<{1}> 不存在"
|
||||
unique_constraint_duplicate = "唯一约束已经存在!"
|
||||
unique_constraint_invalid = "唯一约束的属性不能是 JSON 和 多值"
|
||||
ci_type_trigger_duplicate = "重复的触发器"
|
||||
ci_type_trigger_not_found = "触发器 {} 不存在"
|
||||
|
||||
ci_not_found = _l("CI {} does not exist") # CI {} 不存在
|
||||
unique_constraint = _l("Multiple attribute joint unique verification failed: {}") # 多属性联合唯一校验不通过: {}
|
||||
unique_value_not_found = _l("The model's primary key {} does not exist!") # 模型的主键 {} 不存在!
|
||||
unique_key_required = _l("Primary key {} is missing") # 主键字段 {} 缺失
|
||||
ci_is_already_existed = _l("CI already exists!") # CI 已经存在!
|
||||
ci_reference_not_found = _l("{}: CI reference {} does not exist!") # {}: CI引用 {} 不存在!
|
||||
ci_reference_invalid = _l("{}: CI reference {} is illegal!") # {}, CI引用 {} 不合法!
|
||||
relation_constraint = _l("Relationship constraint: {}, verification failed") # 关系约束: {}, 校验失败
|
||||
# 多对多关系 限制: 模型 {} <-> {} 已经存在多对多关系!
|
||||
m2m_relation_constraint = _l(
|
||||
"Many-to-many relationship constraint: Model {} <-> {} already has a many-to-many relationship!")
|
||||
record_not_found = "操作记录 {} 不存在"
|
||||
cannot_delete_unique = "不能删除唯一标识"
|
||||
cannot_delete_default_order_attr = "不能删除默认排序的属性"
|
||||
|
||||
relation_not_found = _l("CI relationship: {} does not exist") # CI关系: {} 不存在
|
||||
preference_relation_view_node_required = "没有选择节点"
|
||||
preference_search_option_not_found = "该搜索选项不存在!"
|
||||
preference_search_option_exists = "该搜索选项命名重复!"
|
||||
|
||||
# 搜索表达式里小括号前不支持: 或、非
|
||||
ci_search_Parentheses_invalid = _l("In search expressions, not supported before parentheses: or, not")
|
||||
relation_type_exists = "关系类型 {} 已经存在"
|
||||
relation_type_not_found = "关系类型 {} 不存在"
|
||||
|
||||
ci_type_not_found2 = _l("Model {} does not exist") # 模型 {} 不存在
|
||||
ci_type_is_already_existed = _l("Model {} already exists") # 模型 {} 已经存在
|
||||
unique_key_not_define = _l("The primary key is undefined or has been deleted") # 主键未定义或者已被删除
|
||||
only_owner_can_delete = _l("Only the creator can delete it!") # 只有创建人才能删除它!
|
||||
ci_exists_and_cannot_delete_type = _l(
|
||||
"The model cannot be deleted because the CI already exists") # 因为CI已经存在,不能删除模型
|
||||
ci_exists_and_cannot_delete_inheritance = _l(
|
||||
"The inheritance cannot be deleted because the CI already exists") # 因为CI已经存在,不能删除继承关系
|
||||
ci_type_inheritance_cannot_delete = _l("The model is inherited and cannot be deleted") # 该模型被继承, 不能删除
|
||||
ci_type_referenced_cannot_delete = _l(
|
||||
"The model is referenced by attribute {} and cannot be deleted") # 该模型被属性 {} 引用, 不能删除
|
||||
attribute_value_invalid = "无效的属性值: {}"
|
||||
attribute_value_invalid2 = "{} 无效的值: {}"
|
||||
not_in_choice_values = "{} 不在预定义值里"
|
||||
attribute_value_unique_required = "属性 {} 的值必须是唯一的, 当前值 {} 已存在"
|
||||
attribute_value_required = "属性 {} 值必须存在"
|
||||
attribute_value_unknown_error = "新增或者修改属性值未知错误: {}"
|
||||
|
||||
# 因为关系视图 {} 引用了该模型,不能删除模型
|
||||
ci_relation_view_exists_and_cannot_delete_type = _l(
|
||||
"The model cannot be deleted because the model is referenced by the relational view {}")
|
||||
ci_type_group_not_found = _l("Model group {} does not exist") # 模型分组 {} 不存在
|
||||
ci_type_group_exists = _l("Model group {} already exists") # 模型分组 {} 已经存在
|
||||
ci_type_relation_not_found = _l("Model relationship {} does not exist") # 模型关系 {} 不存在
|
||||
ci_type_attribute_group_duplicate = _l("Attribute group {} already exists") # 属性分组 {} 已存在
|
||||
ci_type_attribute_group_not_found = _l("Attribute group {} does not exist") # 属性分组 {} 不存在
|
||||
# 属性组<{0}> - 属性<{1}> 不存在
|
||||
ci_type_group_attribute_not_found = _l("Attribute group <{0}> - attribute <{1}> does not exist")
|
||||
unique_constraint_duplicate = _l("The unique constraint already exists!") # 唯一约束已经存在!
|
||||
# 唯一约束的属性不能是 JSON 和 多值
|
||||
unique_constraint_invalid = _l("Uniquely constrained attributes cannot be JSON and multi-valued")
|
||||
ci_type_trigger_duplicate = _l("Duplicated trigger") # 重复的触发器
|
||||
ci_type_trigger_not_found = _l("Trigger {} does not exist") # 触发器 {} 不存在
|
||||
ci_type_reconciliation_duplicate = _l("Duplicated reconciliation rule") # 重复的校验规则
|
||||
ci_type_reconciliation_not_found = _l("Reconciliation rule {} does not exist") # 规则 {} 不存在
|
||||
custom_name_duplicate = "订制名重复"
|
||||
|
||||
record_not_found = _l("Operation record {} does not exist") # 操作记录 {} 不存在
|
||||
cannot_delete_unique = _l("Unique identifier cannot be deleted") # 不能删除唯一标识
|
||||
cannot_delete_default_order_attr = _l("Cannot delete default sorted attributes") # 不能删除默认排序的属性
|
||||
limit_ci_type = "模型数超过限制: {}"
|
||||
limit_ci = "CI数超过限制: {}"
|
||||
|
||||
preference_relation_view_node_required = _l("No node selected") # 没有选择节点
|
||||
preference_search_option_not_found = _l("This search option does not exist!") # 该搜索选项不存在!
|
||||
preference_search_option_exists = _l("This search option has a duplicate name!") # 该搜索选项命名重复!
|
||||
adr_duplicate = "自动发现规则: {} 已经存在!"
|
||||
adr_not_found = "自动发现规则: {} 不存在!"
|
||||
adr_referenced = "该自动发现规则被模型引用, 不能删除!"
|
||||
ad_duplicate = "自动发现规则的应用不能重复定义!"
|
||||
ad_not_found = "您要修改的自动发现: {} 不存在!"
|
||||
ad_not_unique_key = "属性字段没有包括唯一标识: {}"
|
||||
adc_not_found = "自动发现的实例不存在!"
|
||||
adt_not_found = "模型并未关联该自动发现!"
|
||||
adt_secret_no_permission = "只有创建人才能修改Secret!"
|
||||
cannot_delete_adt = "该规则已经有自动发现的实例, 不能被删除!"
|
||||
adr_default_ref_once = "该默认的自动发现规则 已经被模型 {} 引用!"
|
||||
adr_unique_key_required = "unique_key方法必须返回非空字符串!"
|
||||
adr_plugin_attributes_list_required = "attributes方法必须返回的是list"
|
||||
adr_plugin_attributes_list_no_empty = "attributes方法返回的list不能为空!"
|
||||
adt_target_all_no_permission = "只有管理员才可以定义执行机器为: 所有节点!"
|
||||
adt_target_expr_no_permission = "执行机器权限检查不通过: {}"
|
||||
|
||||
relation_type_exists = _l("Relationship type {} already exists") # 关系类型 {} 已经存在
|
||||
relation_type_not_found = _l("Relationship type {} does not exist") # 关系类型 {} 不存在
|
||||
|
||||
attribute_value_invalid = _l("Invalid attribute value: {}") # 无效的属性值: {}
|
||||
attribute_value_invalid2 = _l("{} Invalid value: {}") # {} 无效的值: {}
|
||||
not_in_choice_values = _l("{} is not in the predefined values") # {} 不在预定义值里
|
||||
# 属性 {} 的值必须是唯一的, 当前值 {} 已存在
|
||||
attribute_value_unique_required = _l("The value of attribute {} must be unique, {} already exists")
|
||||
attribute_value_required = _l("Attribute {} value must exist") # 属性 {} 值必须存在
|
||||
attribute_value_out_of_range = _l("Out of range value, the maximum value is 2147483647")
|
||||
# 新增或者修改属性值未知错误: {}
|
||||
attribute_value_unknown_error = _l("Unknown error when adding or modifying attribute value: {}")
|
||||
|
||||
custom_name_duplicate = _l("Duplicate custom name") # 订制名重复
|
||||
|
||||
limit_ci_type = _l("Number of models exceeds limit: {}") # 模型数超过限制: {}
|
||||
limit_ci = _l("The number of CIs exceeds the limit: {}") # CI数超过限制: {}
|
||||
|
||||
adr_duplicate = _l("Auto-discovery rule: {} already exists!") # 自动发现规则: {} 已经存在!
|
||||
adr_not_found = _l("Auto-discovery rule: {} does not exist!") # 自动发现规则: {} 不存在!
|
||||
# 该自动发现规则被模型引用, 不能删除!
|
||||
adr_referenced = _l("This auto-discovery rule is referenced by the model and cannot be deleted!")
|
||||
# 自动发现规则的应用不能重复定义!
|
||||
ad_duplicate = _l("The application of auto-discovery rules cannot be defined repeatedly!")
|
||||
ad_not_found = _l("The auto-discovery you want to modify: {} does not exist!") # 您要修改的自动发现: {} 不存在!
|
||||
ad_not_unique_key = _l("Attribute does not include unique identifier: {}") # 属性字段没有包括唯一标识: {}
|
||||
adc_not_found = _l("The auto-discovery instance does not exist!") # 自动发现的实例不存在!
|
||||
adt_not_found = _l("The model is not associated with this auto-discovery!") # 模型并未关联该自动发现!
|
||||
adt_secret_no_permission = _l("Only the creator can modify the Secret!") # 只有创建人才能修改Secret!
|
||||
# 该规则已经有自动发现的实例, 不能被删除!
|
||||
cannot_delete_adt = _l("This rule already has auto-discovery instances and cannot be deleted!")
|
||||
# 该默认的自动发现规则 已经被模型 {} 引用!
|
||||
adr_default_ref_once = _l("The default auto-discovery rule is already referenced by model {}!")
|
||||
# unique_key方法必须返回非空字符串!
|
||||
adr_unique_key_required = _l("The unique_key method must return a non-empty string!")
|
||||
adr_plugin_attributes_list_required = _l("The attributes method must return a list") # attributes方法必须返回的是list
|
||||
# attributes方法返回的list不能为空!
|
||||
adr_plugin_attributes_list_no_empty = _l("The list returned by the attributes method cannot be empty!")
|
||||
# 只有管理员才可以定义执行机器为: 所有节点!
|
||||
adt_target_all_no_permission = _l("Only administrators can define execution targets as: all nodes!")
|
||||
adt_target_expr_no_permission = _l("Execute targets permission check failed: {}") # 执行机器权限检查不通过: {}
|
||||
|
||||
ci_filter_name_cannot_be_empty = _l("CI filter authorization must be named!") # CI过滤授权 必须命名!
|
||||
ci_filter_perm_cannot_or_query = _l(
|
||||
"CI filter authorization is currently not supported or query") # CI过滤授权 暂时不支持 或 查询
|
||||
# 您没有属性 {} 的操作权限!
|
||||
ci_filter_perm_attr_no_permission = _l("You do not have permission to operate attribute {}!")
|
||||
ci_filter_perm_ci_no_permission = _l("You do not have permission to operate this CI!") # 您没有该CI的操作权限!
|
||||
|
||||
password_save_failed = _l("Failed to save password: {}") # 保存密码失败: {}
|
||||
password_load_failed = _l("Failed to get password: {}") # 获取密码失败: {}
|
||||
|
||||
cron_time_format_invalid = _l("Scheduling time format error") # 调度时间格式错误
|
||||
reconciliation_title = _l("CMDB data reconciliation results") # CMDB数据合规检查结果
|
||||
reconciliation_body = _l("Number of {} illegal: {}") # "{} 不合规数: {}"
|
||||
|
||||
topology_exists = _l("Topology view {} already exists") # 拓扑视图 {} 已经存在
|
||||
topology_group_exists = _l("Topology group {} already exists") # 拓扑视图分组 {} 已经存在
|
||||
# 因为该分组下定义了拓扑视图,不能删除
|
||||
topo_view_exists_cannot_delete_group = _l("The group cannot be deleted because the topology view already exists")
|
||||
|
||||
relation_path_search_src_target_required = _l("Both the source model and the target model must be selected")
|
||||
ci_filter_name_cannot_be_empty = "CI过滤授权 必须命名!"
|
||||
ci_filter_perm_cannot_or_query = "CI过滤授权 暂时不支持 或 查询"
|
||||
ci_filter_perm_attr_no_permission = "您没有属性 {} 的操作权限!"
|
||||
ci_filter_perm_ci_no_permission = "您没有该CI的操作权限!"
|
||||
|
||||
@@ -16,13 +16,10 @@ def search(query=None,
|
||||
ret_key=RetKey.NAME,
|
||||
count=1,
|
||||
sort=None,
|
||||
excludes=None,
|
||||
use_id_filter=False,
|
||||
use_ci_filter=True):
|
||||
excludes=None):
|
||||
if current_app.config.get("USE_ES"):
|
||||
s = SearchFromES(query, fl, facet, page, ret_key, count, sort)
|
||||
else:
|
||||
s = SearchFromDB(query, fl, facet, page, ret_key, count, sort, excludes=excludes,
|
||||
use_id_filter=use_id_filter, use_ci_filter=use_ci_filter)
|
||||
s = SearchFromDB(query, fl, facet, page, ret_key, count, sort, excludes=excludes)
|
||||
|
||||
return s
|
||||
|
||||
@@ -7,7 +7,6 @@ QUERY_CIS_BY_VALUE_TABLE = """
|
||||
attr.alias AS attr_alias,
|
||||
attr.value_type,
|
||||
attr.is_list,
|
||||
attr.is_password,
|
||||
c_cis.type_id,
|
||||
{0}.ci_id,
|
||||
{0}.attr_id,
|
||||
@@ -27,8 +26,7 @@ QUERY_CIS_BY_IDS = """
|
||||
A.attr_alias,
|
||||
A.value,
|
||||
A.value_type,
|
||||
A.is_list,
|
||||
A.is_password
|
||||
A.is_list
|
||||
FROM
|
||||
({1}) AS A {0}
|
||||
ORDER BY A.ci_id;
|
||||
@@ -45,7 +43,7 @@ FACET_QUERY1 = """
|
||||
|
||||
FACET_QUERY = """
|
||||
SELECT {0}.value,
|
||||
count(distinct {0}.ci_id)
|
||||
count({0}.ci_id)
|
||||
FROM {0}
|
||||
INNER JOIN ({1}) AS F ON F.ci_id={0}.ci_id
|
||||
WHERE {0}.attr_id={2:d}
|
||||
@@ -56,13 +54,13 @@ QUERY_CI_BY_ATTR_NAME = """
|
||||
SELECT {0}.ci_id
|
||||
FROM {0}
|
||||
WHERE {0}.attr_id={1:d}
|
||||
AND ({0}.value {2})
|
||||
AND {0}.value {2}
|
||||
"""
|
||||
|
||||
QUERY_CI_BY_ID = """
|
||||
SELECT c_cis.id as ci_id
|
||||
FROM c_cis
|
||||
WHERE c_cis.id {}
|
||||
WHERE c_cis.id={}
|
||||
"""
|
||||
|
||||
QUERY_CI_BY_TYPE = """
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
import six
|
||||
import time
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from jinja2 import Template
|
||||
from sqlalchemy import text
|
||||
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from jinja2 import Template
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
@@ -29,7 +27,6 @@ from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_NO_ATTR
|
||||
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_TYPE
|
||||
from api.lib.cmdb.search.ci.db.query_sql import QUERY_UNION_CI_ATTRIBUTE_IS_NULL
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.lib.utils import handle_arg_list
|
||||
@@ -44,11 +41,7 @@ class Search(object):
|
||||
count=1,
|
||||
sort=None,
|
||||
ci_ids=None,
|
||||
excludes=None,
|
||||
parent_node_perm_passed=False,
|
||||
use_id_filter=False,
|
||||
use_ci_filter=True,
|
||||
only_ids=False):
|
||||
excludes=None):
|
||||
self.orig_query = query
|
||||
self.fl = fl or []
|
||||
self.excludes = excludes or []
|
||||
@@ -58,20 +51,12 @@ class Search(object):
|
||||
self.count = count
|
||||
self.sort = sort
|
||||
self.ci_ids = ci_ids or []
|
||||
self.raw_ci_ids = copy.deepcopy(self.ci_ids)
|
||||
self.query_sql = ""
|
||||
self.type_id_list = []
|
||||
self.only_type_query = False
|
||||
self.parent_node_perm_passed = parent_node_perm_passed
|
||||
self.use_id_filter = use_id_filter
|
||||
self.use_ci_filter = use_ci_filter
|
||||
self.only_ids = only_ids
|
||||
self.multi_type_has_ci_filter = False
|
||||
|
||||
self.valid_type_names = []
|
||||
self.type2filter_perms = dict()
|
||||
self.is_app_admin = is_app_admin('cmdb') or current_user.username == "worker"
|
||||
self.is_app_admin = self.is_app_admin or (not self.use_ci_filter and not self.use_id_filter)
|
||||
|
||||
@staticmethod
|
||||
def _operator_proc(key):
|
||||
@@ -105,135 +90,81 @@ class Search(object):
|
||||
else:
|
||||
raise SearchError(ErrFormat.attribute_not_found.format(key))
|
||||
|
||||
def _type_query_handler(self, v, queries, is_sub=False):
|
||||
def _type_query_handler(self, v, queries):
|
||||
new_v = v[1:-1].split(";") if v.startswith("(") and v.endswith(")") else [v]
|
||||
type_num = len(new_v)
|
||||
type_id_list = []
|
||||
for _v in new_v:
|
||||
ci_type = CITypeCache.get(_v)
|
||||
|
||||
if type_num == 1 and not self.sort and ci_type and ci_type.default_order_attr:
|
||||
if len(new_v) == 1 and not self.sort and ci_type and ci_type.default_order_attr:
|
||||
self.sort = ci_type.default_order_attr
|
||||
|
||||
if ci_type is not None:
|
||||
if self.valid_type_names == "ALL" or ci_type.name in self.valid_type_names:
|
||||
if not is_sub:
|
||||
self.type_id_list.append(str(ci_type.id))
|
||||
type_id_list.append(str(ci_type.id))
|
||||
if ci_type.id in self.type2filter_perms and not is_sub:
|
||||
self.type_id_list.append(str(ci_type.id))
|
||||
if ci_type.id in self.type2filter_perms:
|
||||
ci_filter = self.type2filter_perms[ci_type.id].get('ci_filter')
|
||||
if ci_filter and self.use_ci_filter and not self.use_id_filter:
|
||||
if ci_filter:
|
||||
sub = []
|
||||
ci_filter = Template(ci_filter).render(user=current_user)
|
||||
ci_filter = Template(ci_filter).render(user=g.user)
|
||||
for i in ci_filter.split(','):
|
||||
if type_num == 1:
|
||||
if i.startswith("~") and not sub:
|
||||
queries.append(i)
|
||||
else:
|
||||
sub.append(i)
|
||||
if i.startswith("~") and not sub:
|
||||
queries.append(i)
|
||||
else:
|
||||
sub.append(i)
|
||||
if sub:
|
||||
if type_num == 1:
|
||||
queries.append(dict(operator="&", queries=sub))
|
||||
else:
|
||||
if str(ci_type.id) in self.type_id_list:
|
||||
self.type_id_list.remove(str(ci_type.id))
|
||||
type_id_list.remove(str(ci_type.id))
|
||||
sub.extend([i for i in queries[1:] if isinstance(i, six.string_types)])
|
||||
queries.append(dict(operator="&", queries=sub))
|
||||
|
||||
sub.insert(0, "_type:{}".format(ci_type.id))
|
||||
queries.append(dict(operator="|", queries=sub))
|
||||
self.multi_type_has_ci_filter = True
|
||||
if self.type2filter_perms[ci_type.id].get('attr_filter'):
|
||||
if type_num == 1:
|
||||
if not self.fl:
|
||||
self.fl = set(self.type2filter_perms[ci_type.id]['attr_filter'])
|
||||
else:
|
||||
self.fl = set(self.fl) & set(self.type2filter_perms[ci_type.id]['attr_filter'])
|
||||
if not self.fl:
|
||||
self.fl = set(self.type2filter_perms[ci_type.id]['attr_filter'])
|
||||
else:
|
||||
self.fl = self.fl or {}
|
||||
if not self.fl or isinstance(self.fl, dict):
|
||||
self.fl[ci_type.id] = set(self.type2filter_perms[ci_type.id]['attr_filter'])
|
||||
|
||||
if self.type2filter_perms[ci_type.id].get('id_filter') and self.use_id_filter:
|
||||
|
||||
if not self.raw_ci_ids:
|
||||
self.ci_ids = list(self.type2filter_perms[ci_type.id]['id_filter'].keys())
|
||||
|
||||
if self.use_id_filter and not self.ci_ids and not self.is_app_admin:
|
||||
self.raw_ci_ids = [0]
|
||||
self.fl = set(self.fl) & set(self.type2filter_perms[ci_type.id]['attr_filter'])
|
||||
else:
|
||||
raise SearchError(ErrFormat.no_permission.format(ci_type.alias, PermEnum.READ))
|
||||
else:
|
||||
raise SearchError(ErrFormat.ci_type_not_found2.format(_v))
|
||||
|
||||
if type_num != len(self.type_id_list) and queries and queries[0].startswith('_type') and not is_sub:
|
||||
queries[0] = "_type:({})".format(";".join(self.type_id_list))
|
||||
|
||||
if type_id_list:
|
||||
type_ids = ",".join(type_id_list)
|
||||
if self.type_id_list:
|
||||
type_ids = ",".join(self.type_id_list)
|
||||
_query_sql = QUERY_CI_BY_TYPE.format(type_ids)
|
||||
if self.only_type_query or self.multi_type_has_ci_filter:
|
||||
if self.only_type_query:
|
||||
return _query_sql
|
||||
elif type_num > 1: # there must be instance-level access control
|
||||
return "select c_cis.id as ci_id from c_cis where c_cis.id=0"
|
||||
|
||||
else:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _id_query_handler(v):
|
||||
if ";" in v:
|
||||
return QUERY_CI_BY_ID.format("in {}".format(v.replace(';', ',')))
|
||||
else:
|
||||
return QUERY_CI_BY_ID.format("= {}".format(v))
|
||||
return QUERY_CI_BY_ID.format(v)
|
||||
|
||||
@staticmethod
|
||||
def _in_query_handler(attr, v, is_not):
|
||||
new_v = v[1:-1].split(";")
|
||||
|
||||
if attr.value_type == ValueTypeEnum.DATE:
|
||||
new_v = ["{} 00:00:00".format(i) for i in new_v if len(i) == 10]
|
||||
|
||||
table_name = TableMap(attr=attr).table_name
|
||||
in_query = " OR {0}.value ".format(table_name).join(['{0} "{1}"'.format(
|
||||
"NOT LIKE" if is_not else "LIKE",
|
||||
_v.replace("*", "%")) for _v in new_v])
|
||||
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, in_query)
|
||||
|
||||
return _query_sql
|
||||
|
||||
@staticmethod
|
||||
def _range_query_handler(attr, v, is_not):
|
||||
start, end = [x.strip() for x in v[1:-1].split("_TO_")]
|
||||
|
||||
if attr.value_type == ValueTypeEnum.DATE:
|
||||
start = "{} 00:00:00".format(start) if len(start) == 10 else start
|
||||
end = "{} 00:00:00".format(end) if len(end) == 10 else end
|
||||
|
||||
table_name = TableMap(attr=attr).table_name
|
||||
range_query = "{0} '{1}' AND '{2}'".format(
|
||||
"NOT BETWEEN" if is_not else "BETWEEN",
|
||||
start.replace("*", "%"), end.replace("*", "%"))
|
||||
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, range_query)
|
||||
|
||||
return _query_sql
|
||||
|
||||
@staticmethod
|
||||
def _comparison_query_handler(attr, v):
|
||||
table_name = TableMap(attr=attr).table_name
|
||||
if v.startswith(">=") or v.startswith("<="):
|
||||
if attr.value_type == ValueTypeEnum.DATE and len(v[2:]) == 10:
|
||||
v = "{} 00:00:00".format(v)
|
||||
|
||||
comparison_query = "{0} '{1}'".format(v[:2], v[2:].replace("*", "%"))
|
||||
else:
|
||||
if attr.value_type == ValueTypeEnum.DATE and len(v[1:]) == 10:
|
||||
v = "{} 00:00:00".format(v)
|
||||
|
||||
comparison_query = "{0} '{1}'".format(v[0], v[1:].replace("*", "%"))
|
||||
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, comparison_query)
|
||||
|
||||
return _query_sql
|
||||
|
||||
@staticmethod
|
||||
@@ -245,7 +176,6 @@ class Search(object):
|
||||
elif field.startswith("-"):
|
||||
field = field[1:]
|
||||
sort_type = "DESC"
|
||||
|
||||
return field, sort_type
|
||||
|
||||
def __sort_by_id(self, sort_type, query_sql):
|
||||
@@ -255,7 +185,7 @@ class Search(object):
|
||||
return ret_sql.format(query_sql, "ORDER BY B.ci_id {1} LIMIT {0:d}, {2};".format(
|
||||
(self.page - 1) * self.count, sort_type, self.count))
|
||||
|
||||
elif self.type_id_list and not self.multi_type_has_ci_filter:
|
||||
elif self.type_id_list:
|
||||
self.query_sql = "SELECT B.ci_id FROM ({0}) AS B {1}".format(
|
||||
query_sql,
|
||||
"INNER JOIN c_cis on c_cis.id=B.ci_id WHERE c_cis.type_id IN ({0}) ".format(
|
||||
@@ -280,7 +210,7 @@ class Search(object):
|
||||
def __sort_by_type(self, sort_type, query_sql):
|
||||
ret_sql = "SELECT SQL_CALC_FOUND_ROWS DISTINCT B.ci_id FROM ({0}) AS B {1}"
|
||||
|
||||
if self.type_id_list and not self.multi_type_has_ci_filter:
|
||||
if self.type_id_list:
|
||||
self.query_sql = "SELECT B.ci_id FROM ({0}) AS B {1}".format(
|
||||
query_sql,
|
||||
"INNER JOIN c_cis on c_cis.id=B.ci_id WHERE c_cis.type_id IN ({0}) ".format(
|
||||
@@ -308,14 +238,16 @@ class Search(object):
|
||||
attr_id = attr.id
|
||||
|
||||
table_name = TableMap(attr=attr).table_name
|
||||
_v_query_sql = """SELECT {0}.ci_id, {1}.value
|
||||
_v_query_sql = """SELECT {0}.ci_id, {1}.value
|
||||
FROM ({2}) AS {0} INNER JOIN {1} ON {1}.ci_id = {0}.ci_id
|
||||
WHERE {1}.attr_id = {3}""".format("ALIAS", table_name, query_sql, attr_id)
|
||||
new_table = _v_query_sql
|
||||
|
||||
if self.only_type_query or not self.type_id_list or self.multi_type_has_ci_filter:
|
||||
return ("SELECT SQL_CALC_FOUND_ROWS DISTINCT C.ci_id FROM ({0}) AS C ORDER BY C.value {2} "
|
||||
"LIMIT {1:d}, {3};".format(new_table, (self.page - 1) * self.count, sort_type, self.count))
|
||||
if self.only_type_query or not self.type_id_list:
|
||||
return "SELECT SQL_CALC_FOUND_ROWS DISTINCT C.ci_id " \
|
||||
"FROM ({0}) AS C " \
|
||||
"ORDER BY C.value {2} " \
|
||||
"LIMIT {1:d}, {3};".format(new_table, (self.page - 1) * self.count, sort_type, self.count)
|
||||
|
||||
elif self.type_id_list:
|
||||
self.query_sql = """SELECT C.ci_id
|
||||
@@ -351,12 +283,10 @@ class Search(object):
|
||||
INNER JOIN ({2}) as {3} USING(ci_id)""".format(query_sql, alias, _query_sql, alias + "A")
|
||||
|
||||
elif operator == "|" or operator == "|~":
|
||||
query_sql = "SELECT * FROM ({0}) as {1} UNION ALL SELECT * FROM ({2}) as {3}".format(query_sql, alias,
|
||||
_query_sql,
|
||||
alias + "A")
|
||||
query_sql = "SELECT * FROM ({0}) as {1} UNION ALL ({2})".format(query_sql, alias, _query_sql)
|
||||
|
||||
elif operator == "~":
|
||||
query_sql = """SELECT * FROM ({0}) as {1} LEFT JOIN ({2}) as {3} USING(ci_id)
|
||||
query_sql = """SELECT * FROM ({0}) as {1} LEFT JOIN ({2}) as {3} USING(ci_id)
|
||||
WHERE {3}.ci_id is NULL""".format(query_sql, alias, _query_sql, alias + "A")
|
||||
|
||||
return query_sql
|
||||
@@ -366,8 +296,8 @@ class Search(object):
|
||||
|
||||
start = time.time()
|
||||
execute = db.session.execute
|
||||
# current_app.logger.debug(v_query_sql)
|
||||
res = execute(text(v_query_sql)).fetchall()
|
||||
current_app.logger.debug(v_query_sql)
|
||||
res = execute(v_query_sql).fetchall()
|
||||
end_time = time.time()
|
||||
current_app.logger.debug("query ci ids time is: {0}".format(end_time - start))
|
||||
|
||||
@@ -376,11 +306,6 @@ class Search(object):
|
||||
|
||||
return numfound, res
|
||||
|
||||
def __get_type2filter_perms(self):
|
||||
res2 = ACLManager('cmdb').get_resources(ResourceTypeEnum.CI_FILTER)
|
||||
if res2:
|
||||
self.type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
|
||||
|
||||
def __get_types_has_read(self):
|
||||
"""
|
||||
:return: _type:(type1;type2)
|
||||
@@ -390,23 +315,14 @@ class Search(object):
|
||||
|
||||
self.valid_type_names = {i['name'] for i in res if PermEnum.READ in i['permissions']}
|
||||
|
||||
self.__get_type2filter_perms()
|
||||
|
||||
for type_id in self.type2filter_perms:
|
||||
ci_type = CITypeCache.get(type_id)
|
||||
if ci_type:
|
||||
if self.type2filter_perms[type_id].get('id_filter'):
|
||||
if self.use_id_filter:
|
||||
self.valid_type_names.add(ci_type.name)
|
||||
elif self.type2filter_perms[type_id].get('ci_filter'):
|
||||
if self.use_ci_filter:
|
||||
self.valid_type_names.add(ci_type.name)
|
||||
else:
|
||||
self.valid_type_names.add(ci_type.name)
|
||||
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
|
||||
if res2:
|
||||
self.type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
|
||||
|
||||
return "_type:({})".format(";".join(self.valid_type_names))
|
||||
|
||||
def __confirm_type_first(self, queries):
|
||||
|
||||
has_type = False
|
||||
|
||||
result = []
|
||||
@@ -439,10 +355,8 @@ class Search(object):
|
||||
else:
|
||||
result.append(q)
|
||||
|
||||
if self.parent_node_perm_passed:
|
||||
self.__get_type2filter_perms()
|
||||
self.valid_type_names = "ALL"
|
||||
elif result and not has_type and not self.is_app_admin:
|
||||
_is_app_admin = is_app_admin('cmdb') or g.user.username == "worker"
|
||||
if result and not has_type and not _is_app_admin:
|
||||
type_q = self.__get_types_has_read()
|
||||
if id_query:
|
||||
ci = CIManager.get_by_id(id_query)
|
||||
@@ -451,21 +365,23 @@ class Search(object):
|
||||
result.insert(0, "_type:{}".format(ci.type_id))
|
||||
else:
|
||||
result.insert(0, type_q)
|
||||
elif self.is_app_admin:
|
||||
elif _is_app_admin:
|
||||
self.valid_type_names = "ALL"
|
||||
else:
|
||||
self.__get_types_has_read()
|
||||
|
||||
current_app.logger.warning(result)
|
||||
|
||||
return result
|
||||
|
||||
def __query_by_attr(self, q, queries, alias, is_sub=False):
|
||||
def __query_by_attr(self, q, queries, alias):
|
||||
k = q.split(":")[0].strip()
|
||||
v = "\:".join(q.split(":")[1:]).strip()
|
||||
v = v.replace("'", "\\'")
|
||||
v = v.replace('"', '\\"')
|
||||
field, field_type, operator, attr = self._attr_name_proc(k)
|
||||
if field == "_type":
|
||||
_query_sql = self._type_query_handler(v, queries, is_sub)
|
||||
_query_sql = self._type_query_handler(v, queries)
|
||||
|
||||
elif field == "_id":
|
||||
_query_sql = self._id_query_handler(v)
|
||||
@@ -476,12 +392,6 @@ class Search(object):
|
||||
|
||||
is_not = True if operator == "|~" else False
|
||||
|
||||
if field_type == ValueTypeEnum.DATE and len(v) == 10:
|
||||
v = "{} 00:00:00".format(v)
|
||||
|
||||
if field_type == ValueTypeEnum.BOOL and "*" not in str(v):
|
||||
v = str(int(v in current_app.config.get('BOOL_TRUE')))
|
||||
|
||||
# in query
|
||||
if v.startswith("(") and v.endswith(")"):
|
||||
_query_sql = self._in_query_handler(attr, v, is_not)
|
||||
@@ -512,20 +422,19 @@ class Search(object):
|
||||
|
||||
return alias, _query_sql, operator
|
||||
|
||||
def __query_build_by_field(self, queries, is_first=True, only_type_query_special=True, alias='A', operator='&',
|
||||
is_sub=False):
|
||||
def __query_build_by_field(self, queries, is_first=True, only_type_query_special=True, alias='A', operator='&'):
|
||||
query_sql = ""
|
||||
|
||||
for q in queries:
|
||||
_query_sql = ""
|
||||
if isinstance(q, dict):
|
||||
alias, _query_sql, operator = self.__query_build_by_field(q['queries'], True, True, alias, is_sub=True)
|
||||
# current_app.logger.info(_query_sql)
|
||||
# current_app.logger.info((operator, is_first, alias))
|
||||
alias, _query_sql, operator = self.__query_build_by_field(q['queries'], True, True, alias)
|
||||
current_app.logger.info(_query_sql)
|
||||
current_app.logger.info((operator, is_first, alias))
|
||||
operator = q['operator']
|
||||
|
||||
elif ":" in q and not q.startswith("*"):
|
||||
alias, _query_sql, operator = self.__query_by_attr(q, queries, alias, is_sub)
|
||||
alias, _query_sql, operator = self.__query_by_attr(q, queries, alias)
|
||||
elif q == "*":
|
||||
continue
|
||||
elif q:
|
||||
@@ -551,7 +460,7 @@ class Search(object):
|
||||
def _filter_ids(self, query_sql):
|
||||
if self.ci_ids:
|
||||
return "SELECT * FROM ({0}) AS IN_QUERY WHERE IN_QUERY.ci_id IN ({1})".format(
|
||||
query_sql, ",".join(list(set(map(str, self.ci_ids)))))
|
||||
query_sql, ",".join(list(map(str, self.ci_ids))))
|
||||
|
||||
return query_sql
|
||||
|
||||
@@ -576,15 +485,13 @@ class Search(object):
|
||||
queries = handle_arg_list(self.orig_query)
|
||||
queries = self._extra_handle_query_expr(queries)
|
||||
queries = self.__confirm_type_first(queries)
|
||||
current_app.logger.debug(queries)
|
||||
|
||||
_, query_sql, _ = self.__query_build_by_field(queries)
|
||||
|
||||
s = time.time()
|
||||
if query_sql:
|
||||
query_sql = self._filter_ids(query_sql)
|
||||
if self.raw_ci_ids and not self.ci_ids:
|
||||
return 0, []
|
||||
|
||||
self.query_sql = query_sql
|
||||
# current_app.logger.debug(query_sql)
|
||||
numfound, res = self._execute_sql(query_sql)
|
||||
@@ -600,35 +507,30 @@ class Search(object):
|
||||
if k:
|
||||
table_name = TableMap(attr=attr).table_name
|
||||
query_sql = FACET_QUERY.format(table_name, self.query_sql, attr.id)
|
||||
result = db.session.execute(text(query_sql)).fetchall()
|
||||
# current_app.logger.debug(query_sql)
|
||||
result = db.session.execute(query_sql).fetchall()
|
||||
facet[k] = result
|
||||
|
||||
facet_result = dict()
|
||||
for k, v in facet.items():
|
||||
if not k.startswith('_'):
|
||||
attr = AttributeCache.get(k)
|
||||
a = getattr(attr, self.ret_key)
|
||||
facet_result[a] = [(ValueTypeMap.serialize[attr.value_type](f[0]), f[1], a) for f in v]
|
||||
a = getattr(AttributeCache.get(k), self.ret_key)
|
||||
facet_result[a] = [(f[0], f[1], a) for f in v]
|
||||
|
||||
return facet_result
|
||||
|
||||
def _fl_build(self):
|
||||
if isinstance(self.fl, list):
|
||||
_fl = list()
|
||||
for f in self.fl:
|
||||
k, _, _, _ = self._attr_name_proc(f)
|
||||
if k:
|
||||
_fl.append(k)
|
||||
_fl = list()
|
||||
for f in self.fl:
|
||||
k, _, _, _ = self._attr_name_proc(f)
|
||||
if k:
|
||||
_fl.append(k)
|
||||
|
||||
return _fl
|
||||
else:
|
||||
return self.fl
|
||||
return _fl
|
||||
|
||||
def search(self):
|
||||
numfound, ci_ids = self._query_build_raw()
|
||||
ci_ids = list(map(str, ci_ids))
|
||||
if self.only_ids:
|
||||
return ci_ids
|
||||
|
||||
_fl = self._fl_build()
|
||||
|
||||
@@ -641,8 +543,6 @@ class Search(object):
|
||||
if ci_ids:
|
||||
response = CIManager.get_cis_by_ids(ci_ids, ret_key=self.ret_key, fields=_fl, excludes=self.excludes)
|
||||
for res in response:
|
||||
if not res:
|
||||
continue
|
||||
ci_type = res.get("ci_type")
|
||||
if ci_type not in counter.keys():
|
||||
counter[ci_type] = 0
|
||||
@@ -650,8 +550,3 @@ class Search(object):
|
||||
total = len(response)
|
||||
|
||||
return response, counter, total, self.page, numfound, facet
|
||||
|
||||
def get_ci_ids(self):
|
||||
_, ci_ids = self._query_build_raw()
|
||||
|
||||
return ci_ids
|
||||
|
||||
@@ -297,8 +297,8 @@ class Search(object):
|
||||
if not attr:
|
||||
raise SearchError(ErrFormat.attribute_not_found.format(field))
|
||||
|
||||
sort_by = ("{0}.keyword".format(field)
|
||||
if attr.value_type not in (ValueTypeEnum.INT, ValueTypeEnum.FLOAT) else field)
|
||||
sort_by = "{0}.keyword".format(field) \
|
||||
if attr.value_type not in (ValueTypeEnum.INT, ValueTypeEnum.FLOAT) else field
|
||||
sorts.append({sort_by: {"order": sort_type}})
|
||||
|
||||
self.query.update(dict(sort=sorts))
|
||||
|
||||
@@ -1,40 +1,24 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from collections import Counter
|
||||
from collections import defaultdict
|
||||
|
||||
import copy
|
||||
|
||||
import json
|
||||
import networkx as nx
|
||||
import sys
|
||||
from collections import Counter
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
|
||||
from api.extensions import rd
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.ci_type import CITypeRelationManager
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import PermEnum
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.perms import CIFilterPermsCRUD
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
|
||||
from api.lib.cmdb.search.ci.es.search import Search as SearchFromES
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.models.cmdb import CI
|
||||
from api.models.cmdb import CITypeRelation
|
||||
from api.models.cmdb import RelationType
|
||||
|
||||
|
||||
class Search(object):
|
||||
def __init__(self, root_id=None,
|
||||
def __init__(self, root_id,
|
||||
level=None,
|
||||
query=None,
|
||||
fl=None,
|
||||
@@ -42,11 +26,7 @@ class Search(object):
|
||||
page=1,
|
||||
count=None,
|
||||
sort=None,
|
||||
reverse=False,
|
||||
ancestor_ids=None,
|
||||
descendant_ids=None,
|
||||
has_m2m=None,
|
||||
root_parent_path=None):
|
||||
reverse=False):
|
||||
self.orig_query = query
|
||||
self.fl = fl
|
||||
self.facet_field = facet_field
|
||||
@@ -55,116 +35,36 @@ class Search(object):
|
||||
self.sort = sort or ("ci_id" if current_app.config.get("USE_ES") else None)
|
||||
|
||||
self.root_id = root_id
|
||||
self.level = level or 0
|
||||
self.level = level
|
||||
self.reverse = reverse
|
||||
|
||||
self.level2constraint = CITypeRelationManager.get_level2constraint(
|
||||
root_id[0] if root_id and isinstance(root_id, list) else root_id,
|
||||
level[0] if isinstance(level, list) and level else level)
|
||||
|
||||
self.ancestor_ids = ancestor_ids
|
||||
self.descendant_ids = descendant_ids
|
||||
self.root_parent_path = root_parent_path
|
||||
self.has_m2m = has_m2m or False
|
||||
if not self.has_m2m:
|
||||
if self.ancestor_ids:
|
||||
self.has_m2m = True
|
||||
else:
|
||||
level = level[0] if isinstance(level, list) and level else level
|
||||
for _l, c in self.level2constraint.items():
|
||||
if _l < int(level) and c == ConstraintEnum.Many2Many:
|
||||
self.has_m2m = True
|
||||
|
||||
self.type2filter_perms = {}
|
||||
|
||||
self.is_app_admin = is_app_admin('cmdb') or current_user.username == "worker"
|
||||
|
||||
def _get_ids(self, ids):
|
||||
|
||||
def _get_ids(self):
|
||||
merge_ids = []
|
||||
key = []
|
||||
_tmp = []
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
for level in range(1, sorted(self.level)[-1] + 1):
|
||||
if len(self.descendant_ids or []) >= level and self.type2filter_perms.get(self.descendant_ids[level - 1]):
|
||||
id_filter_limit, _ = self._get_ci_filter(self.type2filter_perms[self.descendant_ids[level - 1]])
|
||||
else:
|
||||
id_filter_limit = {}
|
||||
|
||||
if not self.has_m2m:
|
||||
key, prefix = list(map(str, ids)), REDIS_PREFIX_CI_RELATION
|
||||
|
||||
else:
|
||||
if not self.ancestor_ids:
|
||||
if level == 1:
|
||||
key, prefix = list(map(str, ids)), REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
key = list(set(["{},{}".format(i, j) for idx, i in enumerate(key) for j in _tmp[idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
else:
|
||||
if level == 1:
|
||||
key, prefix = ["{},{}".format(self.ancestor_ids, i) for i in ids], REDIS_PREFIX_CI_RELATION2
|
||||
else:
|
||||
key = list(set(["{},{}".format(i, j) for idx, i in enumerate(key) for j in _tmp[idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
if not key or id_filter_limit is None:
|
||||
return []
|
||||
|
||||
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
|
||||
_tmp = [[i[0] for i in x if (not id_filter_limit or (
|
||||
key[idx] not in id_filter_limit or int(i[0]) in id_filter_limit[key[idx]]) or
|
||||
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
|
||||
|
||||
_tmp = list(map(lambda x: list(json.loads(x).keys()),
|
||||
filter(lambda x: x is not None, rd.get(ids, REDIS_PREFIX_CI_RELATION) or [])))
|
||||
ids = [j for i in _tmp for j in i]
|
||||
|
||||
if level in self.level:
|
||||
merge_ids.extend(ids)
|
||||
|
||||
return merge_ids
|
||||
|
||||
def _get_reverse_ids(self, ids):
|
||||
def _get_reverse_ids(self):
|
||||
merge_ids = []
|
||||
level2ids = {}
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
for level in range(1, sorted(self.level)[-1] + 1):
|
||||
ids, _level2ids = CIRelationManager.get_ancestor_ids(ids, 1)
|
||||
|
||||
if _level2ids.get(2):
|
||||
level2ids[level + 1] = _level2ids[2]
|
||||
|
||||
ids = CIRelationManager.get_ancestor_ids(ids, 1)
|
||||
if level in self.level:
|
||||
if level in level2ids and level2ids[level]:
|
||||
merge_ids.extend(set(ids) & set(level2ids[level]))
|
||||
else:
|
||||
merge_ids.extend(ids)
|
||||
merge_ids.extend(ids)
|
||||
|
||||
return merge_ids
|
||||
|
||||
def _has_read_perm_from_parent_nodes(self):
|
||||
self.root_parent_path = list(map(str, self.root_parent_path))
|
||||
if str(self.root_id).isdigit() and str(self.root_id) not in self.root_parent_path:
|
||||
self.root_parent_path.append(str(self.root_id))
|
||||
self.root_parent_path = set(self.root_parent_path)
|
||||
|
||||
if self.is_app_admin:
|
||||
self.type2filter_perms = {}
|
||||
return True
|
||||
|
||||
res = ACLManager().get_resources(ResourceTypeEnum.CI_FILTER) or {}
|
||||
self.type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res]))) or {}
|
||||
for _, filters in self.type2filter_perms.items():
|
||||
if set((filters.get('id_filter') or {}).keys()) & self.root_parent_path:
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
def search(self, only_ids=False):
|
||||
use_ci_filter = len(self.descendant_ids or []) == self.level[0] - 1
|
||||
parent_node_perm_passed = not self.is_app_admin and self._has_read_perm_from_parent_nodes()
|
||||
|
||||
def search(self):
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
cis = [CI.get_by_id(_id) or abort(404, ErrFormat.ci_not_found.format("id={}".format(_id))) for _id in ids]
|
||||
|
||||
merge_ids = self._get_ids(ids) if not self.reverse else self._get_reverse_ids(ids)
|
||||
merge_ids = self._get_ids() if not self.reverse else self._get_reverse_ids()
|
||||
|
||||
if not self.orig_query or ("_type:" not in self.orig_query
|
||||
and "type_id:" not in self.orig_query
|
||||
@@ -176,11 +76,11 @@ class Search(object):
|
||||
type_ids.extend(CITypeRelationManager.get_child_type_ids(ci.type_id, level))
|
||||
else:
|
||||
type_ids.extend(CITypeRelationManager.get_parent_type_ids(ci.type_id, level))
|
||||
type_ids = set(type_ids)
|
||||
type_ids = list(set(type_ids))
|
||||
if self.orig_query:
|
||||
self.orig_query = "_type:({0}),{1}".format(";".join(map(str, type_ids)), self.orig_query)
|
||||
self.orig_query = "_type:({0}),{1}".format(";".join(list(map(str, type_ids))), self.orig_query)
|
||||
else:
|
||||
self.orig_query = "_type:({0})".format(";".join(map(str, type_ids)))
|
||||
self.orig_query = "_type:({0})".format(";".join(list(map(str, type_ids))))
|
||||
|
||||
if not merge_ids:
|
||||
# cis, counter, total, self.page, numfound, facet_
|
||||
@@ -201,142 +101,33 @@ class Search(object):
|
||||
page=self.page,
|
||||
count=self.count,
|
||||
sort=self.sort,
|
||||
ci_ids=merge_ids,
|
||||
parent_node_perm_passed=parent_node_perm_passed,
|
||||
use_ci_filter=use_ci_filter,
|
||||
only_ids=only_ids).search()
|
||||
|
||||
def _get_ci_filter(self, filter_perms, ci_filters=None):
|
||||
ci_filters = ci_filters or []
|
||||
if ci_filters:
|
||||
result = {}
|
||||
for item in ci_filters:
|
||||
res = SearchFromDB('_type:{},{}'.format(item['type_id'], item['ci_filter']),
|
||||
count=sys.maxsize, parent_node_perm_passed=True).get_ci_ids()
|
||||
if res:
|
||||
result[item['type_id']] = set(res)
|
||||
|
||||
return {}, result if result else None
|
||||
|
||||
result = dict()
|
||||
if filter_perms.get('id_filter'):
|
||||
for k in filter_perms['id_filter']:
|
||||
node_path = k.split(',')
|
||||
if len(node_path) == 1:
|
||||
result[int(node_path[0])] = 1
|
||||
elif not self.has_m2m:
|
||||
result.setdefault(node_path[-2], set()).add(int(node_path[-1]))
|
||||
else:
|
||||
result.setdefault(','.join(node_path[:-1]), set()).add(int(node_path[-1]))
|
||||
if result:
|
||||
return result, None
|
||||
else:
|
||||
return None, None
|
||||
|
||||
return {}, None
|
||||
|
||||
def statistics(self, type_ids, need_filter=True):
|
||||
self.level = int(self.level)
|
||||
|
||||
acl = ACLManager('cmdb')
|
||||
|
||||
type2filter_perms = dict()
|
||||
if not self.is_app_admin:
|
||||
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
|
||||
if res2:
|
||||
type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
|
||||
ci_ids=merge_ids).search()
|
||||
|
||||
def statistics(self, type_ids):
|
||||
_tmp = []
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
_tmp, tmp_res = [], []
|
||||
level2ids = {}
|
||||
for lv in range(1, self.level + 1):
|
||||
level2ids[lv] = []
|
||||
|
||||
if need_filter:
|
||||
id_filter_limit, ci_filter_limit = None, None
|
||||
if len(self.descendant_ids or []) >= lv and type2filter_perms.get(self.descendant_ids[lv - 1]):
|
||||
id_filter_limit, _ = self._get_ci_filter(type2filter_perms[self.descendant_ids[lv - 1]])
|
||||
elif type_ids and self.level == lv:
|
||||
ci_filters = [type2filter_perms[type_id] for type_id in type_ids if type_id in type2filter_perms]
|
||||
if ci_filters:
|
||||
id_filter_limit, ci_filter_limit = self._get_ci_filter({}, ci_filters=ci_filters)
|
||||
else:
|
||||
id_filter_limit = {}
|
||||
else:
|
||||
id_filter_limit = {}
|
||||
for l in range(0, int(self.level)):
|
||||
if not l:
|
||||
_tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
[i or '{}' for i in rd.get(ids, REDIS_PREFIX_CI_RELATION) or []]))
|
||||
else:
|
||||
id_filter_limit, ci_filter_limit = {}, {}
|
||||
|
||||
if lv == 1:
|
||||
if not self.has_m2m:
|
||||
key, prefix = [str(i) for i in ids], REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
key = ["{},{}".format(self.ancestor_ids, _id) for _id in ids]
|
||||
if not self.ancestor_ids:
|
||||
key, prefix = [str(i) for i in ids], REDIS_PREFIX_CI_RELATION
|
||||
else:
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
level2ids[lv] = [[i] for i in key]
|
||||
|
||||
if not key or id_filter_limit is None:
|
||||
_tmp = [[]] * len(ids)
|
||||
continue
|
||||
|
||||
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
|
||||
_tmp = []
|
||||
if type_ids and lv == self.level:
|
||||
_tmp = [[i for i in x if i[1] in type_ids and
|
||||
(not id_filter_limit or (key[idx] not in id_filter_limit or
|
||||
int(i[0]) in id_filter_limit[key[idx]]) or
|
||||
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
|
||||
else:
|
||||
_tmp = [[i for i in x if (not id_filter_limit or (key[idx] not in id_filter_limit or
|
||||
int(i[0]) in id_filter_limit[key[idx]]) or
|
||||
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
|
||||
|
||||
if ci_filter_limit:
|
||||
_tmp = [[j for j in i if j[1] not in ci_filter_limit or int(j[0]) in ci_filter_limit[j[1]]]
|
||||
for i in _tmp]
|
||||
|
||||
else:
|
||||
|
||||
for idx, item in enumerate(_tmp):
|
||||
if item:
|
||||
if not self.has_m2m:
|
||||
key, prefix = [i[0] for i in item], REDIS_PREFIX_CI_RELATION
|
||||
if type_ids and l == self.level - 1:
|
||||
__tmp = list(
|
||||
map(lambda x: [(_id, type_id) for _id, type_id in json.loads(x).items()
|
||||
if type_id in type_ids],
|
||||
filter(lambda x: x is not None,
|
||||
rd.get([i[0] for i in item], REDIS_PREFIX_CI_RELATION) or [])))
|
||||
else:
|
||||
key = list(set(['{},{}'.format(j, i[0]) for i in item for j in level2ids[lv - 1][idx]]))
|
||||
prefix = REDIS_PREFIX_CI_RELATION2
|
||||
|
||||
level2ids[lv].append(key)
|
||||
__tmp = list(map(lambda x: list(json.loads(x).items()),
|
||||
filter(lambda x: x is not None,
|
||||
rd.get([i[0] for i in item], REDIS_PREFIX_CI_RELATION) or [])))
|
||||
|
||||
if key:
|
||||
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
|
||||
if type_ids and lv == self.level:
|
||||
tmp_res = [[i for i in x if i[1] in type_ids and
|
||||
(not id_filter_limit or (
|
||||
key[idx] not in id_filter_limit or
|
||||
int(i[0]) in id_filter_limit[key[idx]]) or
|
||||
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
|
||||
else:
|
||||
tmp_res = [[i for i in x if (not id_filter_limit or (
|
||||
key[idx] not in id_filter_limit or
|
||||
int(i[0]) in id_filter_limit[key[idx]]) or
|
||||
int(i[0]) in id_filter_limit)] for idx, x in
|
||||
enumerate(res)]
|
||||
|
||||
if ci_filter_limit:
|
||||
tmp_res = [[j for j in i if j[1] not in ci_filter_limit or
|
||||
int(j[0]) in ci_filter_limit[j[1]]] for i in tmp_res]
|
||||
else:
|
||||
tmp_res = []
|
||||
|
||||
if tmp_res:
|
||||
_tmp[idx] = [j for i in tmp_res for j in i]
|
||||
_tmp[idx] = [j for i in __tmp for j in i]
|
||||
else:
|
||||
_tmp[idx] = []
|
||||
level2ids[lv].append([])
|
||||
|
||||
result = {str(_id): len(_tmp[idx]) for idx, _id in enumerate(ids)}
|
||||
|
||||
@@ -344,250 +135,3 @@ class Search(object):
|
||||
detail={str(_id): dict(Counter([i[1] for i in _tmp[idx]]).items()) for idx, _id in enumerate(ids)})
|
||||
|
||||
return result
|
||||
|
||||
def search_full(self, type_ids):
|
||||
def _get_id2name(_type_id):
|
||||
ci_type = CITypeCache.get(_type_id)
|
||||
|
||||
attr = AttributeCache.get(ci_type.unique_id)
|
||||
value_table = TableMap(attr=attr).table
|
||||
serializer = ValueTypeMap.serialize[attr.value_type]
|
||||
unique_value = {i.ci_id: serializer(i.value) for i in value_table.get_by(attr_id=attr.id, to_dict=False)}
|
||||
|
||||
attr = AttributeCache.get(ci_type.show_id)
|
||||
if attr:
|
||||
value_table = TableMap(attr=attr).table
|
||||
serializer = ValueTypeMap.serialize[attr.value_type]
|
||||
show_value = {i.ci_id: serializer(i.value) for i in value_table.get_by(attr_id=attr.id, to_dict=False)}
|
||||
else:
|
||||
show_value = unique_value
|
||||
|
||||
return show_value, unique_value
|
||||
|
||||
self.level = int(self.level)
|
||||
|
||||
acl = ACLManager('cmdb')
|
||||
|
||||
type2filter_perms = dict()
|
||||
if not self.is_app_admin:
|
||||
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
|
||||
if res2:
|
||||
type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
|
||||
|
||||
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
|
||||
|
||||
level_ids = [str(i) for i in ids]
|
||||
result = []
|
||||
id2children = {}
|
||||
id2name = _get_id2name(type_ids[0])
|
||||
for i in level_ids:
|
||||
item = dict(id=int(i),
|
||||
type_id=type_ids[0],
|
||||
isLeaf=False,
|
||||
title=id2name[0].get(int(i)),
|
||||
uniqueValue=id2name[1].get(int(i)),
|
||||
children=[])
|
||||
result.append(item)
|
||||
id2children[str(i)] = item['children']
|
||||
|
||||
for lv in range(1, self.level):
|
||||
|
||||
if len(type_ids or []) >= lv and type2filter_perms.get(type_ids[lv]):
|
||||
id_filter_limit, _ = self._get_ci_filter(type2filter_perms[type_ids[lv]])
|
||||
else:
|
||||
id_filter_limit = {}
|
||||
|
||||
if self.has_m2m and lv != 1:
|
||||
key, prefix = [i for i in level_ids], REDIS_PREFIX_CI_RELATION2
|
||||
else:
|
||||
key, prefix = [i.split(',')[-1] for i in level_ids], REDIS_PREFIX_CI_RELATION
|
||||
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
|
||||
res = [[i for i in x if (not id_filter_limit or (key[idx] not in id_filter_limit or
|
||||
int(i[0]) in id_filter_limit[key[idx]]) or
|
||||
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
|
||||
_level_ids = []
|
||||
type_id = type_ids[lv]
|
||||
id2name = _get_id2name(type_id)
|
||||
for idx, node_path in enumerate(level_ids):
|
||||
for child_id, _ in (res[idx] or []):
|
||||
item = dict(id=int(child_id),
|
||||
type_id=type_id,
|
||||
isLeaf=True if lv == self.level - 1 else False,
|
||||
title=id2name[0].get(int(child_id)),
|
||||
uniqueValue=id2name[1].get(int(child_id)),
|
||||
children=[])
|
||||
id2children[node_path].append(item)
|
||||
|
||||
_node_path = "{},{}".format(node_path, child_id)
|
||||
_level_ids.append(_node_path)
|
||||
id2children[_node_path] = item['children']
|
||||
|
||||
level_ids = _level_ids
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _get_src_ids(src):
|
||||
q = src.get('q') or ''
|
||||
if not q.startswith('_type:'):
|
||||
q = "_type:{},{}".format(src['type_id'], q)
|
||||
|
||||
return SearchFromDB(q, use_ci_filter=True, only_ids=True, count=100000).search()
|
||||
|
||||
@staticmethod
|
||||
def _filter_target_ids(target_ids, type_ids, q):
|
||||
if not q.startswith('_type:'):
|
||||
q = "_type:({}),{}".format(";".join(map(str, type_ids)), q)
|
||||
|
||||
ci_ids = SearchFromDB(q, ci_ids=target_ids, use_ci_filter=True, only_ids=True, count=100000).search()
|
||||
cis = CI.get_by(fl=['id', 'type_id'], only_query=True).filter(CI.id.in_(ci_ids))
|
||||
|
||||
return [(str(i.id), i.type_id) for i in cis]
|
||||
|
||||
@staticmethod
|
||||
def _path2level(src_type_id, target_type_ids, path):
|
||||
if not src_type_id or not target_type_ids:
|
||||
return abort(400, ErrFormat.relation_path_search_src_target_required)
|
||||
|
||||
graph = nx.DiGraph()
|
||||
graph.add_edges_from([(n, _path[idx + 1]) for _path in path for idx, n in enumerate(_path[:-1])])
|
||||
relation_types = defaultdict(dict)
|
||||
level2type = defaultdict(set)
|
||||
type2show_key = dict()
|
||||
for _path in path:
|
||||
for idx, node in enumerate(_path[1:]):
|
||||
level2type[idx + 1].add(node)
|
||||
|
||||
src = CITypeCache.get(_path[idx])
|
||||
target = CITypeCache.get(node)
|
||||
relation_type = RelationType.get_by(only_query=True).join(
|
||||
CITypeRelation, CITypeRelation.relation_type_id == RelationType.id).filter(
|
||||
CITypeRelation.parent_id == src.id).filter(CITypeRelation.child_id == target.id).first()
|
||||
relation_types[src.alias].update({target.alias: relation_type.name})
|
||||
|
||||
if src.id not in type2show_key:
|
||||
type2show_key[src.id] = AttributeCache.get(src.show_id or src.unique_id).name
|
||||
if target.id not in type2show_key:
|
||||
type2show_key[target.id] = AttributeCache.get(target.show_id or target.unique_id).name
|
||||
|
||||
nodes = graph.nodes()
|
||||
|
||||
return level2type, list(nodes), relation_types, type2show_key
|
||||
|
||||
def _build_graph(self, source_ids, source_type_id, level2type, target_type_ids, acl):
|
||||
type2filter_perms = dict()
|
||||
if not self.is_app_admin:
|
||||
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
|
||||
if res2:
|
||||
type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
|
||||
|
||||
target_type_ids = set(target_type_ids)
|
||||
graph = nx.DiGraph()
|
||||
target_ids = []
|
||||
key = [(str(i), source_type_id) for i in source_ids]
|
||||
graph.add_nodes_from(key)
|
||||
for level in level2type:
|
||||
filter_type_ids = level2type[level]
|
||||
id_filter_limit = dict()
|
||||
for _type_id in filter_type_ids:
|
||||
if type2filter_perms.get(_type_id):
|
||||
_id_filter_limit, _ = self._get_ci_filter(type2filter_perms[_type_id])
|
||||
id_filter_limit.update(_id_filter_limit)
|
||||
|
||||
has_target = filter_type_ids & target_type_ids
|
||||
|
||||
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get([i[0] for i in key],
|
||||
REDIS_PREFIX_CI_RELATION) or []]]
|
||||
_key = []
|
||||
for idx, _id in enumerate(key):
|
||||
valid_targets = [i for i in res[idx] if i[1] in filter_type_ids and
|
||||
(not id_filter_limit or int(i[0]) in id_filter_limit)]
|
||||
_key.extend(valid_targets)
|
||||
graph.add_edges_from(zip([_id] * len(valid_targets), valid_targets))
|
||||
|
||||
if has_target:
|
||||
target_ids.extend([j[0] for i in res for j in i if j[1] in target_type_ids])
|
||||
|
||||
key = copy.deepcopy(_key)
|
||||
|
||||
return graph, target_ids
|
||||
|
||||
@staticmethod
|
||||
def _find_paths(graph, source_ids, source_type_id, target_ids, valid_path, max_depth=6):
|
||||
paths = []
|
||||
for source_id in source_ids:
|
||||
_paths = nx.all_simple_paths(graph,
|
||||
source=(source_id, source_type_id),
|
||||
target=target_ids,
|
||||
cutoff=max_depth)
|
||||
for __path in _paths:
|
||||
if tuple([i[1] for i in __path]) in valid_path:
|
||||
paths.append([i[0] for i in __path])
|
||||
|
||||
return paths
|
||||
|
||||
@staticmethod
|
||||
def _wrap_path_result(paths, types, valid_path, target_types, type2show_key):
|
||||
ci_ids = [j for i in paths for j in i]
|
||||
|
||||
response, _, _, _, _, _ = SearchFromDB("_type:({})".format(";".join(map(str, types))),
|
||||
use_ci_filter=False,
|
||||
ci_ids=list(map(int, ci_ids)),
|
||||
count=1000000).search()
|
||||
id2ci = {str(i.get('_id')): i if i['_type'] in target_types else {
|
||||
type2show_key[i['_type']]: i[type2show_key[i['_type']]],
|
||||
"ci_type_alias": i["ci_type_alias"],
|
||||
"_type": i["_type"],
|
||||
} for i in response}
|
||||
|
||||
result = defaultdict(list)
|
||||
counter = defaultdict(int)
|
||||
|
||||
for path in paths:
|
||||
key = "-".join([id2ci.get(i, {}).get('ci_type_alias') or '' for i in path])
|
||||
if tuple([id2ci.get(i, {}).get('_type') for i in path]) in valid_path:
|
||||
counter[key] += 1
|
||||
result[key].append(path)
|
||||
|
||||
return result, counter, id2ci
|
||||
|
||||
def search_by_path(self, source, target, path):
|
||||
"""
|
||||
|
||||
:param source: {type_id: id, q: expr}
|
||||
:param target: {type_ids: [id], q: expr}
|
||||
:param path: [source_type_id, ..., target_type_id], use type id
|
||||
:return:
|
||||
"""
|
||||
acl = ACLManager('cmdb')
|
||||
if not self.is_app_admin:
|
||||
res = {i['name'] for i in acl.get_resources(ResourceTypeEnum.CI_TYPE)}
|
||||
for type_id in (source.get('type_id') and [source['type_id']] or []) + (target.get('type_ids') or []):
|
||||
_type = CITypeCache.get(type_id)
|
||||
if _type and _type.name not in res:
|
||||
return abort(403, ErrFormat.no_permission.format(_type.alias, PermEnum.READ))
|
||||
|
||||
target['type_ids'] = [i[-1] for i in path]
|
||||
level2type, types, relation_types, type2show_key = self._path2level(
|
||||
source.get('type_id'), target.get('type_ids'), path)
|
||||
if not level2type:
|
||||
return [], {}, 0, self.page, 0, {}, {}
|
||||
|
||||
source_ids = self._get_src_ids(source)
|
||||
|
||||
graph, target_ids = self._build_graph(source_ids, source['type_id'], level2type, target['type_ids'], acl)
|
||||
target_ids = self._filter_target_ids(target_ids, target['type_ids'], target.get('q') or '')
|
||||
paths = self._find_paths(graph,
|
||||
source_ids,
|
||||
source['type_id'],
|
||||
set(target_ids),
|
||||
{tuple(i): 1 for i in path})
|
||||
|
||||
numfound = len(paths)
|
||||
paths = paths[(self.page - 1) * self.count:self.page * self.count]
|
||||
response, counter, id2ci = self._wrap_path_result(paths,
|
||||
types,
|
||||
{tuple(i): 1 for i in path},
|
||||
set(target.get('type_ids') or []),
|
||||
type2show_key)
|
||||
return response, counter, len(paths), self.page, numfound, id2ci, relation_types, type2show_key
|
||||
|
||||
@@ -1,251 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import json
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from werkzeug.exceptions import BadRequest
|
||||
|
||||
from api.extensions import rd
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeCache
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.ci_type import CITypeRelationManager
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import ResourceTypeEnum
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search as ci_search
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
from api.models.cmdb import TopologyView
|
||||
from api.models.cmdb import TopologyViewGroup
|
||||
|
||||
|
||||
class TopologyViewManager(object):
|
||||
group_cls = TopologyViewGroup
|
||||
cls = TopologyView
|
||||
|
||||
@classmethod
|
||||
def get_name_by_id(cls, _id):
|
||||
res = cls.cls.get_by_id(_id)
|
||||
return res and res.name
|
||||
|
||||
def get_view_by_id(self, _id):
|
||||
res = self.cls.get_by_id(_id)
|
||||
|
||||
return res and res.to_dict() or {}
|
||||
|
||||
@classmethod
|
||||
def add_group(cls, name, order):
|
||||
if order is None:
|
||||
cur_max_order = cls.group_cls.get_by(only_query=True).order_by(cls.group_cls.order.desc()).first()
|
||||
cur_max_order = cur_max_order and cur_max_order.order or 0
|
||||
order = cur_max_order + 1
|
||||
|
||||
cls.group_cls.get_by(name=name, first=True, to_dict=False) and abort(
|
||||
400, ErrFormat.topology_group_exists.format(name))
|
||||
|
||||
return cls.group_cls.create(name=name, order=order)
|
||||
|
||||
def update_group(self, group_id, name, view_ids):
|
||||
existed = self.group_cls.get_by_id(group_id) or abort(404, ErrFormat.not_found)
|
||||
if name is not None and name != existed.name:
|
||||
existed.update(name=name)
|
||||
|
||||
for idx, view_id in enumerate(view_ids):
|
||||
view = self.cls.get_by_id(view_id)
|
||||
if view is not None:
|
||||
view.update(group_id=group_id, order=idx)
|
||||
|
||||
return existed.to_dict()
|
||||
|
||||
@classmethod
|
||||
def delete_group(cls, _id):
|
||||
existed = cls.group_cls.get_by_id(_id) or abort(404, ErrFormat.not_found)
|
||||
|
||||
if cls.cls.get_by(group_id=_id, first=True):
|
||||
return abort(400, ErrFormat.topo_view_exists_cannot_delete_group)
|
||||
|
||||
existed.soft_delete()
|
||||
|
||||
@classmethod
|
||||
def group_order(cls, group_ids):
|
||||
for idx, group_id in enumerate(group_ids):
|
||||
group = cls.group_cls.get_by_id(group_id)
|
||||
group.update(order=idx + 1)
|
||||
|
||||
@classmethod
|
||||
def add(cls, name, group_id, option, order=None, **kwargs):
|
||||
cls.cls.get_by(name=name, first=True) and abort(400, ErrFormat.topology_exists.format(name))
|
||||
if order is None:
|
||||
cur_max_order = cls.cls.get_by(group_id=group_id, only_query=True).order_by(
|
||||
cls.cls.order.desc()).first()
|
||||
cur_max_order = cur_max_order and cur_max_order.order or 0
|
||||
order = cur_max_order + 1
|
||||
|
||||
inst = cls.cls.create(name=name, group_id=group_id, option=option, order=order, **kwargs).to_dict()
|
||||
if current_app.config.get('USE_ACL'):
|
||||
try:
|
||||
ACLManager().add_resource(name, ResourceTypeEnum.TOPOLOGY_VIEW)
|
||||
except BadRequest:
|
||||
pass
|
||||
|
||||
ACLManager().grant_resource_to_role(name,
|
||||
current_user.username,
|
||||
ResourceTypeEnum.TOPOLOGY_VIEW)
|
||||
|
||||
return inst
|
||||
|
||||
@classmethod
|
||||
def update(cls, _id, **kwargs):
|
||||
existed = cls.cls.get_by_id(_id) or abort(404, ErrFormat.not_found)
|
||||
existed_name = existed.name
|
||||
|
||||
inst = existed.update(filter_none=False, **kwargs).to_dict()
|
||||
if current_app.config.get('USE_ACL') and existed_name != kwargs.get('name') and kwargs.get('name'):
|
||||
try:
|
||||
ACLManager().update_resource(existed_name, kwargs['name'], ResourceTypeEnum.TOPOLOGY_VIEW)
|
||||
except BadRequest:
|
||||
pass
|
||||
|
||||
return inst
|
||||
|
||||
@classmethod
|
||||
def delete(cls, _id):
|
||||
existed = cls.cls.get_by_id(_id) or abort(404, ErrFormat.not_found)
|
||||
|
||||
existed.soft_delete()
|
||||
if current_app.config.get("USE_ACL"):
|
||||
ACLManager().del_resource(existed.name, ResourceTypeEnum.TOPOLOGY_VIEW)
|
||||
|
||||
@classmethod
|
||||
def group_inner_order(cls, _ids):
|
||||
for idx, _id in enumerate(_ids):
|
||||
topology = cls.cls.get_by_id(_id)
|
||||
topology.update(order=idx + 1)
|
||||
|
||||
@classmethod
|
||||
def get_all(cls):
|
||||
resources = None
|
||||
if current_app.config.get('USE_ACL') and not is_app_admin('cmdb'):
|
||||
resources = set([i.get('name') for i in ACLManager().get_resources(ResourceTypeEnum.TOPOLOGY_VIEW)])
|
||||
|
||||
groups = cls.group_cls.get_by(to_dict=True)
|
||||
groups = sorted(groups, key=lambda x: x['order'])
|
||||
group2pos = {group['id']: idx for idx, group in enumerate(groups)}
|
||||
|
||||
topo_views = sorted(cls.cls.get_by(to_dict=True), key=lambda x: x['order'])
|
||||
other_group = dict(views=[])
|
||||
for view in topo_views:
|
||||
if resources is not None and view['name'] not in resources:
|
||||
continue
|
||||
|
||||
if view['group_id']:
|
||||
groups[group2pos[view['group_id']]].setdefault('views', []).append(view)
|
||||
else:
|
||||
other_group['views'].append(view)
|
||||
|
||||
if other_group['views']:
|
||||
groups.append(other_group)
|
||||
|
||||
return groups
|
||||
|
||||
@staticmethod
|
||||
def relation_from_ci_type(type_id):
|
||||
nodes, edges = CITypeRelationManager.get_relations_by_type_id(type_id)
|
||||
|
||||
return dict(nodes=nodes, edges=edges)
|
||||
|
||||
def topology_view(self, view_id=None, preview=None):
|
||||
if view_id is not None:
|
||||
view = self.cls.get_by_id(view_id) or abort(404, ErrFormat.not_found)
|
||||
central_node_type, central_node_instances, path = (view.central_node_type,
|
||||
view.central_node_instances, view.path)
|
||||
else:
|
||||
central_node_type = preview.get('central_node_type')
|
||||
central_node_instances = preview.get('central_node_instances')
|
||||
path = preview.get('path')
|
||||
|
||||
nodes, links = [], []
|
||||
_type = CITypeCache.get(central_node_type)
|
||||
if not _type:
|
||||
return dict(nodes=nodes, links=links)
|
||||
type2meta = {_type.id: _type.icon}
|
||||
root_ids = []
|
||||
show_key = AttributeCache.get(_type.show_id or _type.unique_id)
|
||||
|
||||
q = (central_node_instances[2:] if central_node_instances.startswith('q=') else
|
||||
central_node_instances)
|
||||
s = ci_search(q, fl=['_id', show_key.name], use_id_filter=False, use_ci_filter=False, count=1000000)
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.info(e)
|
||||
return dict(nodes=nodes, links=links)
|
||||
for i in response:
|
||||
root_ids.append(i['_id'])
|
||||
nodes.append(dict(id=str(i['_id']), name=i[show_key.name], type_id=central_node_type))
|
||||
if not root_ids:
|
||||
return dict(nodes=nodes, links=links)
|
||||
|
||||
prefix = REDIS_PREFIX_CI_RELATION
|
||||
key = list(map(str, root_ids))
|
||||
id2node = {}
|
||||
for level in sorted([i for i in path.keys() if int(i) > 0]):
|
||||
type_ids = {int(i) for i in path[level]}
|
||||
|
||||
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
|
||||
new_key = []
|
||||
for idx, from_id in enumerate(key):
|
||||
for to_id, type_id in res[idx]:
|
||||
if type_id in type_ids:
|
||||
links.append({'from': from_id, 'to': to_id})
|
||||
id2node[to_id] = {'id': to_id, 'type_id': type_id}
|
||||
new_key.append(to_id)
|
||||
if type_id not in type2meta:
|
||||
type2meta[type_id] = CITypeCache.get(type_id).icon
|
||||
|
||||
key = new_key
|
||||
|
||||
ci_ids = list(map(int, root_ids))
|
||||
for level in sorted([i for i in path.keys() if int(i) < 0]):
|
||||
type_ids = {int(i) for i in path[level]}
|
||||
res = CIRelationManager.get_parent_ids(ci_ids)
|
||||
_ci_ids = []
|
||||
for to_id in res:
|
||||
for from_id, type_id in res[to_id]:
|
||||
if type_id in type_ids:
|
||||
from_id, to_id = str(from_id), str(to_id)
|
||||
links.append({'from': from_id, 'to': to_id})
|
||||
id2node[from_id] = {'id': str(from_id), 'type_id': type_id}
|
||||
_ci_ids.append(from_id)
|
||||
if type_id not in type2meta:
|
||||
type2meta[type_id] = CITypeCache.get(type_id).icon
|
||||
|
||||
ci_ids = _ci_ids
|
||||
|
||||
fl = set()
|
||||
type_ids = {t for lv in path if lv != '0' for t in path[lv]}
|
||||
type2show = {}
|
||||
for type_id in type_ids:
|
||||
ci_type = CITypeCache.get(type_id)
|
||||
if ci_type:
|
||||
attr = AttributeCache.get(ci_type.show_id or ci_type.unique_id)
|
||||
if attr:
|
||||
fl.add(attr.name)
|
||||
type2show[type_id] = attr.name
|
||||
|
||||
if id2node:
|
||||
s = ci_search("_id:({})".format(';'.join(id2node.keys())), fl=list(fl),
|
||||
use_id_filter=False, use_ci_filter=False, count=1000000)
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
except SearchError:
|
||||
return dict(nodes=nodes, links=links)
|
||||
for i in response:
|
||||
id2node[str(i['_id'])]['name'] = i[type2show[str(i['_type'])]]
|
||||
nodes.extend(id2node.values())
|
||||
|
||||
return dict(nodes=nodes, links=links, type2meta=type2meta)
|
||||
@@ -4,68 +4,37 @@ from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
|
||||
import six
|
||||
from flask import current_app
|
||||
from markupsafe import escape
|
||||
|
||||
import api.models.cmdb as model
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
|
||||
TIME_RE = re.compile(r'(?:[01]\d|2[0-3]):[0-5]\d:[0-5]\d')
|
||||
|
||||
|
||||
class ValueDeserializeError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def string2int(x):
|
||||
v = int(float(x))
|
||||
if v > 2147483647:
|
||||
raise ValueDeserializeError(ErrFormat.attribute_value_out_of_range)
|
||||
|
||||
return v
|
||||
|
||||
|
||||
def str2date(x):
|
||||
|
||||
try:
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").date()
|
||||
except ValueError:
|
||||
pass
|
||||
return int(float(x))
|
||||
|
||||
|
||||
def str2datetime(x):
|
||||
|
||||
x = x.replace('T', ' ')
|
||||
x = x.replace('Z', '')
|
||||
|
||||
try:
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S")
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d")
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M")
|
||||
|
||||
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
class ValueTypeMap(object):
|
||||
deserialize = {
|
||||
ValueTypeEnum.INT: string2int,
|
||||
ValueTypeEnum.FLOAT: float,
|
||||
ValueTypeEnum.TEXT: lambda x: x,
|
||||
ValueTypeEnum.TIME: lambda x: TIME_RE.findall(x)[0],
|
||||
ValueTypeEnum.TEXT: lambda x: escape(x).encode('utf-8').decode('utf-8'),
|
||||
ValueTypeEnum.TIME: lambda x: escape(x).encode('utf-8').decode('utf-8'),
|
||||
ValueTypeEnum.DATETIME: str2datetime,
|
||||
ValueTypeEnum.DATE: str2date,
|
||||
ValueTypeEnum.DATE: str2datetime,
|
||||
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
|
||||
ValueTypeEnum.BOOL: lambda x: x in current_app.config.get('BOOL_TRUE'),
|
||||
}
|
||||
|
||||
serialize = {
|
||||
@@ -73,10 +42,9 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.FLOAT: float,
|
||||
ValueTypeEnum.TEXT: lambda x: x if isinstance(x, six.string_types) else str(x),
|
||||
ValueTypeEnum.TIME: lambda x: x if isinstance(x, six.string_types) else str(x),
|
||||
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d") if not isinstance(x, six.string_types) else x,
|
||||
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S") if not isinstance(x, six.string_types) else x,
|
||||
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d"),
|
||||
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
|
||||
ValueTypeEnum.BOOL: lambda x: x in current_app.config.get('BOOL_TRUE'),
|
||||
}
|
||||
|
||||
serialize2 = {
|
||||
@@ -87,20 +55,21 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.DATE: lambda x: (x.decode() if not isinstance(x, six.string_types) else x).split()[0],
|
||||
ValueTypeEnum.DATETIME: lambda x: x.decode() if not isinstance(x, six.string_types) else x,
|
||||
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
|
||||
ValueTypeEnum.BOOL: lambda x: x in current_app.config.get('BOOL_TRUE'),
|
||||
}
|
||||
|
||||
choice = {
|
||||
ValueTypeEnum.INT: model.IntegerChoice,
|
||||
ValueTypeEnum.FLOAT: model.FloatChoice,
|
||||
ValueTypeEnum.TEXT: model.TextChoice,
|
||||
ValueTypeEnum.TIME: model.TextChoice,
|
||||
ValueTypeEnum.DATE: model.TextChoice,
|
||||
ValueTypeEnum.DATETIME: model.TextChoice,
|
||||
}
|
||||
|
||||
table = {
|
||||
ValueTypeEnum.INT: model.CIValueInteger,
|
||||
ValueTypeEnum.TEXT: model.CIValueText,
|
||||
ValueTypeEnum.DATETIME: model.CIValueDateTime,
|
||||
ValueTypeEnum.DATE: model.CIValueDateTime,
|
||||
ValueTypeEnum.TIME: model.CIValueText,
|
||||
ValueTypeEnum.FLOAT: model.CIValueFloat,
|
||||
ValueTypeEnum.JSON: model.CIValueJson,
|
||||
'index_{0}'.format(ValueTypeEnum.INT): model.CIIndexValueInteger,
|
||||
'index_{0}'.format(ValueTypeEnum.TEXT): model.CIIndexValueText,
|
||||
@@ -109,11 +78,15 @@ class ValueTypeMap(object):
|
||||
'index_{0}'.format(ValueTypeEnum.TIME): model.CIIndexValueText,
|
||||
'index_{0}'.format(ValueTypeEnum.FLOAT): model.CIIndexValueFloat,
|
||||
'index_{0}'.format(ValueTypeEnum.JSON): model.CIValueJson,
|
||||
'index_{0}'.format(ValueTypeEnum.BOOL): model.CIIndexValueInteger,
|
||||
}
|
||||
|
||||
table_name = {
|
||||
ValueTypeEnum.INT: 'c_value_integers',
|
||||
ValueTypeEnum.TEXT: 'c_value_texts',
|
||||
ValueTypeEnum.DATETIME: 'c_value_datetime',
|
||||
ValueTypeEnum.DATE: 'c_value_datetime',
|
||||
ValueTypeEnum.TIME: 'c_value_texts',
|
||||
ValueTypeEnum.FLOAT: 'c_value_floats',
|
||||
ValueTypeEnum.JSON: 'c_value_json',
|
||||
'index_{0}'.format(ValueTypeEnum.INT): 'c_value_index_integers',
|
||||
'index_{0}'.format(ValueTypeEnum.TEXT): 'c_value_index_texts',
|
||||
@@ -122,7 +95,6 @@ class ValueTypeMap(object):
|
||||
'index_{0}'.format(ValueTypeEnum.TIME): 'c_value_index_texts',
|
||||
'index_{0}'.format(ValueTypeEnum.FLOAT): 'c_value_index_floats',
|
||||
'index_{0}'.format(ValueTypeEnum.JSON): 'c_value_json',
|
||||
'index_{0}'.format(ValueTypeEnum.BOOL): 'c_value_index_integers',
|
||||
}
|
||||
|
||||
es_type = {
|
||||
@@ -132,7 +104,7 @@ class ValueTypeMap(object):
|
||||
ValueTypeEnum.DATE: 'text',
|
||||
ValueTypeEnum.TIME: 'text',
|
||||
ValueTypeEnum.FLOAT: 'float',
|
||||
ValueTypeEnum.JSON: 'object',
|
||||
ValueTypeEnum.JSON: 'object'
|
||||
}
|
||||
|
||||
|
||||
@@ -145,13 +117,8 @@ class TableMap(object):
|
||||
@property
|
||||
def table(self):
|
||||
attr = AttributeCache.get(self.attr_name) if not self.attr else self.attr
|
||||
if attr.is_password or attr.is_link:
|
||||
self.is_index = False
|
||||
elif attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON}:
|
||||
self.is_index = True
|
||||
elif self.is_index is None:
|
||||
if self.is_index is None:
|
||||
self.is_index = attr.is_index
|
||||
|
||||
i = "index_{0}".format(attr.value_type) if self.is_index else attr.value_type
|
||||
|
||||
return ValueTypeMap.table.get(i)
|
||||
@@ -159,13 +126,8 @@ class TableMap(object):
|
||||
@property
|
||||
def table_name(self):
|
||||
attr = AttributeCache.get(self.attr_name) if not self.attr else self.attr
|
||||
if attr.is_password or attr.is_link:
|
||||
self.is_index = False
|
||||
elif attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON}:
|
||||
self.is_index = True
|
||||
elif self.is_index is None:
|
||||
if self.is_index is None:
|
||||
self.is_index = attr.is_index
|
||||
|
||||
i = "index_{0}".format(attr.value_type) if self.is_index else attr.value_type
|
||||
|
||||
return ValueTypeMap.table_name.get(i)
|
||||
|
||||
@@ -3,29 +3,27 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import imp
|
||||
|
||||
import copy
|
||||
import jinja2
|
||||
import imp
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
import jinja2
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from jinja2schema import infer
|
||||
from jinja2schema import to_json_schema
|
||||
from werkzeug.exceptions import BadRequest
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.cmdb.attribute import AttributeManager
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeAttributeCache
|
||||
from api.lib.cmdb.const import ExistPolicy
|
||||
from api.lib.cmdb.const import OperateType
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.cmdb.history import AttributeHistoryManger
|
||||
from api.lib.cmdb.resp_format import ErrFormat
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
from api.lib.cmdb.utils import ValueDeserializeError
|
||||
from api.lib.cmdb.utils import ValueTypeMap
|
||||
from api.lib.utils import handle_arg_list
|
||||
from api.models.cmdb import CI
|
||||
@@ -47,7 +45,7 @@ class AttributeValueManager(object):
|
||||
"""
|
||||
return AttributeCache.get(key)
|
||||
|
||||
def get_attr_values(self, fields, ci_id, ret_key="name", unique_key=None, use_master=False, enum_map=None):
|
||||
def get_attr_values(self, fields, ci_id, ret_key="name", unique_key=None, use_master=False):
|
||||
"""
|
||||
|
||||
:param fields:
|
||||
@@ -55,7 +53,6 @@ class AttributeValueManager(object):
|
||||
:param ret_key: It can be name or alias
|
||||
:param unique_key: primary attribute
|
||||
:param use_master: Only for master-slave read-write separation
|
||||
:param enum_map:
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
@@ -70,19 +67,12 @@ class AttributeValueManager(object):
|
||||
use_master=use_master,
|
||||
to_dict=False)
|
||||
field_name = getattr(attr, ret_key)
|
||||
|
||||
if attr.is_list:
|
||||
res[field_name] = [ValueTypeMap.serialize[attr.value_type](i.value) for i in rs]
|
||||
elif attr.is_password and rs:
|
||||
res[field_name] = '******' if rs[0].value else ''
|
||||
else:
|
||||
res[field_name] = ValueTypeMap.serialize[attr.value_type](rs[0].value) if rs else None
|
||||
|
||||
if enum_map and field_name in enum_map:
|
||||
if attr.is_list:
|
||||
res[field_name] = [enum_map[field_name].get(i, i) for i in res[field_name]]
|
||||
else:
|
||||
res[field_name] = enum_map[field_name].get(res[field_name], res[field_name])
|
||||
|
||||
if unique_key is not None and attr.id == unique_key.id and rs:
|
||||
res['unique'] = unique_key.name
|
||||
res['unique_alias'] = unique_key.alias
|
||||
@@ -90,34 +80,24 @@ class AttributeValueManager(object):
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def _deserialize_value(alias, value_type, value):
|
||||
def __deserialize_value(value_type, value):
|
||||
if not value:
|
||||
return value
|
||||
|
||||
deserialize = ValueTypeMap.deserialize[value_type]
|
||||
try:
|
||||
v = deserialize(value)
|
||||
if value_type in (ValueTypeEnum.DATE, ValueTypeEnum.DATETIME):
|
||||
return str(v)
|
||||
return v
|
||||
except ValueDeserializeError as e:
|
||||
return abort(400, ErrFormat.attribute_value_invalid2.format(alias, e))
|
||||
except ValueError:
|
||||
return abort(400, ErrFormat.attribute_value_invalid2.format(alias, value))
|
||||
return abort(400, ErrFormat.attribute_value_invalid.format(value))
|
||||
|
||||
@staticmethod
|
||||
def _check_is_choice(attr, value_type, value):
|
||||
choice_values = AttributeManager.get_choice_values(attr.id, value_type, attr.choice_web_hook, attr.choice_other)
|
||||
if value_type == ValueTypeEnum.FLOAT:
|
||||
if float(value) not in list(map(float, [i[0] for i in choice_values])):
|
||||
return abort(400, ErrFormat.not_in_choice_values.format(value))
|
||||
|
||||
else:
|
||||
if str(value) not in list(map(str, [i[0] for i in choice_values])):
|
||||
return abort(400, ErrFormat.not_in_choice_values.format(value))
|
||||
def __check_is_choice(attr, value_type, value):
|
||||
choice_values = AttributeManager.get_choice_values(attr.id, value_type, attr.choice_web_hook)
|
||||
if str(value) not in list(map(str, [i[0] for i in choice_values])):
|
||||
return abort(400, ErrFormat.not_in_choice_values.format(value))
|
||||
|
||||
@staticmethod
|
||||
def _check_is_unique(value_table, attr, ci_id, type_id, value):
|
||||
def __check_is_unique(value_table, attr, ci_id, type_id, value):
|
||||
existed = db.session.query(value_table.attr_id).join(CI, CI.id == value_table.ci_id).filter(
|
||||
CI.type_id == type_id).filter(
|
||||
value_table.attr_id == attr.id).filter(value_table.deleted.is_(False)).filter(
|
||||
@@ -126,38 +106,24 @@ class AttributeValueManager(object):
|
||||
existed and abort(400, ErrFormat.attribute_value_unique_required.format(attr.alias, value))
|
||||
|
||||
@staticmethod
|
||||
def _check_is_required(type_id, attr, value, type_attr=None):
|
||||
def __check_is_required(type_id, attr, value, type_attr=None):
|
||||
type_attr = type_attr or CITypeAttributeCache.get(type_id, attr.id)
|
||||
if type_attr and type_attr.is_required and not value and value != 0:
|
||||
return abort(400, ErrFormat.attribute_value_required.format(attr.alias))
|
||||
|
||||
@staticmethod
|
||||
def check_re(expr, alias, value):
|
||||
if not re.compile(expr).match(str(value)):
|
||||
return abort(400, ErrFormat.attribute_value_invalid2.format(alias, value))
|
||||
|
||||
def _validate(self, attr, value, value_table, ci=None, type_id=None, ci_id=None, type_attr=None):
|
||||
if not attr.is_reference:
|
||||
ci = ci or {}
|
||||
v = self._deserialize_value(attr.alias, attr.value_type, value)
|
||||
ci = ci or {}
|
||||
v = self.__deserialize_value(attr.value_type, value)
|
||||
|
||||
attr.is_choice and value and self._check_is_choice(attr, attr.value_type, v)
|
||||
|
||||
else:
|
||||
v = value or None
|
||||
|
||||
attr.is_unique and self._check_is_unique(
|
||||
attr.is_choice and value and self.__check_is_choice(attr, attr.value_type, v)
|
||||
attr.is_unique and self.__check_is_unique(
|
||||
value_table, attr, ci and ci.id or ci_id, ci and ci.type_id or type_id, v)
|
||||
self._check_is_required(ci and ci.type_id or type_id, attr, v, type_attr=type_attr)
|
||||
if attr.is_reference:
|
||||
return v
|
||||
|
||||
self.__check_is_required(ci and ci.type_id or type_id, attr, v, type_attr=type_attr)
|
||||
|
||||
if v == "" and attr.value_type not in (ValueTypeEnum.TEXT,):
|
||||
v = None
|
||||
|
||||
if attr.re_check and value:
|
||||
self.check_re(attr.re_check, attr.alias, value)
|
||||
|
||||
return v
|
||||
|
||||
@staticmethod
|
||||
@@ -165,21 +131,20 @@ class AttributeValueManager(object):
|
||||
return AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id)
|
||||
|
||||
@staticmethod
|
||||
def write_change2(changed, record_id=None, ticket_id=None):
|
||||
def _write_change2(changed):
|
||||
record_id = None
|
||||
for ci_id, attr_id, operate_type, old, new, type_id in changed:
|
||||
record_id = AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id,
|
||||
ticket_id=ticket_id,
|
||||
commit=False, flush=False)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.error("write change failed: {}".format(str(e)))
|
||||
|
||||
return record_id
|
||||
|
||||
@staticmethod
|
||||
def _compute_attr_value_from_expr(expr, ci_dict):
|
||||
def __compute_attr_value_from_expr(expr, ci_dict):
|
||||
t = jinja2.Template(expr).render(ci_dict)
|
||||
|
||||
try:
|
||||
@@ -189,7 +154,7 @@ class AttributeValueManager(object):
|
||||
return t
|
||||
|
||||
@staticmethod
|
||||
def _compute_attr_value_from_script(script, ci_dict):
|
||||
def __compute_attr_value_from_script(script, ci_dict):
|
||||
script = jinja2.Template(script).render(ci_dict)
|
||||
|
||||
script_f = tempfile.NamedTemporaryFile(delete=False, suffix=".py")
|
||||
@@ -218,22 +183,22 @@ class AttributeValueManager(object):
|
||||
|
||||
return [var for var in schema.get("properties")]
|
||||
|
||||
def _compute_attr_value(self, attr, payload, ci_id):
|
||||
attrs = (self._jinja2_parse(attr['compute_expr']) if attr.get('compute_expr')
|
||||
else self._jinja2_parse(attr['compute_script']))
|
||||
def _compute_attr_value(self, attr, payload, ci):
|
||||
attrs = self._jinja2_parse(attr['compute_expr']) if attr.get('compute_expr') else \
|
||||
self._jinja2_parse(attr['compute_script'])
|
||||
not_existed = [i for i in attrs if i not in payload]
|
||||
if ci_id is not None:
|
||||
payload.update(self.get_attr_values(not_existed, ci_id))
|
||||
if ci is not None:
|
||||
payload.update(self.get_attr_values(not_existed, ci.id))
|
||||
|
||||
if attr['compute_expr']:
|
||||
return self._compute_attr_value_from_expr(attr['compute_expr'], payload)
|
||||
return self.__compute_attr_value_from_expr(attr['compute_expr'], payload)
|
||||
elif attr['compute_script']:
|
||||
return self._compute_attr_value_from_script(attr['compute_script'], payload)
|
||||
return self.__compute_attr_value_from_script(attr['compute_script'], payload)
|
||||
|
||||
def handle_ci_compute_attributes(self, ci_dict, computed_attrs, ci):
|
||||
payload = copy.deepcopy(ci_dict)
|
||||
for attr in computed_attrs:
|
||||
computed_value = self._compute_attr_value(attr, payload, ci and ci.id)
|
||||
computed_value = self._compute_attr_value(attr, payload, ci)
|
||||
if computed_value is not None:
|
||||
ci_dict[attr['name']] = computed_value
|
||||
|
||||
@@ -250,28 +215,17 @@ class AttributeValueManager(object):
|
||||
|
||||
try:
|
||||
if attr.is_list:
|
||||
if isinstance(value, dict):
|
||||
if value.get('op') == "delete":
|
||||
value['v'] = [ValueTypeMap.serialize[attr.value_type](
|
||||
self._deserialize_value(attr.alias, attr.value_type, i))
|
||||
for i in handle_arg_list(value['v'])]
|
||||
continue
|
||||
_value = value.get('v') or []
|
||||
else:
|
||||
_value = value
|
||||
value_list = [self._validate(attr, i, value_table, ci=None, type_id=type_id, ci_id=ci_id,
|
||||
type_attr=ci_attr2type_attr.get(attr.id))
|
||||
for i in handle_arg_list(_value)]
|
||||
ci_dict[key] = value_list if not isinstance(value, dict) else dict(op=value.get('op'), v=value_list)
|
||||
for i in handle_arg_list(value)]
|
||||
ci_dict[key] = value_list
|
||||
if not value_list:
|
||||
self._check_is_required(type_id, attr, '')
|
||||
self.__check_is_required(type_id, attr, '')
|
||||
|
||||
else:
|
||||
value = self._validate(attr, value, value_table, ci=None, type_id=type_id, ci_id=ci_id,
|
||||
type_attr=ci_attr2type_attr.get(attr.id))
|
||||
ci_dict[key] = value
|
||||
except BadRequest as e:
|
||||
raise
|
||||
except Exception as e:
|
||||
current_app.logger.warning(str(e))
|
||||
|
||||
@@ -280,17 +234,15 @@ class AttributeValueManager(object):
|
||||
|
||||
return key2attr
|
||||
|
||||
def create_or_update_attr_value(self, ci, ci_dict, key2attr, ticket_id=None):
|
||||
def create_or_update_attr_value2(self, ci, ci_dict, key2attr):
|
||||
"""
|
||||
add or update attribute value, then write history
|
||||
:param ci: instance object
|
||||
:param ci_dict: attribute dict
|
||||
:param key2attr: attr key to attr
|
||||
:param ticket_id:
|
||||
:return:
|
||||
"""
|
||||
changed = []
|
||||
has_dynamic = False
|
||||
for key, value in ci_dict.items():
|
||||
attr = key2attr.get(key)
|
||||
if not attr:
|
||||
@@ -299,90 +251,106 @@ class AttributeValueManager(object):
|
||||
|
||||
if attr.is_list:
|
||||
existed_attrs = value_table.get_by(attr_id=attr.id, ci_id=ci.id, to_dict=False)
|
||||
existed_values = [(ValueTypeMap.serialize[attr.value_type](i.value) if
|
||||
i.value or i.value == 0 else i.value) for i in existed_attrs]
|
||||
existed_values = [i.value for i in existed_attrs]
|
||||
added = set(value) - set(existed_values)
|
||||
deleted = set(existed_values) - set(value)
|
||||
for v in added:
|
||||
value_table.create(ci_id=ci.id, attr_id=attr.id, value=v, flush=False, commit=False)
|
||||
changed.append((ci.id, attr.id, OperateType.ADD, None, v, ci.type_id))
|
||||
|
||||
if isinstance(value, dict):
|
||||
if value.get('op') == "add":
|
||||
for v in (value.get('v') or []):
|
||||
if v not in existed_values:
|
||||
value_table.create(ci_id=ci.id, attr_id=attr.id, value=v, flush=False, commit=False)
|
||||
if not attr.is_dynamic:
|
||||
changed.append((ci.id, attr.id, OperateType.ADD, None, v, ci.type_id))
|
||||
else:
|
||||
has_dynamic = True
|
||||
|
||||
elif value.get('op') == "delete":
|
||||
for v in (value.get('v') or []):
|
||||
if v in existed_values:
|
||||
existed_attrs[existed_values.index(v)].delete(flush=False, commit=False)
|
||||
if not attr.is_dynamic:
|
||||
changed.append((ci.id, attr.id, OperateType.DELETE, v, None, ci.type_id))
|
||||
else:
|
||||
has_dynamic = True
|
||||
else:
|
||||
# Comparison array starts from which position changes
|
||||
min_len = min(len(value), len(existed_values))
|
||||
index = 0
|
||||
while index < min_len:
|
||||
if value[index] != existed_values[index]:
|
||||
break
|
||||
index += 1
|
||||
|
||||
# Delete first and then add to ensure id sorting
|
||||
for idx in range(index, len(existed_attrs)):
|
||||
existed_attr = existed_attrs[idx]
|
||||
existed_attr.delete(flush=False, commit=False)
|
||||
if not attr.is_dynamic:
|
||||
changed.append((ci.id, attr.id, OperateType.DELETE, existed_values[idx], None, ci.type_id))
|
||||
else:
|
||||
has_dynamic = True
|
||||
for idx in range(index, len(value)):
|
||||
value_table.create(ci_id=ci.id, attr_id=attr.id, value=value[idx], flush=False, commit=False)
|
||||
if not attr.is_dynamic:
|
||||
changed.append((ci.id, attr.id, OperateType.ADD, None, value[idx], ci.type_id))
|
||||
else:
|
||||
has_dynamic = True
|
||||
for v in deleted:
|
||||
existed_attr = existed_attrs[existed_values.index(v)]
|
||||
existed_attr.delete(flush=False, commit=False)
|
||||
changed.append((ci.id, attr.id, OperateType.DELETE, v, None, ci.type_id))
|
||||
else:
|
||||
existed_attr = value_table.get_by(attr_id=attr.id, ci_id=ci.id, first=True, to_dict=False)
|
||||
existed_value = existed_attr and existed_attr.value
|
||||
existed_value = (ValueTypeMap.serialize[attr.value_type](existed_value) if
|
||||
existed_value or existed_value == 0 else existed_value)
|
||||
if existed_value is None and value is not None:
|
||||
value_table.create(ci_id=ci.id, attr_id=attr.id, value=value, flush=False, commit=False)
|
||||
|
||||
if not attr.is_dynamic:
|
||||
changed.append((ci.id, attr.id, OperateType.ADD, None, value, ci.type_id))
|
||||
else:
|
||||
has_dynamic = True
|
||||
changed.append((ci.id, attr.id, OperateType.ADD, None, value, ci.type_id))
|
||||
else:
|
||||
if existed_value != value and existed_attr:
|
||||
if existed_value != value:
|
||||
if value is None:
|
||||
existed_attr.delete(flush=False, commit=False)
|
||||
else:
|
||||
existed_attr.update(value=value, flush=False, commit=False)
|
||||
|
||||
if not attr.is_dynamic:
|
||||
changed.append((ci.id, attr.id, OperateType.UPDATE, existed_value, value, ci.type_id))
|
||||
changed.append((ci.id, attr.id, OperateType.UPDATE, existed_value, value, ci.type_id))
|
||||
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.warning(str(e))
|
||||
return abort(400, ErrFormat.attribute_value_unknown_error.format(str(e)))
|
||||
|
||||
return self._write_change2(changed)
|
||||
|
||||
def create_or_update_attr_value(self, key, value, ci, _no_attribute_policy=ExistPolicy.IGNORE, record_id=None):
|
||||
"""
|
||||
add or update attribute value, then write history
|
||||
:param key: id, name or alias
|
||||
:param value:
|
||||
:param ci: instance object
|
||||
:param _no_attribute_policy: ignore or reject
|
||||
:param record_id: op record
|
||||
:return:
|
||||
"""
|
||||
attr = self._get_attr(key)
|
||||
if attr is None:
|
||||
if _no_attribute_policy == ExistPolicy.IGNORE:
|
||||
return
|
||||
if _no_attribute_policy == ExistPolicy.REJECT:
|
||||
return abort(400, ErrFormat.attribute_not_found.format(key))
|
||||
|
||||
value_table = TableMap(attr=attr).table
|
||||
|
||||
try:
|
||||
if attr.is_list:
|
||||
value_list = [self._validate(attr, i, value_table, ci) for i in handle_arg_list(value)]
|
||||
if not value_list:
|
||||
self.__check_is_required(ci.type_id, attr, '')
|
||||
|
||||
existed_attrs = value_table.get_by(attr_id=attr.id, ci_id=ci.id, to_dict=False)
|
||||
existed_values = [i.value for i in existed_attrs]
|
||||
added = set(value_list) - set(existed_values)
|
||||
deleted = set(existed_values) - set(value_list)
|
||||
for v in added:
|
||||
value_table.create(ci_id=ci.id, attr_id=attr.id, value=v)
|
||||
record_id = self._write_change(ci.id, attr.id, OperateType.ADD, None, v, record_id, ci.type_id)
|
||||
|
||||
for v in deleted:
|
||||
existed_attr = existed_attrs[existed_values.index(v)]
|
||||
existed_attr.delete()
|
||||
record_id = self._write_change(ci.id, attr.id, OperateType.DELETE, v, None, record_id, ci.type_id)
|
||||
else:
|
||||
value = self._validate(attr, value, value_table, ci)
|
||||
existed_attr = value_table.get_by(attr_id=attr.id, ci_id=ci.id, first=True, to_dict=False)
|
||||
existed_value = existed_attr and existed_attr.value
|
||||
if existed_value is None and value is not None:
|
||||
value_table.create(ci_id=ci.id, attr_id=attr.id, value=value)
|
||||
|
||||
record_id = self._write_change(ci.id, attr.id, OperateType.ADD, None, value, record_id, ci.type_id)
|
||||
else:
|
||||
if existed_value != value:
|
||||
if value is None:
|
||||
existed_attr.delete()
|
||||
else:
|
||||
has_dynamic = True
|
||||
existed_attr.update(value=value)
|
||||
|
||||
if changed or has_dynamic:
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
current_app.logger.warning(str(e))
|
||||
return abort(400, ErrFormat.attribute_value_unknown_error.format(e.args[0]))
|
||||
record_id = self._write_change(ci.id, attr.id, OperateType.UPDATE,
|
||||
existed_value, value, record_id, ci.type_id)
|
||||
|
||||
return self.write_change2(changed, ticket_id=ticket_id), has_dynamic
|
||||
else:
|
||||
return None, has_dynamic
|
||||
return record_id
|
||||
except Exception as e:
|
||||
current_app.logger.warning(str(e))
|
||||
return abort(400, ErrFormat.attribute_value_invalid2.format("{}({})".format(attr.alias, attr.name), value))
|
||||
|
||||
@staticmethod
|
||||
def delete_attr_value(attr_id, ci_id, commit=True):
|
||||
def delete_attr_value(attr_id, ci_id):
|
||||
attr = AttributeCache.get(attr_id)
|
||||
if attr is not None:
|
||||
value_table = TableMap(attr=attr).table
|
||||
for item in value_table.get_by(attr_id=attr.id, ci_id=ci_id, to_dict=False):
|
||||
item.delete(commit=commit)
|
||||
item.delete()
|
||||
|
||||
@@ -1,20 +1,13 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.app import AppCRUD
|
||||
from api.lib.perm.acl.cache import RoleCache, AppCache
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
from api.lib.perm.acl.resource import ResourceTypeCRUD, ResourceCRUD
|
||||
from api.lib.perm.acl.role import RoleCRUD, RoleRelationCRUD
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
|
||||
def validate_app(app_id):
|
||||
app = AppCache.get(app_id)
|
||||
return app.id if app else None
|
||||
|
||||
|
||||
class ACLManager(object):
|
||||
def __init__(self, app_name='acl', uid=None):
|
||||
self.log = current_app.logger
|
||||
@@ -85,69 +78,19 @@ class ACLManager(object):
|
||||
return role.to_dict()
|
||||
|
||||
@staticmethod
|
||||
def delete_role(_id):
|
||||
def delete_role(_id, payload):
|
||||
RoleCRUD.delete_role(_id)
|
||||
return dict(rid=_id)
|
||||
|
||||
def get_user_info(self, username):
|
||||
from api.lib.perm.acl.acl import ACLManager as ACL
|
||||
user_info = ACL().get_user_info(username, self.app_name)
|
||||
result = dict(
|
||||
name=user_info.get('nickname') or username,
|
||||
username=user_info.get('username') or username,
|
||||
email=user_info.get('email'),
|
||||
uid=user_info.get('uid'),
|
||||
rid=user_info.get('rid'),
|
||||
role=dict(permissions=user_info.get('parents')),
|
||||
avatar=user_info.get('avatar')
|
||||
)
|
||||
result = dict(name=user_info.get('nickname') or username,
|
||||
username=user_info.get('username') or username,
|
||||
email=user_info.get('email'),
|
||||
uid=user_info.get('uid'),
|
||||
rid=user_info.get('rid'),
|
||||
role=dict(permissions=user_info.get('parents')),
|
||||
avatar=user_info.get('avatar'))
|
||||
|
||||
return result
|
||||
|
||||
def validate_app(self):
|
||||
return AppCache.get(self.app_name)
|
||||
|
||||
def get_all_resources_types(self, q=None, page=1, page_size=999999):
|
||||
app_id = self.validate_app().id
|
||||
numfound, res, id2perms = ResourceTypeCRUD.search(q, app_id, page, page_size)
|
||||
|
||||
return dict(
|
||||
numfound=numfound,
|
||||
groups=[i.to_dict() for i in res],
|
||||
id2perms=id2perms
|
||||
)
|
||||
|
||||
def create_resources_type(self, payload):
|
||||
payload['app_id'] = self.validate_app().id
|
||||
rt = ResourceTypeCRUD.add(**payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
||||
def update_resources_type(self, _id, payload):
|
||||
rt = ResourceTypeCRUD.update(_id, **payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
||||
def create_resource(self, payload):
|
||||
payload['app_id'] = self.validate_app().id
|
||||
resource = ResourceCRUD.add(**payload)
|
||||
|
||||
return resource.to_dict()
|
||||
|
||||
def get_resource_by_type(self, q, u, rt_id, page=1, page_size=999999):
|
||||
numfound, res = ResourceCRUD.search(q, u, self.validate_app().id, rt_id, page, page_size)
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def grant_resource(rid, resource_id, perms):
|
||||
PermissionCRUD.grant(rid, perms, resource_id=resource_id, group_id=None)
|
||||
|
||||
@staticmethod
|
||||
def create_app(payload):
|
||||
rt = AppCRUD.add(**payload)
|
||||
|
||||
return rt.to_dict()
|
||||
|
||||
def role_has_perms(self, rid, resource_name, resource_type_name, perm):
|
||||
app_id = validate_app(self.app_name)
|
||||
return RoleCRUD.has_permission(rid, resource_name, resource_type_name, app_id, perm)
|
||||
|
||||
@@ -1,282 +0,0 @@
|
||||
import copy
|
||||
import json
|
||||
|
||||
from flask import abort, current_app
|
||||
from ldap3 import Connection
|
||||
from ldap3 import Server
|
||||
from ldap3.core.exceptions import LDAPBindError, LDAPSocketOpenError
|
||||
from ldap3 import AUTO_BIND_NO_TLS
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import CommonData
|
||||
from api.lib.utils import AESCrypto
|
||||
from api.lib.common_setting.const import AuthCommonConfig, AuthenticateType, AuthCommonConfigAutoRedirect, TestType
|
||||
|
||||
|
||||
class CommonDataCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_data_by_type(data_type):
|
||||
CommonDataCRUD.check_auth_type(data_type)
|
||||
return CommonData.get_by(data_type=data_type)
|
||||
|
||||
@staticmethod
|
||||
def get_data_by_id(_id, to_dict=True):
|
||||
return CommonData.get_by(first=True, id=_id, to_dict=to_dict)
|
||||
|
||||
@staticmethod
|
||||
def create_new_data(data_type, **kwargs):
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(data_type)
|
||||
|
||||
return CommonData.create(data_type=data_type, **kwargs)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def update_data(_id, **kwargs):
|
||||
existed = CommonDataCRUD.get_data_by_id(_id, to_dict=False)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(existed.data_type)
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def delete(_id):
|
||||
existed = CommonDataCRUD.get_data_by_id(_id, to_dict=False)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
CommonDataCRUD.check_auth_type(existed.data_type)
|
||||
existed.soft_delete()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def check_auth_type(data_type):
|
||||
if data_type in list(AuthenticateType.all()) + [AuthCommonConfig]:
|
||||
abort(400, ErrFormat.common_data_not_support_auth_type.format(data_type))
|
||||
|
||||
@staticmethod
|
||||
def set_auth_auto_redirect_enable(_value: int):
|
||||
existed = CommonData.get_by(first=True, data_type=AuthCommonConfig, to_dict=False)
|
||||
if not existed:
|
||||
CommonDataCRUD.create_new_data(AuthCommonConfig, data={AuthCommonConfigAutoRedirect: _value})
|
||||
else:
|
||||
data = existed.data
|
||||
data = copy.deepcopy(existed.data) if data else {}
|
||||
data[AuthCommonConfigAutoRedirect] = _value
|
||||
CommonDataCRUD.update_data(existed.id, data=data)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_auth_auto_redirect_enable():
|
||||
existed = CommonData.get_by(first=True, data_type=AuthCommonConfig)
|
||||
if not existed:
|
||||
return 0
|
||||
data = existed.get('data', {})
|
||||
if not data:
|
||||
return 0
|
||||
return data.get(AuthCommonConfigAutoRedirect, 0)
|
||||
|
||||
|
||||
class AuthenticateDataCRUD(object):
|
||||
common_type_list = [AuthCommonConfig]
|
||||
|
||||
def __init__(self, _type):
|
||||
self._type = _type
|
||||
self.record = None
|
||||
self.decrypt_data = {}
|
||||
|
||||
def get_support_type_list(self):
|
||||
return list(AuthenticateType.all()) + self.common_type_list
|
||||
|
||||
def get(self):
|
||||
if not self.decrypt_data:
|
||||
self.decrypt_data = self.get_decrypt_data()
|
||||
|
||||
return self.decrypt_data
|
||||
|
||||
def get_by_key(self, _key):
|
||||
if not self.decrypt_data:
|
||||
self.decrypt_data = self.get_decrypt_data()
|
||||
|
||||
return self.decrypt_data.get(_key, None)
|
||||
|
||||
def get_record(self, to_dict=False) -> CommonData:
|
||||
return CommonData.get_by(first=True, data_type=self._type, to_dict=to_dict)
|
||||
|
||||
def get_record_with_decrypt(self) -> dict:
|
||||
record = CommonData.get_by(first=True, data_type=self._type, to_dict=True)
|
||||
if not record:
|
||||
return {}
|
||||
data = self.get_decrypt_dict(record.get('data', ''))
|
||||
record['data'] = data
|
||||
return record
|
||||
|
||||
def get_decrypt_dict(self, data):
|
||||
decrypt_str = self.decrypt(data)
|
||||
try:
|
||||
return json.loads(decrypt_str)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
def get_decrypt_data(self) -> dict:
|
||||
self.record = self.get_record()
|
||||
if not self.record:
|
||||
return self.get_from_config()
|
||||
return self.get_decrypt_dict(self.record.data)
|
||||
|
||||
def get_from_config(self):
|
||||
return current_app.config.get(self._type, {})
|
||||
|
||||
def check_by_type(self) -> None:
|
||||
existed = self.get_record()
|
||||
if existed:
|
||||
abort(400, ErrFormat.common_data_already_existed.format(self._type))
|
||||
|
||||
def create(self, data) -> CommonData:
|
||||
self.check_by_type()
|
||||
encrypt = data.pop('encrypt', None)
|
||||
if encrypt is False:
|
||||
return CommonData.create(data_type=self._type, data=data)
|
||||
encrypted_data = self.encrypt(data)
|
||||
try:
|
||||
return CommonData.create(data_type=self._type, data=encrypted_data)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
def update_by_record(self, record, data) -> CommonData:
|
||||
encrypt = data.pop('encrypt', None)
|
||||
if encrypt is False:
|
||||
return record.update(data=data)
|
||||
encrypted_data = self.encrypt(data)
|
||||
try:
|
||||
return record.update(data=encrypted_data)
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
def update(self, _id, data) -> CommonData:
|
||||
existed = CommonData.get_by(first=True, to_dict=False, id=_id)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
|
||||
return self.update_by_record(existed, data)
|
||||
|
||||
@staticmethod
|
||||
def delete(_id) -> None:
|
||||
existed = CommonData.get_by(first=True, to_dict=False, id=_id)
|
||||
if not existed:
|
||||
abort(404, ErrFormat.common_data_not_found.format(_id))
|
||||
try:
|
||||
existed.soft_delete()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def encrypt(data) -> str:
|
||||
if type(data) is dict:
|
||||
try:
|
||||
data = json.dumps(data)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
return AESCrypto().encrypt(data)
|
||||
|
||||
@staticmethod
|
||||
def decrypt(data) -> str:
|
||||
return AESCrypto().decrypt(data)
|
||||
|
||||
@staticmethod
|
||||
def get_enable_list():
|
||||
all_records = CommonData.query.filter(
|
||||
CommonData.data_type.in_(AuthenticateType.all()),
|
||||
CommonData.deleted == 0
|
||||
).all()
|
||||
enable_list = []
|
||||
for auth_type in AuthenticateType.all():
|
||||
record = list(filter(lambda x: x.data_type == auth_type, all_records))
|
||||
if not record:
|
||||
config = current_app.config.get(auth_type, None)
|
||||
if not config:
|
||||
continue
|
||||
|
||||
if config.get('enable', False):
|
||||
enable_list.append(dict(
|
||||
auth_type=auth_type,
|
||||
))
|
||||
|
||||
continue
|
||||
|
||||
try:
|
||||
decrypt_data = json.loads(AuthenticateDataCRUD.decrypt(record[0].data))
|
||||
except Exception as e:
|
||||
current_app.logger.error(e)
|
||||
continue
|
||||
|
||||
if decrypt_data.get('enable', 0) == 1:
|
||||
enable_list.append(dict(
|
||||
auth_type=auth_type,
|
||||
))
|
||||
|
||||
auth_auto_redirect = CommonDataCRUD.get_auth_auto_redirect_enable()
|
||||
|
||||
return dict(
|
||||
enable_list=enable_list,
|
||||
auth_auto_redirect=auth_auto_redirect,
|
||||
)
|
||||
|
||||
def test(self, test_type, data):
|
||||
type_lower = self._type.lower()
|
||||
func_name = f'test_{type_lower}'
|
||||
if hasattr(self, func_name):
|
||||
try:
|
||||
return getattr(self, f'test_{type_lower}')(test_type, data)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
abort(400, ErrFormat.not_support_test.format(self._type))
|
||||
|
||||
@staticmethod
|
||||
def test_ldap(test_type, data):
|
||||
ldap_server = data.get('ldap_server')
|
||||
ldap_user_dn = data.get('ldap_user_dn', '{}')
|
||||
|
||||
server = Server(ldap_server, connect_timeout=2)
|
||||
if not server.check_availability():
|
||||
raise Exception(ErrFormat.ldap_server_connect_not_available)
|
||||
else:
|
||||
if test_type == TestType.Connect:
|
||||
return True
|
||||
|
||||
username = data.get('username', None)
|
||||
if not username:
|
||||
raise Exception(ErrFormat.ldap_test_username_required)
|
||||
user = ldap_user_dn.format(username)
|
||||
password = data.get('password', None)
|
||||
|
||||
try:
|
||||
Connection(server, user=user, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except LDAPBindError:
|
||||
ldap_domain = data.get('ldap_domain')
|
||||
user_with_domain = f"{username}@{ldap_domain}"
|
||||
try:
|
||||
Connection(server, user=user_with_domain, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except Exception as e:
|
||||
raise Exception(ErrFormat.ldap_test_unknown_error.format(str(e)))
|
||||
|
||||
except LDAPSocketOpenError:
|
||||
raise Exception(ErrFormat.ldap_server_connect_timeout)
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(ErrFormat.ldap_test_unknown_error.format(str(e)))
|
||||
|
||||
return True
|
||||
@@ -1,7 +1,5 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from api.extensions import cache
|
||||
from api.models.common_setting import CompanyInfo
|
||||
|
||||
|
||||
@@ -13,51 +11,14 @@ class CompanyInfoCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def create(**kwargs):
|
||||
CompanyInfoCRUD.check_data(**kwargs)
|
||||
res = CompanyInfo.create(**kwargs)
|
||||
CompanyInfoCache.refresh(res.info)
|
||||
return res
|
||||
return CompanyInfo.create(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def update(_id, **kwargs):
|
||||
kwargs.pop('id', None)
|
||||
existed = CompanyInfo.get_by_id(_id)
|
||||
if not existed:
|
||||
existed = CompanyInfoCRUD.create(**kwargs)
|
||||
return CompanyInfoCRUD.create(**kwargs)
|
||||
else:
|
||||
CompanyInfoCRUD.check_data(**kwargs)
|
||||
existed = existed.update(**kwargs)
|
||||
CompanyInfoCache.refresh(existed.info)
|
||||
return existed
|
||||
|
||||
@staticmethod
|
||||
def check_data(**kwargs):
|
||||
info = kwargs.get('info', {})
|
||||
info['messenger'] = CompanyInfoCRUD.check_messenger(info.get('messenger', None))
|
||||
|
||||
kwargs['info'] = info
|
||||
|
||||
@staticmethod
|
||||
def check_messenger(messenger):
|
||||
if not messenger:
|
||||
return messenger
|
||||
|
||||
parsed_url = urlparse(messenger)
|
||||
return f"{parsed_url.scheme}://{parsed_url.netloc}"
|
||||
|
||||
|
||||
class CompanyInfoCache(object):
|
||||
key = 'CompanyInfoCache::'
|
||||
|
||||
@classmethod
|
||||
def get(cls):
|
||||
info = cache.get(cls.key)
|
||||
if not info:
|
||||
res = CompanyInfo.get_by(first=True) or {}
|
||||
info = res.get('info', {})
|
||||
cache.set(cls.key, info)
|
||||
return info
|
||||
|
||||
@classmethod
|
||||
def refresh(cls, info):
|
||||
cache.set(cls.key, info)
|
||||
return existed
|
||||
|
||||
@@ -4,63 +4,11 @@ COMMON_SETTING_QUEUE = "common_setting_async"
|
||||
|
||||
|
||||
class OperatorType(BaseEnum):
|
||||
EQUAL = 1
|
||||
NOT_EQUAL = 2
|
||||
IN = 3
|
||||
NOT_IN = 4
|
||||
GREATER_THAN = 5
|
||||
LESS_THAN = 6
|
||||
IS_EMPTY = 7
|
||||
IS_NOT_EMPTY = 8
|
||||
|
||||
|
||||
BotNameMap = {
|
||||
'wechatApp': 'wechatBot',
|
||||
'feishuApp': 'feishuBot',
|
||||
'dingdingApp': 'dingdingBot',
|
||||
}
|
||||
|
||||
|
||||
class AuthenticateType(BaseEnum):
|
||||
CAS = 'CAS'
|
||||
OAUTH2 = 'OAUTH2'
|
||||
OIDC = 'OIDC'
|
||||
LDAP = 'LDAP'
|
||||
|
||||
|
||||
AuthCommonConfig = 'AuthCommonConfig'
|
||||
AuthCommonConfigAutoRedirect = 'auto_redirect'
|
||||
|
||||
|
||||
class TestType(BaseEnum):
|
||||
Connect = 'connect'
|
||||
Login = 'login'
|
||||
|
||||
|
||||
MIMEExtMap = {
|
||||
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx',
|
||||
'application/msword': '.doc',
|
||||
'application/vnd.ms-word.document.macroEnabled.12': '.docm',
|
||||
'application/vnd.ms-excel': '.xls',
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx',
|
||||
'application/vnd.ms-excel.sheet.macroEnabled.12': '.xlsm',
|
||||
'application/vnd.ms-powerpoint': '.ppt',
|
||||
'application/vnd.openxmlformats-officedocument.presentationml.presentation': '.pptx',
|
||||
'application/vnd.ms-powerpoint.presentation.macroEnabled.12': '.pptm',
|
||||
'application/zip': '.zip',
|
||||
'application/x-7z-compressed': '.7z',
|
||||
'application/json': '.json',
|
||||
'application/pdf': '.pdf',
|
||||
'image/png': '.png',
|
||||
'image/bmp': '.bmp',
|
||||
'image/prs.btif': '.btif',
|
||||
'image/gif': '.gif',
|
||||
'image/jpeg': '.jpg',
|
||||
'image/tiff': '.tif',
|
||||
'image/vnd.microsoft.icon': '.ico',
|
||||
'image/webp': '.webp',
|
||||
'image/svg+xml': '.svg',
|
||||
'image/vnd.adobe.photoshop': '.psd',
|
||||
'text/plain': '.txt',
|
||||
'text/csv': '.csv',
|
||||
}
|
||||
EQUAL = 1 # 等于
|
||||
NOT_EQUAL = 2 # 不等于
|
||||
IN = 3 # 包含
|
||||
NOT_IN = 4 # 不包含
|
||||
GREATER_THAN = 5 # 大于
|
||||
LESS_THAN = 6 # 小于
|
||||
IS_EMPTY = 7 # 为空
|
||||
IS_NOT_EMPTY = 8 # 不为空
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
import functools
|
||||
|
||||
from flask import abort, session
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.acl import is_app_admin
|
||||
|
||||
|
||||
def perms_role_required(app_name, resource_type_name, resource_name, perm, role_name=None):
|
||||
def decorator_perms_role_required(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_required(*args, **kwargs):
|
||||
acl = ACLManager(app_name)
|
||||
has_perms = False
|
||||
try:
|
||||
has_perms = acl.role_has_perms(session["acl"]['rid'], resource_name, resource_type_name, perm)
|
||||
except Exception as e:
|
||||
# resource_type not exist, continue check role
|
||||
if role_name:
|
||||
if role_name not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin(app_name):
|
||||
abort(403, ErrFormat.role_required.format(role_name))
|
||||
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
abort(403, ErrFormat.resource_no_permission.format(resource_name, perm))
|
||||
|
||||
if not has_perms:
|
||||
if role_name:
|
||||
if role_name not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin(app_name):
|
||||
abort(403, ErrFormat.role_required.format(role_name))
|
||||
else:
|
||||
abort(403, ErrFormat.resource_no_permission.format(resource_name, perm))
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper_required
|
||||
|
||||
return decorator_perms_role_required
|
||||
@@ -1,41 +1,47 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask import abort, current_app
|
||||
from flask import abort
|
||||
from treelib import Tree
|
||||
from wtforms import Form
|
||||
from wtforms import IntegerField
|
||||
from wtforms import StringField
|
||||
from wtforms import validators
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.common_setting.utils import get_df_from_read_sql
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
from api.models.common_setting import Department, Employee
|
||||
|
||||
sub_departments_column_name = 'sub_departments'
|
||||
|
||||
|
||||
def get_all_department_list(to_dict=True):
|
||||
def drop_ts_column(df):
|
||||
columns = list(df.columns)
|
||||
remove_columns = []
|
||||
for column in ['created_at', 'updated_at', 'deleted_at', 'last_login']:
|
||||
targets = list(filter(lambda c: c.startswith(column), columns))
|
||||
if targets:
|
||||
remove_columns.extend(targets)
|
||||
|
||||
remove_columns = list(set(remove_columns))
|
||||
|
||||
return df.drop(remove_columns, axis=1) if len(remove_columns) > 0 else df
|
||||
|
||||
|
||||
def get_department_df():
|
||||
criterion = [
|
||||
Department.deleted == 0,
|
||||
]
|
||||
query = Department.query.filter(
|
||||
*criterion
|
||||
).order_by(Department.department_id.asc())
|
||||
results = query.all()
|
||||
if to_dict:
|
||||
datas = []
|
||||
for r in results:
|
||||
d = r.to_dict()
|
||||
if r.department_id == 0:
|
||||
d['department_name'] = ErrFormat.company_wide
|
||||
datas.append(d)
|
||||
return datas
|
||||
return results
|
||||
)
|
||||
df = get_df_from_read_sql(query)
|
||||
if df.empty:
|
||||
return
|
||||
return drop_ts_column(df)
|
||||
|
||||
|
||||
def get_all_employee_list(block=0, to_dict=True):
|
||||
def get_all_employee_df(block=0):
|
||||
criterion = [
|
||||
Employee.deleted == 0,
|
||||
]
|
||||
@@ -44,107 +50,112 @@ def get_all_employee_list(block=0, to_dict=True):
|
||||
Employee.block == block
|
||||
)
|
||||
|
||||
results = db.session.query(Employee).filter(*criterion).all()
|
||||
|
||||
DepartmentTreeEmployeeColumns = [
|
||||
'acl_rid',
|
||||
'employee_id',
|
||||
'username',
|
||||
'nickname',
|
||||
'email',
|
||||
'mobile',
|
||||
'direct_supervisor_id',
|
||||
'block',
|
||||
'department_id',
|
||||
]
|
||||
|
||||
def format_columns(e):
|
||||
return {column: getattr(e, column) for column in DepartmentTreeEmployeeColumns}
|
||||
|
||||
return [format_columns(r) for r in results] if to_dict else results
|
||||
entities = [getattr(Employee, c) for c in Employee.get_columns(
|
||||
).keys() if c not in ['deleted', 'deleted_at']]
|
||||
query = Employee.query.with_entities(
|
||||
*entities
|
||||
).filter(
|
||||
*criterion
|
||||
)
|
||||
df = get_df_from_read_sql(query)
|
||||
if df.empty:
|
||||
return df
|
||||
return drop_ts_column(df)
|
||||
|
||||
|
||||
class DepartmentTree(object):
|
||||
def __init__(self, append_employee=False, block=-1):
|
||||
self.append_employee = append_employee
|
||||
self.block = block
|
||||
self.all_department_list = get_all_department_list()
|
||||
self.all_employee_list = get_all_employee_list(
|
||||
self.d_df = get_department_df()
|
||||
self.employee_df = get_all_employee_df(
|
||||
block) if append_employee else None
|
||||
|
||||
def prepare(self):
|
||||
pass
|
||||
|
||||
def get_employees_by_d_id(self, d_id):
|
||||
block = self.block
|
||||
|
||||
def filter_department_id(e):
|
||||
if self.block != -1:
|
||||
return e['department_id'] == d_id and e['block'] == block
|
||||
return e.department_id == d_id
|
||||
|
||||
results = list(filter(lambda e: filter_department_id(e), self.all_employee_list))
|
||||
|
||||
return results
|
||||
|
||||
def get_department_by_parent_id(self, parent_id):
|
||||
results = list(filter(lambda d: d['department_parent_id'] == parent_id, self.all_department_list))
|
||||
if not results:
|
||||
_df = self.employee_df[
|
||||
self.employee_df['department_id'].eq(d_id)
|
||||
].sort_values(by=['direct_supervisor_id'], ascending=True)
|
||||
if _df.empty:
|
||||
return []
|
||||
return results
|
||||
|
||||
if self.block != -1:
|
||||
_df = _df[
|
||||
_df['block'].eq(self.block)
|
||||
]
|
||||
|
||||
return _df.to_dict('records')
|
||||
|
||||
def get_tree_departments(self):
|
||||
# 一级部门
|
||||
top_departments = self.get_department_by_parent_id(-1)
|
||||
if len(top_departments) == 0:
|
||||
top_df = self.d_df[self.d_df['department_parent_id'].eq(-1)]
|
||||
if top_df.empty:
|
||||
return []
|
||||
|
||||
d_list = []
|
||||
|
||||
for top_d in top_departments:
|
||||
for index in top_df.index:
|
||||
top_d = top_df.loc[index].to_dict()
|
||||
|
||||
department_id = top_d['department_id']
|
||||
sub_deps = self.get_department_by_parent_id(department_id)
|
||||
|
||||
# 检查 department_id 是否作为其他部门的 parent
|
||||
sub_df = self.d_df[
|
||||
self.d_df['department_parent_id'].eq(department_id)
|
||||
].sort_values(by=['sort_value'], ascending=True)
|
||||
|
||||
employees = []
|
||||
|
||||
if self.append_employee:
|
||||
# 要包含员工
|
||||
employees = self.get_employees_by_d_id(department_id)
|
||||
|
||||
top_d['employees'] = employees
|
||||
top_d['department_name'] = ErrFormat.company_wide
|
||||
if len(sub_deps) == 0:
|
||||
|
||||
if sub_df.empty:
|
||||
top_d[sub_departments_column_name] = []
|
||||
d_list.append(top_d)
|
||||
continue
|
||||
|
||||
self.parse_sub_department(sub_deps, top_d)
|
||||
self.parse_sub_department(sub_df, top_d)
|
||||
d_list.append(top_d)
|
||||
|
||||
return d_list
|
||||
|
||||
def get_all_departments(self, is_tree=1):
|
||||
if len(self.all_department_list) == 0:
|
||||
if self.d_df.empty:
|
||||
return []
|
||||
|
||||
if is_tree != 1:
|
||||
return self.all_department_list
|
||||
return self.d_df.to_dict('records')
|
||||
|
||||
return self.get_tree_departments()
|
||||
|
||||
def parse_sub_department(self, deps, top_d):
|
||||
def parse_sub_department(self, df, top_d):
|
||||
sub_departments = []
|
||||
for d in deps:
|
||||
sub_deps = self.get_department_by_parent_id(d['department_id'])
|
||||
for s_index in df.index:
|
||||
d = df.loc[s_index].to_dict()
|
||||
sub_df = self.d_df[
|
||||
self.d_df['department_parent_id'].eq(
|
||||
df.at[s_index, 'department_id'])
|
||||
].sort_values(by=['sort_value'], ascending=True)
|
||||
employees = []
|
||||
|
||||
if self.append_employee:
|
||||
employees = self.get_employees_by_d_id(d['department_id'])
|
||||
# 要包含员工
|
||||
employees = self.get_employees_by_d_id(
|
||||
df.at[s_index, 'department_id'])
|
||||
|
||||
d['employees'] = employees
|
||||
|
||||
if len(sub_deps) == 0:
|
||||
if sub_df.empty:
|
||||
d[sub_departments_column_name] = []
|
||||
sub_departments.append(d)
|
||||
continue
|
||||
|
||||
self.parse_sub_department(sub_deps, d)
|
||||
self.parse_sub_department(sub_df, d)
|
||||
sub_departments.append(d)
|
||||
|
||||
top_d[sub_departments_column_name] = sub_departments
|
||||
@@ -162,10 +173,6 @@ class DepartmentForm(Form):
|
||||
|
||||
class DepartmentCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_department_by_id(d_id, to_dict=True):
|
||||
return Department.get_by(first=True, department_id=d_id, to_dict=to_dict)
|
||||
|
||||
@staticmethod
|
||||
def add(**kwargs):
|
||||
DepartmentCRUD.check_department_name_unique(kwargs['department_name'])
|
||||
@@ -195,16 +202,16 @@ class DepartmentCRUD(object):
|
||||
def check_department_parent_id_allow(d_id, department_parent_id):
|
||||
if department_parent_id == 0:
|
||||
return
|
||||
# 检查 department_parent_id 是否在许可范围内
|
||||
allow_p_d_id_list = DepartmentCRUD.get_allow_parent_d_id_by(d_id)
|
||||
target = list(
|
||||
filter(lambda d: d['department_id'] == department_parent_id, allow_p_d_id_list))
|
||||
if len(target) == 0:
|
||||
try:
|
||||
dep = Department.get_by(
|
||||
d = Department.get_by(
|
||||
first=True, to_dict=False, department_id=department_parent_id)
|
||||
name = dep.department_name if dep else ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
name = d.department_name if d else ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
name = ErrFormat.department_id_not_found.format(department_parent_id)
|
||||
abort(400, ErrFormat.cannot_to_be_parent_department.format(name))
|
||||
|
||||
@@ -255,7 +262,7 @@ class DepartmentCRUD(object):
|
||||
return abort(400, ErrFormat.acl_update_role_failed.format(str(e)))
|
||||
|
||||
try:
|
||||
return existed.update(**kwargs)
|
||||
existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@@ -268,12 +275,15 @@ class DepartmentCRUD(object):
|
||||
try:
|
||||
RoleCRUD.delete_role(existed.acl_rid)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
pass
|
||||
|
||||
return existed.soft_delete()
|
||||
|
||||
@staticmethod
|
||||
def get_allow_parent_d_id_by(department_id):
|
||||
"""
|
||||
获取可以成为 department_id 的 department_parent_id 的 list
|
||||
"""
|
||||
tree_list = DepartmentCRUD.get_department_tree_list()
|
||||
|
||||
allow_d_id_list = []
|
||||
@@ -283,7 +293,7 @@ class DepartmentCRUD(object):
|
||||
try:
|
||||
tree.remove_subtree(department_id)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
pass
|
||||
|
||||
[allow_d_id_list.append({'department_id': int(n.identifier), 'department_name': n.tag}) for n in
|
||||
tree.all_nodes()]
|
||||
@@ -311,58 +321,58 @@ class DepartmentCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_department_tree_list():
|
||||
all_deps = get_all_department_list()
|
||||
if len(all_deps) == 0:
|
||||
df = get_department_df()
|
||||
if df.empty:
|
||||
return []
|
||||
|
||||
top_deps = list(filter(lambda d: d['department_parent_id'] == -1, all_deps))
|
||||
if len(top_deps) == 0:
|
||||
# 一级部门
|
||||
top_df = df[df['department_parent_id'].eq(-1)]
|
||||
if top_df.empty:
|
||||
return []
|
||||
|
||||
tree_list = []
|
||||
|
||||
for top_d in top_deps:
|
||||
top_d['department_name'] = ErrFormat.company_wide
|
||||
for index in top_df.index:
|
||||
tree = Tree()
|
||||
identifier_root = top_d['department_id']
|
||||
identifier_root = top_df.at[index, 'department_id']
|
||||
tree.create_node(
|
||||
top_d['department_name'],
|
||||
top_df.at[index, 'department_name'],
|
||||
identifier_root
|
||||
)
|
||||
sub_ds = list(filter(lambda d: d['department_parent_id'] == identifier_root, all_deps))
|
||||
if len(sub_ds) == 0:
|
||||
|
||||
# 检查 department_id 是否作为其他部门的 parent
|
||||
sub_df = df[
|
||||
df['department_parent_id'].eq(identifier_root)
|
||||
]
|
||||
if sub_df.empty:
|
||||
tree_list.append(tree)
|
||||
continue
|
||||
|
||||
DepartmentCRUD.parse_sub_department_node(
|
||||
sub_ds, all_deps, tree, identifier_root)
|
||||
sub_df, df, tree, identifier_root)
|
||||
|
||||
tree_list.append(tree)
|
||||
|
||||
return tree_list
|
||||
|
||||
@staticmethod
|
||||
def parse_sub_department_node(sub_ds, all_ds, tree, parent_id):
|
||||
for d in sub_ds:
|
||||
def parse_sub_department_node(df, all_df, tree, parent_id):
|
||||
for s_index in df.index:
|
||||
tree.create_node(
|
||||
d['department_name'],
|
||||
d['department_id'],
|
||||
df.at[s_index, 'department_name'],
|
||||
df.at[s_index, 'department_id'],
|
||||
parent=parent_id
|
||||
)
|
||||
|
||||
next_sub_ds = list(filter(lambda item_d: item_d['department_parent_id'] == d['department_id'], all_ds))
|
||||
if len(next_sub_ds) == 0:
|
||||
sub_df = all_df[
|
||||
all_df['department_parent_id'].eq(
|
||||
df.at[s_index, 'department_id'])
|
||||
]
|
||||
if sub_df.empty:
|
||||
continue
|
||||
|
||||
DepartmentCRUD.parse_sub_department_node(
|
||||
next_sub_ds, all_ds, tree, d['department_id'])
|
||||
|
||||
@staticmethod
|
||||
def get_department_by_query(query, to_dict=True):
|
||||
results = query.all()
|
||||
if not results:
|
||||
return []
|
||||
return results if not to_dict else [r.to_dict() for r in results]
|
||||
sub_df, all_df, tree, df.at[s_index, 'department_id'])
|
||||
|
||||
@staticmethod
|
||||
def get_departments_and_ids(department_parent_id, block):
|
||||
@@ -370,33 +380,44 @@ class DepartmentCRUD(object):
|
||||
Department.department_parent_id == department_parent_id,
|
||||
Department.deleted == 0,
|
||||
).order_by(Department.sort_value.asc())
|
||||
all_departments = DepartmentCRUD.get_department_by_query(query)
|
||||
if len(all_departments) == 0:
|
||||
df = get_df_from_read_sql(query)
|
||||
if df.empty:
|
||||
return [], []
|
||||
|
||||
tree_list = DepartmentCRUD.get_department_tree_list()
|
||||
all_employee_list = get_all_employee_list(block)
|
||||
employee_df = get_all_employee_df(block)
|
||||
|
||||
department_id_list = [d['department_id'] for d in all_departments]
|
||||
department_id_list = list(df['department_id'].values)
|
||||
query = Department.query.filter(
|
||||
Department.department_parent_id.in_(department_id_list),
|
||||
Department.deleted == 0,
|
||||
).order_by(Department.sort_value.asc()).group_by(Department.department_id)
|
||||
sub_deps = DepartmentCRUD.get_department_by_query(query)
|
||||
sub_df = get_df_from_read_sql(query)
|
||||
if sub_df.empty:
|
||||
df['has_sub'] = 0
|
||||
|
||||
sub_map = {d['department_parent_id']: 1 for d in sub_deps}
|
||||
def handle_row_employee_count(row):
|
||||
return len(employee_df[employee_df['department_id'] == row['department_id']])
|
||||
|
||||
for d in all_departments:
|
||||
d['has_sub'] = sub_map.get(d['department_id'], 0)
|
||||
df['employee_count'] = df.apply(
|
||||
lambda row: handle_row_employee_count(row), axis=1)
|
||||
|
||||
d_ids = DepartmentCRUD.get_department_id_list_by_root(d['department_id'], tree_list)
|
||||
else:
|
||||
sub_map = {d['department_parent_id']: 1 for d in sub_df.to_dict('records')}
|
||||
|
||||
d['employee_count'] = len(list(filter(lambda e: e['department_id'] in d_ids, all_employee_list)))
|
||||
def handle_row(row):
|
||||
d_ids = DepartmentCRUD.get_department_id_list_by_root(
|
||||
row['department_id'], tree_list)
|
||||
row['employee_count'] = len(
|
||||
employee_df[employee_df['department_id'].isin(d_ids)])
|
||||
|
||||
if int(department_parent_id) == -1:
|
||||
d['department_name'] = ErrFormat.company_wide
|
||||
row['has_sub'] = sub_map.get(row['department_id'], 0)
|
||||
|
||||
return all_departments, department_id_list
|
||||
return row
|
||||
|
||||
df = df.apply(lambda row: handle_row(row), axis=1)
|
||||
|
||||
return df.to_dict('records'), department_id_list
|
||||
|
||||
@staticmethod
|
||||
def get_department_id_list_by_root(root_department_id, tree_list=None):
|
||||
@@ -409,151 +430,6 @@ class DepartmentCRUD(object):
|
||||
[id_list.append(int(n.identifier))
|
||||
for n in tmp_tree.all_nodes()]
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
pass
|
||||
|
||||
return id_list
|
||||
|
||||
|
||||
class EditDepartmentInACL(object):
|
||||
|
||||
@staticmethod
|
||||
def add_department_to_acl(department_id, op_uid):
|
||||
db_department = DepartmentCRUD.get_department_by_id(department_id, to_dict=False)
|
||||
if not db_department:
|
||||
return
|
||||
|
||||
from api.models.acl import Role
|
||||
role = Role.get_by(first=True, name=db_department.department_name, app_id=None)
|
||||
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
if role is None:
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'name': db_department.department_name,
|
||||
}
|
||||
role = acl.create_role(payload)
|
||||
|
||||
acl_rid = role.get('id') if role else 0
|
||||
|
||||
db_department.update(
|
||||
acl_rid=acl_rid
|
||||
)
|
||||
info = f"add_department_to_acl, acl_rid: {acl_rid}"
|
||||
current_app.logger.info(info)
|
||||
return info
|
||||
|
||||
@staticmethod
|
||||
def delete_department_from_acl(department_rids, op_uid):
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
|
||||
result = []
|
||||
|
||||
for rid in department_rids:
|
||||
try:
|
||||
acl.delete_role(rid)
|
||||
except Exception as e:
|
||||
result.append(f"delete_department_in_acl, rid: {rid}, error: {e}")
|
||||
continue
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def edit_department_name_in_acl(d_rid: int, d_name: str, op_uid: int):
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
payload = {
|
||||
'name': d_name
|
||||
}
|
||||
try:
|
||||
acl.edit_role(d_rid, payload)
|
||||
except Exception as e:
|
||||
return f"edit_department_name_in_acl, rid: {d_rid}, error: {e}"
|
||||
|
||||
return f"edit_department_name_in_acl, rid: {d_rid}, success"
|
||||
|
||||
@classmethod
|
||||
def remove_from_old_department_role(cls, e_list, acl):
|
||||
result = []
|
||||
for employee in e_list:
|
||||
employee_acl_rid = employee.get('e_acl_rid')
|
||||
if employee_acl_rid == 0:
|
||||
result.append(f"employee_acl_rid == 0")
|
||||
continue
|
||||
cls.remove_single_employee_from_old_department(acl, employee, result)
|
||||
|
||||
@staticmethod
|
||||
def remove_single_employee_from_old_department(acl, employee, result):
|
||||
from api.models.acl import Role
|
||||
old_department = DepartmentCRUD.get_department_by_id(employee.get('department_id'), False)
|
||||
if not old_department:
|
||||
return False
|
||||
|
||||
old_role = Role.get_by(first=True, name=old_department.department_name, app_id=None)
|
||||
old_d_rid_in_acl = old_role.get('id') if old_role else 0
|
||||
if old_d_rid_in_acl == 0:
|
||||
return False
|
||||
|
||||
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'parent_id': d_acl_rid,
|
||||
}
|
||||
try:
|
||||
acl.remove_user_from_role(employee.get('e_acl_rid'), payload)
|
||||
current_app.logger.info(f"remove {employee.get('e_acl_rid')} from {d_acl_rid}")
|
||||
except Exception as e:
|
||||
result.append(
|
||||
f"remove_user_from_role employee_acl_rid: {employee.get('e_acl_rid')}, parent_id: {d_acl_rid}, err: {e}")
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def add_employee_to_new_department(acl, employee_acl_rid, new_department_acl_rid, result):
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'child_ids': [employee_acl_rid],
|
||||
}
|
||||
try:
|
||||
acl.add_user_to_role(new_department_acl_rid, payload)
|
||||
current_app.logger.info(f"add {employee_acl_rid} to {new_department_acl_rid}")
|
||||
except Exception as e:
|
||||
result.append(
|
||||
f"add_user_to_role employee_acl_rid: {employee_acl_rid}, parent_id: {new_department_acl_rid}, \
|
||||
err: {e}")
|
||||
|
||||
@classmethod
|
||||
def edit_employee_department_in_acl(cls, e_list: list, new_d_id: int, op_uid: int):
|
||||
result = []
|
||||
new_department = DepartmentCRUD.get_department_by_id(new_d_id, False)
|
||||
if not new_department:
|
||||
result.append(f"{new_d_id} new_department is None")
|
||||
return result
|
||||
|
||||
from api.models.acl import Role
|
||||
new_role = Role.get_by(first=True, name=new_department.department_name, app_id=None)
|
||||
new_d_rid_in_acl = new_role.get('id') if new_role else 0
|
||||
acl = ACLManager('acl', str(op_uid))
|
||||
|
||||
if new_d_rid_in_acl == 0:
|
||||
# only remove from old department role
|
||||
cls.remove_from_old_department_role(e_list, acl)
|
||||
return
|
||||
|
||||
if new_d_rid_in_acl != new_department.acl_rid:
|
||||
new_department.update(
|
||||
acl_rid=new_d_rid_in_acl
|
||||
)
|
||||
new_department_acl_rid = new_department.acl_rid if new_d_rid_in_acl == new_department.acl_rid else \
|
||||
new_d_rid_in_acl
|
||||
|
||||
for employee in e_list:
|
||||
employee_acl_rid = employee.get('e_acl_rid')
|
||||
if employee_acl_rid == 0:
|
||||
result.append(f"employee_acl_rid == 0")
|
||||
continue
|
||||
|
||||
cls.remove_single_employee_from_old_department(acl, employee, result)
|
||||
|
||||
# 在新部门中添加员工
|
||||
cls.add_employee_to_new_department(acl, employee_acl_rid, new_department_acl_rid, result)
|
||||
|
||||
return result
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import copy
|
||||
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
import pandas as pd
|
||||
from flask import abort
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import or_, literal_column, func, not_, and_
|
||||
@@ -15,25 +15,11 @@ from wtforms import validators
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.common_setting.const import OperatorType
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.common_setting.const import COMMON_SETTING_QUEUE, OperatorType
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.lib.common_setting.utils import get_df_from_read_sql
|
||||
from api.models.common_setting import Employee, Department
|
||||
|
||||
from api.tasks.common_setting import refresh_employee_acl_info, edit_employee_department_in_acl
|
||||
|
||||
acl_user_columns = [
|
||||
'email',
|
||||
'mobile',
|
||||
'nickname',
|
||||
'username',
|
||||
'password',
|
||||
'block',
|
||||
'avatar',
|
||||
]
|
||||
employee_pop_columns = ['password']
|
||||
can_not_edit_columns = ['email']
|
||||
|
||||
|
||||
def edit_acl_user(uid, **kwargs):
|
||||
user_data = {column: kwargs.get(
|
||||
@@ -84,6 +70,9 @@ class EmployeeCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_employee_by_uid_with_create(_uid):
|
||||
"""
|
||||
根据 uid 获取员工信息,不存在则创建
|
||||
"""
|
||||
try:
|
||||
return EmployeeCRUD.get_employee_by_uid(_uid).to_dict()
|
||||
except Exception as e:
|
||||
@@ -113,6 +102,7 @@ class EmployeeCRUD(object):
|
||||
acl_uid=user_info['uid'],
|
||||
)
|
||||
return existed.to_dict()
|
||||
# 创建员工
|
||||
if not user_info.get('nickname', None):
|
||||
user_info['nickname'] = user_info['name']
|
||||
|
||||
@@ -124,25 +114,10 @@ class EmployeeCRUD(object):
|
||||
employee = CreateEmployee().create_single(**data)
|
||||
return employee.to_dict()
|
||||
|
||||
@staticmethod
|
||||
def add_employee_from_acl_created(**kwargs):
|
||||
try:
|
||||
kwargs['acl_uid'] = kwargs.pop('uid')
|
||||
kwargs['acl_rid'] = kwargs.pop('rid')
|
||||
kwargs['department_id'] = 0
|
||||
|
||||
Employee.create(
|
||||
**kwargs
|
||||
)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def add(**kwargs):
|
||||
try:
|
||||
res = CreateEmployee().create_single(**kwargs)
|
||||
refresh_employee_acl_info.apply_async(args=(res.employee_id,), queue=ACL_QUEUE)
|
||||
return res
|
||||
return CreateEmployee().create_single(**kwargs)
|
||||
except Exception as e:
|
||||
abort(400, str(e))
|
||||
|
||||
@@ -169,9 +144,13 @@ class EmployeeCRUD(object):
|
||||
existed.update(**kwargs)
|
||||
|
||||
if len(e_list) > 0:
|
||||
from api.tasks.common_setting import edit_employee_department_in_acl
|
||||
# fixme: comment next line
|
||||
# edit_employee_department_in_acl(e_list, new_department_id, current_user.uid)
|
||||
|
||||
edit_employee_department_in_acl.apply_async(
|
||||
args=(e_list, new_department_id, current_user.uid),
|
||||
queue=ACL_QUEUE
|
||||
queue=COMMON_SETTING_QUEUE
|
||||
)
|
||||
|
||||
return existed
|
||||
@@ -182,7 +161,7 @@ class EmployeeCRUD(object):
|
||||
def edit_employee_by_uid(_uid, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
try:
|
||||
edit_acl_user(_uid, **kwargs)
|
||||
user = edit_acl_user(_uid, **kwargs)
|
||||
|
||||
for column in employee_pop_columns:
|
||||
if kwargs.get(column):
|
||||
@@ -194,9 +173,9 @@ class EmployeeCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def change_password_by_uid(_uid, password):
|
||||
EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
try:
|
||||
edit_acl_user(_uid, password=password)
|
||||
user = edit_acl_user(_uid, password=password)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@@ -230,6 +209,173 @@ class EmployeeCRUD(object):
|
||||
*criterion
|
||||
).count()
|
||||
|
||||
@staticmethod
|
||||
def import_employee(employee_list):
|
||||
return CreateEmployee().batch_create(employee_list)
|
||||
|
||||
@staticmethod
|
||||
def get_export_employee_df(block_status):
|
||||
criterion = [
|
||||
Employee.deleted == 0
|
||||
]
|
||||
if block_status >= 0:
|
||||
criterion.append(
|
||||
Employee.block == block_status
|
||||
)
|
||||
|
||||
query = Employee.query.with_entities(
|
||||
Employee.employee_id,
|
||||
Employee.nickname,
|
||||
Employee.email,
|
||||
Employee.sex,
|
||||
Employee.mobile,
|
||||
Employee.position_name,
|
||||
Employee.last_login,
|
||||
Employee.department_id,
|
||||
Employee.direct_supervisor_id,
|
||||
).filter(*criterion)
|
||||
df = get_df_from_read_sql(query)
|
||||
if df.empty:
|
||||
return df
|
||||
|
||||
query = Department.query.filter(
|
||||
*criterion
|
||||
)
|
||||
department_df = get_df_from_read_sql(query)
|
||||
|
||||
def find_name(row):
|
||||
department_id = row['department_id']
|
||||
_df = department_df[department_df['department_id']
|
||||
== department_id]
|
||||
row['department_name'] = '' if _df.empty else _df.iloc[0]['department_name']
|
||||
|
||||
direct_supervisor_id = row['direct_supervisor_id']
|
||||
_df = df[df['employee_id'] == direct_supervisor_id]
|
||||
row['nickname_direct_supervisor'] = '' if _df.empty else _df.iloc[0]['nickname']
|
||||
|
||||
if isinstance(row['last_login'], pd.Timestamp):
|
||||
try:
|
||||
row['last_login'] = str(row['last_login'])
|
||||
except:
|
||||
row['last_login'] = ''
|
||||
else:
|
||||
row['last_login'] = ''
|
||||
|
||||
return row
|
||||
|
||||
df = df.apply(find_name, axis=1)
|
||||
df.drop(['department_id', 'direct_supervisor_id',
|
||||
'employee_id'], axis=1, inplace=True)
|
||||
return df
|
||||
|
||||
@staticmethod
|
||||
def batch_employee(column_name, column_value, employee_id_list):
|
||||
if not column_value:
|
||||
abort(400, ErrFormat.value_is_required)
|
||||
if column_name in ['password', 'block']:
|
||||
return EmployeeCRUD.batch_edit_password_or_block_column(column_name, employee_id_list, column_value, True)
|
||||
|
||||
elif column_name in ['department_id']:
|
||||
return EmployeeCRUD.batch_edit_employee_department(employee_id_list, column_value)
|
||||
|
||||
elif column_name in [
|
||||
'direct_supervisor_id', 'position_name'
|
||||
]:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, False)
|
||||
|
||||
else:
|
||||
abort(400, ErrFormat.column_name_not_support)
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_employee_department(employee_id_list, column_value):
|
||||
err_list = []
|
||||
employee_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
employee = dict(
|
||||
e_acl_rid=existed.acl_rid,
|
||||
department_id=existed.department_id
|
||||
)
|
||||
employee_list.append(employee)
|
||||
existed.update(department_id=column_value)
|
||||
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
from api.tasks.common_setting import edit_employee_department_in_acl
|
||||
edit_employee_department_in_acl.apply_async(
|
||||
args=(employee_list, column_value, current_user.uid),
|
||||
queue=COMMON_SETTING_QUEUE
|
||||
)
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_password_or_block_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
if column_name == 'block':
|
||||
err_list = []
|
||||
success_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
employee = EmployeeCRUD.edit_employee_block_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
success_list.append(employee)
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
return err_list
|
||||
else:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, is_acl)
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
err_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
EmployeeCRUD.edit_employee_single_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_single_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
|
||||
if is_acl:
|
||||
return edit_acl_user(existed.acl_uid, **kwargs)
|
||||
|
||||
try:
|
||||
for column in employee_pop_columns:
|
||||
if kwargs.get(column):
|
||||
kwargs.pop(column)
|
||||
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_block_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
value = get_block_value(kwargs.get('block'))
|
||||
if value is True:
|
||||
# 判断该用户是否为 部门负责人,或者员工的直接上级
|
||||
check_department_director_id_or_direct_supervisor_id(_id)
|
||||
|
||||
if is_acl:
|
||||
kwargs['block'] = value
|
||||
edit_acl_user(existed.acl_uid, **kwargs)
|
||||
data = existed.to_dict()
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def check_email_unique(email, _id=0):
|
||||
criterion = [
|
||||
@@ -249,7 +395,7 @@ class EmployeeCRUD(object):
|
||||
raise Exception(err)
|
||||
|
||||
@staticmethod
|
||||
def get_employee_list_by_body(department_id, block_status, search='', order='', conditions=None, page=1,
|
||||
def get_employee_list_by_body(department_id, block_status, search='', order='', conditions=[], page=1,
|
||||
page_size=10):
|
||||
criterion = [
|
||||
Employee.deleted == 0
|
||||
@@ -295,9 +441,7 @@ class EmployeeCRUD(object):
|
||||
employees = []
|
||||
for r in pagination.items:
|
||||
d = r.Employee.to_dict()
|
||||
d['department_name'] = r.Department.department_name if r.Department else ''
|
||||
if r.Employee.department_id == 0:
|
||||
d['department_name'] = ErrFormat.company_wide
|
||||
d['department_name'] = r.Department.department_name
|
||||
employees.append(d)
|
||||
|
||||
return {
|
||||
@@ -317,7 +461,7 @@ class EmployeeCRUD(object):
|
||||
@staticmethod
|
||||
def get_expr_by_condition(column, operator, value, relation):
|
||||
"""
|
||||
get expr: (and_list, or_list)
|
||||
根据conditions返回expr: (and_list, or_list)
|
||||
"""
|
||||
attr = EmployeeCRUD.get_attr_by_column(column)
|
||||
# 根据operator生成条件表达式
|
||||
@@ -337,7 +481,7 @@ class EmployeeCRUD(object):
|
||||
if value:
|
||||
abort(400, ErrFormat.query_column_none_keep_value_empty.format(column))
|
||||
expr = [attr.is_(None)]
|
||||
if column not in ["last_login"]:
|
||||
if column not in ["entry_date", "leave_date", "dfc_entry_date", "last_login"]:
|
||||
expr += [attr == '']
|
||||
expr = [or_(*expr)]
|
||||
elif operator == OperatorType.IS_NOT_EMPTY:
|
||||
@@ -351,6 +495,7 @@ class EmployeeCRUD(object):
|
||||
else:
|
||||
abort(400, ErrFormat.not_support_operator.format(operator))
|
||||
|
||||
# 根据relation生成复合条件
|
||||
if relation == "&":
|
||||
return expr, []
|
||||
elif relation == "|":
|
||||
@@ -360,16 +505,15 @@ class EmployeeCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def check_condition(column, operator, value, relation):
|
||||
# 对于condition中column为空的,报错
|
||||
if column is None or operator is None or relation is None:
|
||||
return abort(400, ErrFormat.conditions_field_missing)
|
||||
|
||||
if value and column == "last_login":
|
||||
try:
|
||||
return datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
value = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
except Exception as e:
|
||||
err = f"{ErrFormat.datetime_format_error.format(column)}: {str(e)}"
|
||||
abort(400, err)
|
||||
return value
|
||||
abort(400, ErrFormat.datetime_format_error.format(column))
|
||||
|
||||
@staticmethod
|
||||
def get_attr_by_column(column):
|
||||
@@ -390,7 +534,7 @@ class EmployeeCRUD(object):
|
||||
relation = condition.get("relation", None)
|
||||
value = condition.get("value", None)
|
||||
|
||||
value = EmployeeCRUD.check_condition(column, operator, value, relation)
|
||||
EmployeeCRUD.check_condition(column, operator, value, relation)
|
||||
a, o = EmployeeCRUD.get_expr_by_condition(
|
||||
column, operator, value, relation)
|
||||
and_list += a
|
||||
@@ -443,7 +587,7 @@ class EmployeeCRUD(object):
|
||||
employees = []
|
||||
for r in pagination.items:
|
||||
d = r.Employee.to_dict()
|
||||
d['department_name'] = r.Department.department_name if r.Department else ''
|
||||
d['department_name'] = r.Department.department_name
|
||||
employees.append(d)
|
||||
|
||||
return {
|
||||
@@ -496,224 +640,6 @@ class EmployeeCRUD(object):
|
||||
|
||||
return [r.to_dict() for r in results]
|
||||
|
||||
@staticmethod
|
||||
def remove_bind_notice_by_uid(_platform, _uid):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
employee_data = existed.to_dict()
|
||||
|
||||
notice_info = employee_data.get('notice_info', {})
|
||||
notice_info = copy.deepcopy(notice_info) if notice_info else {}
|
||||
|
||||
notice_info[_platform] = ''
|
||||
|
||||
existed.update(
|
||||
notice_info=notice_info
|
||||
)
|
||||
return ErrFormat.notice_remove_bind_success
|
||||
|
||||
@staticmethod
|
||||
def bind_notice_by_uid(_platform, _uid):
|
||||
existed = EmployeeCRUD.get_employee_by_uid(_uid)
|
||||
mobile = existed.mobile
|
||||
if not mobile or len(mobile) == 0:
|
||||
abort(400, ErrFormat.notice_bind_err_with_empty_mobile)
|
||||
|
||||
from api.lib.common_setting.notice_config import NoticeConfigCRUD
|
||||
messenger = NoticeConfigCRUD.get_messenger_url()
|
||||
if not messenger or len(messenger) == 0:
|
||||
abort(400, ErrFormat.notice_please_config_messenger_first)
|
||||
|
||||
url = f"{messenger}/v1/uid/getbyphone"
|
||||
try:
|
||||
payload = dict(
|
||||
phone=mobile,
|
||||
sender=_platform
|
||||
)
|
||||
res = requests.post(url, json=payload)
|
||||
result = res.json()
|
||||
if res.status_code != 200:
|
||||
raise Exception(result.get('msg', ''))
|
||||
target_id = result.get('uid', '')
|
||||
|
||||
employee_data = existed.to_dict()
|
||||
|
||||
notice_info = employee_data.get('notice_info', {})
|
||||
notice_info = copy.deepcopy(notice_info) if notice_info else {}
|
||||
|
||||
notice_info[_platform] = '' if not target_id else target_id
|
||||
|
||||
existed.update(
|
||||
notice_info=notice_info
|
||||
)
|
||||
return ErrFormat.notice_bind_success
|
||||
|
||||
except Exception as e:
|
||||
return abort(400, ErrFormat.notice_bind_failed.format(str(e)))
|
||||
|
||||
@staticmethod
|
||||
def get_employee_notice_by_ids(employee_ids):
|
||||
criterion = [
|
||||
Employee.employee_id.in_(employee_ids),
|
||||
Employee.deleted == 0,
|
||||
]
|
||||
direct_columns = ['email', 'mobile']
|
||||
employees = Employee.query.filter(
|
||||
*criterion
|
||||
).all()
|
||||
results = []
|
||||
for employee in employees:
|
||||
d = employee.to_dict()
|
||||
tmp = dict(
|
||||
employee_id=employee.employee_id,
|
||||
)
|
||||
for column in direct_columns:
|
||||
tmp[column] = d.get(column, '')
|
||||
notice_info = d.get('notice_info', {})
|
||||
notice_info = copy.deepcopy(notice_info) if notice_info else {}
|
||||
tmp.update(**notice_info)
|
||||
results.append(tmp)
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def import_employee(employee_list):
|
||||
res = CreateEmployee().batch_create(employee_list)
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_employee_department(employee_id_list, column_value):
|
||||
err_list = []
|
||||
employee_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
employee = dict(
|
||||
e_acl_rid=existed.acl_rid,
|
||||
department_id=existed.department_id
|
||||
)
|
||||
employee_list.append(employee)
|
||||
existed.update(department_id=column_value)
|
||||
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
from api.lib.common_setting.department import EditDepartmentInACL
|
||||
EditDepartmentInACL.edit_employee_department_in_acl(
|
||||
employee_list, column_value, current_user.uid
|
||||
)
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_password_or_block_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
if column_name == 'block':
|
||||
err_list = []
|
||||
success_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
employee = EmployeeCRUD.edit_employee_block_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
success_list.append(employee)
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
return err_list
|
||||
else:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, is_acl)
|
||||
|
||||
@staticmethod
|
||||
def batch_edit_column(column_name, employee_id_list, column_value, is_acl=False):
|
||||
err_list = []
|
||||
for _id in employee_id_list:
|
||||
try:
|
||||
EmployeeCRUD.edit_employee_single_column(
|
||||
_id, is_acl, **{column_name: column_value})
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'employee_id': _id,
|
||||
'err': str(e),
|
||||
})
|
||||
|
||||
return err_list
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_single_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
if 'direct_supervisor_id' in kwargs.keys():
|
||||
if kwargs['direct_supervisor_id'] == existed.direct_supervisor_id:
|
||||
raise Exception(ErrFormat.direct_supervisor_is_not_self)
|
||||
|
||||
if is_acl:
|
||||
return edit_acl_user(existed.acl_uid, **kwargs)
|
||||
|
||||
try:
|
||||
for column in employee_pop_columns:
|
||||
if kwargs.get(column):
|
||||
kwargs.pop(column)
|
||||
|
||||
return existed.update(**kwargs)
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def edit_employee_block_column(_id, is_acl=False, **kwargs):
|
||||
existed = EmployeeCRUD.get_employee_by_id(_id)
|
||||
value = get_block_value(kwargs.get('block'))
|
||||
if value is True:
|
||||
check_department_director_id_or_direct_supervisor_id(_id)
|
||||
value = 1
|
||||
else:
|
||||
value = 0
|
||||
|
||||
if is_acl:
|
||||
kwargs['block'] = value
|
||||
edit_acl_user(existed.acl_uid, **kwargs)
|
||||
|
||||
existed.update(block=value)
|
||||
data = existed.to_dict()
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def batch_employee(column_name, column_value, employee_id_list):
|
||||
if column_value is None:
|
||||
abort(400, ErrFormat.value_is_required)
|
||||
if column_name in ['password', 'block']:
|
||||
return EmployeeCRUD.batch_edit_password_or_block_column(column_name, employee_id_list, column_value, True)
|
||||
|
||||
elif column_name in ['department_id']:
|
||||
return EmployeeCRUD.batch_edit_employee_department(employee_id_list, column_value)
|
||||
|
||||
elif column_name in [
|
||||
'direct_supervisor_id', 'position_name'
|
||||
]:
|
||||
return EmployeeCRUD.batch_edit_column(column_name, employee_id_list, column_value, False)
|
||||
|
||||
else:
|
||||
abort(400, ErrFormat.column_name_not_support)
|
||||
|
||||
@staticmethod
|
||||
def update_last_login_by_uid(uid, last_login=None):
|
||||
employee = Employee.get_by(acl_uid=uid, first=True, to_dict=False)
|
||||
if not employee:
|
||||
return
|
||||
if last_login:
|
||||
try:
|
||||
last_login = datetime.strptime(last_login, '%Y-%m-%d %H:%M:%S')
|
||||
except Exception as e:
|
||||
last_login = datetime.now()
|
||||
else:
|
||||
last_login = datetime.now()
|
||||
|
||||
try:
|
||||
employee.update(
|
||||
last_login=last_login
|
||||
)
|
||||
return last_login
|
||||
except Exception as e:
|
||||
return
|
||||
|
||||
|
||||
def get_user_map(key='uid', acl=None):
|
||||
"""
|
||||
@@ -728,6 +654,19 @@ def get_user_map(key='uid', acl=None):
|
||||
return data
|
||||
|
||||
|
||||
acl_user_columns = [
|
||||
'email',
|
||||
'mobile',
|
||||
'nickname',
|
||||
'username',
|
||||
'password',
|
||||
'block',
|
||||
'avatar',
|
||||
]
|
||||
employee_pop_columns = ['password']
|
||||
can_not_edit_columns = ['email']
|
||||
|
||||
|
||||
def format_params(params):
|
||||
for k in ['_key', '_secret']:
|
||||
params.pop(k, None)
|
||||
@@ -737,24 +676,20 @@ def format_params(params):
|
||||
class CreateEmployee(object):
|
||||
def __init__(self):
|
||||
self.acl = ACLManager()
|
||||
self.all_acl_users = self.acl.get_all_users()
|
||||
self.useremail_map = {}
|
||||
|
||||
def check_acl_user(self, user_data):
|
||||
target_email = list(filter(lambda x: x['email'] == user_data['email'], self.all_acl_users))
|
||||
if target_email:
|
||||
return target_email[0]
|
||||
|
||||
target_username = list(filter(lambda x: x['username'] == user_data['username'], self.all_acl_users))
|
||||
if target_username:
|
||||
return target_username[0]
|
||||
def check_acl_user(self, email):
|
||||
user_info = self.useremail_map.get(email, None)
|
||||
if user_info:
|
||||
return user_info
|
||||
return None
|
||||
|
||||
def add_acl_user(self, **kwargs):
|
||||
user_data = {column: kwargs.get(
|
||||
column, '') for column in acl_user_columns if kwargs.get(column, '')}
|
||||
try:
|
||||
existed = self.check_acl_user(user_data)
|
||||
existed = self.check_acl_user(user_data['email'])
|
||||
if not existed:
|
||||
user_data['add_from'] = 'common'
|
||||
return self.acl.create_user(user_data)
|
||||
return existed
|
||||
except Exception as e:
|
||||
@@ -762,6 +697,8 @@ class CreateEmployee(object):
|
||||
|
||||
def create_single(self, **kwargs):
|
||||
EmployeeCRUD.check_email_unique(kwargs['email'])
|
||||
self.useremail_map = self.useremail_map if self.useremail_map else get_user_map(
|
||||
'email', self.acl)
|
||||
user = self.add_acl_user(**kwargs)
|
||||
kwargs['acl_uid'] = user['uid']
|
||||
kwargs['last_login'] = user['last_login']
|
||||
@@ -774,6 +711,8 @@ class CreateEmployee(object):
|
||||
)
|
||||
|
||||
def create_single_with_import(self, **kwargs):
|
||||
self.useremail_map = self.useremail_map if self.useremail_map else get_user_map(
|
||||
'email', self.acl)
|
||||
user = self.add_acl_user(**kwargs)
|
||||
kwargs['acl_uid'] = user['uid']
|
||||
kwargs['last_login'] = user['last_login']
|
||||
@@ -787,14 +726,11 @@ class CreateEmployee(object):
|
||||
if existed:
|
||||
return existed
|
||||
|
||||
res = Employee.create(
|
||||
return Employee.create(
|
||||
**kwargs
|
||||
)
|
||||
refresh_employee_acl_info.apply_async(args=(res.employee_id,), queue=ACL_QUEUE)
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def get_department_by_name(d_name):
|
||||
def get_department_by_name(self, d_name):
|
||||
return Department.get_by(first=True, department_name=d_name)
|
||||
|
||||
def get_end_department_id(self, department_name_list, department_name_map):
|
||||
@@ -819,6 +755,9 @@ class CreateEmployee(object):
|
||||
return end_d_id
|
||||
|
||||
def format_department_id(self, employee):
|
||||
"""
|
||||
部门名称转化为ID,不存在则创建
|
||||
"""
|
||||
department_name_map = {}
|
||||
try:
|
||||
department_name = employee.get('department_name', '')
|
||||
@@ -835,13 +774,16 @@ class CreateEmployee(object):
|
||||
|
||||
def batch_create(self, employee_list):
|
||||
err_list = []
|
||||
self.useremail_map = get_user_map('email', self.acl)
|
||||
|
||||
for employee in employee_list:
|
||||
try:
|
||||
# 获取username
|
||||
username = employee.get('username', None)
|
||||
if username is None:
|
||||
employee['username'] = employee['email']
|
||||
|
||||
# 校验通过后获取department_id
|
||||
employee = self.format_department_id(employee)
|
||||
err = employee.get('err', None)
|
||||
if err:
|
||||
@@ -853,7 +795,7 @@ class CreateEmployee(object):
|
||||
raise Exception(
|
||||
','.join(['{}: {}'.format(filed, ','.join(msg)) for filed, msg in form.errors.items()]))
|
||||
|
||||
self.create_single_with_import(**form.data)
|
||||
data = self.create_single_with_import(**form.data)
|
||||
except Exception as e:
|
||||
err_list.append({
|
||||
'email': employee.get('email', ''),
|
||||
@@ -867,12 +809,12 @@ class CreateEmployee(object):
|
||||
|
||||
class EmployeeAddForm(Form):
|
||||
username = StringField(validators=[
|
||||
validators.DataRequired(message=ErrFormat.username_is_required),
|
||||
validators.DataRequired(message="username不能为空"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
email = StringField(validators=[
|
||||
validators.DataRequired(message=ErrFormat.email_is_required),
|
||||
validators.Email(message=ErrFormat.email_format_error),
|
||||
validators.DataRequired(message="邮箱不能为空"),
|
||||
validators.Email(message="邮箱格式不正确"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
password = StringField(validators=[
|
||||
@@ -881,7 +823,7 @@ class EmployeeAddForm(Form):
|
||||
position_name = StringField(validators=[])
|
||||
|
||||
nickname = StringField(validators=[
|
||||
validators.DataRequired(message=ErrFormat.nickname_is_required),
|
||||
validators.DataRequired(message="用户名不能为空"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
sex = StringField(validators=[])
|
||||
@@ -892,81 +834,9 @@ class EmployeeAddForm(Form):
|
||||
|
||||
class EmployeeUpdateByUidForm(Form):
|
||||
nickname = StringField(validators=[
|
||||
validators.DataRequired(message=ErrFormat.nickname_is_required),
|
||||
validators.DataRequired(message="用户名不能为空"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
avatar = StringField(validators=[])
|
||||
sex = StringField(validators=[])
|
||||
mobile = StringField(validators=[])
|
||||
|
||||
|
||||
class GrantEmployeeACLPerm(object):
|
||||
"""
|
||||
Grant ACL Permission After Create New Employee
|
||||
"""
|
||||
|
||||
def __init__(self, acl=None):
|
||||
self.perms_by_create_resources_type = ['read', 'grant', 'delete', 'update']
|
||||
self.perms_by_common_grant = ['read']
|
||||
self.resource_name_list = ['公司信息', '公司架构', '通知设置']
|
||||
|
||||
self.acl = acl if acl else self.check_app('backend')
|
||||
self.resources_types = self.acl.get_all_resources_types()
|
||||
self.resources_type = self.get_resources_type()
|
||||
self.resource_list = self.acl.get_resource_by_type(None, None, self.resources_type['id'])
|
||||
|
||||
@staticmethod
|
||||
def check_app(app_name):
|
||||
acl = ACLManager(app_name)
|
||||
payload = dict(
|
||||
name=app_name,
|
||||
description=app_name
|
||||
)
|
||||
app = acl.validate_app()
|
||||
if not app:
|
||||
acl.create_app(payload)
|
||||
return acl
|
||||
|
||||
def get_resources_type(self):
|
||||
results = list(filter(lambda t: t['name'] == '操作权限', self.resources_types['groups']))
|
||||
if len(results) == 0:
|
||||
payload = dict(
|
||||
app_id=self.acl.app_name,
|
||||
name='操作权限',
|
||||
description='',
|
||||
perms=self.perms_by_create_resources_type
|
||||
)
|
||||
resource_type = self.acl.create_resources_type(payload)
|
||||
else:
|
||||
resource_type = results[0]
|
||||
resource_type_id = resource_type['id']
|
||||
existed_perms = self.resources_types.get('id2perms', {}).get(resource_type_id, [])
|
||||
existed_perms = [p['name'] for p in existed_perms]
|
||||
new_perms = []
|
||||
for perm in self.perms_by_create_resources_type:
|
||||
if perm not in existed_perms:
|
||||
new_perms.append(perm)
|
||||
if len(new_perms) > 0:
|
||||
resource_type['perms'] = existed_perms + new_perms
|
||||
self.acl.update_resources_type(resource_type_id, resource_type)
|
||||
|
||||
return resource_type
|
||||
|
||||
def grant(self, rid_list):
|
||||
[self.grant_by_rid(rid) for rid in rid_list if rid > 0]
|
||||
|
||||
def grant_by_rid(self, rid, is_admin=False):
|
||||
for name in self.resource_name_list:
|
||||
resource = list(filter(lambda r: r['name'] == name, self.resource_list))
|
||||
if len(resource) == 0:
|
||||
payload = dict(
|
||||
type_id=self.resources_type['id'],
|
||||
app_id=self.acl.app_name,
|
||||
name=name,
|
||||
)
|
||||
resource = self.acl.create_resource(payload)
|
||||
else:
|
||||
resource = resource[0]
|
||||
|
||||
perms = self.perms_by_create_resources_type if is_admin else self.perms_by_common_grant
|
||||
self.acl.grant_resource(rid, resource['id'], perms)
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
import requests
|
||||
|
||||
from api.lib.common_setting.const import BotNameMap
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import CompanyInfo, NoticeConfig
|
||||
from wtforms import Form
|
||||
from wtforms import StringField
|
||||
from wtforms import validators
|
||||
from flask import abort, current_app
|
||||
|
||||
|
||||
class NoticeConfigCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def add_notice_config(**kwargs):
|
||||
platform = kwargs.get('platform')
|
||||
NoticeConfigCRUD.check_platform(platform)
|
||||
info = kwargs.get('info', {})
|
||||
if 'name' not in info:
|
||||
info['name'] = platform
|
||||
kwargs['info'] = info
|
||||
try:
|
||||
NoticeConfigCRUD.update_messenger_config(**info)
|
||||
res = NoticeConfig.create(
|
||||
**kwargs
|
||||
)
|
||||
return res
|
||||
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def check_platform(platform):
|
||||
NoticeConfig.get_by(first=True, to_dict=False, platform=platform) and \
|
||||
abort(400, ErrFormat.notice_platform_existed.format(platform))
|
||||
|
||||
@staticmethod
|
||||
def edit_notice_config(_id, **kwargs):
|
||||
existed = NoticeConfigCRUD.get_notice_config_by_id(_id)
|
||||
try:
|
||||
info = kwargs.get('info', {})
|
||||
if 'name' not in info:
|
||||
info['name'] = existed.platform
|
||||
kwargs['info'] = info
|
||||
NoticeConfigCRUD.update_messenger_config(**info)
|
||||
|
||||
res = existed.update(**kwargs)
|
||||
return res
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def get_messenger_url():
|
||||
from api.lib.common_setting.company_info import CompanyInfoCache
|
||||
com_info = CompanyInfoCache.get()
|
||||
if not com_info:
|
||||
return
|
||||
messenger = com_info.get('messenger', '')
|
||||
if len(messenger) == 0:
|
||||
return
|
||||
if messenger[-1] == '/':
|
||||
messenger = messenger[:-1]
|
||||
return messenger
|
||||
|
||||
@staticmethod
|
||||
def update_messenger_config(**kwargs):
|
||||
try:
|
||||
messenger = NoticeConfigCRUD.get_messenger_url()
|
||||
if not messenger or len(messenger) == 0:
|
||||
raise Exception(ErrFormat.notice_please_config_messenger_first)
|
||||
|
||||
url = f"{messenger}/v1/senders"
|
||||
name = kwargs.get('name')
|
||||
bot_list = kwargs.pop('bot', None)
|
||||
for k, v in kwargs.items():
|
||||
if isinstance(v, bool):
|
||||
kwargs[k] = 'true' if v else 'false'
|
||||
else:
|
||||
kwargs[k] = str(v)
|
||||
|
||||
payload = {name: [kwargs]}
|
||||
current_app.logger.info(f"update_messenger_config: {url}, {payload}")
|
||||
res = requests.put(url, json=payload, timeout=2)
|
||||
current_app.logger.info(f"update_messenger_config: {res.status_code}, {res.text}")
|
||||
|
||||
if not bot_list or len(bot_list) == 0:
|
||||
return
|
||||
bot_name = BotNameMap.get(name)
|
||||
payload = {bot_name: bot_list}
|
||||
current_app.logger.info(f"update_messenger_config: {url}, {payload}")
|
||||
bot_res = requests.put(url, json=payload, timeout=2)
|
||||
current_app.logger.info(f"update_messenger_config: {bot_res.status_code}, {bot_res.text}")
|
||||
|
||||
except Exception as e:
|
||||
return abort(400, str(e))
|
||||
|
||||
@staticmethod
|
||||
def get_notice_config_by_id(_id):
|
||||
return NoticeConfig.get_by(first=True, to_dict=False, id=_id) or \
|
||||
abort(400,
|
||||
ErrFormat.notice_not_existed.format(_id))
|
||||
|
||||
@staticmethod
|
||||
def get_all():
|
||||
return NoticeConfig.get_by(to_dict=True)
|
||||
|
||||
@staticmethod
|
||||
def test_send_email(receive_address, **kwargs):
|
||||
messenger = NoticeConfigCRUD.get_messenger_url()
|
||||
if not messenger or len(messenger) == 0:
|
||||
abort(400, ErrFormat.notice_please_config_messenger_first)
|
||||
url = f"{messenger}/v1/message"
|
||||
|
||||
recipient_email = receive_address
|
||||
|
||||
subject = 'Test Email'
|
||||
body = 'This is a test email'
|
||||
payload = {
|
||||
"sender": 'email',
|
||||
"msgtype": "text/plain",
|
||||
"title": subject,
|
||||
"content": body,
|
||||
"tos": [recipient_email],
|
||||
}
|
||||
current_app.logger.info(f"test_send_email: {url}, {payload}")
|
||||
response = requests.post(url, json=payload)
|
||||
if response.status_code != 200:
|
||||
abort(400, response.text)
|
||||
|
||||
return 1
|
||||
|
||||
@staticmethod
|
||||
def get_app_bot():
|
||||
result = []
|
||||
for notice_app in NoticeConfig.get_by(to_dict=False):
|
||||
if notice_app.platform in ['email']:
|
||||
continue
|
||||
info = notice_app.info
|
||||
name = info.get('name', '')
|
||||
if name not in BotNameMap:
|
||||
continue
|
||||
result.append(dict(
|
||||
name=info.get('name', ''),
|
||||
label=info.get('label', ''),
|
||||
bot=info.get('bot', []),
|
||||
))
|
||||
return result
|
||||
|
||||
|
||||
class NoticeConfigForm(Form):
|
||||
platform = StringField(validators=[
|
||||
validators.DataRequired(message="平台 不能为空"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
info = StringField(validators=[
|
||||
validators.DataRequired(message="信息 不能为空"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
|
||||
|
||||
class NoticeConfigUpdateForm(Form):
|
||||
info = StringField(validators=[
|
||||
validators.DataRequired(message="信息 不能为空"),
|
||||
validators.Length(max=255),
|
||||
])
|
||||
@@ -1,84 +1,51 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from flask_babel import lazy_gettext as _l
|
||||
|
||||
from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
class ErrFormat(CommonErrFormat):
|
||||
company_info_is_already_existed = _l("Company info already existed") # 公司信息已存在!无法创建
|
||||
company_info_is_already_existed = "公司信息已存在!无法创建"
|
||||
|
||||
no_file_part = _l("No file part") # 没有文件部分
|
||||
file_is_required = _l("File is required") # 文件是必须的
|
||||
file_not_found = _l("File not found") # 文件不存在
|
||||
file_type_not_allowed = _l("File type not allowed") # 文件类型不允许
|
||||
upload_failed = _l("Upload failed: {}") # 上传失败: {}
|
||||
no_file_part = "没有文件部分"
|
||||
file_is_required = "文件是必须的"
|
||||
|
||||
direct_supervisor_is_not_self = _l("Direct supervisor is not self") # 直属上级不能是自己
|
||||
parent_department_is_not_self = _l("Parent department is not self") # 上级部门不能是自己
|
||||
employee_list_is_empty = _l("Employee list is empty") # 员工列表为空
|
||||
direct_supervisor_is_not_self = "直属上级不能是自己"
|
||||
parent_department_is_not_self = "上级部门不能是自己"
|
||||
employee_list_is_empty = "员工列表为空"
|
||||
|
||||
column_name_not_support = _l("Column name not support") # 不支持的列名
|
||||
password_is_required = _l("Password is required") # 密码是必须的
|
||||
employee_acl_rid_is_zero = _l("Employee acl rid is zero") # 员工ACL角色ID不能为0
|
||||
column_name_not_support = "不支持的列名"
|
||||
password_is_required = "密码不能为空"
|
||||
employee_acl_rid_is_zero = "员工ACL角色ID不能为0"
|
||||
|
||||
generate_excel_failed = _l("Generate excel failed: {}") # 生成excel失败: {}
|
||||
rename_columns_failed = _l("Rename columns failed: {}") # 重命名字段失败: {}
|
||||
cannot_block_this_employee_is_other_direct_supervisor = _l(
|
||||
"Cannot block this employee is other direct supervisor") # 该员工是其他员工的直属上级, 不能禁用
|
||||
cannot_block_this_employee_is_department_manager = _l(
|
||||
"Cannot block this employee is department manager") # 该员工是部门负责人, 不能禁用
|
||||
employee_id_not_found = _l("Employee id [{}] not found") # 员工ID [{}] 不存在
|
||||
value_is_required = _l("Value is required") # 值是必须的
|
||||
email_already_exists = _l("Email already exists") # 邮箱已存在
|
||||
query_column_none_keep_value_empty = _l("Query {} none keep value empty") # 查询 {} 空值时请保持value为空"
|
||||
not_support_operator = _l("Not support operator: {}") # 不支持的操作符: {}
|
||||
not_support_relation = _l("Not support relation: {}") # 不支持的关系: {}
|
||||
conditions_field_missing = _l("Conditions field missing") # conditions内元素字段缺失,请检查!
|
||||
datetime_format_error = _l("Datetime format error: {}") # {} 格式错误,应该为:%Y-%m-%d %H:%M:%S
|
||||
department_level_relation_error = _l("Department level relation error") # 部门层级关系不正确
|
||||
delete_reserved_department_name = _l("Delete reserved department name") # 保留部门,无法删除!
|
||||
department_id_is_required = _l("Department id is required") # 部门ID是必须的
|
||||
department_list_is_required = _l("Department list is required") # 部门列表是必须的
|
||||
cannot_to_be_parent_department = _l("{} Cannot to be parent department") # 不能设置为上级部门
|
||||
department_id_not_found = _l("Department id [{}] not found") # 部门ID [{}] 不存在
|
||||
parent_department_id_must_more_than_zero = _l("Parent department id must more than zero") # 上级部门ID必须大于0
|
||||
department_name_already_exists = _l("Department name [{}] already exists") # 部门名称 [{}] 已存在
|
||||
new_department_is_none = _l("New department is none") # 新部门是空的
|
||||
generate_excel_failed = "生成excel失败: {}"
|
||||
rename_columns_failed = "字段转换为中文失败: {}"
|
||||
cannot_block_this_employee_is_other_direct_supervisor = "该员工是其他员工的直属上级, 不能禁用"
|
||||
cannot_block_this_employee_is_department_manager = "该员工是部门负责人, 不能禁用"
|
||||
employee_id_not_found = "员工ID [{}] 不存在"
|
||||
value_is_required = "值是必须的"
|
||||
email_already_exists = "邮箱 [{}] 已存在"
|
||||
query_column_none_keep_value_empty = "查询 {} 空值时请保持value为空"
|
||||
not_support_operator = "不支持的操作符: {}"
|
||||
not_support_relation = "不支持的关系: {}"
|
||||
conditions_field_missing = "conditions内元素字段缺失,请检查!"
|
||||
datetime_format_error = "{} 格式错误,应该为:%Y-%m-%d %H:%M:%S"
|
||||
department_level_relation_error = "部门层级关系不正确"
|
||||
delete_reserved_department_name = "保留部门,无法删除!"
|
||||
department_id_is_required = "部门ID是必须的"
|
||||
department_list_is_required = "部门列表是必须的"
|
||||
cannot_to_be_parent_department = "{} 不能设置为上级部门"
|
||||
department_id_not_found = "部门ID [{}] 不存在"
|
||||
parent_department_id_must_more_than_zero = "上级部门ID必须大于0"
|
||||
department_name_already_exists = "部门名称 [{}] 已存在"
|
||||
new_department_is_none = "新部门是空的"
|
||||
|
||||
acl_edit_user_failed = _l("ACL edit user failed: {}") # ACL 修改用户失败: {}
|
||||
acl_uid_not_found = _l("ACL uid not found: {}") # ACL 用户UID [{}] 不存在
|
||||
acl_add_user_failed = _l("ACL add user failed: {}") # ACL 添加用户失败: {}
|
||||
acl_add_role_failed = _l("ACL add role failed: {}") # ACL 添加角色失败: {}
|
||||
acl_update_role_failed = _l("ACL update role failed: {}") # ACL 更新角色失败: {}
|
||||
acl_get_all_users_failed = _l("ACL get all users failed: {}") # ACL 获取所有用户失败: {}
|
||||
acl_remove_user_from_role_failed = _l("ACL remove user from role failed: {}") # ACL 从角色中移除用户失败: {}
|
||||
acl_add_user_to_role_failed = _l("ACL add user to role failed: {}") # ACL 添加用户到角色失败: {}
|
||||
acl_import_user_failed = _l("ACL import user failed: {}") # ACL 导入用户失败: {}
|
||||
acl_edit_user_failed = "ACL 修改用户失败: {}"
|
||||
acl_uid_not_found = "ACL 用户UID [{}] 不存在"
|
||||
acl_add_user_failed = "ACL 添加用户失败: {}"
|
||||
acl_add_role_failed = "ACL 添加角色失败: {}"
|
||||
acl_update_role_failed = "ACL 更新角色失败: {}"
|
||||
acl_get_all_users_failed = "ACL 获取所有用户失败: {}"
|
||||
acl_remove_user_from_role_failed = "ACL 从角色中移除用户失败: {}"
|
||||
acl_add_user_to_role_failed = "ACL 添加用户到角色失败: {}"
|
||||
acl_import_user_failed = "ACL 导入用户[{}]失败: {}"
|
||||
|
||||
nickname_is_required = _l("Nickname is required") # 昵称不能为空
|
||||
username_is_required = _l("Username is required") # 用户名不能为空
|
||||
email_is_required = _l("Email is required") # 邮箱不能为空
|
||||
email_format_error = _l("Email format error") # 邮箱格式错误
|
||||
email_send_timeout = _l("Email send timeout") # 邮件发送超时
|
||||
|
||||
common_data_not_found = _l("Common data not found {} ") # ID {} 找不到记录
|
||||
common_data_already_existed = _l("Common data {} already existed") # {} 已存在
|
||||
notice_platform_existed = _l("Notice platform {} existed") # {} 已存在
|
||||
notice_not_existed = _l("Notice {} not existed") # {} 配置项不存在
|
||||
notice_please_config_messenger_first = _l("Notice please config messenger first") # 请先配置messenger URL
|
||||
notice_bind_err_with_empty_mobile = _l("Notice bind err with empty mobile") # 绑定错误,手机号为空
|
||||
notice_bind_failed = _l("Notice bind failed: {}") # 绑定失败: {}
|
||||
notice_bind_success = _l("Notice bind success") # 绑定成功
|
||||
notice_remove_bind_success = _l("Notice remove bind success") # 解绑成功
|
||||
|
||||
not_support_test = _l("Not support test type: {}") # 不支持的测试类型: {}
|
||||
not_support_auth_type = _l("Not support auth type: {}") # 不支持的认证类型: {}
|
||||
ldap_server_connect_timeout = _l("LDAP server connect timeout") # LDAP服务器连接超时
|
||||
ldap_server_connect_not_available = _l("LDAP server connect not available") # LDAP服务器连接不可用
|
||||
ldap_test_unknown_error = _l("LDAP test unknown error: {}") # LDAP测试未知错误: {}
|
||||
common_data_not_support_auth_type = _l("Common data not support auth type: {}") # 通用数据不支持auth类型: {}
|
||||
ldap_test_username_required = _l("LDAP test username required") # LDAP测试用户名必填
|
||||
|
||||
company_wide = _l("Company wide") # 全公司
|
||||
|
||||
resource_no_permission = _l("No permission to access resource {}, perm {} ") # 没有权限访问 {} 资源的 {} 权限"
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
class OperationPermission(object):
|
||||
|
||||
def __init__(self, resource_perms):
|
||||
for _r in resource_perms:
|
||||
setattr(self, _r['page'], _r['page'])
|
||||
for _p in _r['perms']:
|
||||
setattr(self, _p, _p)
|
||||
|
||||
|
||||
class BaseApp(object):
|
||||
resource_type_name = 'OperationPermission'
|
||||
all_resource_perms = []
|
||||
|
||||
def __init__(self):
|
||||
self.admin_name = None
|
||||
self.roles = []
|
||||
self.app_name = 'acl'
|
||||
self.require_create_resource_type = self.resource_type_name
|
||||
self.extra_create_resource_type_list = []
|
||||
|
||||
self.op = None
|
||||
|
||||
@staticmethod
|
||||
def format_role(role_name, role_type, acl_rid, resource_perms, description=''):
|
||||
return dict(
|
||||
role_name=role_name,
|
||||
role_type=role_type,
|
||||
acl_rid=acl_rid,
|
||||
description=description,
|
||||
resource_perms=resource_perms,
|
||||
)
|
||||
|
||||
|
||||
class CMDBApp(BaseApp):
|
||||
all_resource_perms = [
|
||||
{"page": "Big_Screen", "page_cn": "大屏", "perms": ["read"]},
|
||||
{"page": "Dashboard", "page_cn": "仪表盘", "perms": ["read"]},
|
||||
{"page": "Resource_Search", "page_cn": "资源搜索", "perms": ["read"]},
|
||||
{"page": "Auto_Discovery_Pool", "page_cn": "自动发现池", "perms": ["read"]},
|
||||
{"page": "My_Subscriptions", "page_cn": "我的订阅", "perms": ["read"]},
|
||||
{"page": "Bulk_Import", "page_cn": "批量导入", "perms": ["read"]},
|
||||
{"page": "Model_Configuration", "page_cn": "模型配置",
|
||||
"perms": ["read", "create_CIType", "create_CIType_group", "update_CIType_group",
|
||||
"delete_CIType_group", "download_CIType"]},
|
||||
{"page": "Backend_Management", "page_cn": "后台管理", "perms": ["read"]},
|
||||
{"page": "Customized_Dashboard", "page_cn": "定制仪表盘", "perms": ["read"]},
|
||||
{"page": "Service_Tree_Definition", "page_cn": "服务树定义", "perms": ["read"]},
|
||||
{"page": "Model_Relationships", "page_cn": "模型关系", "perms": ["read"]},
|
||||
{"page": "Operation_Audit", "page_cn": "操作审计", "perms": ["read"]},
|
||||
{"page": "Relationship_Types", "page_cn": "关系类型", "perms": ["read"]},
|
||||
{"page": "Auto_Discovery", "page_cn": "自动发现", "perms": ["read", "create_plugin", "update_plugin", "delete_plugin"]},
|
||||
{"page": "TopologyView", "page_cn": "拓扑视图",
|
||||
"perms": ["read", "create_topology_group", "update_topology_group", "delete_topology_group",
|
||||
"create_topology_view"],
|
||||
},
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.admin_name = 'cmdb_admin'
|
||||
self.app_name = 'cmdb'
|
||||
|
||||
self.op = OperationPermission(self.all_resource_perms)
|
||||
@@ -1,18 +1,11 @@
|
||||
import base64
|
||||
import uuid
|
||||
import os
|
||||
from io import BytesIO
|
||||
|
||||
from flask import abort, current_app
|
||||
import lz4.frame
|
||||
|
||||
from api.lib.common_setting.utils import get_cur_time_str
|
||||
from api.models.common_setting import CommonFile
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
|
||||
|
||||
def allowed_file(filename, allowed_extensions):
|
||||
return '.' in filename and filename.rsplit('.', 1)[1].lower() in allowed_extensions
|
||||
return '.' in filename and \
|
||||
filename.rsplit('.', 1)[1].lower() in allowed_extensions
|
||||
|
||||
|
||||
def generate_new_file_name(name):
|
||||
@@ -20,75 +13,4 @@ def generate_new_file_name(name):
|
||||
prev_name = ''.join(name.split(f".{ext}")[:-1])
|
||||
uid = str(uuid.uuid4())
|
||||
cur_str = get_cur_time_str('_')
|
||||
|
||||
return f"{prev_name}_{cur_str}_{uid}.{ext}"
|
||||
|
||||
|
||||
class CommonFileCRUD:
|
||||
@staticmethod
|
||||
def add_file(**kwargs):
|
||||
return CommonFile.create(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_file(file_name, to_str=False):
|
||||
existed = CommonFile.get_by(file_name=file_name, first=True, to_dict=False)
|
||||
if not existed:
|
||||
abort(400, ErrFormat.file_not_found)
|
||||
|
||||
uncompressed_data = lz4.frame.decompress(existed.binary)
|
||||
|
||||
return base64.b64encode(uncompressed_data).decode('utf-8') if to_str else BytesIO(uncompressed_data)
|
||||
|
||||
@staticmethod
|
||||
def sync_file_to_db():
|
||||
for p in ['UPLOAD_DIRECTORY_FULL']:
|
||||
upload_path = current_app.config.get(p, None)
|
||||
if not upload_path:
|
||||
continue
|
||||
for root, dirs, files in os.walk(upload_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
if not os.path.isfile(file_path):
|
||||
continue
|
||||
|
||||
existed = CommonFile.get_by(file_name=file, first=True, to_dict=False)
|
||||
if existed:
|
||||
continue
|
||||
with open(file_path, 'rb') as f:
|
||||
data = f.read()
|
||||
compressed_data = lz4.frame.compress(data)
|
||||
try:
|
||||
CommonFileCRUD.add_file(
|
||||
origin_name=file,
|
||||
file_name=file,
|
||||
binary=compressed_data
|
||||
)
|
||||
|
||||
current_app.logger.info(f'sync file {file} to db')
|
||||
except Exception as e:
|
||||
current_app.logger.error(f'sync file {file} to db error: {e}')
|
||||
|
||||
def get_file_binary_str(self, file_name):
|
||||
return self.get_file(file_name, True)
|
||||
|
||||
def save_str_to_file(self, file_name, str_data):
|
||||
try:
|
||||
self.get_file(file_name)
|
||||
current_app.logger.info(f'file {file_name} already exists')
|
||||
return
|
||||
except Exception as e:
|
||||
# file not found
|
||||
pass
|
||||
|
||||
bytes_data = base64.b64decode(str_data)
|
||||
compressed_data = lz4.frame.compress(bytes_data)
|
||||
|
||||
try:
|
||||
self.add_file(
|
||||
origin_name=file_name,
|
||||
file_name=file_name,
|
||||
binary=compressed_data
|
||||
)
|
||||
current_app.logger.info(f'save_str_to_file {file_name} success')
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"save_str_to_file error: {e}")
|
||||
|
||||
@@ -1,12 +1,24 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
from datetime import datetime
|
||||
from flask import current_app
|
||||
from sqlalchemy import inspect, text
|
||||
from sqlalchemy.dialects.mysql import ENUM
|
||||
|
||||
import pandas as pd
|
||||
from sqlalchemy import text
|
||||
|
||||
from api.extensions import db
|
||||
|
||||
|
||||
def get_df_from_read_sql(query, to_dict=False):
|
||||
bind = query.session.bind
|
||||
query = query.statement.compile(dialect=bind.dialect if bind else None,
|
||||
compile_kwargs={"literal_binds": True}).string
|
||||
a = db.engine
|
||||
df = pd.read_sql(sql=text(query), con=a.connect())
|
||||
|
||||
if to_dict:
|
||||
return df.to_dict('records')
|
||||
return df
|
||||
|
||||
|
||||
def get_cur_time_str(split_flag='-'):
|
||||
f = f"%Y{split_flag}%m{split_flag}%d{split_flag}%H{split_flag}%M{split_flag}%S{split_flag}%f"
|
||||
return datetime.now().strftime(f)[:-3]
|
||||
@@ -28,115 +40,3 @@ class BaseEnum(object):
|
||||
if not attr.startswith("_") and not callable(getattr(cls, attr))
|
||||
}
|
||||
return cls._ALL_
|
||||
|
||||
|
||||
class CheckNewColumn(object):
|
||||
|
||||
def __init__(self):
|
||||
self.engine = db.get_engine()
|
||||
self.inspector = inspect(self.engine)
|
||||
self.table_names = self.inspector.get_table_names()
|
||||
|
||||
@staticmethod
|
||||
def get_model_by_table_name(_table_name):
|
||||
registry = getattr(db.Model, 'registry', None)
|
||||
class_registry = getattr(registry, '_class_registry', None)
|
||||
for _model in class_registry.values():
|
||||
if hasattr(_model, '__tablename__') and _model.__tablename__ == _table_name:
|
||||
return _model
|
||||
return None
|
||||
|
||||
def run(self):
|
||||
for table_name in self.table_names:
|
||||
self.check_by_table(table_name)
|
||||
|
||||
def check_by_table(self, table_name):
|
||||
existed_columns = self.inspector.get_columns(table_name)
|
||||
enum_columns = []
|
||||
existed_column_name_list = []
|
||||
for c in existed_columns:
|
||||
if isinstance(c['type'], ENUM):
|
||||
enum_columns.append(c['name'])
|
||||
existed_column_name_list.append(c['name'])
|
||||
|
||||
model = self.get_model_by_table_name(table_name)
|
||||
if model is None:
|
||||
return
|
||||
model_columns = getattr(getattr(getattr(model, '__table__'), 'columns'), '_all_columns')
|
||||
for column in model_columns:
|
||||
if column.name not in existed_column_name_list:
|
||||
add_res = self.add_new_column(table_name, column)
|
||||
if not add_res:
|
||||
continue
|
||||
|
||||
current_app.logger.info(f"add new column [{column.name}] in table [{table_name}] success.")
|
||||
|
||||
if column.name in enum_columns:
|
||||
enum_columns.remove(column.name)
|
||||
|
||||
self.add_new_index(table_name, column)
|
||||
|
||||
if len(enum_columns) > 0:
|
||||
self.check_enum_column(enum_columns, existed_columns, model_columns, table_name)
|
||||
|
||||
def add_new_column(self, target_table_name, new_column):
|
||||
try:
|
||||
column_type = new_column.type.compile(self.engine.dialect)
|
||||
default_value = new_column.default.arg if new_column.default else None
|
||||
|
||||
sql = "ALTER TABLE " + target_table_name + " ADD COLUMN " + f"`{new_column.name}`" + " " + column_type
|
||||
if new_column.comment:
|
||||
sql += f" comment '{new_column.comment}'"
|
||||
|
||||
if column_type == 'JSON':
|
||||
pass
|
||||
elif default_value:
|
||||
if column_type.startswith('VAR') or column_type.startswith('Text'):
|
||||
if default_value is None or len(default_value) == 0:
|
||||
pass
|
||||
else:
|
||||
sql += f" DEFAULT {default_value}"
|
||||
|
||||
sql = text(sql)
|
||||
db.session.execute(sql)
|
||||
return True
|
||||
except Exception as e:
|
||||
err = f"add_new_column [{new_column.name}] to table [{target_table_name}] err: {e}"
|
||||
current_app.logger.error(err)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def add_new_index(target_table_name, new_column):
|
||||
try:
|
||||
if new_column.index:
|
||||
index_name = f"{target_table_name}_{new_column.name}"
|
||||
sql = "CREATE INDEX " + f"{index_name}" + " ON " + target_table_name + " (" + new_column.name + ")"
|
||||
db.session.execute(sql)
|
||||
current_app.logger.info(f"add new index [{index_name}] in table [{target_table_name}] success.")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
err = f"add_new_index [{new_column.name}] to table [{target_table_name}] err: {e}"
|
||||
current_app.logger.error(err)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def check_enum_column(enum_columns, existed_columns, model_columns, table_name):
|
||||
for column_name in enum_columns:
|
||||
try:
|
||||
enum_column = list(filter(lambda x: x['name'] == column_name, existed_columns))[0]
|
||||
old_enum_value = enum_column.get('type', {}).enums
|
||||
target_column = list(filter(lambda x: x.name == column_name, model_columns))[0]
|
||||
new_enum_value = target_column.type.enums
|
||||
|
||||
if set(old_enum_value) == set(new_enum_value):
|
||||
continue
|
||||
|
||||
enum_values_str = ','.join(["'{}'".format(value) for value in new_enum_value])
|
||||
sql = f"ALTER TABLE {table_name} MODIFY COLUMN" + f"`{column_name}`" + f" enum({enum_values_str})"
|
||||
db.session.execute(sql)
|
||||
current_app.logger.info(
|
||||
f"modify column [{column_name}] ENUM: {new_enum_value} in table [{table_name}] success.")
|
||||
except Exception as e:
|
||||
current_app.logger.error(
|
||||
f"modify column ENUM [{column_name}] in table [{table_name}] err: {e}")
|
||||
|
||||
@@ -10,18 +10,14 @@ from api.lib.exception import CommitException
|
||||
|
||||
class FormatMixin(object):
|
||||
def to_dict(self):
|
||||
res = dict()
|
||||
for k in getattr(self, "__mapper__").c.keys():
|
||||
if k in {'password', '_password', 'secret', '_secret'}:
|
||||
continue
|
||||
res = dict([(k, getattr(self, k) if not isinstance(
|
||||
getattr(self, k), (datetime.datetime, datetime.date, datetime.time)) else str(
|
||||
getattr(self, k))) for k in getattr(self, "__mapper__").c.keys()])
|
||||
# FIXME: getattr(cls, "__table__").columns k.name
|
||||
|
||||
if k.startswith('_'):
|
||||
k = k[1:]
|
||||
|
||||
if not isinstance(getattr(self, k), (datetime.datetime, datetime.date, datetime.time)):
|
||||
res[k] = getattr(self, k)
|
||||
else:
|
||||
res[k] = str(getattr(self, k))
|
||||
res.pop('password', None)
|
||||
res.pop('_password', None)
|
||||
res.pop('secret', None)
|
||||
|
||||
return res
|
||||
|
||||
@@ -84,17 +80,17 @@ class CRUDMixin(FormatMixin):
|
||||
db.session.rollback()
|
||||
raise CommitException(str(e))
|
||||
|
||||
def soft_delete(self, flush=False, commit=True):
|
||||
def soft_delete(self, flush=False):
|
||||
setattr(self, "deleted", True)
|
||||
setattr(self, "deleted_at", datetime.datetime.now())
|
||||
self.save(flush=flush, commit=commit)
|
||||
self.save(flush=flush)
|
||||
|
||||
@classmethod
|
||||
def get_by_id(cls, _id):
|
||||
if any((isinstance(_id, six.string_types) and _id.isdigit(),
|
||||
isinstance(_id, (six.integer_types, float))), ):
|
||||
obj = getattr(cls, "query").get(int(_id))
|
||||
if obj and not getattr(obj, 'deleted', False):
|
||||
if obj and not obj.deleted:
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
@@ -142,11 +138,8 @@ class CRUDMixin(FormatMixin):
|
||||
return result[0] if first and result else (None if first else result)
|
||||
|
||||
@classmethod
|
||||
def get_by_like(cls, to_dict=True, deleted=False, **kwargs):
|
||||
def get_by_like(cls, to_dict=True, **kwargs):
|
||||
query = db.session.query(cls)
|
||||
if hasattr(cls, "deleted") and deleted is not None:
|
||||
query = query.filter(cls.deleted.is_(deleted))
|
||||
|
||||
for k, v in kwargs.items():
|
||||
query = query.filter(getattr(cls, k).ilike('%{0}%'.format(v)))
|
||||
return [i.to_dict() if to_dict else i for i in query]
|
||||
|
||||
@@ -4,14 +4,8 @@
|
||||
from functools import wraps
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from sqlalchemy.exc import InvalidRequestError
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.exc import PendingRollbackError
|
||||
from sqlalchemy.exc import StatementError
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
@@ -61,8 +55,8 @@ def args_validate(model_cls, exclude_args=None):
|
||||
if exclude_args and arg in exclude_args:
|
||||
continue
|
||||
|
||||
if attr.type.python_type == str and attr.type.length and (
|
||||
len(request.values[arg] or '') > attr.type.length):
|
||||
if attr.type.python_type == str and attr.type.length and \
|
||||
len(request.values[arg] or '') > attr.type.length:
|
||||
|
||||
return abort(400, CommonErrFormat.argument_str_length_limit.format(arg, attr.type.length))
|
||||
elif attr.type.python_type in (int, float) and request.values[arg]:
|
||||
@@ -76,43 +70,3 @@ def args_validate(model_cls, exclude_args=None):
|
||||
return wrapper
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def reconnect_db(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (StatementError, OperationalError, InvalidRequestError) as e:
|
||||
error_msg = str(e)
|
||||
if 'Lost connection' in error_msg or 'reconnect until invalid transaction' in error_msg or \
|
||||
'can be emitted within this transaction' in error_msg:
|
||||
current_app.logger.info('[reconnect_db] lost connect rollback then retry')
|
||||
db.session.rollback()
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
raise e
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _flush_db():
|
||||
try:
|
||||
db.session.commit()
|
||||
except (StatementError, OperationalError, InvalidRequestError, PendingRollbackError):
|
||||
db.session.rollback()
|
||||
|
||||
|
||||
def flush_db(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
_flush_db()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def run_flush_db():
|
||||
_flush_db()
|
||||
|
||||
@@ -4,22 +4,21 @@
|
||||
import hashlib
|
||||
|
||||
import requests
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from future.moves.urllib.parse import urlparse
|
||||
from flask import abort
|
||||
from flask import g
|
||||
from flask import current_app
|
||||
|
||||
|
||||
def build_api_key(path, params):
|
||||
current_user is not None or abort(403, u"您得登陆才能进行该操作")
|
||||
key = current_user.key
|
||||
secret = current_user.secret
|
||||
g.user is not None or abort(403, u"您得登陆才能进行该操作")
|
||||
key = g.user.key
|
||||
secret = g.user.secret
|
||||
values = "".join([str(params[k]) for k in sorted(params.keys())
|
||||
if params[k] is not None]) if params.keys() else ""
|
||||
_secret = "".join([path, secret, values]).encode("utf-8")
|
||||
params["_secret"] = hashlib.sha1(_secret).hexdigest()
|
||||
params["_key"] = key
|
||||
|
||||
return params
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
|
||||
from flask import abort
|
||||
from sqlalchemy import func
|
||||
|
||||
from api.extensions import db
|
||||
@@ -12,23 +13,17 @@ class DBMixin(object):
|
||||
cls = None
|
||||
|
||||
@classmethod
|
||||
def search(cls, page, page_size, fl=None, only_query=False, reverse=False, count_query=False,
|
||||
last_size=None, **kwargs):
|
||||
def search(cls, page, page_size, fl=None, only_query=False, reverse=False, count_query=False, **kwargs):
|
||||
page = get_page(page)
|
||||
page_size = get_page_size(page_size)
|
||||
if fl is None:
|
||||
query = db.session.query(cls.cls)
|
||||
query = db.session.query(cls.cls).filter(cls.cls.deleted.is_(False))
|
||||
else:
|
||||
query = db.session.query(*[getattr(cls.cls, i) for i in fl])
|
||||
query = db.session.query(*[getattr(cls.cls, i) for i in fl]).filter(cls.cls.deleted.is_(False))
|
||||
|
||||
_query = None
|
||||
if count_query:
|
||||
_query = db.session.query(func.count(cls.cls.id))
|
||||
|
||||
if hasattr(cls.cls, 'deleted'):
|
||||
query = query.filter(cls.cls.deleted.is_(False))
|
||||
if _query:
|
||||
_query = _query.filter(cls.cls.deleted.is_(False))
|
||||
_query = db.session.query(func.count(cls.cls.id)).filter(cls.cls.deleted.is_(False))
|
||||
|
||||
for k in kwargs:
|
||||
if hasattr(cls.cls, k):
|
||||
@@ -45,15 +40,14 @@ class DBMixin(object):
|
||||
return _query, query
|
||||
|
||||
numfound = query.count()
|
||||
if not last_size:
|
||||
return numfound, [i.to_dict() if fl is None else getattr(i, '_asdict')()
|
||||
for i in query.offset((page - 1) * page_size).limit(page_size)]
|
||||
else:
|
||||
offset = numfound - last_size
|
||||
if offset < 0:
|
||||
offset = 0
|
||||
return numfound, [i.to_dict() if fl is None else getattr(i, '_asdict')()
|
||||
for i in query.offset(offset).limit(last_size)]
|
||||
return numfound, [i.to_dict() if fl is None else getattr(i, '_asdict')()
|
||||
for i in query.offset((page - 1) * page_size).limit(page_size)]
|
||||
|
||||
def _must_be_required(self, _id):
|
||||
existed = self.cls.get_by_id(_id)
|
||||
existed or abort(404, "Factor [{}] does not exist".format(_id))
|
||||
|
||||
return existed
|
||||
|
||||
def _can_add(self, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import json
|
||||
|
||||
import requests
|
||||
import six
|
||||
from flask import current_app
|
||||
from jinja2 import Template
|
||||
from markdownify import markdownify as md
|
||||
|
||||
from api.lib.common_setting.notice_config import NoticeConfigCRUD
|
||||
from api.lib.mail import send_mail
|
||||
|
||||
|
||||
def _request_messenger(subject, body, tos, sender, payload):
|
||||
params = dict(sender=sender, title=subject,
|
||||
tos=[to[sender] for to in tos if to.get(sender)])
|
||||
|
||||
if not params['tos']:
|
||||
raise Exception("no receivers")
|
||||
|
||||
flat_tos = []
|
||||
for i in params['tos']:
|
||||
if i.strip():
|
||||
to = Template(i).render(payload)
|
||||
if isinstance(to, list):
|
||||
flat_tos.extend(to)
|
||||
elif isinstance(to, six.string_types):
|
||||
flat_tos.append(to)
|
||||
params['tos'] = flat_tos
|
||||
|
||||
if sender == "email":
|
||||
params['msgtype'] = 'text/html'
|
||||
params['content'] = body
|
||||
else:
|
||||
params['msgtype'] = 'markdown'
|
||||
try:
|
||||
content = md("{}\n{}".format(subject or '', body or ''))
|
||||
except Exception as e:
|
||||
current_app.logger.warning("html2markdown failed: {}".format(e))
|
||||
content = "{}\n{}".format(subject or '', body or '')
|
||||
|
||||
params['content'] = json.dumps(dict(content=content))
|
||||
|
||||
url = current_app.config.get('MESSENGER_URL') or NoticeConfigCRUD.get_messenger_url()
|
||||
if not url:
|
||||
raise Exception("no messenger url")
|
||||
|
||||
if not url.endswith("message"):
|
||||
url = "{}/v1/message".format(url)
|
||||
|
||||
resp = requests.post(url, json=params)
|
||||
if resp.status_code != 200:
|
||||
raise Exception(resp.text)
|
||||
|
||||
return resp.text
|
||||
|
||||
|
||||
def notify_send(subject, body, methods, tos, payload=None):
|
||||
payload = payload or {}
|
||||
payload = {k: '' if v is None else v for k, v in payload.items()}
|
||||
subject = Template(subject).render(payload)
|
||||
body = Template(body).render(payload)
|
||||
|
||||
res = ''
|
||||
for method in methods:
|
||||
if method == "email" and not current_app.config.get('USE_MESSENGER', True):
|
||||
send_mail(None, [Template(to.get('email')).render(payload) for to in tos], subject, body)
|
||||
|
||||
res += (_request_messenger(subject, body, tos, method, payload) + "\n")
|
||||
|
||||
return res
|
||||
@@ -5,11 +5,8 @@ import hashlib
|
||||
|
||||
import requests
|
||||
import six
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask_login import current_user
|
||||
from flask import current_app, g, request
|
||||
from flask import session, abort
|
||||
|
||||
from api.extensions import cache
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
@@ -87,8 +84,8 @@ class ACLManager(object):
|
||||
if user:
|
||||
return Role.get_by(name=name, uid=user.uid, first=True, to_dict=False)
|
||||
|
||||
return (Role.get_by(name=name, app_id=self.app_id, first=True, to_dict=False) or
|
||||
Role.get_by(name=name, first=True, to_dict=False))
|
||||
return Role.get_by(name=name, app_id=self.app_id, first=True, to_dict=False) or \
|
||||
Role.get_by(name=name, first=True, to_dict=False)
|
||||
|
||||
def add_resource(self, name, resource_type_name=None):
|
||||
resource_type = ResourceType.get_by(name=resource_type_name, first=True, to_dict=False)
|
||||
@@ -117,15 +114,15 @@ class ACLManager(object):
|
||||
if group:
|
||||
PermissionCRUD.grant(role.id, permissions, group_id=group.id)
|
||||
|
||||
def grant_resource_to_role_by_rid(self, name, rid, resource_type_name=None, permissions=None, rebuild=True):
|
||||
def grant_resource_to_role_by_rid(self, name, rid, resource_type_name=None, permissions=None):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
|
||||
if resource:
|
||||
PermissionCRUD.grant(rid, permissions, resource_id=resource.id, rebuild=rebuild)
|
||||
PermissionCRUD.grant(rid, permissions, resource_id=resource.id)
|
||||
else:
|
||||
group = self._get_resource_group(name)
|
||||
if group:
|
||||
PermissionCRUD.grant(rid, permissions, group_id=group.id, rebuild=rebuild)
|
||||
PermissionCRUD.grant(rid, permissions, group_id=group.id)
|
||||
|
||||
def revoke_resource_from_role(self, name, role, resource_type_name=None, permissions=None):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
@@ -138,28 +135,28 @@ class ACLManager(object):
|
||||
if group:
|
||||
PermissionCRUD.revoke(role.id, permissions, group_id=group.id)
|
||||
|
||||
def revoke_resource_from_role_by_rid(self, name, rid, resource_type_name=None, permissions=None, rebuild=True):
|
||||
def revoke_resource_from_role_by_rid(self, name, rid, resource_type_name=None, permissions=None):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
|
||||
if resource:
|
||||
PermissionCRUD.revoke(rid, permissions, resource_id=resource.id, rebuild=rebuild)
|
||||
PermissionCRUD.revoke(rid, permissions, resource_id=resource.id)
|
||||
else:
|
||||
group = self._get_resource_group(name)
|
||||
if group:
|
||||
PermissionCRUD.revoke(rid, permissions, group_id=group.id, rebuild=rebuild)
|
||||
PermissionCRUD.revoke(rid, permissions, group_id=group.id)
|
||||
|
||||
def del_resource(self, name, resource_type_name=None, rebuild=True):
|
||||
def del_resource(self, name, resource_type_name=None):
|
||||
resource = self._get_resource(name, resource_type_name)
|
||||
if resource:
|
||||
return ResourceCRUD.delete(resource.id, rebuild=rebuild)
|
||||
ResourceCRUD.delete(resource.id)
|
||||
|
||||
def has_permission(self, resource_name, resource_type, perm, resource_id=None, rid=None):
|
||||
def has_permission(self, resource_name, resource_type, perm, resource_id=None):
|
||||
if is_app_admin(self.app_id):
|
||||
return True
|
||||
|
||||
role = self._get_role(current_user.username) if rid is None else RoleCache.get(rid)
|
||||
role = self._get_role(g.user.username)
|
||||
|
||||
role or abort(404, ErrFormat.role_not_found.format(current_user.username))
|
||||
role or abort(404, ErrFormat.role_not_found.format(g.user.username))
|
||||
|
||||
return RoleCRUD.has_permission(role.id, resource_name, resource_type, self.app_id, perm,
|
||||
resource_id=resource_id)
|
||||
@@ -196,9 +193,9 @@ class ACLManager(object):
|
||||
return user
|
||||
|
||||
def get_resources(self, resource_type_name=None):
|
||||
role = self._get_role(current_user.username)
|
||||
role = self._get_role(g.user.username)
|
||||
|
||||
role or abort(404, ErrFormat.role_not_found.format(current_user.username))
|
||||
role or abort(404, ErrFormat.role_not_found.format(g.user.username))
|
||||
rid = role.id
|
||||
|
||||
return RoleCRUD.recursive_resources(rid, self.app_id, resource_type_name).get('resources')
|
||||
@@ -218,7 +215,7 @@ def validate_permission(resources, resource_type, perm, app=None):
|
||||
return
|
||||
|
||||
if current_app.config.get("USE_ACL"):
|
||||
if current_user.username == "worker":
|
||||
if g.user.username == "worker":
|
||||
return
|
||||
|
||||
resources = [resources] if isinstance(resources, six.string_types) else resources
|
||||
@@ -316,7 +313,7 @@ def role_required(role_name, app=None):
|
||||
return
|
||||
|
||||
if current_app.config.get("USE_ACL"):
|
||||
if getattr(current_user, 'username', None) == "worker":
|
||||
if getattr(g.user, 'username', None) == "worker":
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if role_name not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin(app):
|
||||
|
||||
@@ -8,9 +8,7 @@ from flask import abort
|
||||
from flask import current_app
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
from api.lib.perm.acl.audit import AuditScope
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditScope
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from api.models.acl import App
|
||||
|
||||
|
||||
@@ -1,29 +1,16 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
import json
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
|
||||
from flask import has_request_context
|
||||
from flask import request
|
||||
from flask import g, has_request_context, request
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import func
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl import AppCache
|
||||
from api.models.acl import AuditLoginLog
|
||||
from api.models.acl import AuditPermissionLog
|
||||
from api.models.acl import AuditResourceLog
|
||||
from api.models.acl import AuditRoleLog
|
||||
from api.models.acl import AuditTriggerLog
|
||||
from api.models.acl import Permission
|
||||
from api.models.acl import Resource
|
||||
from api.models.acl import ResourceGroup
|
||||
from api.models.acl import ResourceType
|
||||
from api.models.acl import Role
|
||||
from api.models.acl import RolePermission
|
||||
from api.models.acl import AuditRoleLog, AuditResourceLog, AuditPermissionLog, AuditTriggerLog, RolePermission, \
|
||||
Resource, ResourceGroup, Permission, Role, ResourceType
|
||||
|
||||
|
||||
class AuditScope(str, Enum):
|
||||
@@ -62,7 +49,9 @@ class AuditCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def get_current_operate_uid(uid=None):
|
||||
user_id = uid or (getattr(current_user, 'uid', None)) or getattr(current_user, 'user_id', None)
|
||||
|
||||
user_id = uid or (hasattr(g, 'user') and getattr(g.user, 'uid', None)) \
|
||||
or getattr(current_user, 'user_id', None)
|
||||
|
||||
if has_request_context() and request.headers.get('X-User-Id'):
|
||||
_user_id = request.headers['X-User-Id']
|
||||
@@ -104,8 +93,11 @@ class AuditCRUD(object):
|
||||
criterion.append(AuditPermissionLog.operate_type == v)
|
||||
|
||||
records = AuditPermissionLog.query.filter(
|
||||
AuditPermissionLog.deleted == 0, *criterion).order_by(
|
||||
AuditPermissionLog.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
||||
AuditPermissionLog.deleted == 0,
|
||||
*criterion) \
|
||||
.order_by(AuditPermissionLog.id.desc()) \
|
||||
.offset((page - 1) * page_size) \
|
||||
.limit(page_size).all()
|
||||
|
||||
data = {
|
||||
'data': [r.to_dict() for r in records],
|
||||
@@ -168,8 +160,10 @@ class AuditCRUD(object):
|
||||
elif k == 'operate_type':
|
||||
criterion.append(AuditRoleLog.operate_type == v)
|
||||
|
||||
records = AuditRoleLog.query.filter(AuditRoleLog.deleted == 0, *criterion).order_by(
|
||||
AuditRoleLog.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
||||
records = AuditRoleLog.query.filter(AuditRoleLog.deleted == 0, *criterion) \
|
||||
.order_by(AuditRoleLog.id.desc()) \
|
||||
.offset((page - 1) * page_size) \
|
||||
.limit(page_size).all()
|
||||
|
||||
data = {
|
||||
'data': [r.to_dict() for r in records],
|
||||
@@ -231,8 +225,11 @@ class AuditCRUD(object):
|
||||
criterion.append(AuditResourceLog.operate_type == v)
|
||||
|
||||
records = AuditResourceLog.query.filter(
|
||||
AuditResourceLog.deleted == 0, *criterion).order_by(
|
||||
AuditResourceLog.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
||||
AuditResourceLog.deleted == 0,
|
||||
*criterion) \
|
||||
.order_by(AuditResourceLog.id.desc()) \
|
||||
.offset((page - 1) * page_size) \
|
||||
.limit(page_size).all()
|
||||
|
||||
data = {
|
||||
'data': [r.to_dict() for r in records],
|
||||
@@ -262,8 +259,11 @@ class AuditCRUD(object):
|
||||
criterion.append(AuditTriggerLog.operate_type == v)
|
||||
|
||||
records = AuditTriggerLog.query.filter(
|
||||
AuditTriggerLog.deleted == 0, *criterion).order_by(
|
||||
AuditTriggerLog.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
||||
AuditTriggerLog.deleted == 0,
|
||||
*criterion) \
|
||||
.order_by(AuditTriggerLog.id.desc()) \
|
||||
.offset((page - 1) * page_size) \
|
||||
.limit(page_size).all()
|
||||
|
||||
data = {
|
||||
'data': [r.to_dict() for r in records],
|
||||
@@ -288,27 +288,6 @@ class AuditCRUD(object):
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def search_login(_, q=None, page=1, page_size=10, start=None, end=None):
|
||||
query = db.session.query(AuditLoginLog)
|
||||
|
||||
if start:
|
||||
query = query.filter(AuditLoginLog.login_at >= start)
|
||||
if end:
|
||||
query = query.filter(AuditLoginLog.login_at <= end)
|
||||
|
||||
if q:
|
||||
query = query.filter(AuditLoginLog.username == q)
|
||||
|
||||
records = query.order_by(
|
||||
AuditLoginLog.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
||||
|
||||
data = {
|
||||
'data': [r.to_dict() for r in records],
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def add_role_log(cls, app_id, operate_type: AuditOperateType,
|
||||
scope: AuditScope, link_id: int, origin: dict, current: dict, extra: dict,
|
||||
@@ -374,32 +353,3 @@ class AuditCRUD(object):
|
||||
AuditTriggerLog.create(app_id=app_id, trigger_id=trigger_id, operate_uid=user_id,
|
||||
operate_type=operate_type.value,
|
||||
origin=origin, current=current, extra=extra, source=source.value)
|
||||
|
||||
@classmethod
|
||||
def add_login_log(cls, username, is_ok, description, _id=None, logout_at=None, ip=None, browser=None):
|
||||
if _id is not None:
|
||||
existed = AuditLoginLog.get_by_id(_id)
|
||||
if existed is not None:
|
||||
existed.update(logout_at=logout_at)
|
||||
return
|
||||
|
||||
payload = dict(username=username,
|
||||
is_ok=is_ok,
|
||||
description=description,
|
||||
logout_at=logout_at,
|
||||
ip=(ip or request.headers.get('X-Forwarded-For') or
|
||||
request.headers.get('X-Real-IP') or request.remote_addr or '').split(',')[0],
|
||||
browser=browser or request.headers.get('User-Agent'),
|
||||
channel=request.values.get('channel', 'web'),
|
||||
)
|
||||
|
||||
if logout_at is None:
|
||||
payload['login_at'] = datetime.datetime.now()
|
||||
|
||||
try:
|
||||
from api.lib.common_setting.employee import EmployeeCRUD
|
||||
EmployeeCRUD.update_last_login_by_uid(current_user.uid)
|
||||
except:
|
||||
pass
|
||||
|
||||
return AuditLoginLog.create(**payload).id
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
|
||||
import msgpack
|
||||
import redis_lock
|
||||
|
||||
from api.extensions import cache
|
||||
from api.extensions import db
|
||||
from api.extensions import rd
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.utils import Lock
|
||||
from api.models.acl import App
|
||||
from api.models.acl import Permission
|
||||
from api.models.acl import Resource
|
||||
@@ -62,15 +60,15 @@ class UserCache(object):
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
user = (cache.get(cls.PREFIX_ID.format(key)) or
|
||||
cache.get(cls.PREFIX_NAME.format(key)) or
|
||||
cache.get(cls.PREFIX_NICK.format(key)) or
|
||||
cache.get(cls.PREFIX_WXID.format(key)))
|
||||
user = cache.get(cls.PREFIX_ID.format(key)) or \
|
||||
cache.get(cls.PREFIX_NAME.format(key)) or \
|
||||
cache.get(cls.PREFIX_NICK.format(key)) or \
|
||||
cache.get(cls.PREFIX_WXID.format(key))
|
||||
if not user:
|
||||
user = (User.query.get(key) or
|
||||
User.query.get_by_username(key) or
|
||||
User.query.get_by_nickname(key) or
|
||||
User.query.get_by_wxid(key))
|
||||
user = User.query.get(key) or \
|
||||
User.query.get_by_username(key) or \
|
||||
User.query.get_by_nickname(key) or \
|
||||
User.query.get_by_wxid(key)
|
||||
if user:
|
||||
cls.set(user)
|
||||
|
||||
@@ -138,14 +136,14 @@ class HasResourceRoleCache(object):
|
||||
|
||||
@classmethod
|
||||
def add(cls, rid, app_id):
|
||||
with redis_lock.Lock(rd.r, 'HasResourceRoleCache'):
|
||||
with Lock('HasResourceRoleCache'):
|
||||
c = cls.get(app_id)
|
||||
c[rid] = 1
|
||||
cache.set(cls.PREFIX_KEY.format(app_id), c, timeout=0)
|
||||
|
||||
@classmethod
|
||||
def remove(cls, rid, app_id):
|
||||
with redis_lock.Lock(rd.r, 'HasResourceRoleCache'):
|
||||
with Lock('HasResourceRoleCache'):
|
||||
c = cls.get(app_id)
|
||||
c.pop(rid, None)
|
||||
cache.set(cls.PREFIX_KEY.format(app_id), c, timeout=0)
|
||||
@@ -158,10 +156,9 @@ class RoleRelationCache(object):
|
||||
PREFIX_RESOURCES2 = "RoleRelationResources2::id::{0}::AppId::{1}"
|
||||
|
||||
@classmethod
|
||||
def get_parent_ids(cls, rid, app_id, force=False):
|
||||
def get_parent_ids(cls, rid, app_id):
|
||||
parent_ids = cache.get(cls.PREFIX_PARENT.format(rid, app_id))
|
||||
if not parent_ids or force:
|
||||
db.session.commit()
|
||||
if not parent_ids:
|
||||
from api.lib.perm.acl.role import RoleRelationCRUD
|
||||
parent_ids = RoleRelationCRUD.get_parent_ids(rid, app_id)
|
||||
cache.set(cls.PREFIX_PARENT.format(rid, app_id), parent_ids, timeout=0)
|
||||
@@ -169,10 +166,9 @@ class RoleRelationCache(object):
|
||||
return parent_ids
|
||||
|
||||
@classmethod
|
||||
def get_child_ids(cls, rid, app_id, force=False):
|
||||
def get_child_ids(cls, rid, app_id):
|
||||
child_ids = cache.get(cls.PREFIX_CHILDREN.format(rid, app_id))
|
||||
if not child_ids or force:
|
||||
db.session.commit()
|
||||
if not child_ids:
|
||||
from api.lib.perm.acl.role import RoleRelationCRUD
|
||||
child_ids = RoleRelationCRUD.get_child_ids(rid, app_id)
|
||||
cache.set(cls.PREFIX_CHILDREN.format(rid, app_id), child_ids, timeout=0)
|
||||
@@ -180,16 +176,14 @@ class RoleRelationCache(object):
|
||||
return child_ids
|
||||
|
||||
@classmethod
|
||||
def get_resources(cls, rid, app_id, force=False):
|
||||
def get_resources(cls, rid, app_id):
|
||||
"""
|
||||
:param rid:
|
||||
:param app_id:
|
||||
:param force:
|
||||
:return: {id2perms: {resource_id: [perm,]}, group2perms: {group_id: [perm, ]}}
|
||||
"""
|
||||
resources = cache.get(cls.PREFIX_RESOURCES.format(rid, app_id))
|
||||
if not resources or force:
|
||||
db.session.commit()
|
||||
if not resources:
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
resources = RoleCRUD.get_resources(rid, app_id)
|
||||
if resources['id2perms'] or resources['group2perms']:
|
||||
@@ -198,10 +192,9 @@ class RoleRelationCache(object):
|
||||
return resources or {}
|
||||
|
||||
@classmethod
|
||||
def get_resources2(cls, rid, app_id, force=False):
|
||||
def get_resources2(cls, rid, app_id):
|
||||
r_g = cache.get(cls.PREFIX_RESOURCES2.format(rid, app_id))
|
||||
if not r_g or force:
|
||||
db.session.commit()
|
||||
if not r_g:
|
||||
res = cls.get_resources(rid, app_id)
|
||||
id2perms = res['id2perms']
|
||||
group2perms = res['group2perms']
|
||||
@@ -228,30 +221,24 @@ class RoleRelationCache(object):
|
||||
return msgpack.loads(r_g, raw=False)
|
||||
|
||||
@classmethod
|
||||
@flush_db
|
||||
def rebuild(cls, rid, app_id):
|
||||
if app_id is None:
|
||||
app_ids = [None] + [i.id for i in App.get_by(to_dict=False)]
|
||||
cls.clean(rid, app_id)
|
||||
db.session.remove()
|
||||
|
||||
cls.get_parent_ids(rid, app_id)
|
||||
cls.get_child_ids(rid, app_id)
|
||||
resources = cls.get_resources(rid, app_id)
|
||||
if resources.get('id2perms') or resources.get('group2perms'):
|
||||
HasResourceRoleCache.add(rid, app_id)
|
||||
else:
|
||||
app_ids = [app_id]
|
||||
|
||||
for _app_id in app_ids:
|
||||
cls.clean(rid, _app_id)
|
||||
|
||||
cls.get_parent_ids(rid, _app_id, force=True)
|
||||
cls.get_child_ids(rid, _app_id, force=True)
|
||||
resources = cls.get_resources(rid, _app_id, force=True)
|
||||
if resources.get('id2perms') or resources.get('group2perms'):
|
||||
HasResourceRoleCache.add(rid, _app_id)
|
||||
else:
|
||||
HasResourceRoleCache.remove(rid, _app_id)
|
||||
cls.get_resources2(rid, _app_id, force=True)
|
||||
HasResourceRoleCache.remove(rid, app_id)
|
||||
cls.get_resources2(rid, app_id)
|
||||
|
||||
@classmethod
|
||||
@flush_db
|
||||
def rebuild2(cls, rid, app_id):
|
||||
cache.delete(cls.PREFIX_RESOURCES2.format(rid, app_id))
|
||||
cls.get_resources2(rid, app_id, force=True)
|
||||
db.session.remove()
|
||||
cls.get_resources2(rid, app_id)
|
||||
|
||||
@classmethod
|
||||
def clean(cls, rid, app_id):
|
||||
|
||||
@@ -4,9 +4,7 @@ import datetime
|
||||
from flask import abort
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateSource
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditOperateSource
|
||||
from api.lib.perm.acl.cache import PermissionCache
|
||||
from api.lib.perm.acl.cache import RoleCache
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
@@ -79,8 +77,7 @@ class PermissionCRUD(object):
|
||||
return r and cls.get_all(r.id)
|
||||
|
||||
@staticmethod
|
||||
def grant(rid, perms, resource_id=None, group_id=None, rebuild=True,
|
||||
source=AuditOperateSource.acl, force_update=False):
|
||||
def grant(rid, perms, resource_id=None, group_id=None, rebuild=True, source=AuditOperateSource.acl):
|
||||
app_id = None
|
||||
rt_id = None
|
||||
|
||||
@@ -100,30 +97,15 @@ class PermissionCRUD(object):
|
||||
elif group_id is not None:
|
||||
from api.models.acl import ResourceGroup
|
||||
|
||||
group = ResourceGroup.get_by_id(group_id) or abort(
|
||||
404, ErrFormat.resource_group_not_found.format("id={}".format(group_id)))
|
||||
group = ResourceGroup.get_by_id(group_id) or \
|
||||
abort(404, ErrFormat.resource_group_not_found.format("id={}".format(group_id)))
|
||||
app_id = group.app_id
|
||||
rt_id = group.resource_type_id
|
||||
if not perms:
|
||||
perms = [i.get('name') for i in ResourceTypeCRUD.get_perms(group.resource_type_id)]
|
||||
|
||||
if force_update:
|
||||
revoke_role_permissions = []
|
||||
existed_perms = RolePermission.get_by(rid=rid,
|
||||
app_id=app_id,
|
||||
group_id=group_id,
|
||||
resource_id=resource_id,
|
||||
to_dict=False)
|
||||
for role_perm in existed_perms:
|
||||
perm = PermissionCache.get(role_perm.perm_id, rt_id)
|
||||
if perm and perm.name not in perms:
|
||||
role_perm.soft_delete()
|
||||
revoke_role_permissions.append(role_perm)
|
||||
|
||||
AuditCRUD.add_permission_log(app_id, AuditOperateType.revoke, rid, rt_id,
|
||||
revoke_role_permissions, source=source)
|
||||
|
||||
_role_permissions = []
|
||||
|
||||
for _perm in set(perms):
|
||||
perm = PermissionCache.get(_perm, rt_id)
|
||||
if not perm:
|
||||
@@ -224,8 +206,8 @@ class PermissionCRUD(object):
|
||||
if resource_id is not None:
|
||||
from api.models.acl import Resource
|
||||
|
||||
resource = Resource.get_by_id(resource_id) or abort(
|
||||
404, ErrFormat.resource_not_found.format("id={}".format(resource_id)))
|
||||
resource = Resource.get_by_id(resource_id) or \
|
||||
abort(404, ErrFormat.resource_not_found.format("id={}".format(resource_id)))
|
||||
app_id = resource.app_id
|
||||
rt_id = resource.resource_type_id
|
||||
if not perms:
|
||||
@@ -234,8 +216,8 @@ class PermissionCRUD(object):
|
||||
elif group_id is not None:
|
||||
from api.models.acl import ResourceGroup
|
||||
|
||||
group = ResourceGroup.get_by_id(group_id) or abort(
|
||||
404, ErrFormat.resource_group_not_found.format("id={}".format(group_id)))
|
||||
group = ResourceGroup.get_by_id(group_id) or \
|
||||
abort(404, ErrFormat.resource_group_not_found.format("id={}".format(group_id)))
|
||||
app_id = group.app_id
|
||||
|
||||
rt_id = group.resource_type_id
|
||||
@@ -290,14 +272,12 @@ class PermissionCRUD(object):
|
||||
perm2resource.setdefault(_perm, []).append(resource_id)
|
||||
for _perm in perm2resource:
|
||||
perm = PermissionCache.get(_perm, resource_type_id)
|
||||
if perm is None:
|
||||
continue
|
||||
exists = RolePermission.get_by(rid=rid,
|
||||
app_id=app_id,
|
||||
perm_id=perm.id,
|
||||
__func_in___key_resource_id=perm2resource[_perm],
|
||||
to_dict=False)
|
||||
for existed in exists:
|
||||
existeds = RolePermission.get_by(rid=rid,
|
||||
app_id=app_id,
|
||||
perm_id=perm.id,
|
||||
__func_in___key_resource_id=perm2resource[_perm],
|
||||
to_dict=False)
|
||||
for existed in existeds:
|
||||
existed.deleted = True
|
||||
existed.deleted_at = datetime.datetime.now()
|
||||
db.session.add(existed)
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
from api.lib.perm.acl.audit import AuditScope
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditScope
|
||||
from api.lib.perm.acl.cache import ResourceCache
|
||||
from api.lib.perm.acl.cache import ResourceGroupCache
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
@@ -103,8 +102,8 @@ class ResourceTypeCRUD(object):
|
||||
|
||||
@classmethod
|
||||
def delete(cls, rt_id):
|
||||
rt = ResourceType.get_by_id(rt_id) or abort(
|
||||
404, ErrFormat.resource_type_not_found.format("id={}".format(rt_id)))
|
||||
rt = ResourceType.get_by_id(rt_id) or \
|
||||
abort(404, ErrFormat.resource_type_not_found.format("id={}".format(rt_id)))
|
||||
|
||||
Resource.get_by(resource_type_id=rt_id) and abort(400, ErrFormat.resource_type_cannot_delete)
|
||||
|
||||
@@ -126,18 +125,11 @@ class ResourceTypeCRUD(object):
|
||||
existed_ids = [i.id for i in existed]
|
||||
current_ids = []
|
||||
|
||||
rebuild_rids = set()
|
||||
for i in existed:
|
||||
if i.name not in perms:
|
||||
i.soft_delete(commit=False)
|
||||
for rp in RolePermission.get_by(perm_id=i.id, to_dict=False):
|
||||
rp.soft_delete(commit=False)
|
||||
rebuild_rids.add((rp.app_id, rp.rid))
|
||||
i.soft_delete()
|
||||
else:
|
||||
current_ids.append(i.id)
|
||||
db.session.commit()
|
||||
for _app_id, _rid in rebuild_rids:
|
||||
role_rebuild.apply_async(args=(_rid, _app_id), queue=ACL_QUEUE)
|
||||
|
||||
for i in perms:
|
||||
if i not in existed_names:
|
||||
@@ -173,8 +165,8 @@ class ResourceGroupCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def add(name, type_id, app_id, uid=None):
|
||||
ResourceGroup.get_by(name=name, resource_type_id=type_id, app_id=app_id) and abort(
|
||||
400, ErrFormat.resource_group_exists.format(name))
|
||||
ResourceGroup.get_by(name=name, resource_type_id=type_id, app_id=app_id) and \
|
||||
abort(400, ErrFormat.resource_group_exists.format(name))
|
||||
rg = ResourceGroup.create(name=name, resource_type_id=type_id, app_id=app_id, uid=uid)
|
||||
|
||||
AuditCRUD.add_resource_log(app_id, AuditOperateType.create,
|
||||
@@ -183,8 +175,8 @@ class ResourceGroupCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def update(rg_id, items):
|
||||
rg = ResourceGroup.get_by_id(rg_id) or abort(
|
||||
404, ErrFormat.resource_group_not_found.format("id={}".format(rg_id)))
|
||||
rg = ResourceGroup.get_by_id(rg_id) or \
|
||||
abort(404, ErrFormat.resource_group_not_found.format("id={}".format(rg_id)))
|
||||
|
||||
existed = ResourceGroupItems.get_by(group_id=rg_id, to_dict=False)
|
||||
existed_ids = [i.resource_id for i in existed]
|
||||
@@ -204,8 +196,8 @@ class ResourceGroupCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def delete(rg_id):
|
||||
rg = ResourceGroup.get_by_id(rg_id) or abort(
|
||||
404, ErrFormat.resource_group_not_found.format("id={}".format(rg_id)))
|
||||
rg = ResourceGroup.get_by_id(rg_id) or \
|
||||
abort(404, ErrFormat.resource_group_not_found.format("id={}".format(rg_id)))
|
||||
|
||||
origin = rg.to_dict()
|
||||
rg.soft_delete()
|
||||
@@ -266,8 +258,7 @@ class ResourceCRUD(object):
|
||||
numfound = query.count()
|
||||
res = [i.to_dict() for i in query.offset((page - 1) * page_size).limit(page_size)]
|
||||
for i in res:
|
||||
user = UserCache.get(i['uid']) if i['uid'] else ''
|
||||
i['user'] = user and user.nickname
|
||||
i['user'] = UserCache.get(i['uid']).nickname if i['uid'] else ''
|
||||
|
||||
return numfound, res
|
||||
|
||||
@@ -275,13 +266,14 @@ class ResourceCRUD(object):
|
||||
def add(cls, name, type_id, app_id, uid=None):
|
||||
type_id = cls._parse_resource_type_id(type_id, app_id)
|
||||
|
||||
Resource.get_by(name=name, resource_type_id=type_id, app_id=app_id) and abort(
|
||||
400, ErrFormat.resource_exists.format(name))
|
||||
Resource.get_by(name=name, resource_type_id=type_id, app_id=app_id) and \
|
||||
abort(400, ErrFormat.resource_exists.format(name))
|
||||
|
||||
r = Resource.create(name=name, resource_type_id=type_id, app_id=app_id, uid=uid)
|
||||
|
||||
from api.tasks.acl import apply_trigger
|
||||
triggers = TriggerCRUD.match_triggers(app_id, r.name, r.resource_type_id, uid)
|
||||
current_app.logger.info(triggers)
|
||||
for trigger in triggers:
|
||||
# auto trigger should be no uid
|
||||
apply_trigger.apply_async(args=(trigger.id,),
|
||||
@@ -315,12 +307,9 @@ class ResourceCRUD(object):
|
||||
return resource
|
||||
|
||||
@staticmethod
|
||||
def delete(_id, rebuild=True, app_id=None):
|
||||
def delete(_id):
|
||||
resource = Resource.get_by_id(_id) or abort(404, ErrFormat.resource_not_found.format("id={}".format(_id)))
|
||||
|
||||
if app_id is not None and resource.app_id != app_id:
|
||||
return abort(404, ErrFormat.resource_not_found.format("id={}".format(_id)))
|
||||
|
||||
origin = resource.to_dict()
|
||||
resource.soft_delete()
|
||||
|
||||
@@ -331,15 +320,12 @@ class ResourceCRUD(object):
|
||||
i.soft_delete()
|
||||
rebuilds.append((i.rid, i.app_id))
|
||||
|
||||
if rebuild:
|
||||
for rid, app_id in set(rebuilds):
|
||||
role_rebuild.apply_async(args=(rid, app_id), queue=ACL_QUEUE)
|
||||
for rid, app_id in set(rebuilds):
|
||||
role_rebuild.apply_async(args=(rid, app_id), queue=ACL_QUEUE)
|
||||
|
||||
AuditCRUD.add_resource_log(resource.app_id, AuditOperateType.delete,
|
||||
AuditScope.resource, resource.id, origin, {}, {})
|
||||
|
||||
return rebuilds
|
||||
|
||||
@classmethod
|
||||
def delete_by_name(cls, name, type_id, app_id):
|
||||
resource = Resource.get_by(name=name, resource_type_id=type_id, app_id=app_id) or abort(
|
||||
|
||||
@@ -1,50 +1,42 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask_babel import lazy_gettext as _l
|
||||
|
||||
from api.lib.resp_format import CommonErrFormat
|
||||
|
||||
|
||||
class ErrFormat(CommonErrFormat):
|
||||
login_succeed = _l("login successful") # 登录成功
|
||||
ldap_connection_failed = _l("Failed to connect to LDAP service") # 连接LDAP服务失败
|
||||
invalid_password = _l("Password verification failed") # 密码验证失败
|
||||
auth_only_with_app_token_failed = _l("Application Token verification failed") # 应用 Token验证失败
|
||||
# 您不是应用管理员 或者 session失效(尝试一下退出重新登录)
|
||||
session_invalid = _l(
|
||||
"You are not the application administrator or the session has expired (try logging out and logging in again)")
|
||||
auth_only_with_app_token_failed = "应用 Token验证失败"
|
||||
session_invalid = "您不是应用管理员 或者 session失效(尝试一下退出重新登录)"
|
||||
|
||||
resource_type_not_found = _l("Resource type {} does not exist!") # 资源类型 {} 不存在!
|
||||
resource_type_exists = _l("Resource type {} already exists!") # 资源类型 {} 已经存在!
|
||||
# 因为该类型下有资源的存在, 不能删除!
|
||||
resource_type_cannot_delete = _l("Because there are resources under this type, they cannot be deleted!")
|
||||
resource_type_not_found = "资源类型 {} 不存在!"
|
||||
resource_type_exists = "资源类型 {} 已经存在!"
|
||||
resource_type_cannot_delete = "因为该类型下有资源的存在, 不能删除!"
|
||||
|
||||
user_not_found = _l("User {} does not exist!") # 用户 {} 不存在!
|
||||
user_exists = _l("User {} already exists!") # 用户 {} 已经存在!
|
||||
role_not_found = _l("Role {} does not exist!") # 角色 {} 不存在!
|
||||
role_exists = _l("Role {} already exists!") # 角色 {} 已经存在!
|
||||
global_role_not_found = _l("Global role {} does not exist!") # 全局角色 {} 不存在!
|
||||
global_role_exists = _l("Global role {} already exists!") # 全局角色 {} 已经存在!
|
||||
user_not_found = "用户 {} 不存在!"
|
||||
user_exists = "用户 {} 已经存在!"
|
||||
role_not_found = "角色 {} 不存在!"
|
||||
role_exists = "角色 {} 已经存在!"
|
||||
global_role_not_found = "全局角色 {} 不存在!"
|
||||
global_role_exists = "全局角色 {} 已经存在!"
|
||||
|
||||
resource_no_permission = _l("You do not have {} permission on resource: {}") # 您没有资源: {} 的 {} 权限
|
||||
admin_required = _l("Requires administrator permissions") # 需要管理员权限
|
||||
role_required = _l("Requires role: {}") # 需要角色: {}
|
||||
# 删除用户角色, 请在 用户管理 页面操作!
|
||||
user_role_delete_invalid = _l("To delete a user role, please operate on the User Management page!")
|
||||
resource_no_permission = "您没有资源: {} 的 {} 权限"
|
||||
admin_required = "需要管理员权限"
|
||||
role_required = "需要角色: {}"
|
||||
|
||||
app_is_ready_existed = _l("Application {} already exists") # 应用 {} 已经存在
|
||||
app_not_found = _l("Application {} does not exist!") # 应用 {} 不存在!
|
||||
app_secret_invalid = _l("The Secret is invalid") # 应用的Secret无效
|
||||
app_is_ready_existed = "应用 {} 已经存在"
|
||||
app_not_found = "应用 {} 不存在!"
|
||||
app_secret_invalid = "应用的Secret无效"
|
||||
|
||||
resource_not_found = _l("Resource {} does not exist!") # 资源 {} 不存在!
|
||||
resource_exists = _l("Resource {} already exists!") # 资源 {} 已经存在!
|
||||
resource_not_found = "资源 {} 不存在!"
|
||||
resource_exists = "资源 {} 已经存在!"
|
||||
|
||||
resource_group_not_found = _l("Resource group {} does not exist!") # 资源组 {} 不存在!
|
||||
resource_group_exists = _l("Resource group {} already exists!") # 资源组 {} 已经存在!
|
||||
resource_group_not_found = "资源组 {} 不存在!"
|
||||
resource_group_exists = "资源组 {} 已经存在!"
|
||||
|
||||
inheritance_dead_loop = _l("Inheritance detected infinite loop") # 继承检测到了死循环
|
||||
role_relation_not_found = _l("Role relationship {} does not exist!") # 角色关系 {} 不存在!
|
||||
inheritance_dead_loop = "继承检测到了死循环"
|
||||
role_relation_not_found = "角色关系 {} 不存在!"
|
||||
|
||||
trigger_not_found = _l("Trigger {} does not exist!") # 触发器 {} 不存在!
|
||||
trigger_exists = _l("Trigger {} already exists!") # 触发器 {} 已经存在!
|
||||
trigger_disabled = _l("Trigger {} has been disabled!") # Trigger {} has been disabled!
|
||||
trigger_not_found = "触发器 {} 不存在!"
|
||||
trigger_exists = "触发器 {} 已经存在!"
|
||||
trigger_disabled = "触发器 {} 已经被禁用!"
|
||||
|
||||
invalid_password = "密码不正确!"
|
||||
|
||||
@@ -3,14 +3,11 @@
|
||||
|
||||
import time
|
||||
|
||||
import redis_lock
|
||||
import six
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from sqlalchemy import or_
|
||||
|
||||
from api.extensions import db
|
||||
from api.extensions import rd
|
||||
from api.lib.perm.acl.app import AppCRUD
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditScope
|
||||
from api.lib.perm.acl.cache import AppCache
|
||||
@@ -64,9 +61,7 @@ class RoleRelationCRUD(object):
|
||||
|
||||
id2parents = {}
|
||||
for i in res:
|
||||
parent = RoleCache.get(i.parent_id)
|
||||
if parent:
|
||||
id2parents.setdefault(rid2uid.get(i.child_id, i.child_id), []).append(parent.to_dict())
|
||||
id2parents.setdefault(rid2uid.get(i.child_id, i.child_id), []).append(RoleCache.get(i.parent_id).to_dict())
|
||||
|
||||
return id2parents
|
||||
|
||||
@@ -145,27 +140,24 @@ class RoleRelationCRUD(object):
|
||||
|
||||
@classmethod
|
||||
def add(cls, role, parent_id, child_ids, app_id):
|
||||
with redis_lock.Lock(rd.r, "ROLE_RELATION_ADD"):
|
||||
db.session.commit()
|
||||
result = []
|
||||
for child_id in child_ids:
|
||||
existed = RoleRelation.get_by(parent_id=parent_id, child_id=child_id, app_id=app_id)
|
||||
if existed:
|
||||
continue
|
||||
|
||||
result = []
|
||||
for child_id in child_ids:
|
||||
existed = RoleRelation.get_by(parent_id=parent_id, child_id=child_id, app_id=app_id)
|
||||
if existed:
|
||||
continue
|
||||
RoleRelationCache.clean(parent_id, app_id)
|
||||
RoleRelationCache.clean(child_id, app_id)
|
||||
|
||||
if parent_id in cls.recursive_child_ids(child_id, app_id):
|
||||
return abort(400, ErrFormat.inheritance_dead_loop)
|
||||
if parent_id in cls.recursive_child_ids(child_id, app_id):
|
||||
return abort(400, ErrFormat.inheritance_dead_loop)
|
||||
|
||||
result.append(RoleRelation.create(parent_id=parent_id, child_id=child_id, app_id=app_id).to_dict())
|
||||
if app_id is None:
|
||||
for app in AppCRUD.get_all():
|
||||
if app.name != "acl":
|
||||
RoleRelationCache.clean(child_id, app.id)
|
||||
|
||||
RoleRelationCache.clean(parent_id, app_id)
|
||||
RoleRelationCache.clean(child_id, app_id)
|
||||
|
||||
if app_id is None:
|
||||
for app in AppCRUD.get_all():
|
||||
if app.name != "acl":
|
||||
RoleRelationCache.clean(child_id, app.id)
|
||||
result.append(RoleRelation.create(parent_id=parent_id, child_id=child_id, app_id=app_id).to_dict())
|
||||
|
||||
AuditCRUD.add_role_log(app_id, AuditOperateType.role_relation_add,
|
||||
AuditScope.role_relation, role.id, {}, {},
|
||||
@@ -220,14 +212,17 @@ class RoleCRUD(object):
|
||||
|
||||
@staticmethod
|
||||
def search(q, app_id, page=1, page_size=None, user_role=True, is_all=False, user_only=False):
|
||||
if user_only: # only user role
|
||||
query = db.session.query(Role).filter(Role.deleted.is_(False)).filter(Role.uid.isnot(None))
|
||||
query = db.session.query(Role).filter(Role.deleted.is_(False))
|
||||
query1 = query.filter(Role.app_id == app_id).filter(Role.uid.is_(None))
|
||||
query2 = query.filter(Role.app_id.is_(None)).filter(Role.uid.is_(None))
|
||||
query = query1.union(query2)
|
||||
|
||||
else:
|
||||
query = db.session.query(Role).filter(Role.deleted.is_(False)).filter(
|
||||
or_(Role.app_id == app_id, Role.app_id.is_(None)))
|
||||
if not user_role: # only virtual role
|
||||
query = query.filter(Role.uid.is_(None))
|
||||
if user_role:
|
||||
query1 = db.session.query(Role).filter(Role.deleted.is_(False)).filter(Role.uid.isnot(None))
|
||||
query = query.union(query1)
|
||||
|
||||
if user_only:
|
||||
query = db.session.query(Role).filter(Role.deleted.is_(False)).filter(Role.uid.isnot(None))
|
||||
|
||||
if not is_all:
|
||||
role_ids = list(HasResourceRoleCache.get(app_id).keys())
|
||||
@@ -277,13 +272,6 @@ class RoleCRUD(object):
|
||||
RoleCache.clean(rid)
|
||||
|
||||
role = role.update(**kwargs)
|
||||
|
||||
if origin['uid'] and kwargs.get('name') and kwargs.get('name') != origin['name']:
|
||||
from api.models.acl import User
|
||||
user = User.get_by(uid=origin['uid'], first=True, to_dict=False)
|
||||
if user:
|
||||
user.update(username=kwargs['name'])
|
||||
|
||||
AuditCRUD.add_role_log(role.app_id, AuditOperateType.update,
|
||||
AuditScope.role, role.id, origin, role.to_dict(), {},
|
||||
)
|
||||
@@ -298,15 +286,14 @@ class RoleCRUD(object):
|
||||
return role
|
||||
|
||||
@classmethod
|
||||
def delete_role(cls, rid, force=False):
|
||||
def delete_role(cls, rid):
|
||||
from api.lib.perm.acl.acl import is_admin
|
||||
|
||||
role = Role.get_by_id(rid) or abort(404, ErrFormat.role_not_found.format("rid={}".format(rid)))
|
||||
|
||||
if not role.app_id and not is_admin():
|
||||
return abort(403, ErrFormat.admin_required)
|
||||
|
||||
not force and role.uid and abort(400, ErrFormat.user_role_delete_invalid)
|
||||
|
||||
origin = role.to_dict()
|
||||
|
||||
child_ids = []
|
||||
@@ -379,16 +366,16 @@ class RoleCRUD(object):
|
||||
resource_type_id = resource_type and resource_type.id
|
||||
|
||||
result = dict(resources=dict(), groups=dict())
|
||||
# s = time.time()
|
||||
s = time.time()
|
||||
parent_ids = RoleRelationCRUD.recursive_parent_ids(rid, app_id)
|
||||
# current_app.logger.info('parent ids {0}: {1}'.format(parent_ids, time.time() - s))
|
||||
current_app.logger.info('parent ids {0}: {1}'.format(parent_ids, time.time() - s))
|
||||
for parent_id in parent_ids:
|
||||
|
||||
_resources, _groups = cls._extend_resources(parent_id, resource_type_id, app_id)
|
||||
# current_app.logger.info('middle1: {0}'.format(time.time() - s))
|
||||
current_app.logger.info('middle1: {0}'.format(time.time() - s))
|
||||
_merge(result['resources'], _resources)
|
||||
# current_app.logger.info('middle2: {0}'.format(time.time() - s))
|
||||
# current_app.logger.info(len(_groups))
|
||||
current_app.logger.info('middle2: {0}'.format(time.time() - s))
|
||||
current_app.logger.info(len(_groups))
|
||||
if not group_flat:
|
||||
_merge(result['groups'], _groups)
|
||||
else:
|
||||
@@ -399,7 +386,7 @@ class RoleCRUD(object):
|
||||
item.setdefault('permissions', [])
|
||||
item['permissions'] = list(set(item['permissions'] + _groups[rg_id]['permissions']))
|
||||
result['resources'][item['id']] = item
|
||||
# current_app.logger.info('End: {0}'.format(time.time() - s))
|
||||
current_app.logger.info('End: {0}'.format(time.time() - s))
|
||||
|
||||
result['resources'] = list(result['resources'].values())
|
||||
result['groups'] = list(result['groups'].values())
|
||||
|
||||
@@ -6,10 +6,9 @@ import json
|
||||
import re
|
||||
from fnmatch import fnmatch
|
||||
|
||||
from flask import abort
|
||||
from flask import abort, current_app
|
||||
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
|
||||
@@ -6,12 +6,10 @@ import string
|
||||
import uuid
|
||||
|
||||
from flask import abort
|
||||
from flask_login import current_user
|
||||
from flask import g
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
from api.lib.perm.acl.audit import AuditScope
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditScope
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from api.lib.perm.acl.role import RoleCRUD
|
||||
@@ -41,35 +39,27 @@ class UserCRUD(object):
|
||||
|
||||
@classmethod
|
||||
def add(cls, **kwargs):
|
||||
add_from = kwargs.pop('add_from', None)
|
||||
existed = User.get_by(username=kwargs['username'])
|
||||
existed = User.get_by(username=kwargs['username'], email=kwargs['email'])
|
||||
existed and abort(400, ErrFormat.user_exists.format(kwargs['username']))
|
||||
|
||||
existed = User.get_by(username=kwargs['email'])
|
||||
existed and abort(400, ErrFormat.user_exists.format(kwargs['email']))
|
||||
|
||||
kwargs['nickname'] = kwargs.get('nickname') or kwargs['username']
|
||||
kwargs['block'] = 0
|
||||
kwargs['key'], kwargs['secret'] = cls.gen_key_secret()
|
||||
|
||||
user_employee = db.session.query(User).filter(User.deleted.is_(False)).order_by(User.employee_id.desc()).first()
|
||||
user_employee = db.session.query(User).filter(User.deleted.is_(False)).order_by(
|
||||
User.employee_id.desc()).first()
|
||||
|
||||
biggest_employee_id = int(float(user_employee.employee_id)) if user_employee is not None else 0
|
||||
biggest_employee_id = int(float(user_employee.employee_id)) \
|
||||
if user_employee is not None else 0
|
||||
|
||||
kwargs['employee_id'] = '{0:04d}'.format(biggest_employee_id + 1)
|
||||
user = User.create(**kwargs)
|
||||
|
||||
role = RoleCRUD.add_role(user.username, uid=user.uid)
|
||||
RoleCRUD.add_role(user.username, uid=user.uid)
|
||||
AuditCRUD.add_role_log(None, AuditOperateType.create,
|
||||
AuditScope.user, user.uid, {}, user.to_dict(), {}, {}
|
||||
)
|
||||
|
||||
if add_from != 'common':
|
||||
from api.lib.common_setting.employee import EmployeeCRUD
|
||||
payload = {column: getattr(user, column) for column in ['uid', 'username', 'nickname', 'email', 'block']}
|
||||
payload['rid'] = role.id
|
||||
EmployeeCRUD.add_employee_from_acl_created(**payload)
|
||||
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
@@ -100,9 +90,9 @@ class UserCRUD(object):
|
||||
@classmethod
|
||||
def reset_key_secret(cls):
|
||||
key, secret = cls.gen_key_secret()
|
||||
current_user.update(key=key, secret=secret)
|
||||
g.user.update(key=key, secret=secret)
|
||||
|
||||
UserCache.clean(current_user)
|
||||
UserCache.clean(g.user)
|
||||
|
||||
return key, secret
|
||||
|
||||
@@ -113,14 +103,10 @@ class UserCRUD(object):
|
||||
|
||||
origin = user.to_dict()
|
||||
|
||||
user.delete()
|
||||
user.soft_delete()
|
||||
|
||||
UserCache.clean(user)
|
||||
|
||||
role = RoleCRUD.get_by_name(user.username, app_id=None)
|
||||
if role:
|
||||
RoleCRUD.delete_role(role[0]['id'], force=True)
|
||||
|
||||
AuditCRUD.add_role_log(None, AuditOperateType.delete,
|
||||
AuditScope.user, user.uid, origin, {}, {}, {})
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ from functools import wraps
|
||||
import jwt
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask_login import login_user
|
||||
@@ -51,22 +52,24 @@ def _auth_with_key():
|
||||
user, authenticated = User.query.authenticate_with_key(key, secret, req_args, path)
|
||||
if user and authenticated:
|
||||
login_user(user)
|
||||
# reset_session(user)
|
||||
reset_session(user)
|
||||
return True
|
||||
|
||||
role, authenticated = Role.query.authenticate_with_key(key, secret, req_args, path)
|
||||
if role and authenticated:
|
||||
# reset_session(None, role=role.name)
|
||||
reset_session(None, role=role.name)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _auth_with_session():
|
||||
if isinstance(getattr(g, 'user', None), User):
|
||||
login_user(g.user)
|
||||
return True
|
||||
if "acl" in session and "userName" in (session["acl"] or {}):
|
||||
login_user(UserCache.get(session["acl"]["userName"]))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@@ -93,9 +96,6 @@ def _auth_with_token():
|
||||
|
||||
|
||||
def _auth_with_ip_white_list():
|
||||
if request.url.endswith("acl/users/info"):
|
||||
return False
|
||||
|
||||
ip = request.headers.get('X-Real-IP') or request.remote_addr
|
||||
key = request.values.get('_key')
|
||||
secret = request.values.get('_secret')
|
||||
@@ -108,7 +108,7 @@ def _auth_with_ip_white_list():
|
||||
|
||||
|
||||
def _auth_with_app_token():
|
||||
if _auth_with_session() or _auth_with_token():
|
||||
if _auth_with_session():
|
||||
if not is_app_admin(request.values.get('app_id')) and request.method != "GET":
|
||||
return False
|
||||
elif is_app_admin(request.values.get('app_id')):
|
||||
@@ -157,7 +157,7 @@ def _auth_with_acl_token():
|
||||
|
||||
|
||||
def auth_required(func):
|
||||
if request.get_json(silent=True) is not None:
|
||||
if request.json is not None:
|
||||
setattr(request, 'values', request.json)
|
||||
else:
|
||||
setattr(request, 'values', request.values.to_dict())
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
@@ -1,67 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import uuid
|
||||
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from ldap3 import ALL
|
||||
from ldap3 import AUTO_BIND_NO_TLS
|
||||
from ldap3 import Connection
|
||||
from ldap3 import Server
|
||||
from ldap3.core.exceptions import LDAPBindError
|
||||
from ldap3.core.exceptions import LDAPCertificateError
|
||||
from ldap3.core.exceptions import LDAPSocketOpenError
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.common_setting.const import AuthenticateType
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
from api.models.acl import User
|
||||
|
||||
|
||||
def authenticate_with_ldap(username, password):
|
||||
config = AuthenticateDataCRUD(AuthenticateType.LDAP).get()
|
||||
|
||||
server = Server(config.get('ldap_server'), get_info=ALL, connect_timeout=3)
|
||||
if '@' in username:
|
||||
email = username
|
||||
who = config.get('ldap_user_dn').format(username.split('@')[0])
|
||||
else:
|
||||
who = config.get('ldap_user_dn').format(username)
|
||||
email = "{}@{}".format(who, config.get('ldap_domain'))
|
||||
|
||||
username = username.split('@')[0]
|
||||
user = User.query.get_by_username(username)
|
||||
try:
|
||||
if not password:
|
||||
raise LDAPCertificateError
|
||||
|
||||
try:
|
||||
conn = Connection(server, user=who, password=password, auto_bind=AUTO_BIND_NO_TLS)
|
||||
except LDAPBindError:
|
||||
conn = Connection(server,
|
||||
user=f"{username}@{config.get('ldap_domain')}",
|
||||
password=password,
|
||||
auto_bind=AUTO_BIND_NO_TLS)
|
||||
|
||||
if conn.result['result'] != 0:
|
||||
AuditCRUD.add_login_log(username, False, ErrFormat.invalid_password)
|
||||
raise LDAPBindError
|
||||
else:
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
if not user:
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
user = UserCRUD.add(username=username, email=email, password=uuid.uuid4().hex)
|
||||
|
||||
return user, True
|
||||
|
||||
except LDAPBindError as e:
|
||||
current_app.logger.info(e)
|
||||
return user, False
|
||||
|
||||
except LDAPSocketOpenError as e:
|
||||
current_app.logger.info(e)
|
||||
return abort(403, ErrFormat.ldap_connection_failed)
|
||||
@@ -1,30 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from . import routing
|
||||
|
||||
|
||||
class OAuth2(object):
|
||||
def __init__(self, app=None, url_prefix=None):
|
||||
self._app = app
|
||||
if app is not None:
|
||||
self.init_app(app, url_prefix)
|
||||
|
||||
@staticmethod
|
||||
def init_app(app, url_prefix=None):
|
||||
# Configuration defaults
|
||||
app.config.setdefault('OAUTH2_GRANT_TYPE', 'authorization_code')
|
||||
app.config.setdefault('OAUTH2_RESPONSE_TYPE', 'code')
|
||||
app.config.setdefault('OAUTH2_AFTER_LOGIN', '/')
|
||||
|
||||
app.config.setdefault('OIDC_GRANT_TYPE', 'authorization_code')
|
||||
app.config.setdefault('OIDC_RESPONSE_TYPE', 'code')
|
||||
app.config.setdefault('OIDC_AFTER_LOGIN', '/')
|
||||
|
||||
# Register Blueprint
|
||||
app.register_blueprint(routing.blueprint, url_prefix=url_prefix)
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
return self._app or current_app
|
||||
@@ -1,139 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import secrets
|
||||
import uuid
|
||||
|
||||
import requests
|
||||
from flask import Blueprint
|
||||
from flask import abort
|
||||
from flask import current_app
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask import session
|
||||
from flask import url_for
|
||||
from flask_login import login_user
|
||||
from flask_login import logout_user
|
||||
from six.moves.urllib.parse import urlencode
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
from api.lib.common_setting.common_data import AuthenticateDataCRUD
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
|
||||
blueprint = Blueprint('oauth2', __name__)
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/login')
|
||||
def login(auth_type):
|
||||
config = AuthenticateDataCRUD(auth_type.upper()).get()
|
||||
|
||||
if request.values.get("next"):
|
||||
session["next"] = request.values.get("next")
|
||||
|
||||
session[f'{auth_type}_state'] = secrets.token_urlsafe(16)
|
||||
|
||||
auth_type = auth_type.upper()
|
||||
|
||||
redirect_uri = "{}://{}{}".format(urlparse(request.referrer).scheme,
|
||||
urlparse(request.referrer).netloc,
|
||||
url_for('oauth2.callback', auth_type=auth_type.lower()))
|
||||
qs = urlencode({
|
||||
'client_id': config['client_id'],
|
||||
'redirect_uri': redirect_uri,
|
||||
'response_type': current_app.config[f'{auth_type}_RESPONSE_TYPE'],
|
||||
'scope': ' '.join(config['scopes'] or []),
|
||||
'state': session[f'{auth_type.lower()}_state'],
|
||||
})
|
||||
|
||||
return redirect("{}?{}".format(config['authorize_url'].split('?')[0], qs))
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/callback')
|
||||
def callback(auth_type):
|
||||
auth_type = auth_type.upper()
|
||||
config = AuthenticateDataCRUD(auth_type).get()
|
||||
|
||||
redirect_url = session.get("next") or config.get('after_login') or '/'
|
||||
|
||||
if request.values['state'] != session.get(f'{auth_type.lower()}_state'):
|
||||
return abort(401, "state is invalid")
|
||||
|
||||
if 'code' not in request.values:
|
||||
return abort(401, 'code is invalid')
|
||||
|
||||
response = requests.post(config['token_url'], data={
|
||||
'client_id': config['client_id'],
|
||||
'client_secret': config['client_secret'],
|
||||
'code': request.values['code'],
|
||||
'grant_type': current_app.config[f'{auth_type}_GRANT_TYPE'],
|
||||
'redirect_uri': url_for('oauth2.callback', auth_type=auth_type.lower(), _external=True),
|
||||
}, headers={'Accept': 'application/json'})
|
||||
if response.status_code != 200:
|
||||
current_app.logger.error(response.text)
|
||||
return abort(401)
|
||||
access_token = response.json().get('access_token')
|
||||
if not access_token:
|
||||
return abort(401)
|
||||
|
||||
response = requests.get(config['user_info']['url'], headers={
|
||||
'Authorization': 'Bearer {}'.format(access_token),
|
||||
'Accept': 'application/json',
|
||||
})
|
||||
if response.status_code != 200:
|
||||
return abort(401)
|
||||
|
||||
res = response.json()
|
||||
email = res.get(config['user_info']['email'])
|
||||
username = res.get(config['user_info']['username'])
|
||||
avatar = res.get(config['user_info'].get('avatar'))
|
||||
user = UserCache.get(username)
|
||||
if user is None:
|
||||
current_app.logger.info("create user: {}".format(username))
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
|
||||
user_dict = dict(username=username, email=email, avatar=avatar)
|
||||
user_dict['password'] = uuid.uuid4().hex
|
||||
|
||||
user = UserCRUD.add(**user_dict)
|
||||
|
||||
# log the user in
|
||||
login_user(user)
|
||||
|
||||
from api.lib.perm.acl.acl import ACLManager
|
||||
user_info = ACLManager.get_user_info(username)
|
||||
|
||||
session["acl"] = dict(uid=user_info.get("uid"),
|
||||
avatar=user.avatar if user else user_info.get("avatar"),
|
||||
userId=user_info.get("uid"),
|
||||
rid=user_info.get("rid"),
|
||||
userName=user_info.get("username"),
|
||||
nickName=user_info.get("nickname") or user_info.get("username"),
|
||||
parentRoles=user_info.get("parents"),
|
||||
childRoles=user_info.get("children"),
|
||||
roleName=user_info.get("role"))
|
||||
session["uid"] = user_info.get("uid")
|
||||
|
||||
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
|
||||
return redirect(redirect_url)
|
||||
|
||||
|
||||
@blueprint.route('/api/<string:auth_type>/logout')
|
||||
def logout(auth_type):
|
||||
"acl" in session and session.pop("acl")
|
||||
"uid" in session and session.pop("uid")
|
||||
f'{auth_type}_state' in session and session.pop(f'{auth_type}_state')
|
||||
"next" in session and session.pop("next")
|
||||
|
||||
redirect_url = url_for('oauth2.login', auth_type=auth_type, _external=True, next=request.referrer)
|
||||
|
||||
logout_user()
|
||||
|
||||
current_app.logger.debug('Redirecting to: {0}'.format(redirect_url))
|
||||
|
||||
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
|
||||
|
||||
return redirect(redirect_url)
|
||||
@@ -1,34 +1,27 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
from flask_babel import lazy_gettext as _l
|
||||
|
||||
|
||||
class CommonErrFormat(object):
|
||||
unauthorized = _l("unauthorized") # 未认证
|
||||
unknown_error = _l("unknown error") # 未知错误
|
||||
unauthorized = "未认证"
|
||||
unknown_error = "未知错误"
|
||||
|
||||
invalid_request = _l("Illegal request") # 不合法的请求
|
||||
invalid_operation = _l("Invalid operation") # 无效的操作
|
||||
invalid_request = "不合法的请求"
|
||||
invalid_operation = "无效的操作"
|
||||
|
||||
not_found = _l("does not exist") # 不存在
|
||||
not_found = "不存在"
|
||||
|
||||
circular_dependency_error = _l("There is a circular dependency!") # 存在循环依赖!
|
||||
unknown_search_error = "未知搜索错误"
|
||||
|
||||
unknown_search_error = _l("Unknown search error") # 未知搜索错误
|
||||
invalid_json = "json格式似乎不正确了, 请仔细确认一下!"
|
||||
|
||||
# json格式似乎不正确了, 请仔细确认一下!
|
||||
invalid_json = _l("The json format seems to be incorrect, please confirm carefully!")
|
||||
datetime_argument_invalid = "参数 {} 格式不正确, 格式必须是: yyyy-mm-dd HH:MM:SS"
|
||||
|
||||
# 参数 {} 格式不正确, 格式必须是: yyyy-mm-dd HH:MM:SS
|
||||
datetime_argument_invalid = _l("The format of parameter {} is incorrect, the format must be: yyyy-mm-dd HH:MM:SS")
|
||||
argument_value_required = "参数 {} 的值不能为空!"
|
||||
argument_required = "请求缺少参数 {}"
|
||||
argument_invalid = "参数 {} 的值无效"
|
||||
argument_str_length_limit = "参数 {} 的长度必须 <= {}"
|
||||
|
||||
argument_value_required = _l("The value of parameter {} cannot be empty!") # 参数 {} 的值不能为空!
|
||||
argument_required = _l("The request is missing parameters {}") # 请求缺少参数 {}
|
||||
argument_invalid = _l("Invalid value for parameter {}") # 参数 {} 的值无效
|
||||
argument_str_length_limit = _l("The length of parameter {} must be <= {}") # 参数 {} 的长度必须 <= {}
|
||||
|
||||
role_required = _l("Role {} can only operate!") # 角色 {} 才能操作!
|
||||
user_not_found = _l("User {} does not exist") # 用户 {} 不存在
|
||||
no_permission = _l("For resource: {}, you do not have {} permission!") # 您没有资源: {} 的{}权限!
|
||||
no_permission2 = _l("You do not have permission to operate!") # 您没有操作权限!
|
||||
no_permission_only_owner = _l("Only the creator or administrator has permission!") # 只有创建人或者管理员才有权限!
|
||||
role_required = "角色 {} 才能操作!"
|
||||
user_not_found = "用户 {} 不存在"
|
||||
no_permission = "您没有资源: {} 的{}权限!"
|
||||
no_permission2 = "您没有操作权限!"
|
||||
no_permission_only_owner = "只有创建人或者管理员才有权限!"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
@@ -1,494 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import secrets
|
||||
import sys
|
||||
import threading
|
||||
from base64 import b64decode, b64encode
|
||||
|
||||
from Cryptodome.Protocol.SecretSharing import Shamir
|
||||
from colorama import Back, Fore, Style, init as colorama_init
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from flask import current_app
|
||||
|
||||
global_iv_length = 16
|
||||
global_key_shares = 5 # Number of generated key shares
|
||||
global_key_threshold = 3 # Minimum number of shares required to rebuild the key
|
||||
|
||||
backend_root_key_name = "root_key"
|
||||
backend_encrypt_key_name = "encrypt_key"
|
||||
backend_root_key_salt_name = "root_key_salt"
|
||||
backend_encrypt_key_salt_name = "encrypt_key_salt"
|
||||
backend_seal_key = "seal_status"
|
||||
|
||||
success = "success"
|
||||
seal_status = True
|
||||
|
||||
secrets_encrypt_key = ""
|
||||
secrets_root_key = ""
|
||||
|
||||
|
||||
def string_to_bytes(value):
|
||||
if not value:
|
||||
return ""
|
||||
if isinstance(value, bytes):
|
||||
return value
|
||||
if sys.version_info.major == 2:
|
||||
byte_string = value
|
||||
else:
|
||||
byte_string = value.encode("utf-8")
|
||||
return byte_string
|
||||
|
||||
|
||||
class Backend:
|
||||
def __init__(self, backend=None):
|
||||
self.backend = backend
|
||||
# cache is a redis object
|
||||
self.cache = backend.cache
|
||||
|
||||
def get(self, key):
|
||||
return self.backend.get(key)
|
||||
|
||||
def add(self, key, value):
|
||||
return self.backend.add(key, value)
|
||||
|
||||
def update(self, key, value):
|
||||
return self.backend.update(key, value)
|
||||
|
||||
def get_shares(self, key):
|
||||
return self.backend.get_shares(key)
|
||||
|
||||
def set_shares(self, key, value):
|
||||
return self.backend.set_shares(key, value)
|
||||
|
||||
|
||||
class KeyManage:
|
||||
|
||||
def __init__(self, trigger=None, backend=None):
|
||||
self.trigger = trigger
|
||||
self.backend = backend
|
||||
self.share_key = "cmdb::secret::secrets_share"
|
||||
if backend:
|
||||
self.backend = Backend(backend)
|
||||
|
||||
def init_app(self, app, backend=None):
|
||||
if (sys.argv[0].endswith("gunicorn") or
|
||||
(len(sys.argv) > 1 and sys.argv[1] in ("run", "cmdb-password-data-migrate"))):
|
||||
|
||||
self.backend = backend
|
||||
threading.Thread(target=self.watch_root_key, args=(app,), daemon=True).start()
|
||||
|
||||
self.trigger = app.config.get("INNER_TRIGGER_TOKEN")
|
||||
if not self.trigger:
|
||||
return
|
||||
|
||||
resp = self.auto_unseal()
|
||||
self.print_response(resp)
|
||||
|
||||
def hash_root_key(self, value):
|
||||
algorithm = hashes.SHA256()
|
||||
salt = self.backend.get(backend_root_key_salt_name)
|
||||
if not salt:
|
||||
salt = secrets.token_hex(16)
|
||||
msg, ok = self.backend.add(backend_root_key_salt_name, salt)
|
||||
if not ok:
|
||||
return msg, ok
|
||||
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=algorithm,
|
||||
length=32,
|
||||
salt=string_to_bytes(salt),
|
||||
iterations=100000,
|
||||
)
|
||||
key = kdf.derive(string_to_bytes(value))
|
||||
|
||||
return b64encode(key).decode('utf-8'), True
|
||||
|
||||
def generate_encrypt_key(self, key):
|
||||
algorithm = hashes.SHA256()
|
||||
salt = self.backend.get(backend_encrypt_key_salt_name)
|
||||
if not salt:
|
||||
salt = secrets.token_hex(32)
|
||||
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=algorithm,
|
||||
length=32,
|
||||
salt=string_to_bytes(salt),
|
||||
iterations=100000,
|
||||
backend=default_backend()
|
||||
)
|
||||
key = kdf.derive(string_to_bytes(key))
|
||||
msg, ok = self.backend.add(backend_encrypt_key_salt_name, salt)
|
||||
if ok:
|
||||
return b64encode(key).decode('utf-8'), ok
|
||||
else:
|
||||
return msg, ok
|
||||
|
||||
@classmethod
|
||||
def generate_keys(cls, secret):
|
||||
shares = Shamir.split(global_key_threshold, global_key_shares, secret, False)
|
||||
new_shares = []
|
||||
for share in shares:
|
||||
t = [i for i in share[1]] + [ord(i) for i in "{:0>2}".format(share[0])]
|
||||
new_shares.append(b64encode(bytes(t)))
|
||||
|
||||
return new_shares
|
||||
|
||||
def is_valid_root_key(self, root_key):
|
||||
if not root_key:
|
||||
return False
|
||||
root_key_hash, ok = self.hash_root_key(root_key)
|
||||
if not ok:
|
||||
return root_key_hash, ok
|
||||
backend_root_key_hash = self.backend.get(backend_root_key_name)
|
||||
if not backend_root_key_hash:
|
||||
return "should init firstly", False
|
||||
elif backend_root_key_hash != root_key_hash:
|
||||
return "invalid root key", False
|
||||
else:
|
||||
return "", True
|
||||
|
||||
def auth_root_secret(self, root_key, app):
|
||||
with app.app_context():
|
||||
msg, ok = self.is_valid_root_key(root_key)
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
encrypt_key_aes = self.backend.get(backend_encrypt_key_name)
|
||||
if not encrypt_key_aes:
|
||||
return {
|
||||
"message": "encrypt key is empty",
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
secret_encrypt_key, ok = InnerCrypt.aes_decrypt(string_to_bytes(root_key), encrypt_key_aes)
|
||||
if ok:
|
||||
msg, ok = self.backend.update(backend_seal_key, "open")
|
||||
if ok:
|
||||
global secrets_encrypt_key, secrets_root_key
|
||||
secrets_encrypt_key = secret_encrypt_key
|
||||
secrets_root_key = root_key
|
||||
self.backend.cache.set(self.share_key, json.dumps([]))
|
||||
return {"message": success, "status": success}
|
||||
return {"message": msg, "status": "failed"}
|
||||
else:
|
||||
return {
|
||||
"message": secret_encrypt_key,
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
def parse_shares(self, shares, app):
|
||||
if len(shares) >= global_key_threshold:
|
||||
recovered_secret = Shamir.combine(shares[:global_key_threshold], False)
|
||||
return self.auth_root_secret(b64encode(recovered_secret), app)
|
||||
|
||||
def unseal(self, key):
|
||||
if not self.is_seal():
|
||||
return {
|
||||
"message": "current status is unseal, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
try:
|
||||
t = [i for i in b64decode(key)]
|
||||
v = (int("".join([chr(i) for i in t[-2:]])), bytes(t[:-2]))
|
||||
shares = self.backend.get_shares(self.share_key)
|
||||
if v not in shares:
|
||||
shares.append(v)
|
||||
self.set_shares(shares)
|
||||
if len(shares) >= global_key_threshold:
|
||||
return self.parse_shares(shares, current_app)
|
||||
else:
|
||||
return {
|
||||
"message": "waiting for inputting other unseal key {0}/{1}".format(len(shares),
|
||||
global_key_threshold),
|
||||
"status": "waiting"
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"message": "invalid token: " + str(e),
|
||||
"status": "failed"
|
||||
}
|
||||
|
||||
def generate_unseal_keys(self):
|
||||
info = self.backend.get(backend_root_key_name)
|
||||
if info:
|
||||
return "already exist", [], False
|
||||
|
||||
secret = AESGCM.generate_key(128)
|
||||
shares = self.generate_keys(secret)
|
||||
|
||||
return b64encode(secret), shares, True
|
||||
|
||||
def init(self):
|
||||
"""
|
||||
init the master key, unseal key and store in backend
|
||||
:return:
|
||||
"""
|
||||
root_key = self.backend.get(backend_root_key_name)
|
||||
if root_key:
|
||||
return {"message": "already init, skip", "status": "skip"}, False
|
||||
else:
|
||||
root_key, shares, status = self.generate_unseal_keys()
|
||||
if not status:
|
||||
return {"message": root_key, "status": "failed"}, False
|
||||
|
||||
# hash root key and store in backend
|
||||
root_key_hash, ok = self.hash_root_key(root_key)
|
||||
if not ok:
|
||||
return {"message": root_key_hash, "status": "failed"}, False
|
||||
|
||||
msg, ok = self.backend.add(backend_root_key_name, root_key_hash)
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
|
||||
# generate encrypt key from root_key and store in backend
|
||||
encrypt_key, ok = self.generate_encrypt_key(root_key)
|
||||
if not ok:
|
||||
return {"message": encrypt_key, "status": "failed"}
|
||||
|
||||
encrypt_key_aes, status = InnerCrypt.aes_encrypt(root_key, encrypt_key)
|
||||
if not status:
|
||||
return {"message": encrypt_key_aes, "status": "failed"}
|
||||
|
||||
msg, ok = self.backend.add(backend_encrypt_key_name, encrypt_key_aes)
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
msg, ok = self.backend.add(backend_seal_key, "open")
|
||||
if not ok:
|
||||
return {"message": msg, "status": "failed"}, False
|
||||
|
||||
global secrets_encrypt_key, secrets_root_key
|
||||
secrets_encrypt_key = encrypt_key
|
||||
secrets_root_key = root_key
|
||||
|
||||
self.print_token(shares, root_token=root_key)
|
||||
|
||||
return {"message": "OK",
|
||||
"details": {
|
||||
"root_token": root_key,
|
||||
"seal_tokens": shares,
|
||||
}}, True
|
||||
|
||||
def auto_unseal(self):
|
||||
if not self.trigger:
|
||||
return {
|
||||
"message": "trigger config is empty, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
if self.trigger.startswith("http"):
|
||||
return {
|
||||
"message": "todo in next step, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
# TODO
|
||||
elif len(self.trigger.strip()) == 24:
|
||||
res = self.auth_root_secret(self.trigger.encode(), current_app)
|
||||
if res.get("status") == success:
|
||||
return {
|
||||
"message": success,
|
||||
"status": success
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": res.get("message"),
|
||||
"status": "failed"
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"message": "trigger config is invalid, skip",
|
||||
"status": "skip"
|
||||
}
|
||||
|
||||
def seal(self, root_key):
|
||||
root_key = root_key.encode()
|
||||
msg, ok = self.is_valid_root_key(root_key)
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed"
|
||||
}
|
||||
else:
|
||||
msg, ok = self.backend.update(backend_seal_key, "block")
|
||||
if not ok:
|
||||
return {
|
||||
"message": msg,
|
||||
"status": "failed",
|
||||
}
|
||||
self.clear()
|
||||
self.backend.cache.publish(self.share_key, "clear")
|
||||
|
||||
return {
|
||||
"message": success,
|
||||
"status": success
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def clear():
|
||||
global secrets_encrypt_key, secrets_root_key
|
||||
secrets_encrypt_key = ''
|
||||
secrets_root_key = ''
|
||||
|
||||
def is_seal(self):
|
||||
"""
|
||||
If there is no initialization or the root key is inconsistent, it is considered to be in a sealed state..
|
||||
:return:
|
||||
"""
|
||||
# secrets_root_key = current_app.config.get("secrets_root_key")
|
||||
if not secrets_root_key:
|
||||
return True
|
||||
msg, ok = self.is_valid_root_key(secrets_root_key)
|
||||
if not ok:
|
||||
return True
|
||||
status = self.backend.get(backend_seal_key)
|
||||
return status == "block"
|
||||
|
||||
@classmethod
|
||||
def print_token(cls, shares, root_token):
|
||||
"""
|
||||
data: {"message": "OK",
|
||||
"details": {
|
||||
"root_token": root_key,
|
||||
"seal_tokens": shares,
|
||||
}}
|
||||
"""
|
||||
colorama_init()
|
||||
print(Style.BRIGHT, "Please be sure to store the Unseal Key in a secure location and avoid losing it."
|
||||
" The Unseal Key is required to unseal the system every time when it restarts."
|
||||
" Successful unsealing is necessary to enable the password feature." + Style.RESET_ALL)
|
||||
|
||||
for i, v in enumerate(shares):
|
||||
print(
|
||||
"unseal token " + str(i + 1) + ": " + Fore.RED + Back.BLACK + v.decode("utf-8") + Style.RESET_ALL)
|
||||
print()
|
||||
|
||||
print(Fore.GREEN + "root token: " + root_token.decode("utf-8") + Style.RESET_ALL)
|
||||
|
||||
@classmethod
|
||||
def print_response(cls, data):
|
||||
status = data.get("status", "")
|
||||
message = data.get("message", "")
|
||||
status_colors = {
|
||||
"skip": Style.BRIGHT,
|
||||
"failed": Fore.RED,
|
||||
"waiting": Fore.YELLOW,
|
||||
}
|
||||
print(status_colors.get(status, Fore.GREEN), message, Style.RESET_ALL)
|
||||
|
||||
def set_shares(self, values):
|
||||
new_value = list()
|
||||
for v in values:
|
||||
new_value.append((v[0], b64encode(v[1]).decode("utf-8")))
|
||||
self.backend.cache.publish(self.share_key, json.dumps(new_value))
|
||||
self.backend.cache.set(self.share_key, json.dumps(new_value))
|
||||
|
||||
def watch_root_key(self, app):
|
||||
pubsub = self.backend.cache.pubsub()
|
||||
pubsub.subscribe(self.share_key)
|
||||
|
||||
new_value = set()
|
||||
for message in pubsub.listen():
|
||||
if message["type"] == "message":
|
||||
if message["data"] == b"clear":
|
||||
self.clear()
|
||||
continue
|
||||
try:
|
||||
value = json.loads(message["data"].decode("utf-8"))
|
||||
for v in value:
|
||||
new_value.add((v[0], b64decode(v[1])))
|
||||
except Exception as e:
|
||||
return []
|
||||
if len(new_value) >= global_key_threshold:
|
||||
self.parse_shares(list(new_value), app)
|
||||
new_value = set()
|
||||
|
||||
|
||||
class InnerCrypt:
|
||||
def __init__(self):
|
||||
self.encrypt_key = b64decode(secrets_encrypt_key)
|
||||
# self.encrypt_key = b64decode(secrets_encrypt_key, "".encode("utf-8"))
|
||||
|
||||
def encrypt(self, plaintext):
|
||||
"""
|
||||
encrypt method contain aes currently
|
||||
"""
|
||||
if not self.encrypt_key:
|
||||
return ValueError("secret is disabled, please seal firstly"), False
|
||||
return self.aes_encrypt(self.encrypt_key, plaintext)
|
||||
|
||||
def decrypt(self, ciphertext):
|
||||
"""
|
||||
decrypt method contain aes currently
|
||||
"""
|
||||
if not self.encrypt_key:
|
||||
return ValueError("secret is disabled, please seal firstly"), False
|
||||
return self.aes_decrypt(self.encrypt_key, ciphertext)
|
||||
|
||||
@classmethod
|
||||
def aes_encrypt(cls, key, plaintext):
|
||||
if isinstance(plaintext, str):
|
||||
plaintext = string_to_bytes(plaintext)
|
||||
iv = os.urandom(global_iv_length)
|
||||
try:
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
encryptor = cipher.encryptor()
|
||||
v_padder = padding.PKCS7(algorithms.AES.block_size).padder()
|
||||
padded_plaintext = v_padder.update(plaintext) + v_padder.finalize()
|
||||
ciphertext = encryptor.update(padded_plaintext) + encryptor.finalize()
|
||||
|
||||
return b64encode(iv + ciphertext).decode("utf-8"), True
|
||||
except Exception as e:
|
||||
|
||||
return str(e), False
|
||||
|
||||
@classmethod
|
||||
def aes_decrypt(cls, key, ciphertext):
|
||||
try:
|
||||
s = b64decode(ciphertext.encode("utf-8"))
|
||||
iv = s[:global_iv_length]
|
||||
ciphertext = s[global_iv_length:]
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
decrypter = cipher.decryptor()
|
||||
decrypted_padded_plaintext = decrypter.update(ciphertext) + decrypter.finalize()
|
||||
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
|
||||
plaintext = unpadder.update(decrypted_padded_plaintext) + unpadder.finalize()
|
||||
|
||||
return plaintext.decode('utf-8'), True
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
km = KeyManage()
|
||||
# info, shares, status = km.generate_unseal_keys()
|
||||
# print(info, shares, status)
|
||||
# print("..................")
|
||||
# for i in shares:
|
||||
# print(b64encode(i[1]).decode())
|
||||
|
||||
res1, ok1 = km.init()
|
||||
if not ok1:
|
||||
print(res1)
|
||||
# for j in res["details"]["seal_tokens"]:
|
||||
# r = km.unseal(j)
|
||||
# if r["status"] != "waiting":
|
||||
# if r["status"] != "success":
|
||||
# print("r........", r)
|
||||
# else:
|
||||
# print(r)
|
||||
# break
|
||||
|
||||
t_plaintext = b"Hello, World!" # The plaintext to encrypt
|
||||
c = InnerCrypt()
|
||||
t_ciphertext, status1 = c.encrypt(t_plaintext)
|
||||
print("Ciphertext:", t_ciphertext)
|
||||
decrypted_plaintext, status2 = c.decrypt(t_ciphertext)
|
||||
print("Decrypted plaintext:", decrypted_plaintext)
|
||||
@@ -1,63 +0,0 @@
|
||||
import base64
|
||||
import json
|
||||
|
||||
from api.models.cmdb import InnerKV
|
||||
from api.extensions import rd
|
||||
|
||||
|
||||
class InnerKVManger(object):
|
||||
def __init__(self):
|
||||
self.cache = rd.r
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def add(cls, key, value):
|
||||
data = {"key": key, "value": value}
|
||||
res = InnerKV.create(**data)
|
||||
if res.key == key:
|
||||
return "success", True
|
||||
|
||||
return "add failed", False
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
res = InnerKV.get_by(first=True, to_dict=False, key=key)
|
||||
if not res:
|
||||
return None
|
||||
|
||||
return res.value
|
||||
|
||||
@classmethod
|
||||
def update(cls, key, value):
|
||||
res = InnerKV.get_by(first=True, to_dict=False, key=key)
|
||||
if not res:
|
||||
return cls.add(key, value)
|
||||
|
||||
t = res.update(value=value)
|
||||
if t.key == key:
|
||||
return "success", True
|
||||
|
||||
return "update failed", True
|
||||
|
||||
@classmethod
|
||||
def get_shares(cls, key):
|
||||
new_value = list()
|
||||
v = rd.get_str(key)
|
||||
if not v:
|
||||
return new_value
|
||||
try:
|
||||
value = json.loads(v.decode("utf-8"))
|
||||
for v in value:
|
||||
new_value.append((v[0], base64.b64decode(v[1])))
|
||||
except Exception as e:
|
||||
return []
|
||||
return new_value
|
||||
|
||||
@classmethod
|
||||
def set_shares(cls, key, value):
|
||||
new_value = list()
|
||||
for v in value:
|
||||
new_value.append((v[0], base64.b64encode(v[1]).decode("utf-8")))
|
||||
rd.set_str(key, json.dumps(new_value))
|
||||
|
||||
|
||||
@@ -1,141 +0,0 @@
|
||||
from base64 import b64decode
|
||||
from base64 import b64encode
|
||||
|
||||
import hvac
|
||||
|
||||
|
||||
class VaultClient:
|
||||
def __init__(self, base_url, token, mount_path='cmdb'):
|
||||
self.client = hvac.Client(url=base_url, token=token)
|
||||
self.mount_path = mount_path
|
||||
|
||||
def create_app_role(self, role_name, policies):
|
||||
resp = self.client.create_approle(role_name, policies=policies)
|
||||
|
||||
return resp == 200
|
||||
|
||||
def delete_app_role(self, role_name):
|
||||
resp = self.client.delete_approle(role_name)
|
||||
|
||||
return resp == 204
|
||||
|
||||
def update_app_role_policies(self, role_name, policies):
|
||||
resp = self.client.update_approle_role(role_name, policies=policies)
|
||||
|
||||
return resp == 204
|
||||
|
||||
def get_app_role(self, role_name):
|
||||
resp = self.client.get_approle(role_name)
|
||||
resp.json()
|
||||
if resp.status_code == 200:
|
||||
return resp.json
|
||||
else:
|
||||
return {}
|
||||
|
||||
def enable_secrets_engine(self):
|
||||
resp = self.client.sys.enable_secrets_engine('kv', path=self.mount_path)
|
||||
resp_01 = self.client.sys.enable_secrets_engine('transit')
|
||||
|
||||
if resp.status_code == 200 and resp_01.status_code == 200:
|
||||
return resp.json
|
||||
else:
|
||||
return {}
|
||||
|
||||
def encrypt(self, plaintext):
|
||||
response = self.client.secrets.transit.encrypt_data(name='transit-key', plaintext=plaintext)
|
||||
ciphertext = response['data']['ciphertext']
|
||||
|
||||
return ciphertext
|
||||
|
||||
# decrypt data
|
||||
def decrypt(self, ciphertext):
|
||||
response = self.client.secrets.transit.decrypt_data(name='transit-key', ciphertext=ciphertext)
|
||||
plaintext = response['data']['plaintext']
|
||||
|
||||
return plaintext
|
||||
|
||||
def write(self, path, data, encrypt=None):
|
||||
if encrypt:
|
||||
for k, v in data.items():
|
||||
data[k] = self.encrypt(self.encode_base64(v))
|
||||
response = self.client.secrets.kv.v2.create_or_update_secret(
|
||||
path=path,
|
||||
secret=data,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
# read data
|
||||
def read(self, path, decrypt=True):
|
||||
try:
|
||||
response = self.client.secrets.kv.v2.read_secret_version(
|
||||
path=path, raise_on_deleted_version=False, mount_point=self.mount_path
|
||||
)
|
||||
except Exception as e:
|
||||
return str(e), False
|
||||
data = response['data']['data']
|
||||
if decrypt:
|
||||
try:
|
||||
for k, v in data.items():
|
||||
data[k] = self.decode_base64(self.decrypt(v))
|
||||
except:
|
||||
return data, True
|
||||
|
||||
return data, True
|
||||
|
||||
# update data
|
||||
def update(self, path, data, overwrite=True, encrypt=True):
|
||||
if encrypt:
|
||||
for k, v in data.items():
|
||||
data[k] = self.encrypt(self.encode_base64(v))
|
||||
if overwrite:
|
||||
response = self.client.secrets.kv.v2.create_or_update_secret(
|
||||
path=path,
|
||||
secret=data,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
else:
|
||||
response = self.client.secrets.kv.v2.patch(path=path, secret=data, mount_point=self.mount_path)
|
||||
|
||||
return response
|
||||
|
||||
# delete data
|
||||
def delete(self, path):
|
||||
response = self.client.secrets.kv.v2.delete_metadata_and_all_versions(
|
||||
path=path,
|
||||
mount_point=self.mount_path
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
# Base64 encode
|
||||
@classmethod
|
||||
def encode_base64(cls, data):
|
||||
encoded_bytes = b64encode(data.encode())
|
||||
encoded_string = encoded_bytes.decode()
|
||||
|
||||
return encoded_string
|
||||
|
||||
# Base64 decode
|
||||
@classmethod
|
||||
def decode_base64(cls, encoded_string):
|
||||
decoded_bytes = b64decode(encoded_string)
|
||||
decoded_string = decoded_bytes.decode()
|
||||
|
||||
return decoded_string
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_base_url = "http://localhost:8200"
|
||||
_token = "your token"
|
||||
|
||||
_path = "test001"
|
||||
# Example
|
||||
sdk = VaultClient(_base_url, _token)
|
||||
# sdk.enable_secrets_engine()
|
||||
_data = {"key1": "value1", "key2": "value2", "key3": "value3"}
|
||||
_data = sdk.update(_path, _data, overwrite=True, encrypt=True)
|
||||
print(_data)
|
||||
_data = sdk.read(_path, decrypt=True)
|
||||
print(_data)
|
||||
@@ -1,6 +1,9 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
from typing import Set
|
||||
|
||||
import elasticsearch
|
||||
@@ -10,9 +13,6 @@ from Crypto.Cipher import AES
|
||||
from elasticsearch import Elasticsearch
|
||||
from flask import current_app
|
||||
|
||||
from api.lib.secrets.inner import InnerCrypt
|
||||
from api.lib.secrets.inner import KeyManage
|
||||
|
||||
|
||||
class BaseEnum(object):
|
||||
_ALL_ = set() # type: Set[str]
|
||||
@@ -113,27 +113,10 @@ class RedisHandler(object):
|
||||
try:
|
||||
ret = self.r.hdel(prefix, key_id)
|
||||
if not ret:
|
||||
current_app.logger.warning("[{0}] is not in redis".format(key_id))
|
||||
current_app.logger.warn("[{0}] is not in redis".format(key_id))
|
||||
except Exception as e:
|
||||
current_app.logger.error("delete redis key error, {0}".format(str(e)))
|
||||
|
||||
def set_str(self, key, value, expired=None):
|
||||
try:
|
||||
if expired:
|
||||
self.r.setex(key, expired, value)
|
||||
else:
|
||||
self.r.set(key, value)
|
||||
except Exception as e:
|
||||
current_app.logger.error("set redis error, {0}".format(str(e)))
|
||||
|
||||
def get_str(self, key):
|
||||
try:
|
||||
value = self.r.get(key)
|
||||
except Exception as e:
|
||||
current_app.logger.error("get redis error, {0}".format(str(e)))
|
||||
return
|
||||
return value
|
||||
|
||||
|
||||
class ESHandler(object):
|
||||
def __init__(self, flask_app=None):
|
||||
@@ -221,17 +204,146 @@ class ESHandler(object):
|
||||
|
||||
res = self.es.search(index=self.index, body=query, filter_path=filter_path)
|
||||
if res['hits'].get('hits'):
|
||||
return (res['hits']['total']['value'],
|
||||
[i['_source'] for i in res['hits']['hits']],
|
||||
res.get("aggregations", {}))
|
||||
return res['hits']['total']['value'], \
|
||||
[i['_source'] for i in res['hits']['hits']], \
|
||||
res.get("aggregations", {})
|
||||
else:
|
||||
return 0, [], {}
|
||||
|
||||
|
||||
class Lock(object):
|
||||
def __init__(self, name, timeout=10, app=None, need_lock=True):
|
||||
self.lock_key = name
|
||||
self.need_lock = need_lock
|
||||
self.timeout = timeout
|
||||
if not app:
|
||||
app = current_app
|
||||
self.app = app
|
||||
try:
|
||||
self.redis = redis.Redis(host=self.app.config.get('CACHE_REDIS_HOST'),
|
||||
port=self.app.config.get('CACHE_REDIS_PORT'),
|
||||
password=self.app.config.get('CACHE_REDIS_PASSWORD'))
|
||||
except:
|
||||
self.app.logger.error("cannot connect redis")
|
||||
raise Exception("cannot connect redis")
|
||||
|
||||
def lock(self, timeout=None):
|
||||
if not timeout:
|
||||
timeout = self.timeout
|
||||
retry = 0
|
||||
while retry < 100:
|
||||
timestamp = time.time() + timeout + 1
|
||||
_lock = self.redis.setnx(self.lock_key, timestamp)
|
||||
if _lock == 1 or (
|
||||
time.time() > float(self.redis.get(self.lock_key) or sys.maxsize) and
|
||||
time.time() > float(self.redis.getset(self.lock_key, timestamp) or sys.maxsize)):
|
||||
break
|
||||
else:
|
||||
retry += 1
|
||||
time.sleep(0.6)
|
||||
if retry >= 100:
|
||||
raise Exception("get lock failed...")
|
||||
|
||||
def release(self):
|
||||
if time.time() < float(self.redis.get(self.lock_key)):
|
||||
self.redis.delete(self.lock_key)
|
||||
|
||||
def __enter__(self):
|
||||
if self.need_lock:
|
||||
self.lock()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.need_lock:
|
||||
self.release()
|
||||
|
||||
|
||||
class Redis2Handler(object):
|
||||
def __init__(self, flask_app=None, prefix=None):
|
||||
self.flask_app = flask_app
|
||||
self.prefix = prefix
|
||||
self.r = None
|
||||
|
||||
def init_app(self, app):
|
||||
self.flask_app = app
|
||||
config = self.flask_app.config
|
||||
try:
|
||||
pool = redis.ConnectionPool(
|
||||
max_connections=config.get("REDIS_MAX_CONN"),
|
||||
host=config.get("ONEAGENT_REDIS_HOST"),
|
||||
port=config.get("ONEAGENT_REDIS_PORT"),
|
||||
db=config.get("ONEAGENT_REDIS_DB"),
|
||||
password=config.get("ONEAGENT_REDIS_PASSWORD")
|
||||
)
|
||||
self.r = redis.Redis(connection_pool=pool)
|
||||
except Exception as e:
|
||||
current_app.logger.warning(str(e))
|
||||
current_app.logger.error("init redis connection failed")
|
||||
|
||||
def get(self, key):
|
||||
try:
|
||||
value = json.loads(self.r.get(key))
|
||||
except:
|
||||
return
|
||||
|
||||
return value
|
||||
|
||||
def lrange(self, key, start=0, end=-1):
|
||||
try:
|
||||
value = "".join(map(redis_decode, self.r.lrange(key, start, end) or []))
|
||||
except:
|
||||
return
|
||||
|
||||
return value
|
||||
|
||||
def lrange2(self, key, start=0, end=-1):
|
||||
try:
|
||||
return list(map(redis_decode, self.r.lrange(key, start, end) or []))
|
||||
except:
|
||||
return []
|
||||
|
||||
def llen(self, key):
|
||||
try:
|
||||
return self.r.llen(key) or 0
|
||||
except:
|
||||
return 0
|
||||
|
||||
def hget(self, key, field):
|
||||
try:
|
||||
return self.r.hget(key, field)
|
||||
except Exception as e:
|
||||
current_app.logger.warning("hget redis failed, %s" % str(e))
|
||||
return
|
||||
|
||||
def hset(self, key, field, value):
|
||||
try:
|
||||
self.r.hset(key, field, value)
|
||||
except Exception as e:
|
||||
current_app.logger.warning("hset redis failed, %s" % str(e))
|
||||
return
|
||||
|
||||
def expire(self, key, timeout):
|
||||
try:
|
||||
self.r.expire(key, timeout)
|
||||
except Exception as e:
|
||||
current_app.logger.warning("expire redis failed, %s" % str(e))
|
||||
return
|
||||
|
||||
|
||||
def redis_decode(x):
|
||||
try:
|
||||
return x.decode()
|
||||
except Exception as e:
|
||||
print(x, e)
|
||||
try:
|
||||
return x.decode("gb18030")
|
||||
except:
|
||||
return "decode failed"
|
||||
|
||||
|
||||
class AESCrypto(object):
|
||||
BLOCK_SIZE = 16 # Bytes
|
||||
pad = lambda s: s + ((AESCrypto.BLOCK_SIZE - len(s) % AESCrypto.BLOCK_SIZE) *
|
||||
chr(AESCrypto.BLOCK_SIZE - len(s) % AESCrypto.BLOCK_SIZE))
|
||||
pad = lambda s: s + (AESCrypto.BLOCK_SIZE - len(s) % AESCrypto.BLOCK_SIZE) * \
|
||||
chr(AESCrypto.BLOCK_SIZE - len(s) % AESCrypto.BLOCK_SIZE)
|
||||
unpad = lambda s: s[:-ord(s[len(s) - 1:])]
|
||||
|
||||
iv = '0102030405060708'
|
||||
@@ -240,7 +352,7 @@ class AESCrypto(object):
|
||||
def key():
|
||||
key = current_app.config.get("SECRET_KEY")[:16]
|
||||
if len(key) < 16:
|
||||
key = "{}{}".format(key, (16 - len(key)) * "x")
|
||||
key = "{}{}".format(key, (16 - len(key) * "x"))
|
||||
|
||||
return key.encode('utf8')
|
||||
|
||||
@@ -258,33 +370,3 @@ class AESCrypto(object):
|
||||
text_decrypted = cipher.decrypt(encode_bytes)
|
||||
|
||||
return cls.unpad(text_decrypted).decode('utf8')
|
||||
|
||||
|
||||
class Crypto(AESCrypto):
|
||||
@classmethod
|
||||
def encrypt(cls, data):
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
|
||||
if not KeyManage(backend=InnerKVManger()).is_seal():
|
||||
res, status = InnerCrypt().encrypt(data)
|
||||
if status:
|
||||
return res
|
||||
|
||||
return AESCrypto().encrypt(data)
|
||||
|
||||
@classmethod
|
||||
def decrypt(cls, data):
|
||||
from api.lib.secrets.secrets import InnerKVManger
|
||||
|
||||
if not KeyManage(backend=InnerKVManger()).is_seal():
|
||||
try:
|
||||
res, status = InnerCrypt().decrypt(data)
|
||||
if status:
|
||||
return res
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
return AESCrypto().decrypt(data)
|
||||
except:
|
||||
return data
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import json
|
||||
from functools import partial
|
||||
|
||||
import requests
|
||||
from jinja2 import Template
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from requests_oauthlib import OAuth2Session
|
||||
|
||||
|
||||
class BearerAuth(requests.auth.AuthBase):
|
||||
def __init__(self, token):
|
||||
self.token = token
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers["authorization"] = "Bearer {}".format(self.token)
|
||||
return r
|
||||
|
||||
|
||||
def _wrap_auth(**kwargs):
|
||||
auth_type = (kwargs.get('type') or "").lower()
|
||||
if auth_type == "basicauth":
|
||||
return HTTPBasicAuth(kwargs.get('username'), kwargs.get('password'))
|
||||
|
||||
elif auth_type == "bearer":
|
||||
return BearerAuth(kwargs.get('token'))
|
||||
|
||||
elif auth_type == 'oauth2.0':
|
||||
client_id = kwargs.get('client_id')
|
||||
client_secret = kwargs.get('client_secret')
|
||||
authorization_base_url = kwargs.get('authorization_base_url')
|
||||
token_url = kwargs.get('token_url')
|
||||
redirect_url = kwargs.get('redirect_url')
|
||||
scope = kwargs.get('scope')
|
||||
|
||||
oauth2_session = OAuth2Session(client_id, scope=scope or None)
|
||||
oauth2_session.authorization_url(authorization_base_url)
|
||||
|
||||
oauth2_session.fetch_token(token_url, client_secret=client_secret, authorization_response=redirect_url)
|
||||
|
||||
return oauth2_session
|
||||
|
||||
elif auth_type == "apikey":
|
||||
return HTTPBasicAuth(kwargs.get('key'), kwargs.get('value'))
|
||||
|
||||
|
||||
def webhook_request(webhook, payload):
|
||||
"""
|
||||
|
||||
:param webhook:
|
||||
{
|
||||
"url": "https://veops.cn"
|
||||
"method": "GET|POST|PUT|DELETE"
|
||||
"body": {},
|
||||
"headers": {
|
||||
"Content-Type": "Application/json"
|
||||
},
|
||||
"parameters": {
|
||||
"key": "value"
|
||||
},
|
||||
"authorization": {
|
||||
"type": "BasicAuth|Bearer|OAuth2.0|APIKey",
|
||||
"password": "mmmm", # BasicAuth
|
||||
"username": "bbb", # BasicAuth
|
||||
|
||||
"token": "xxx", # Bearer
|
||||
|
||||
"key": "xxx", # APIKey
|
||||
"value": "xxx", # APIKey
|
||||
|
||||
"client_id": "xxx", # OAuth2.0
|
||||
"client_secret": "xxx", # OAuth2.0
|
||||
"authorization_base_url": "xxx", # OAuth2.0
|
||||
"token_url": "xxx", # OAuth2.0
|
||||
"redirect_url": "xxx", # OAuth2.0
|
||||
"scope": "xxx" # OAuth2.0
|
||||
}
|
||||
}
|
||||
:param payload:
|
||||
:return:
|
||||
"""
|
||||
assert webhook.get('url') is not None
|
||||
|
||||
payload = {k: v or '' for k, v in payload.items()}
|
||||
|
||||
url = Template(webhook['url']).render(payload)
|
||||
|
||||
params = webhook.get('parameters') or None
|
||||
if isinstance(params, dict):
|
||||
params = json.loads(Template(json.dumps(params)).render(payload).encode('utf-8'))
|
||||
|
||||
headers = json.loads(Template(json.dumps(webhook.get('headers') or {})).render(payload))
|
||||
|
||||
data = Template(json.dumps(webhook.get('body', ''))).render(payload).encode('utf-8')
|
||||
auth = _wrap_auth(**webhook.get('authorization', {}))
|
||||
|
||||
if (webhook.get('authorization', {}).get("type") or '').lower() == 'oauth2.0':
|
||||
request = getattr(auth, webhook.get('method', 'GET').lower())
|
||||
else:
|
||||
request = partial(requests.request, webhook.get('method', 'GET'))
|
||||
|
||||
return request(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers or None,
|
||||
data=data,
|
||||
auth=auth
|
||||
)
|
||||
@@ -5,18 +5,16 @@ import copy
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
|
||||
import ldap
|
||||
from flask import current_app
|
||||
from flask import session
|
||||
from flask_sqlalchemy import BaseQuery
|
||||
|
||||
from api.extensions import db
|
||||
from api.lib.database import CRUDModel
|
||||
from api.lib.database import Model
|
||||
from api.lib.database import Model2
|
||||
from api.lib.database import SoftDeleteMixin
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.perm.acl.const import OperateType
|
||||
from api.lib.perm.acl.resp_format import ErrFormat
|
||||
|
||||
|
||||
class App(Model):
|
||||
@@ -29,26 +27,21 @@ class App(Model):
|
||||
|
||||
|
||||
class UserQuery(BaseQuery):
|
||||
def _join(self, *args, **kwargs):
|
||||
super(UserQuery, self)._join(*args, **kwargs)
|
||||
|
||||
def authenticate(self, login, password):
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
|
||||
user = self.filter(db.or_(User.username == login,
|
||||
User.email == login)).filter(User.deleted.is_(False)).filter(User.block == 0).first()
|
||||
if user:
|
||||
current_app.logger.info(user)
|
||||
authenticated = user.check_password(password)
|
||||
if authenticated:
|
||||
_id = AuditCRUD.add_login_log(login, True, ErrFormat.login_succeed)
|
||||
session['LOGIN_ID'] = _id
|
||||
else:
|
||||
AuditCRUD.add_login_log(login, False, ErrFormat.invalid_password)
|
||||
from api.tasks.acl import op_record
|
||||
op_record.apply_async(args=(None, login, OperateType.LOGIN, ["ACL"]), queue=ACL_QUEUE)
|
||||
else:
|
||||
authenticated = False
|
||||
|
||||
AuditCRUD.add_login_log(login, False, ErrFormat.user_not_found.format(login))
|
||||
|
||||
current_app.logger.info(("login", login, user, authenticated))
|
||||
|
||||
return user, authenticated
|
||||
|
||||
def authenticate_with_key(self, key, secret, args, path):
|
||||
@@ -63,6 +56,37 @@ class UserQuery(BaseQuery):
|
||||
|
||||
return user, authenticated
|
||||
|
||||
def authenticate_with_ldap(self, username, password):
|
||||
ldap_conn = ldap.initialize(current_app.config.get('LDAP_SERVER'))
|
||||
ldap_conn.protocol_version = 3
|
||||
ldap_conn.set_option(ldap.OPT_REFERRALS, 0)
|
||||
if '@' in username:
|
||||
email = username
|
||||
who = '{0}@{1}'.format(username.split('@')[0], current_app.config.get('LDAP_DOMAIN'))
|
||||
else:
|
||||
who = '{0}@{1}'.format(username, current_app.config.get('LDAP_DOMAIN'))
|
||||
email = who
|
||||
|
||||
username = username.split('@')[0]
|
||||
user = self.get_by_username(username)
|
||||
try:
|
||||
|
||||
if not password:
|
||||
raise ldap.INVALID_CREDENTIALS
|
||||
|
||||
ldap_conn.simple_bind_s(who, password)
|
||||
|
||||
if not user:
|
||||
from api.lib.perm.acl.user import UserCRUD
|
||||
user = UserCRUD.add(username=username, email=email)
|
||||
|
||||
from api.tasks.acl import op_record
|
||||
op_record.apply_async(args=(None, username, OperateType.LOGIN, ["ACL"]), queue=ACL_QUEUE)
|
||||
|
||||
return user, True
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
return user, False
|
||||
|
||||
def search(self, key):
|
||||
query = self.filter(db.or_(User.email == key,
|
||||
User.nickname.ilike('%' + key + '%'),
|
||||
@@ -112,7 +136,6 @@ class User(CRUDModel, SoftDeleteMixin):
|
||||
wx_id = db.Column(db.String(32))
|
||||
employee_id = db.Column(db.String(16), index=True)
|
||||
avatar = db.Column(db.String(128))
|
||||
|
||||
# apps = db.Column(db.JSON)
|
||||
|
||||
def __str__(self):
|
||||
@@ -143,9 +166,11 @@ class User(CRUDModel, SoftDeleteMixin):
|
||||
|
||||
|
||||
class RoleQuery(BaseQuery):
|
||||
def _join(self, *args, **kwargs):
|
||||
super(RoleQuery, self)._join(*args, **kwargs)
|
||||
|
||||
def authenticate(self, login, password):
|
||||
role = self.filter(Role.name == login).filter(Role.deleted.is_(False)).first()
|
||||
role = self.filter(Role.name == login).first()
|
||||
if role:
|
||||
authenticated = role.check_password(password)
|
||||
|
||||
@@ -350,16 +375,3 @@ class AuditTriggerLog(Model):
|
||||
current = db.Column(db.JSON, default=dict(), comment='当前数据')
|
||||
extra = db.Column(db.JSON, default=dict(), comment='权限名')
|
||||
source = db.Column(db.String(16), default='', comment='来源')
|
||||
|
||||
|
||||
class AuditLoginLog(Model2):
|
||||
__tablename__ = "acl_audit_login_logs"
|
||||
|
||||
username = db.Column(db.String(64), index=True)
|
||||
channel = db.Column(db.Enum('web', 'api', 'ssh'), default="web")
|
||||
ip = db.Column(db.String(15))
|
||||
browser = db.Column(db.String(256))
|
||||
description = db.Column(db.String(128))
|
||||
is_ok = db.Column(db.Boolean)
|
||||
login_at = db.Column(db.DateTime)
|
||||
logout_at = db.Column(db.DateTime)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy.dialects.mysql import DOUBLE
|
||||
|
||||
from api.extensions import db
|
||||
@@ -10,11 +11,8 @@ from api.lib.cmdb.const import CIStatusEnum
|
||||
from api.lib.cmdb.const import CITypeOperateType
|
||||
from api.lib.cmdb.const import ConstraintEnum
|
||||
from api.lib.cmdb.const import OperateType
|
||||
from api.lib.cmdb.const import RelationSourceEnum
|
||||
from api.lib.cmdb.const import ValueTypeEnum
|
||||
from api.lib.database import Model
|
||||
from api.lib.database import Model2
|
||||
from api.lib.utils import Crypto
|
||||
from api.lib.database import Model, Model2
|
||||
|
||||
|
||||
# template
|
||||
@@ -46,31 +44,17 @@ class CIType(Model):
|
||||
name = db.Column(db.String(32), nullable=False)
|
||||
alias = db.Column(db.String(32), nullable=False)
|
||||
unique_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"), nullable=False)
|
||||
show_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"))
|
||||
enabled = db.Column(db.Boolean, default=True, nullable=False)
|
||||
is_attached = db.Column(db.Boolean, default=False, nullable=False)
|
||||
icon = db.Column(db.Text)
|
||||
order = db.Column(db.SmallInteger, default=0, nullable=False)
|
||||
default_order_attr = db.Column(db.String(33))
|
||||
|
||||
unique_key = db.relationship("Attribute", backref="c_ci_types.unique_id",
|
||||
primaryjoin="Attribute.id==CIType.unique_id", foreign_keys=[unique_id])
|
||||
show_key = db.relationship("Attribute", backref="c_ci_types.show_id",
|
||||
primaryjoin="Attribute.id==CIType.show_id", foreign_keys=[show_id])
|
||||
unique_key = db.relationship("Attribute", backref="c_ci_types.unique_id")
|
||||
|
||||
uid = db.Column(db.Integer, index=True)
|
||||
|
||||
|
||||
class CITypeInheritance(Model):
|
||||
__tablename__ = "c_ci_type_inheritance"
|
||||
|
||||
parent_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
|
||||
child_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
|
||||
|
||||
parent = db.relationship("CIType", primaryjoin="CIType.id==CITypeInheritance.parent_id")
|
||||
child = db.relationship("CIType", primaryjoin="CIType.id==CITypeInheritance.child_id")
|
||||
|
||||
|
||||
class CITypeRelation(Model):
|
||||
__tablename__ = "c_ci_type_relations"
|
||||
|
||||
@@ -79,12 +63,6 @@ class CITypeRelation(Model):
|
||||
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
|
||||
constraint = db.Column(db.Enum(*ConstraintEnum.all()), default=ConstraintEnum.One2Many)
|
||||
|
||||
parent_attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id")) # CMDB > 2.4.5: deprecated
|
||||
child_attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id")) # CMDB > 2.4.5: deprecated
|
||||
|
||||
parent_attr_ids = db.Column(db.JSON) # [parent_attr_id, ]
|
||||
child_attr_ids = db.Column(db.JSON) # [child_attr_id, ]
|
||||
|
||||
parent = db.relationship("CIType", primaryjoin="CIType.id==CITypeRelation.parent_id")
|
||||
child = db.relationship("CIType", primaryjoin="CIType.id==CITypeRelation.child_id")
|
||||
relation_type = db.relationship("RelationType", backref="c_ci_type_relations.relation_type_id")
|
||||
@@ -104,11 +82,6 @@ class Attribute(Model):
|
||||
is_link = db.Column(db.Boolean, default=False)
|
||||
is_password = db.Column(db.Boolean, default=False)
|
||||
is_sortable = db.Column(db.Boolean, default=False)
|
||||
is_dynamic = db.Column(db.Boolean, default=False)
|
||||
is_bool = db.Column(db.Boolean, default=False)
|
||||
|
||||
is_reference = db.Column(db.Boolean, default=False)
|
||||
reference_type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'))
|
||||
|
||||
default = db.Column(db.JSON) # {"default": None}
|
||||
|
||||
@@ -116,39 +89,12 @@ class Attribute(Model):
|
||||
compute_expr = db.Column(db.Text)
|
||||
compute_script = db.Column(db.Text)
|
||||
|
||||
_choice_web_hook = db.Column('choice_web_hook', db.JSON)
|
||||
choice_other = db.Column(db.JSON)
|
||||
|
||||
re_check = db.Column(db.Text)
|
||||
choice_web_hook = db.Column(db.JSON)
|
||||
|
||||
uid = db.Column(db.Integer, index=True)
|
||||
|
||||
option = db.Column(db.JSON)
|
||||
|
||||
def _get_webhook(self):
|
||||
if self._choice_web_hook:
|
||||
if self._choice_web_hook.get('headers') and "Cookie" in self._choice_web_hook['headers']:
|
||||
self._choice_web_hook['headers']['Cookie'] = Crypto.decrypt(self._choice_web_hook['headers']['Cookie'])
|
||||
|
||||
if self._choice_web_hook.get('authorization'):
|
||||
for k, v in self._choice_web_hook['authorization'].items():
|
||||
self._choice_web_hook['authorization'][k] = Crypto.decrypt(v)
|
||||
|
||||
return self._choice_web_hook
|
||||
|
||||
def _set_webhook(self, data):
|
||||
if data:
|
||||
if data.get('headers') and "Cookie" in data['headers']:
|
||||
data['headers']['Cookie'] = Crypto.encrypt(data['headers']['Cookie'])
|
||||
|
||||
if data.get('authorization'):
|
||||
for k, v in data['authorization'].items():
|
||||
data['authorization'][k] = Crypto.encrypt(v)
|
||||
|
||||
self._choice_web_hook = data
|
||||
|
||||
choice_web_hook = db.synonym("_choice_web_hook", descriptor=property(_get_webhook, _set_webhook))
|
||||
|
||||
|
||||
class CITypeAttribute(Model):
|
||||
__tablename__ = "c_ci_type_attributes"
|
||||
@@ -179,65 +125,16 @@ class CITypeAttributeGroupItem(Model):
|
||||
|
||||
|
||||
class CITypeTrigger(Model):
|
||||
# __tablename__ = "c_ci_type_triggers"
|
||||
__tablename__ = "c_c_t_t"
|
||||
|
||||
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
|
||||
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"))
|
||||
_option = db.Column('notify', db.JSON)
|
||||
|
||||
def _get_option(self):
|
||||
if self._option and self._option.get('webhooks'):
|
||||
if self._option['webhooks'].get('authorization'):
|
||||
for k, v in self._option['webhooks']['authorization'].items():
|
||||
self._option['webhooks']['authorization'][k] = Crypto.decrypt(v)
|
||||
|
||||
return self._option
|
||||
|
||||
def _set_option(self, data):
|
||||
if data and data.get('webhooks'):
|
||||
if data['webhooks'].get('authorization'):
|
||||
for k, v in data['webhooks']['authorization'].items():
|
||||
data['webhooks']['authorization'][k] = Crypto.encrypt(v)
|
||||
|
||||
self._option = data
|
||||
|
||||
option = db.synonym("_option", descriptor=property(_get_option, _set_option))
|
||||
|
||||
|
||||
class CITriggerHistory(Model):
|
||||
__tablename__ = "c_ci_trigger_histories"
|
||||
|
||||
operate_type = db.Column(db.Enum(*OperateType.all(), name="operate_type"))
|
||||
record_id = db.Column(db.Integer, db.ForeignKey("c_records.id"))
|
||||
ci_id = db.Column(db.Integer, index=True, nullable=False)
|
||||
trigger_id = db.Column(db.Integer, db.ForeignKey("c_c_t_t.id"))
|
||||
trigger_name = db.Column(db.String(64))
|
||||
is_ok = db.Column(db.Boolean, default=False)
|
||||
notify = db.Column(db.Text)
|
||||
webhook = db.Column(db.Text)
|
||||
|
||||
|
||||
class TopologyViewGroup(Model):
|
||||
__tablename__ = 'c_topology_view_groups'
|
||||
|
||||
name = db.Column(db.String(64), index=True)
|
||||
order = db.Column(db.Integer, default=0)
|
||||
|
||||
|
||||
class TopologyView(Model):
|
||||
__tablename__ = 'c_topology_views'
|
||||
|
||||
name = db.Column(db.String(64), index=True)
|
||||
group_id = db.Column(db.Integer, db.ForeignKey('c_topology_view_groups.id'))
|
||||
category = db.Column(db.String(32))
|
||||
central_node_type = db.Column(db.Integer)
|
||||
central_node_instances = db.Column(db.Text)
|
||||
path = db.Column(db.JSON)
|
||||
order = db.Column(db.Integer, default=0)
|
||||
option = db.Column(db.JSON)
|
||||
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"), nullable=False)
|
||||
notify = db.Column(db.JSON) # {subject: x, body: x, wx_to: [], mail_to: [], before_days: 0, notify_at: 08:00}
|
||||
|
||||
|
||||
class CITypeUniqueConstraint(Model):
|
||||
# __tablename__ = "c_ci_type_unique_constraints"
|
||||
__tablename__ = "c_c_t_u_c"
|
||||
|
||||
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
|
||||
@@ -264,9 +161,6 @@ class CIRelation(Model):
|
||||
second_ci_id = db.Column(db.Integer, db.ForeignKey("c_cis.id"), nullable=False)
|
||||
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
|
||||
more = db.Column(db.Integer, db.ForeignKey("c_cis.id"))
|
||||
source = db.Column(db.Enum(*RelationSourceEnum.all()), name="source")
|
||||
|
||||
ancestor_ids = db.Column(db.String(128), index=True)
|
||||
|
||||
first_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.first_ci_id")
|
||||
second_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.second_ci_id")
|
||||
@@ -356,9 +250,6 @@ class CIIndexValueDateTime(Model):
|
||||
|
||||
|
||||
class CIValueInteger(Model):
|
||||
"""
|
||||
Deprecated in a future version
|
||||
"""
|
||||
__tablename__ = "c_value_integers"
|
||||
|
||||
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
|
||||
@@ -370,9 +261,6 @@ class CIValueInteger(Model):
|
||||
|
||||
|
||||
class CIValueFloat(Model):
|
||||
"""
|
||||
Deprecated in a future version
|
||||
"""
|
||||
__tablename__ = "c_value_floats"
|
||||
|
||||
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
|
||||
@@ -395,9 +283,6 @@ class CIValueText(Model):
|
||||
|
||||
|
||||
class CIValueDateTime(Model):
|
||||
"""
|
||||
Deprecated in a future version
|
||||
"""
|
||||
__tablename__ = "c_value_datetime"
|
||||
|
||||
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
|
||||
@@ -461,7 +346,6 @@ class CITypeHistory(Model):
|
||||
|
||||
attr_id = db.Column(db.Integer)
|
||||
trigger_id = db.Column(db.Integer)
|
||||
rc_id = db.Column(db.Integer)
|
||||
unique_constraint_id = db.Column(db.Integer)
|
||||
|
||||
uid = db.Column(db.Integer, index=True)
|
||||
@@ -470,6 +354,7 @@ class CITypeHistory(Model):
|
||||
|
||||
# preference
|
||||
class PreferenceShowAttributes(Model):
|
||||
# __tablename__ = "c_preference_show_attributes"
|
||||
__tablename__ = "c_psa"
|
||||
|
||||
uid = db.Column(db.Integer, index=True, nullable=False)
|
||||
@@ -483,6 +368,7 @@ class PreferenceShowAttributes(Model):
|
||||
|
||||
|
||||
class PreferenceTreeView(Model):
|
||||
# __tablename__ = "c_preference_tree_views"
|
||||
__tablename__ = "c_ptv"
|
||||
|
||||
uid = db.Column(db.Integer, index=True, nullable=False)
|
||||
@@ -491,13 +377,13 @@ class PreferenceTreeView(Model):
|
||||
|
||||
|
||||
class PreferenceRelationView(Model):
|
||||
# __tablename__ = "c_preference_relation_views"
|
||||
__tablename__ = "c_prv"
|
||||
|
||||
uid = db.Column(db.Integer, index=True, nullable=False)
|
||||
name = db.Column(db.String(64), index=True, nullable=False)
|
||||
cr_ids = db.Column(db.JSON) # [{parent_id: x, child_id: y}]
|
||||
is_public = db.Column(db.Boolean, default=False)
|
||||
option = db.Column(db.JSON)
|
||||
|
||||
|
||||
class PreferenceSearchOption(Model):
|
||||
@@ -514,15 +400,6 @@ class PreferenceSearchOption(Model):
|
||||
option = db.Column(db.JSON)
|
||||
|
||||
|
||||
class PreferenceCITypeOrder(Model):
|
||||
__tablename__ = "c_pcto"
|
||||
|
||||
uid = db.Column(db.Integer, index=True, nullable=False)
|
||||
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'))
|
||||
order = db.Column(db.SmallInteger, default=0)
|
||||
is_tree = db.Column(db.Boolean, default=False) # True is tree view, False is resource view
|
||||
|
||||
|
||||
# custom
|
||||
class CustomDashboard(Model):
|
||||
__tablename__ = "c_c_d"
|
||||
@@ -571,28 +448,18 @@ class AutoDiscoveryCIType(Model):
|
||||
|
||||
attributes = db.Column(db.JSON) # {ad_key: cmdb_key}
|
||||
|
||||
relation = db.Column(db.JSON) # [{ad_key: {type_id: x, attr_id: x}}], CMDB > 2.4.5: deprecated
|
||||
relation = db.Column(db.JSON) # [{ad_key: {type_id: x, attr_id: x}}]
|
||||
|
||||
auto_accept = db.Column(db.Boolean, default=False)
|
||||
|
||||
agent_id = db.Column(db.String(8), index=True)
|
||||
query_expr = db.Column(db.Text)
|
||||
|
||||
interval = db.Column(db.Integer) # seconds, > 2.4.5: deprecated
|
||||
interval = db.Column(db.Integer) # seconds
|
||||
cron = db.Column(db.String(128))
|
||||
|
||||
extra_option = db.Column(db.JSON)
|
||||
uid = db.Column(db.Integer, index=True)
|
||||
enabled = db.Column(db.Boolean, default=True)
|
||||
|
||||
|
||||
class AutoDiscoveryCITypeRelation(Model):
|
||||
__tablename__ = "c_ad_ci_type_relations"
|
||||
|
||||
ad_type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
|
||||
ad_key = db.Column(db.String(128))
|
||||
peer_type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
|
||||
peer_attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
|
||||
|
||||
|
||||
class AutoDiscoveryCI(Model):
|
||||
@@ -610,45 +477,6 @@ class AutoDiscoveryCI(Model):
|
||||
accept_time = db.Column(db.DateTime)
|
||||
|
||||
|
||||
class AutoDiscoveryRuleSyncHistory(Model2):
|
||||
__tablename__ = "c_ad_rule_sync_histories"
|
||||
|
||||
adt_id = db.Column(db.Integer, db.ForeignKey('c_ad_ci_types.id'))
|
||||
oneagent_id = db.Column(db.String(8))
|
||||
oneagent_name = db.Column(db.String(64))
|
||||
sync_at = db.Column(db.DateTime, default=datetime.datetime.now())
|
||||
|
||||
|
||||
class AutoDiscoveryExecHistory(Model2):
|
||||
__tablename__ = "c_ad_exec_histories"
|
||||
|
||||
type_id = db.Column(db.Integer, index=True)
|
||||
stdout = db.Column(db.Text)
|
||||
|
||||
|
||||
class AutoDiscoveryCounter(Model2):
|
||||
__tablename__ = "c_ad_counter"
|
||||
|
||||
type_id = db.Column(db.Integer, index=True)
|
||||
rule_count = db.Column(db.Integer, default=0)
|
||||
exec_target_count = db.Column(db.Integer, default=0)
|
||||
instance_count = db.Column(db.Integer, default=0)
|
||||
accept_count = db.Column(db.Integer, default=0)
|
||||
this_month_count = db.Column(db.Integer, default=0)
|
||||
this_week_count = db.Column(db.Integer, default=0)
|
||||
last_month_count = db.Column(db.Integer, default=0)
|
||||
last_week_count = db.Column(db.Integer, default=0)
|
||||
|
||||
|
||||
class AutoDiscoveryAccount(Model):
|
||||
__tablename__ = "c_ad_accounts"
|
||||
|
||||
uid = db.Column(db.Integer, index=True)
|
||||
name = db.Column(db.String(64))
|
||||
adr_id = db.Column(db.Integer, db.ForeignKey('c_ad_rules.id'))
|
||||
config = db.Column(db.JSON)
|
||||
|
||||
|
||||
class CIFilterPerms(Model):
|
||||
__tablename__ = "c_ci_filter_perms"
|
||||
|
||||
@@ -656,13 +484,5 @@ class CIFilterPerms(Model):
|
||||
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'))
|
||||
ci_filter = db.Column(db.Text)
|
||||
attr_filter = db.Column(db.Text)
|
||||
id_filter = db.Column(db.JSON) # {node_path: unique_value}
|
||||
|
||||
rid = db.Column(db.Integer, index=True)
|
||||
|
||||
|
||||
class InnerKV(Model):
|
||||
__tablename__ = "c_kv"
|
||||
|
||||
key = db.Column(db.String(128), index=True)
|
||||
value = db.Column(db.Text)
|
||||
|
||||
@@ -13,41 +13,40 @@ class Department(ModelWithoutPK):
|
||||
__tablename__ = 'common_department'
|
||||
department_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
department_name = db.Column(db.VARCHAR(255), default='')
|
||||
department_name = db.Column(db.VARCHAR(255), default='', comment='部门名称')
|
||||
department_director_id = db.Column(
|
||||
db.Integer, default=0)
|
||||
department_parent_id = db.Column(db.Integer, default=1)
|
||||
db.Integer, default=0, comment='部门负责人ID')
|
||||
department_parent_id = db.Column(db.Integer, default=1, comment='上级部门ID')
|
||||
|
||||
sort_value = db.Column(db.Integer, default=0)
|
||||
sort_value = db.Column(db.Integer, default=0, comment='排序值')
|
||||
|
||||
acl_rid = db.Column(db.Integer, default=0)
|
||||
acl_rid = db.Column(db.Integer, comment='ACL中rid', default=0)
|
||||
|
||||
|
||||
class Employee(ModelWithoutPK):
|
||||
__tablename__ = 'common_employee'
|
||||
employee_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
email = db.Column(db.VARCHAR(255), default='')
|
||||
username = db.Column(db.VARCHAR(255), default='')
|
||||
nickname = db.Column(db.VARCHAR(255), default='')
|
||||
sex = db.Column(db.VARCHAR(64), default='')
|
||||
position_name = db.Column(db.VARCHAR(255), default='')
|
||||
mobile = db.Column(db.VARCHAR(255), default='')
|
||||
avatar = db.Column(db.VARCHAR(255), default='')
|
||||
email = db.Column(db.VARCHAR(255), default='', comment='邮箱')
|
||||
username = db.Column(db.VARCHAR(255), default='', comment='用户名')
|
||||
nickname = db.Column(db.VARCHAR(255), default='', comment='姓名')
|
||||
sex = db.Column(db.VARCHAR(64), default='', comment='性别')
|
||||
position_name = db.Column(db.VARCHAR(255), default='', comment='职位名称')
|
||||
mobile = db.Column(db.VARCHAR(255), default='', comment='电话号码')
|
||||
avatar = db.Column(db.VARCHAR(255), default='', comment='头像')
|
||||
|
||||
direct_supervisor_id = db.Column(db.Integer, default=0)
|
||||
direct_supervisor_id = db.Column(db.Integer, default=0, comment='直接上级ID')
|
||||
|
||||
department_id = db.Column(db.Integer,
|
||||
db.ForeignKey('common_department.department_id')
|
||||
db.ForeignKey('common_department.department_id'),
|
||||
comment='部门ID',
|
||||
)
|
||||
|
||||
acl_uid = db.Column(db.Integer, default=0)
|
||||
acl_rid = db.Column(db.Integer, default=0)
|
||||
acl_virtual_rid = db.Column(db.Integer, default=0)
|
||||
last_login = db.Column(db.TIMESTAMP, nullable=True)
|
||||
block = db.Column(db.Integer, default=0)
|
||||
|
||||
notice_info = db.Column(db.JSON, default={})
|
||||
acl_uid = db.Column(db.Integer, comment='ACL中uid', default=0)
|
||||
acl_rid = db.Column(db.Integer, comment='ACL中rid', default=0)
|
||||
acl_virtual_rid = db.Column(db.Integer, comment='ACL中虚拟角色rid', default=0)
|
||||
last_login = db.Column(db.TIMESTAMP, nullable=True, comment='上次登录时间')
|
||||
block = db.Column(db.Integer, comment='锁定状态', default=0)
|
||||
|
||||
_department = db.relationship(
|
||||
'Department', backref='common_employee.department_id',
|
||||
@@ -56,11 +55,14 @@ class Employee(ModelWithoutPK):
|
||||
|
||||
|
||||
class EmployeeInfo(Model):
|
||||
"""
|
||||
员工信息
|
||||
"""
|
||||
__tablename__ = 'common_employee_info'
|
||||
|
||||
info = db.Column(db.JSON, default={})
|
||||
info = db.Column(db.JSON, default={}, comment='员工信息')
|
||||
employee_id = db.Column(db.Integer, db.ForeignKey(
|
||||
'common_employee.employee_id'))
|
||||
'common_employee.employee_id'), comment='员工ID')
|
||||
employee = db.relationship(
|
||||
'Employee', backref='common_employee.employee_id', lazy='joined')
|
||||
|
||||
@@ -72,35 +74,16 @@ class CompanyInfo(Model):
|
||||
|
||||
|
||||
class InternalMessage(Model):
|
||||
"""
|
||||
内部消息
|
||||
"""
|
||||
__tablename__ = "common_internal_message"
|
||||
|
||||
title = db.Column(db.VARCHAR(255), nullable=True)
|
||||
content = db.Column(db.TEXT, nullable=True)
|
||||
path = db.Column(db.VARCHAR(255), nullable=True)
|
||||
is_read = db.Column(db.Boolean, default=False)
|
||||
app_name = db.Column(db.VARCHAR(128), nullable=False)
|
||||
category = db.Column(db.VARCHAR(128), nullable=False)
|
||||
message_data = db.Column(db.JSON, nullable=True)
|
||||
title = db.Column(db.VARCHAR(255), nullable=True, comment='标题')
|
||||
content = db.Column(db.TEXT, nullable=True, comment='内容')
|
||||
path = db.Column(db.VARCHAR(255), nullable=True, comment='跳转路径')
|
||||
is_read = db.Column(db.Boolean, default=False, comment='是否已读')
|
||||
app_name = db.Column(db.VARCHAR(128), nullable=False, comment='应用名称')
|
||||
category = db.Column(db.VARCHAR(128), nullable=False, comment='分类')
|
||||
message_data = db.Column(db.JSON, nullable=True, comment='数据')
|
||||
employee_id = db.Column(db.Integer, db.ForeignKey('common_employee.employee_id'), comment='ID')
|
||||
|
||||
|
||||
class CommonData(Model):
|
||||
__table_name__ = 'common_data'
|
||||
|
||||
data_type = db.Column(db.VARCHAR(255), default='')
|
||||
data = db.Column(db.JSON)
|
||||
|
||||
|
||||
class NoticeConfig(Model):
|
||||
__tablename__ = "common_notice_config"
|
||||
|
||||
platform = db.Column(db.VARCHAR(255), nullable=False)
|
||||
info = db.Column(db.JSON)
|
||||
|
||||
|
||||
class CommonFile(Model):
|
||||
__tablename__ = 'common_file'
|
||||
|
||||
file_name = db.Column(db.VARCHAR(512), nullable=False, index=True)
|
||||
origin_name = db.Column(db.VARCHAR(512), nullable=False)
|
||||
binary = db.Column(db.LargeBinary(16777216), nullable=False)
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from inspect import getmembers
|
||||
from inspect import isclass
|
||||
from inspect import getmembers, isclass
|
||||
|
||||
import six
|
||||
from flask import jsonify
|
||||
@@ -28,15 +27,16 @@ class APIView(Resource):
|
||||
return send_file(*args, **kwargs)
|
||||
|
||||
|
||||
API_PACKAGE = os.path.abspath(os.path.dirname(__file__))
|
||||
API_PACKAGE = "api"
|
||||
|
||||
|
||||
def register_resources(resource_path, rest_api):
|
||||
for root, _, files in os.walk(os.path.join(resource_path)):
|
||||
for filename in files:
|
||||
if not filename.startswith("_") and filename.endswith("py"):
|
||||
if root not in sys.path:
|
||||
sys.path.insert(1, root)
|
||||
module_path = os.path.join(API_PACKAGE, root[root.index("views"):])
|
||||
if module_path not in sys.path:
|
||||
sys.path.insert(1, module_path)
|
||||
view = __import__(os.path.splitext(filename)[0])
|
||||
resource_list = [o[0] for o in getmembers(view) if isclass(o[1]) and issubclass(o[1], Resource)]
|
||||
resource_list = [i for i in resource_list if i != "APIView"]
|
||||
@@ -46,4 +46,5 @@ def register_resources(resource_path, rest_api):
|
||||
resource_cls.url_prefix = ("",)
|
||||
if isinstance(resource_cls.url_prefix, six.string_types):
|
||||
resource_cls.url_prefix = (resource_cls.url_prefix,)
|
||||
|
||||
rest_api.add_resource(resource_cls, *resource_cls.url_prefix)
|
||||
|
||||
@@ -3,43 +3,37 @@
|
||||
import json
|
||||
import re
|
||||
|
||||
import redis_lock
|
||||
from celery_once import QueueOnce
|
||||
from flask import current_app
|
||||
from werkzeug.exceptions import BadRequest
|
||||
from werkzeug.exceptions import NotFound
|
||||
from werkzeug.exceptions import BadRequest, NotFound
|
||||
|
||||
from api.extensions import celery
|
||||
from api.extensions import rd
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
from api.lib.perm.acl.audit import AuditCRUD
|
||||
from api.lib.perm.acl.audit import AuditOperateSource
|
||||
from api.lib.perm.acl.audit import AuditOperateType
|
||||
from api.extensions import db
|
||||
from api.lib.perm.acl.cache import AppCache
|
||||
from api.lib.perm.acl.cache import RoleCache
|
||||
from api.lib.perm.acl.cache import RoleRelationCache
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.perm.acl.record import OperateRecordCRUD
|
||||
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditOperateSource
|
||||
from api.models.acl import Resource
|
||||
from api.models.acl import Role
|
||||
from api.models.acl import Trigger
|
||||
|
||||
|
||||
@celery.task(name="acl.role_rebuild", queue=ACL_QUEUE, )
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
@celery.task(base=QueueOnce,
|
||||
name="acl.role_rebuild",
|
||||
queue=ACL_QUEUE,
|
||||
once={"graceful": True, "unlock_before_run": True})
|
||||
def role_rebuild(rids, app_id):
|
||||
rids = rids if isinstance(rids, list) else [rids]
|
||||
for rid in rids:
|
||||
with redis_lock.Lock(rd.r, "ROLE_REBUILD_{}_{}".format(rid, app_id)):
|
||||
RoleRelationCache.rebuild(rid, app_id)
|
||||
RoleRelationCache.rebuild(rid, app_id)
|
||||
|
||||
current_app.logger.info("Role {0} App {1} rebuild..........".format(rids, app_id))
|
||||
|
||||
|
||||
@celery.task(name="acl.update_resource_to_build_role", queue=ACL_QUEUE)
|
||||
@reconnect_db
|
||||
def update_resource_to_build_role(resource_id, app_id, group_id=None):
|
||||
rids = [i.id for i in Role.get_by(__func_isnot__key_uid=None, fl='id', to_dict=False)]
|
||||
rids += [i.id for i in Role.get_by(app_id=app_id, fl='id', to_dict=False)]
|
||||
@@ -55,9 +49,9 @@ def update_resource_to_build_role(resource_id, app_id, group_id=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.apply_trigger", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def apply_trigger(_id, resource_id=None, operator_uid=None):
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
|
||||
trigger = Trigger.get_by_id(_id)
|
||||
@@ -121,9 +115,9 @@ def apply_trigger(_id, resource_id=None, operator_uid=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.cancel_trigger", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def cancel_trigger(_id, resource_id=None, operator_uid=None):
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.perm.acl.permission import PermissionCRUD
|
||||
|
||||
trigger = Trigger.get_by_id(_id)
|
||||
@@ -189,19 +183,18 @@ def cancel_trigger(_id, resource_id=None, operator_uid=None):
|
||||
|
||||
|
||||
@celery.task(name="acl.op_record", queue=ACL_QUEUE)
|
||||
@reconnect_db
|
||||
def op_record(app, role_name, operate_type, obj):
|
||||
def op_record(app, rolename, operate_type, obj):
|
||||
if isinstance(app, int):
|
||||
app = AppCache.get(app)
|
||||
app = app and app.name
|
||||
|
||||
if isinstance(role_name, int):
|
||||
u = UserCache.get(role_name)
|
||||
if isinstance(rolename, int):
|
||||
u = UserCache.get(rolename)
|
||||
if u:
|
||||
role_name = u.username
|
||||
rolename = u.username
|
||||
if not u:
|
||||
r = RoleCache.get(role_name)
|
||||
r = RoleCache.get(rolename)
|
||||
if r:
|
||||
role_name = r.name
|
||||
rolename = r.name
|
||||
|
||||
OperateRecordCRUD.add(app, role_name, operate_type, obj)
|
||||
OperateRecordCRUD.add(app, rolename, operate_type, obj)
|
||||
|
||||
@@ -1,46 +1,31 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import redis_lock
|
||||
import time
|
||||
|
||||
import jinja2
|
||||
import requests
|
||||
from flask import current_app
|
||||
from flask_login import login_user
|
||||
|
||||
import api.lib.cmdb.ci
|
||||
from api.extensions import celery
|
||||
from api.extensions import db
|
||||
from api.extensions import es
|
||||
from api.extensions import rd
|
||||
from api.lib.cmdb.cache import AttributeCache
|
||||
from api.lib.cmdb.cache import CITypeAttributesCache
|
||||
from api.lib.cmdb.const import CMDB_QUEUE
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
|
||||
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
|
||||
from api.lib.cmdb.const import RelationSourceEnum
|
||||
from api.lib.cmdb.perms import CIFilterPermsCRUD
|
||||
from api.lib.cmdb.utils import TableMap
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
from api.lib.utils import handle_arg_list
|
||||
from api.models.cmdb import Attribute
|
||||
from api.models.cmdb import AutoDiscoveryCI
|
||||
from api.models.cmdb import AutoDiscoveryCIType
|
||||
from api.models.cmdb import AutoDiscoveryCITypeRelation
|
||||
from api.models.cmdb import CI
|
||||
from api.lib.mail import send_mail
|
||||
from api.lib.utils import Lock
|
||||
from api.models.cmdb import CIRelation
|
||||
from api.models.cmdb import CITypeAttribute
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_cache(ci_id, operate_type, record_id):
|
||||
from api.lib.cmdb.ci import CITriggerManager
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.ci_type import CITypeAttributeManager
|
||||
def ci_cache(ci_id):
|
||||
time.sleep(0.01)
|
||||
db.session.remove()
|
||||
|
||||
m = api.lib.cmdb.ci.CIManager()
|
||||
ci_dict = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
|
||||
@@ -52,37 +37,12 @@ def ci_cache(ci_id, operate_type, record_id):
|
||||
|
||||
current_app.logger.info("{0} flush..........".format(ci_id))
|
||||
|
||||
if operate_type:
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get('worker'))
|
||||
|
||||
_, enum_map = CITypeAttributeManager.get_attr_names_label_enum(ci_dict.get('_type'))
|
||||
payload = dict()
|
||||
for k, v in ci_dict.items():
|
||||
if k in enum_map:
|
||||
if isinstance(v, list):
|
||||
payload[k] = [enum_map[k].get(i, i) for i in v]
|
||||
else:
|
||||
payload[k] = enum_map[k].get(v, v)
|
||||
else:
|
||||
payload[k] = v
|
||||
CITriggerManager.fire(operate_type, payload, record_id)
|
||||
|
||||
ci_dict and CIRelationManager.build_by_attribute(ci_dict)
|
||||
|
||||
|
||||
@celery.task(name="cmdb.rebuild_relation_for_attribute_changed", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def rebuild_relation_for_attribute_changed(ci_type_relation, uid):
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
|
||||
CIRelationManager.rebuild_all_by_attribute(ci_type_relation, uid)
|
||||
|
||||
|
||||
@celery.task(name="cmdb.batch_ci_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def batch_ci_cache(ci_ids, ): # only for attribute change index
|
||||
def batch_ci_cache(ci_ids):
|
||||
time.sleep(1)
|
||||
db.session.remove()
|
||||
|
||||
for ci_id in ci_ids:
|
||||
m = api.lib.cmdb.ci.CIManager()
|
||||
ci_dict = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
|
||||
@@ -96,8 +56,7 @@ def batch_ci_cache(ci_ids, ): # only for attribute change index
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_delete", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_delete(ci_id, type_id):
|
||||
def ci_delete(ci_id):
|
||||
current_app.logger.info(ci_id)
|
||||
|
||||
if current_app.config.get("USE_ES"):
|
||||
@@ -105,122 +64,29 @@ def ci_delete(ci_id, type_id):
|
||||
else:
|
||||
rd.delete(ci_id, REDIS_PREFIX_CI)
|
||||
|
||||
instance = AutoDiscoveryCI.get_by(ci_id=ci_id, to_dict=False, first=True)
|
||||
if instance is not None:
|
||||
adt = AutoDiscoveryCIType.get_by_id(instance.adt_id)
|
||||
if adt:
|
||||
adt.update(updated_at=datetime.datetime.now())
|
||||
instance.delete()
|
||||
|
||||
for attr in Attribute.get_by(reference_type_id=type_id, to_dict=False):
|
||||
table = TableMap(attr=attr).table
|
||||
for i in getattr(table, 'get_by')(attr_id=attr.id, value=ci_id, to_dict=False):
|
||||
i.delete()
|
||||
ci_cache(i.ci_id, None, None)
|
||||
|
||||
current_app.logger.info("{0} delete..........".format(ci_id))
|
||||
|
||||
|
||||
@celery.task(name="cmdb.delete_id_filter", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def delete_id_filter(ci_id):
|
||||
CIFilterPermsCRUD().delete_id_filter_by_ci_id(ci_id)
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_delete_trigger", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_delete_trigger(trigger, operate_type, ci_dict):
|
||||
current_app.logger.info('delete ci {} trigger'.format(ci_dict['_id']))
|
||||
from api.lib.cmdb.ci import CITriggerManager
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get('worker'))
|
||||
|
||||
CITriggerManager.fire_by_trigger(trigger, operate_type, ci_dict)
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_cache", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_relation_cache(parent_id, child_id, ancestor_ids):
|
||||
with redis_lock.Lock(rd.r, "CIRelation_{}".format(parent_id)):
|
||||
def ci_relation_cache(parent_id, child_id):
|
||||
db.session.remove()
|
||||
|
||||
with Lock("CIRelation_{}".format(parent_id)):
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
|
||||
first=True, to_dict=False)
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, first=True, to_dict=False)
|
||||
if str(child_id) not in children:
|
||||
children[str(child_id)] = cr.second_ci.type_id
|
||||
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
if ancestor_ids is not None:
|
||||
key = "{},{}".format(ancestor_ids, parent_id)
|
||||
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
|
||||
grandson = json.loads(grandson) if grandson is not None else {}
|
||||
|
||||
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
|
||||
first=True, to_dict=False)
|
||||
if cr and str(cr.second_ci_id) not in grandson:
|
||||
grandson[str(cr.second_ci_id)] = cr.second_ci.type_id
|
||||
|
||||
rd.create_or_update({key: json.dumps(grandson)}, REDIS_PREFIX_CI_RELATION2)
|
||||
|
||||
current_app.logger.info("ADD ci relation cache: {0} -> {1}".format(parent_id, child_id))
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_add", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_relation_add(parent_dict, child_id, uid):
|
||||
"""
|
||||
:param parent_dict: key is '$parent_model.attr_name'
|
||||
:param child_id:
|
||||
:param uid:
|
||||
:return:
|
||||
"""
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.ci_type import CITypeAttributeManager
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
|
||||
for parent in parent_dict:
|
||||
parent_ci_type_name, _attr_name = parent.strip()[1:].split('.', 1)
|
||||
attr_name = CITypeAttributeManager.get_attr_name(parent_ci_type_name, _attr_name)
|
||||
if attr_name is None:
|
||||
current_app.logger.warning("attr name {} does not exist".format(_attr_name))
|
||||
continue
|
||||
|
||||
parent_dict[parent] = handle_arg_list(parent_dict[parent])
|
||||
for v in parent_dict[parent]:
|
||||
query = "_type:{},{}:{}".format(parent_ci_type_name, attr_name, v)
|
||||
s = search(query)
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.error('ci relation add failed: {}'.format(e))
|
||||
continue
|
||||
|
||||
for ci in response:
|
||||
try:
|
||||
CIRelationManager.add(ci['_id'], child_id)
|
||||
ci_relation_cache(ci['_id'], child_id, None)
|
||||
except Exception as e:
|
||||
current_app.logger.warning(e)
|
||||
finally:
|
||||
try:
|
||||
db.session.commit()
|
||||
except:
|
||||
db.session.rollback()
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_relation_delete", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def ci_relation_delete(parent_id, child_id, ancestor_ids):
|
||||
with redis_lock.Lock(rd.r, "CIRelation_{}".format(parent_id)):
|
||||
def ci_relation_delete(parent_id, child_id):
|
||||
with Lock("CIRelation_{}".format(parent_id)):
|
||||
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
|
||||
children = json.loads(children) if children is not None else {}
|
||||
|
||||
@@ -229,33 +95,19 @@ def ci_relation_delete(parent_id, child_id, ancestor_ids):
|
||||
|
||||
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
|
||||
|
||||
if ancestor_ids is not None:
|
||||
key = "{},{}".format(ancestor_ids, parent_id)
|
||||
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
|
||||
grandson = json.loads(grandson) if grandson is not None else {}
|
||||
|
||||
if str(child_id) in grandson:
|
||||
grandson.pop(str(child_id))
|
||||
|
||||
rd.create_or_update({key: json.dumps(grandson)}, REDIS_PREFIX_CI_RELATION2)
|
||||
|
||||
current_app.logger.info("DELETE ci relation cache: {0} -> {1}".format(parent_id, child_id))
|
||||
|
||||
|
||||
@celery.task(name="cmdb.ci_type_attribute_order_rebuild", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def ci_type_attribute_order_rebuild(type_id, uid):
|
||||
def ci_type_attribute_order_rebuild(type_id):
|
||||
current_app.logger.info('rebuild attribute order')
|
||||
db.session.remove()
|
||||
|
||||
from api.lib.cmdb.ci_type import CITypeAttributeGroupManager
|
||||
|
||||
attrs = CITypeAttributesCache.get(type_id)
|
||||
id2attr = {attr.attr_id: attr for attr in attrs}
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
|
||||
res = CITypeAttributeGroupManager.get_by_type_id(type_id, True)
|
||||
order = 0
|
||||
for group in res:
|
||||
@@ -266,107 +118,41 @@ def ci_type_attribute_order_rebuild(type_id, uid):
|
||||
order += 1
|
||||
|
||||
|
||||
@celery.task(name="cmdb.calc_computed_attribute", queue=CMDB_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def calc_computed_attribute(attr_id, uid):
|
||||
from api.lib.cmdb.ci import CIManager
|
||||
@celery.task(name='cmdb.trigger_notify', queue=CMDB_QUEUE)
|
||||
def trigger_notify(notify, ci_id):
|
||||
from api.lib.perm.acl.cache import UserCache
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get(uid))
|
||||
def _wrap_mail(mail_to):
|
||||
if "@" not in mail_to:
|
||||
user = UserCache.get(mail_to)
|
||||
if user:
|
||||
return user.email
|
||||
|
||||
cim = CIManager()
|
||||
for i in CITypeAttribute.get_by(attr_id=attr_id, to_dict=False):
|
||||
cis = CI.get_by(type_id=i.type_id, to_dict=False)
|
||||
for ci in cis:
|
||||
cim.update(ci.id, {})
|
||||
return mail_to
|
||||
|
||||
db.session.remove()
|
||||
|
||||
@celery.task(name="cmdb.write_ad_rule_sync_history", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def write_ad_rule_sync_history(rules, oneagent_id, oneagent_name, sync_at):
|
||||
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryRuleSyncHistoryCRUD
|
||||
m = api.lib.cmdb.ci.CIManager()
|
||||
ci_dict = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
|
||||
|
||||
for rule in rules:
|
||||
AutoDiscoveryRuleSyncHistoryCRUD().upsert(adt_id=rule['id'],
|
||||
oneagent_id=oneagent_id,
|
||||
oneagent_name=oneagent_name,
|
||||
sync_at=sync_at,
|
||||
commit=False)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
current_app.logger.error("write auto discovery rule sync history failed: {}".format(e))
|
||||
db.session.rollback()
|
||||
subject = jinja2.Template(notify.get('subject') or "").render(ci_dict)
|
||||
body = jinja2.Template(notify.get('body') or "").render(ci_dict)
|
||||
|
||||
if notify.get('wx_to'):
|
||||
to_user = jinja2.Template('|'.join(notify['wx_to'])).render(ci_dict)
|
||||
url = current_app.config.get("WX_URI")
|
||||
data = {"to_user": to_user, "content": subject}
|
||||
try:
|
||||
requests.post(url, data=data)
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
|
||||
@celery.task(name="cmdb.build_relations_for_ad_accept", queue=CMDB_QUEUE)
|
||||
@reconnect_db
|
||||
def build_relations_for_ad_accept(adc, ci_id, ad_key2attr):
|
||||
from api.lib.cmdb.ci import CIRelationManager
|
||||
from api.lib.cmdb.search import SearchError
|
||||
from api.lib.cmdb.search.ci import search as ci_search
|
||||
if notify.get('mail_to'):
|
||||
try:
|
||||
if len(subject) > 700:
|
||||
subject = subject[:600] + "..." + subject[-100:]
|
||||
|
||||
current_app.test_request_context().push()
|
||||
login_user(UserCache.get('worker'))
|
||||
|
||||
relation_ads = AutoDiscoveryCITypeRelation.get_by(ad_type_id=adc['type_id'], to_dict=False)
|
||||
for r_adt in relation_ads:
|
||||
ad_key = r_adt.ad_key
|
||||
if not adc['instance'].get(ad_key):
|
||||
continue
|
||||
|
||||
ad_key_values = [adc['instance'].get(ad_key)] if not isinstance(
|
||||
adc['instance'].get(ad_key), list) else adc['instance'].get(ad_key)
|
||||
for ad_key_value in ad_key_values:
|
||||
query = "_type:{},{}:{}".format(r_adt.peer_type_id, r_adt.peer_attr_id, ad_key_value)
|
||||
s = ci_search(query, use_ci_filter=False, count=1000000)
|
||||
try:
|
||||
response, _, _, _, _, _ = s.search()
|
||||
except SearchError as e:
|
||||
current_app.logger.error("build_relations_for_ad_accept failed: {}".format(e))
|
||||
return
|
||||
|
||||
for relation_ci in response:
|
||||
relation_ci_id = relation_ci['_id']
|
||||
try:
|
||||
CIRelationManager.add(ci_id, relation_ci_id,
|
||||
valid=False,
|
||||
source=RelationSourceEnum.AUTO_DISCOVERY)
|
||||
|
||||
except:
|
||||
try:
|
||||
CIRelationManager.add(relation_ci_id, ci_id,
|
||||
valid=False,
|
||||
source=RelationSourceEnum.AUTO_DISCOVERY)
|
||||
except:
|
||||
pass
|
||||
|
||||
# build relations in reverse
|
||||
relation_ads = AutoDiscoveryCITypeRelation.get_by(peer_type_id=adc['type_id'], to_dict=False)
|
||||
attr2ad_key = {v: k for k, v in ad_key2attr.items()}
|
||||
for r_adt in relation_ads:
|
||||
attr = AttributeCache.get(r_adt.peer_attr_id)
|
||||
ad_key = attr2ad_key.get(attr and attr.name)
|
||||
if not ad_key:
|
||||
continue
|
||||
|
||||
ad_value = adc['instance'].get(ad_key)
|
||||
peer_ad_key = r_adt.ad_key
|
||||
peer_instances = AutoDiscoveryCI.get_by(type_id=r_adt.ad_type_id, to_dict=False)
|
||||
for peer_instance in peer_instances:
|
||||
peer_ad_values = peer_instance.instance.get(peer_ad_key)
|
||||
peer_ad_values = [peer_ad_values] if not isinstance(peer_ad_values, list) else peer_ad_values
|
||||
if ad_value in peer_ad_values and peer_instance.ci_id:
|
||||
try:
|
||||
CIRelationManager.add(peer_instance.ci_id, ci_id,
|
||||
valid=False,
|
||||
source=RelationSourceEnum.AUTO_DISCOVERY)
|
||||
|
||||
except:
|
||||
try:
|
||||
CIRelationManager.add(ci_id, peer_instance.ci_id,
|
||||
valid=False,
|
||||
source=RelationSourceEnum.AUTO_DISCOVERY)
|
||||
except:
|
||||
pass
|
||||
send_mail("", [_wrap_mail(jinja2.Template(i).render(ci_dict))
|
||||
for i in notify['mail_to'] if i], subject, body)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Send mail failed: {0}".format(str(e)))
|
||||
|
||||
@@ -1,24 +1,28 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import requests
|
||||
from flask import current_app
|
||||
|
||||
from api.extensions import celery
|
||||
from api.extensions import db
|
||||
from api.lib.common_setting.acl import ACLManager
|
||||
from api.lib.perm.acl.const import ACL_QUEUE
|
||||
from api.lib.common_setting.const import COMMON_SETTING_QUEUE
|
||||
from api.lib.common_setting.resp_format import ErrFormat
|
||||
from api.models.common_setting import Department, Employee
|
||||
from api.lib.decorator import flush_db
|
||||
from api.lib.decorator import reconnect_db
|
||||
from api.models.common_setting import Department
|
||||
|
||||
|
||||
@celery.task(name="common_setting.edit_employee_department_in_acl", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
@celery.task(name="common_setting.edit_employee_department_in_acl", queue=COMMON_SETTING_QUEUE)
|
||||
def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
|
||||
"""
|
||||
:param e_list:{acl_rid: 11, department_id: 22}
|
||||
:param new_d_id
|
||||
:param op_uid
|
||||
在 ACL 员工更换部门
|
||||
:param e_list: 员工列表 {acl_rid: 11, department_id: 22}
|
||||
:param new_d_id: 新部门 ID
|
||||
:param op_uid: 操作人 ID
|
||||
|
||||
在老部门中删除员工
|
||||
在新部门中添加员工
|
||||
"""
|
||||
db.session.remove()
|
||||
|
||||
result = []
|
||||
new_department = Department.get_by(
|
||||
first=True, department_id=new_d_id, to_dict=False)
|
||||
@@ -39,6 +43,7 @@ def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
|
||||
new_department_acl_rid = new_department.acl_rid if new_d_rid_in_acl == new_department.acl_rid else new_d_rid_in_acl
|
||||
|
||||
for employee in e_list:
|
||||
# 根据 部门ID获取部门 acl_rid
|
||||
old_department = Department.get_by(
|
||||
first=True, department_id=employee.get('department_id'), to_dict=False)
|
||||
if not old_department:
|
||||
@@ -49,21 +54,24 @@ def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
|
||||
continue
|
||||
|
||||
old_d_rid_in_acl = role_map.get(old_department.department_name, 0)
|
||||
if old_d_rid_in_acl > 0:
|
||||
if old_d_rid_in_acl != old_department.acl_rid:
|
||||
old_department.update(
|
||||
acl_rid=old_d_rid_in_acl
|
||||
)
|
||||
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'parent_id': d_acl_rid,
|
||||
}
|
||||
try:
|
||||
acl.remove_user_from_role(employee_acl_rid, payload)
|
||||
except Exception as e:
|
||||
result.append(ErrFormat.acl_remove_user_from_role_failed.format(str(e)))
|
||||
if old_d_rid_in_acl == 0:
|
||||
return
|
||||
if old_d_rid_in_acl != old_department.acl_rid:
|
||||
old_department.update(
|
||||
acl_rid=old_d_rid_in_acl
|
||||
)
|
||||
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
|
||||
# 在老部门中删除员工
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'parent_id': d_acl_rid,
|
||||
}
|
||||
try:
|
||||
acl.remove_user_from_role(employee_acl_rid, payload)
|
||||
except Exception as e:
|
||||
result.append(ErrFormat.acl_remove_user_from_role_failed.format(str(e)))
|
||||
|
||||
# 在新部门中添加员工
|
||||
payload = {
|
||||
'app_id': 'acl',
|
||||
'child_ids': [employee_acl_rid],
|
||||
@@ -74,57 +82,3 @@ def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
|
||||
result.append(ErrFormat.acl_add_user_to_role_failed.format(str(e)))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@celery.task(name="common_setting.refresh_employee_acl_info", queue=ACL_QUEUE)
|
||||
@flush_db
|
||||
@reconnect_db
|
||||
def refresh_employee_acl_info(current_employee_id=None):
|
||||
acl = ACLManager('acl')
|
||||
role_map = {role['name']: role for role in acl.get_all_roles()}
|
||||
|
||||
criterion = [
|
||||
Employee.deleted == 0
|
||||
]
|
||||
query = Employee.query.filter(*criterion).order_by(
|
||||
Employee.created_at.desc()
|
||||
)
|
||||
current_employee_rid = 0
|
||||
|
||||
for em in query.all():
|
||||
if current_employee_id and em.employee_id == current_employee_id:
|
||||
current_employee_rid = em.acl_rid if em.acl_rid else 0
|
||||
|
||||
if em.acl_uid and em.acl_rid:
|
||||
continue
|
||||
role = role_map.get(em.username, None)
|
||||
if not role:
|
||||
continue
|
||||
|
||||
params = dict()
|
||||
if not em.acl_uid:
|
||||
params['acl_uid'] = role.get('uid', 0)
|
||||
|
||||
if not em.acl_rid:
|
||||
params['acl_rid'] = role.get('id', 0)
|
||||
|
||||
if current_employee_id and em.employee_id == current_employee_id:
|
||||
current_employee_rid = params['acl_rid'] if params.get('acl_rid', 0) else 0
|
||||
|
||||
try:
|
||||
em.update(**params)
|
||||
current_app.logger.info(
|
||||
f"refresh_employee_acl_info success, employee_id: {em.employee_id}, uid: {em.acl_uid}, "
|
||||
f"rid: {em.acl_rid}")
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
continue
|
||||
|
||||
if current_employee_rid and current_employee_rid > 0:
|
||||
try:
|
||||
from api.lib.common_setting.employee import GrantEmployeeACLPerm
|
||||
|
||||
GrantEmployeeACLPerm().grant_by_rid(current_employee_rid, False)
|
||||
current_app.logger.info(f"GrantEmployeeACLPerm success, current_employee_rid: {current_employee_rid}")
|
||||
except Exception as e:
|
||||
current_app.logger.error(str(e))
|
||||
|
||||
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user