Compare commits

..

110 Commits
master ... 1.5

Author SHA1 Message Date
pycook 8ee7c6daf8 version 1.5: update docker file 2019-11-30 23:07:12 +08:00
pycook 882b158d18 cmdb.sql update 2019-11-29 22:21:41 +08:00
pycook 85222443c0 relation view [done] 2019-11-29 18:11:18 +08:00
pycook 1696ecf49d relation view [doing] 2019-11-28 21:17:06 +08:00
pycook 73b92ff533 relation view define [done] 2019-11-27 18:25:53 +08:00
pycook e977bb15a5 GPLv2 2019-11-25 20:35:05 +08:00
pycook 7c46d6cdbf change to GPLv2 2019-11-25 20:33:56 +08:00
pycook 4d11c1f7db License change to GPLv3 2019-11-25 19:42:37 +08:00
pycook 0a563deb11 UI: relation type define [done] 2019-11-25 19:23:51 +08:00
pycook ba80ec4403 /acl/resources add param resource_type_id 2019-11-24 22:33:57 +08:00
pycook 3b7cc4595b fix grant 2019-11-24 22:29:51 +08:00
pycook 9fe47657a6 Merge pull request #20 from kdyq007/master
[更新] 新增角色、资源、权限页面
2019-11-24 17:29:58 +08:00
kdyq007 5a4a6caa07 Merge branch 'master' of https://github.com/kdyq007/cmdb 2019-11-24 17:21:27 +08:00
kdyq007 9dadbe1599 Merge pull request #6 from pycook/master
fix acl api
2019-11-24 16:43:53 +08:00
pycook 40d016f513 fix acl api 2019-11-24 16:35:28 +08:00
kdyq007 655edaa7c8 [更新] 完成权限管理 2019-11-24 15:40:38 +08:00
kdyq007 7fa5cff919 [更新] 完成权限管理页面 2019-11-24 15:22:18 +08:00
kdyq007 d19834ed5d Merge pull request #5 from pycook/master
同步
2019-11-23 21:53:46 +08:00
pycook b6be430aa3 fix 2019-11-23 21:50:45 +08:00
kdyq007 63792c242f [更新] 完成资源类型页面 2019-11-23 20:16:31 +08:00
kdyq007 10f7029722 [保存] 完成资源类型权限显示 2019-11-23 18:08:52 +08:00
pycook ba176542dc fix acl resource 2019-11-23 17:42:33 +08:00
kdyq007 aae3b6e2ff Merge pull request #4 from pycook/master
fix acl resource_type
2019-11-23 17:36:42 +08:00
pycook b370c7d46e fix acl resource_type 2019-11-23 17:24:43 +08:00
kdyq007 efa5a8ea5d Merge pull request #3 from pycook/master
同步
2019-11-23 14:52:41 +08:00
pycook fd532626ac relative view api [done] 2019-11-22 18:18:22 +08:00
pycook 617337c614 Realize /api/v0.1/ci_relations/s [done] 2019-11-21 18:21:03 +08:00
kdyq007 9a3d24ac81 [更新] 保存一下 2019-11-20 19:02:36 +08:00
kdyq007 454dd4c56b Merge branch 'master' of https://github.com/kdyq007/cmdb 2019-11-19 21:52:33 +08:00
kdyq007 88ad72d4dc Merge pull request #2 from pycook/master
update
2019-11-19 21:52:02 +08:00
kdyq007 8d1517d550 [更新] 完成基础role和user管理 2019-11-19 21:49:51 +08:00
pycook d3a8ef5966 fix get user by uid 2019-11-19 21:46:53 +08:00
pycook e5baa5012d acl: resource type api 2019-11-19 21:41:46 +08:00
pycook a1f63b00dd fix search 2019-11-19 18:32:35 +08:00
pycook 47ded84231 elastic search [done] 2019-11-19 18:16:31 +08:00
kdyq007 224a48a5f3 [更新] 去除app_id 2019-11-18 22:22:38 +08:00
pycook 0e7c52df71 es search update 2019-11-18 22:05:59 +08:00
pycook ff701cc770 search by elasticsearch [doing] 2019-11-18 20:02:25 +08:00
kdyq007 6a7bb725cc Merge pull request #1 from pycook/master
怎么玩的?反向pull request
2019-11-18 18:31:14 +08:00
pycook 0a13186c13 fix acl api 2019-11-18 12:02:02 +08:00
kdyq007 a0ffeb9950 [更新] 完成角色管理页面 2019-11-17 21:08:04 +08:00
kdyq007 6c70ec6d53 [更新] 完成roles基本接口 2019-11-17 17:09:24 +08:00
qiqi 4b5f82699a [更新] 完成用户管理页面 2019-11-17 09:32:39 +08:00
pycook f78c3b928b pep8 2019-11-15 18:03:06 +08:00
pycook 332659c1d5 update acl 2019-11-15 16:54:56 +08:00
pycook 3beb2706dc Merge pull request #18 from kdyq007/master
[更新] 修改图片路径、压缩图片
2019-11-14 21:59:38 +08:00
qiqi a14111e1ce [更新] 优化格式 2019-11-14 21:51:58 +08:00
qiqi c4320c14f9 [更新] 更换图片位置、压缩图片 2019-11-14 21:48:36 +08:00
qiqi 4c5442748f [更新] 优化说明文件格式 2019-11-14 21:00:24 +08:00
qiqi a81750acba [更新] 新增Q群 README.md 2019-11-14 20:55:48 +08:00
pycook 0439e2462b update acl 2019-11-14 18:35:31 +08:00
pycook 3b62bd7ac9 update readme 2019-11-13 14:02:02 +08:00
pycook f6add52721 python3.7 timezone fix 2019-11-13 13:56:44 +08:00
pycook c85e535288 update acl 2019-11-13 13:25:42 +08:00
pycook c0c6d116b5 docker images use aliyun 2019-11-13 11:56:17 +08:00
pycook 39153e92d1 update Makefile and support for install by make 2019-11-12 11:55:04 +08:00
pycook 42bcc2e510 fix py3 2019-11-12 11:15:25 +08:00
pycook 398fbb25dc merge Dockerfile 2019-11-12 10:40:37 +08:00
pycook 4b312d4f99 delete docs/Dockerfile 2019-11-11 23:12:50 +08:00
pycook 10414155a5 fix timezone 2019-11-11 23:11:12 +08:00
pycook feda0c37e7 update README 2019-11-11 16:10:02 +08:00
pycook 173c120b64 flask init-cache 2019-11-11 15:46:57 +08:00
pycook 5f2a0d1a7b Remove package-lock.json and remove some compile warnings 2019-11-11 13:16:07 +08:00
pycook 50f894a01d add command init-cache 2019-11-11 11:27:43 +08:00
pycook 66e93e73af Merge branch 'master' of https://github.com/pycook/cmdb 2019-11-11 09:20:07 +08:00
pycook 58ad9d3f05 vue lint 2019-11-11 00:25:22 +08:00
pycook 08c96039e9 gunicorn==19.5.0 2019-11-10 19:10:23 +08:00
pycook ca0dd97626 Docker to production 2019-11-10 19:06:38 +08:00
pycook 7810ee3974 Partially completed backend development of permissions management 2019-11-08 17:42:13 +08:00
pycook 2cfea7ef08 Update README.md
docker 一键安装说明补充
2019-11-08 15:26:22 +08:00
pycook 0cee6cea25 fix py2.7 unicode encoding error 2019-11-08 15:15:31 +08:00
pycook 5d13ba2f26 users drop is_admin 2019-11-08 14:58:21 +08:00
pycook a583433530 fix unicode encode error 2019-11-08 14:37:53 +08:00
fxiang21 733ac3b2b4 移除多余的docker-start目录 2019-11-08 09:20:34 +08:00
fxiang21 ef6300255a 修复nginx转发问题 2019-11-08 09:20:27 +08:00
fxiang21 aad37dcf0b 添加容器化部署方式 2019-11-08 09:20:09 +08:00
pycook cce10d39ea code format 2019-11-07 19:18:31 +08:00
pycook c521dd447e Update README.md
pipenv run flask run -h 0.0.0.0
2019-11-05 17:52:45 +08:00
pycook 4d0cd4ba56 Update README.md
如果是非本机访问, 要修改ui/.env里VUE_APP_API_BASE_URL里的IP地址
2019-11-05 17:44:48 +08:00
pycook 7291274cb1 Update README.md
如果是非本机访问, 要修改ui/.env里VUE_APP_API_BASE_URL里的IP地址
2019-11-05 17:43:40 +08:00
pycook 44f2e383c3 update overview jpeg url 2019-11-01 11:37:16 +08:00
pycook 1f8219b418 fix add integer list 2019-11-01 11:27:24 +08:00
pycook cb2f170ded mkdir logs, ignore *.log 2019-11-01 10:45:35 +08:00
pycook 1241a23ba8 Update README.md
add cmdb.sql
2019-10-28 21:48:46 +08:00
pycook 7d7744b7dc add docs/cmdb.sql 2019-10-28 21:46:41 +08:00
pycook 9c7d51127a fix date picker 2019-10-24 20:43:59 +08:00
pycook b5a987f6b4 choice value tip fix 2019-10-24 20:43:58 +08:00
pycook 7bbc68bfd5 fix delete ci type 2019-10-24 20:43:58 +08:00
pycook 99d11e11ce fix ci types show 2019-10-24 20:43:58 +08:00
pycook 7b96ac4638 attribute alias must be unique 2019-10-24 20:43:58 +08:00
pycook 0a36330852 fix attributes paginate 2019-10-24 20:43:54 +08:00
pycook 9105f92c82 update README 2019-10-24 20:43:51 +08:00
pycook 57541ab486 Update README.md
create tables fix
2019-10-24 20:43:51 +08:00
pycook a0fcbd220e attributes paginate and fix update value 2019-10-24 20:43:51 +08:00
pycook d54b404eb6 add docs 2019-10-24 20:43:51 +08:00
pycook 620c5bb5eb ci search return unique key 2019-10-24 20:43:51 +08:00
pycook 0fde1d699d invalid username or password -> 403 2019-10-24 20:43:51 +08:00
shaohaojiecoder 61f77cf311 add batch module 2019-10-24 20:43:51 +08:00
lilixiang 13476128d5 add 添加属性库和模型模块 2019-10-24 20:43:51 +08:00
pycook 5cdb4ecd2a Revert "add 添加属性库和模型模块" 2019-10-24 20:43:51 +08:00
lilixiang 64c3b9da3b add 添加属性库和模型模块 2019-10-24 20:43:51 +08:00
pycook 55dad7a58c cache强制unicode编码 2019-08-30 09:46:24 +08:00
pycook 38dabc35e5 add .gitattributes 2019-08-28 21:08:28 +08:00
pycook 5b4f95a50e add ui 2019-08-28 20:51:51 +08:00
pycook f3046d3c91 remove ui 2019-08-28 20:48:23 +08:00
pycook 5faae9af67 remove ui 2019-08-28 20:48:04 +08:00
pycook c0b50642e0 update README 2019-08-28 20:45:59 +08:00
pycook 12ca296879 升级后端并开源UI 2019-08-28 20:34:10 +08:00
pycook 420c6cea2b delete。。。 2016-08-26 13:46:03 +08:00
pycook ccc4bb48fa pep8 2016-06-27 10:50:32 +08:00
1006 changed files with 24366 additions and 170037 deletions

13
.env
View File

@ -1,6 +1,7 @@
MYSQL_ROOT_PASSWORD='123456'
MYSQL_HOST='mysql'
MYSQL_PORT=3306
MYSQL_USER='cmdb'
MYSQL_DATABASE='cmdb'
MYSQL_PASSWORD='123456'
# Environment variable overrides for local development
FLASK_APP=autoapp.py
FLASK_DEBUG=1
FLASK_ENV=development
GUNICORN_WORKERS=2
LOG_LEVEL=debug
SECRET_KEY='YourSecretKey'

3
.gitattributes vendored
View File

@ -1 +1,2 @@
*.vue linguist-language=python
* linguist-language=python

View File

@ -1,60 +0,0 @@
name: Bug Report
description: File a bug report
title: "[Bug]: "
labels: ["☢️ bug"]
assignees:
- Selina316
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
- type: input
id: contact
attributes:
label: Contact Details
description: How can we get in touch with you if we need more info?
placeholder: ex. email@example.com
validations:
required: false
- type: dropdown
id: aspects
attributes:
label: This bug is related to UI or API?
multiple: true
options:
- UI
- API
- type: textarea
id: happened
attributes:
label: What happened?
description: Also tell us, what did you expect to happen?
placeholder: Tell us what you see!
value: "A bug happened!"
validations:
required: true
- type: input
id: version
attributes:
label: Version
description: What version of our software are you running?
value: "newest"
validations:
required: true
- type: dropdown
id: browsers
attributes:
label: What browsers are you seeing the problem on?
multiple: true
options:
- Firefox
- Chrome
- Safari
- Microsoft Edge
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell

View File

@ -1,44 +0,0 @@
name: Feature wanted
description: A new feature would be good
title: "[Feature]: "
labels: ["✏️ feature"]
assignees:
- pycook
body:
- type: markdown
attributes:
value: |
Thank you for your feature suggestion; we will evaluate it carefully!
- type: input
id: contact
attributes:
label: Contact Details
description: How can we get in touch with you if we need more info?
placeholder: ex. email@example.com
validations:
required: false
- type: dropdown
id: aspects
attributes:
label: feature is related to UI or API aspects?
multiple: true
options:
- UI
- API
- type: textarea
id: feature
attributes:
label: What is your advice?
description: Also tell us, what did you expect to happen?
placeholder: Tell us what you want!
value: "everyone wants this feature!"
validations:
required: true
- type: input
id: version
attributes:
label: Version
description: What version of our software are you running?
value: "newest"
validations:
required: true

View File

@ -1,36 +0,0 @@
name: Help wanted
description: I have a question
title: "[help wanted]: "
labels: ["help wanted"]
assignees:
- ivonGwy
body:
- type: markdown
attributes:
value: |
Please tell us what's you need!
- type: input
id: contact
attributes:
label: Contact Details
description: How can we get in touch with you if we need more info?
placeholder: ex. email@example.com
validations:
required: false
- type: textarea
id: question
attributes:
label: What is your question?
description: Also tell us, how can we help?
placeholder: Tell us what you need!
value: "i have a question!"
validations:
required: true
- type: input
id: version
attributes:
label: Version
description: What version of our software are you running?
value: "newest"
validations:
required: true

View File

@ -1,60 +0,0 @@
name: Bug Report
description: File a bug report
title: "[Bug]: "
labels: ["bug"]
assignees:
- pycook
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
- type: input
id: contact
attributes:
label: Contact Details
description: How can we get in touch with you if we need more info?
placeholder: ex. email@example.com
validations:
required: false
- type: dropdown
id: type
attributes:
label: bug is related to UI or API aspects?
multiple: true
options:
- UI
- API
- type: textarea
id: what-happened
attributes:
label: What happened?
description: Also tell us, what did you expect to happen?
placeholder: Tell us what you see!
value: "A bug happened!"
validations:
required: true
- type: textarea
id: version
attributes:
label: Version
description: What version of our software are you running?
default: 2.3.5
validations:
required: true
- type: dropdown
id: browsers
attributes:
label: What browsers are you seeing the problem on?
multiple: true
options:
- Firefox
- Chrome
- Safari
- Microsoft Edge
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell

View File

@ -1,6 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: veops official website
url: https://veops.cn/#hero
about: you can contact us here.

View File

@ -1,44 +0,0 @@
name: Feature wanted
description: A new feature would be good
title: "[Feature]: "
labels: ["feature"]
assignees:
- pycook
body:
- type: markdown
attributes:
value: |
Thank you for your feature suggestion; we will evaluate it carefully!
- type: input
id: contact
attributes:
label: Contact Details
description: How can we get in touch with you if we need more info?
placeholder: ex. email@example.com
validations:
required: false
- type: dropdown
id: type
attributes:
label: feature is related to UI or API aspects?
multiple: true
options:
- UI
- API
- type: textarea
id: describe the feature
attributes:
label: What is your advice?
description: Also tell us, what did you expect to happen?
placeholder: Tell us what you want!
value: "everyone wants this feature!"
validations:
required: true
- type: textarea
id: version
attributes:
label: Version
description: What version of our software are you running?
default: 2.3.5
validations:
required: true

View File

@ -1,67 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '20 3 * * 2'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@ -1,79 +0,0 @@
name: docker-images-build-and-release
on:
push:
branches:
- master
tags: ["v*"]
# pull_request:
# branches:
# - master
env:
# Use docker.io for Docker Hub if empty
REGISTRY_SERVER_ADDRESS: ghcr.io/veops
TAG: ${{ github.sha }}
jobs:
setup-environment:
timeout-minutes: 30
runs-on: ubuntu-latest
steps:
- name: Checkout Repo
uses: actions/checkout@v4
release-api-images:
runs-on: ubuntu-latest
needs: [setup-environment]
permissions:
contents: read
packages: write
timeout-minutes: 90
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- name: Login to GitHub Package Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push CMDB-API Docker image
uses: docker/build-push-action@v6
with:
file: docker/Dockerfile-API
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ env.REGISTRY_SERVER_ADDRESS }}/cmdb-api:${{ env.TAG }}
# release-ui-images:
# runs-on: ubuntu-latest
# needs: [setup-environment]
# permissions:
# contents: read
# packages: write
# timeout-minutes: 90
# steps:
# - name: Checkout Repo
# uses: actions/checkout@v4
# - name: Login to GitHub Package Registry
# uses: docker/login-action@v2
# with:
# registry: ghcr.io
# username: ${{ github.repository_owner }}
# password: ${{ secrets.GITHUB_TOKEN }}
# - name: Set up QEMU
# uses: docker/setup-qemu-action@v3
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@v3
# - name: Build and push CMDB-UI Docker image
# uses: docker/build-push-action@v6
# with:
# file: docker/Dockerfile-UI
# context: .
# platforms: linux/amd64,linux/arm64
# push: true
# tags: ${{ env.REGISTRY_SERVER_ADDRESS }}/cmdb-ui:${{ env.TAG }}

20
.gitignore vendored
View File

@ -38,13 +38,9 @@ pip-log.txt
.tox
nosetests.xml
.pytest_cache
cmdb-api/test-output
cmdb-api/api/uploaded_files
cmdb-api/migrations/versions
# Translations
#*.mo
messages.pot
*.mo
# Mr Developer
.mr.developer.cfg
@ -69,12 +65,12 @@ settings.py
*.db
# UI
cmdb-ui/node_modules
cmdb-ui/dist
cmdb-ui/yarn.lock
ui/node_modules
ui/dist
# Log files
cmdb-ui/npm-debug.log*
cmdb-ui/yarn-debug.log*
cmdb-ui/yarn-error.log*
cmdb-ui/package-lock.json
ui/npm-debug.log*
ui/yarn-debug.log*
ui/yarn-error.log*
ui/yarn.lock
ui/package-lock.json

View File

@ -1,13 +0,0 @@
# Contributor Code of Conduct
As contributors and maintainers of this project, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
We are committed to making participation in this project a harassment-free experience for everyone, regardless of the level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, age, or religion.
Examples of unacceptable behavior by participants include the use of sexual language or imagery, derogatory comments or personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed from the project team.
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.
This Code of Conduct is adapted from the [Contributor Covenant](https://contributor-covenant.org), version 1.0.0, available at [http://contributor-covenant.org/version/1/0/0/](http://contributor-covenant.org/version/1/0/0/)

45
Dockerfile Normal file
View File

@ -0,0 +1,45 @@
# ================================= UI ================================
FROM node:alpine AS builder
LABEL description="cmdb-ui"
COPY ui /data/apps/cmdb-ui
WORKDIR /data/apps/cmdb-ui
RUN sed -i "s#http://127.0.0.1:5000##g" .env && yarn install && yarn build
FROM nginx:alpine AS cmdb-ui
RUN mkdir /etc/nginx/html && rm -f /etc/nginx/conf.d/default.conf
COPY --from=builder /data/apps/cmdb-ui/dist /etc/nginx/html/
# ================================= API ================================
FROM python:3.7-alpine AS cmdb-api
LABEL description="Python3.7,cmdb"
COPY . /data/apps/cmdb
WORKDIR /data/apps/cmdb
RUN apk add --no-cache tzdata gcc musl-dev libffi-dev
ENV TZ=Asia/Shanghai
RUN pip install --no-cache-dir -r docs/requirements.txt \
&& cp ./api/settings.py.example ./api/settings.py \
&& sed -i "s#{user}:{password}@127.0.0.1:3306/{db}#cmdb:123456@mysql:3306/cmdb#g" api/settings.py \
&& sed -i "s#redis://127.0.0.1#redis://redis#g" api/settings.py \
&& sed -i 's#CACHE_REDIS_HOST = "127.0.0.1"#CACHE_REDIS_HOST = "redis"#g' api/settings.py
CMD ["bash", "-c", "flask run"]
# ================================= Search ================================
FROM docker.elastic.co/elasticsearch/elasticsearch:7.4.2 AS cmdb-search
RUN yes | ./bin/elasticsearch-plugin install https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v7.4.2/elasticsearch-analysis-ik-7.4.2.zip

778
LICENSE
View File

@ -1,661 +1,125 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.
When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.
Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and modification follow.
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.
You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.
c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.
In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.
3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.
If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.
5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.
7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.
This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.
10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
one line to give the program's name and an idea of what it does.
Copyright (C) yyyy name of author
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
If the program is interactive, make it output a short notice like this when it starts in an interactive mode:
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
type `show w'. This is free software, and you are welcome
to redistribute it under certain conditions; type `show c'
for details.
The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright
interest in the program `Gnomovision'
(which makes passes at compilers) written
by James Hacker.
signature of Ty Coon, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License.

View File

@ -1,72 +1,37 @@
include ./Makefile.variable
.PHONY: env clean api ui worker
default: help
help: ## display this help
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z0-9_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
.PHONY: help
help:
@echo " env create a development environment using pipenv"
@echo " deps install dependencies using pip"
@echo " clean remove unwanted files like .pyc's"
@echo " lint check style with flake8"
@echo " api start api server"
@echo " ui start ui server"
@echo " worker start async tasks worker"
env: ## create a development environment using pipenv
env:
sudo easy_install pip && \
pip install pipenv -i https://repo.huaweicloud.com/repository/pypi/simple && \
pip install pipenv -i https://pypi.douban.com/simple && \
npm install yarn && \
make deps
.PHONY: env
docker-mysql: ## deploy MySQL use docker
@docker run --name mysql -p ${MYSQL_PORT}:3306 -e MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD} -d mysql:latest
.PHONY: docker-mysql
docker-redis: ## deploy Redis use docker
@docker run --name redis -p ${REDIS_PORT}:6379 -d redis:latest
.PHONY: docker-redis
deps: ## install dependencies using pip
cd cmdb-api && \
deps:
pipenv install --dev && \
pipenv run flask db-setup && \
pipenv run flask cmdb-init-cache && \
cd .. && \
cd cmdb-ui && yarn install && cd ..
.PHONY: deps
pipenv run flask init-cache && \
cd ui && yarn install && cd ..
api: ## start api server
cd cmdb-api && pipenv run flask run -h 0.0.0.0
.PHONY: api
api:
pipenv run flask run -h 0.0.0.0
worker: ## start async tasks worker
cd cmdb-api && pipenv run celery -A celery_worker.celery worker -E -Q one_cmdb_async --autoscale=5,2 --logfile=one_cmdb_async.log -D && pipenv run celery -A celery_worker.celery worker -E -Q acl_async --autoscale=2,1 --logfile=one_acl_async.log -D
.PHONY: worker
worker:
pipenv run celery worker -A celery_worker.celery -E -Q cmdb_async --concurrency=1
ui: ## start ui server
cd cmdb-ui && yarn run serve
.PHONY: ui
ui:
cd ui && yarn run serve
clean: ## remove unwanted files like .pyc's
clean:
pipenv run flask clean
.PHONY: clean
lint: ## check style with flake8
lint:
flake8 --exclude=env .
.PHONY: lint
api-docker-build:
export DOCKER_CLI_EXPERIMENTAL=enabled ;\
! ( docker buildx ls | grep multi-platform-builder ) && docker buildx create --use --platform=$(BUILD_ARCH) --name multi-platform-builder ;\
docker buildx build \
--builder multi-platform-builder \
--platform=$(BUILD_ARCH) \
--tag $(REGISTRY)/cmdb-api:$(CMDB_DOCKER_VERSION) \
--tag $(REGISTRY)/cmdb-api:latest \
-f docker/Dockerfile-API \
.
ui-docker-build:
export DOCKER_CLI_EXPERIMENTAL=enabled ;\
! ( docker buildx ls | grep multi-platform-builder ) && docker buildx create --use --platform=$(BUILD_ARCH) --name multi-platform-builder ;\
docker buildx build \
--builder multi-platform-builder \
--platform=$(BUILD_ARCH) \
--tag $(REGISTRY)/cmdb-ui:$(CMDB_DOCKER_VERSION) \
--tag $(REGISTRY)/cmdb-ui:latest \
-f docker/Dockerfile-UI \
.

View File

@ -1,21 +0,0 @@
SHELL := /bin/bash -o pipefail
MYSQL_ROOT_PASSWORD ?= root
MYSQL_PORT ?= 3306
REDIS_PORT ?= 6379
LATEST_TAG_DIFF:=$(shell git describe --tags --abbrev=8)
LATEST_COMMIT:=$(VERSION)-dev-$(shell git rev-parse --short=8 HEAD)
BUILD_ARCH ?= linux/amd64,linux/arm64
# Set your version by env or using latest tags from git
CMDB_VERSION?=$(LATEST_TAG_DIFF)
ifeq ($(CMDB_VERSION),)
#fall back to last commit
CMDB_VERSION=$(LATEST_COMMIT)
endif
COMMIT_VERSION:=$(LATEST_COMMIT)
CMDB_DOCKER_VERSION:=${CMDB_VERSION}
CMDB_CHART_VERSION:=$(shell echo ${CMDB_VERSION} | sed 's/^v//g' )
REGISTRY ?= local

60
Pipfile Normal file
View File

@ -0,0 +1,60 @@
[[source]]
url = "https://mirrors.aliyun.com/pypi/simple"
verify_ssl = true
name = "pypi"
[packages]
# Flask
Flask = "==1.0.3"
Werkzeug = "==0.15.4"
click = ">=5.0"
# Api
Flask-RESTful = "==0.3.7"
# Database
Flask-SQLAlchemy = "==2.4.0"
SQLAlchemy = "==1.3.5"
PyMySQL = "==0.9.3"
redis = "==3.2.1"
# Migrations
Flask-Migrate = "==2.5.2"
# Deployment
gevent = "==1.4.0"
gunicorn = "==19.5.0"
supervisor = "==4.0.3"
# Auth
Flask-Login = "==0.4.1"
Flask-Bcrypt = "==0.7.1"
Flask-Cors = ">=3.0.8"
# Caching
Flask-Caching = ">=1.0.0"
# Environment variable parsing
environs = "==4.2.0"
marshmallow = "==2.20.2"
# async tasks
celery = "==4.3.0"
more-itertools = "==5.0.0"
kombu = "==4.4.0"
# other
six = "==1.12.0"
bs4 = ">=0.0.1"
toposort = ">=1.5"
requests = ">=2.22.0"
PyJWT = ">=1.7.1"
elasticsearch = "==7.0.4"
[dev-packages]
# Testing
pytest = "==4.6.5"
WebTest = "==2.0.33"
factory-boy = "==2.12.*"
pdbpp = "==0.10.0"
# Lint and code style
flake8 = "==3.7.7"
flake8-blind-except = "==0.1.1"
flake8-debugger = "==3.1.0"
flake8-docstrings = "==1.3.0"
flake8-isort = "==2.7.0"
isort = "==4.3.21"
pep8-naming = "==0.8.2"
pydocstyle = "==3.0.0"

191
README.md
View File

@ -1,138 +1,97 @@
<h1 align="center">CMDB</h1>
<div align="center">
尽可能实现比较通用的运维资产数据的配置和管理
</div>
<p align="center">
<a href="https://veops.cn">
<img src="https://github.com/user-attachments/assets/c5cfb272-899b-418d-9e69-8e1dd07db0f6" alt="维易CMDB"/>
</a>
</p>
<div align="center">
<h4 align="center">简单轻量通用的运维配置管理数据库</h4>
[![License](https://img.shields.io/badge/License-GPLv2-brightgreen)](https://github.com/pycook/cmdb/blob/master/LICENSE)
[![UI](https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-brightgreen)](https://github.com/sendya/ant-design-pro-vue)
[![API](https://img.shields.io/badge/API-Flask-brightgreen)](https://github.com/pallets/flask)
<p align="center">
<a href="https://github.com/veops/cmdb/blob/master/LICENSE"><img src="https://img.shields.io/badge/License-AGPLv3-brightgreen" alt="License: GPLv3"></a>
<a href="https://github.com/veops/cmdb/releases"><img alt="the latest release version" src="https://img.shields.io/github/v/release/veops/cmdb?color=75C1C4&include_prereleases&label=Release&logo=github&logoColor=white"></a>
<a href="https:https://github.com/sendya/ant-design-pro-vue"><img src="https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-green" alt="UI"></a>
<a href="https://github.com/pallets/flask"><img src="https://img.shields.io/badge/API-Flask-bright" alt="API"></a>
<a href="https://github.com/veops/cmdb/stargazers"><img src="https://img.shields.io/github/stars/veops/cmdb" alt="Stars Badge"/></a>
<a href="https://github.com/veops/cmdb"><img src="https://img.shields.io/github/forks/veops/cmdb" alt="Forks Badge"/></a>
</p>
<p align="center">
中文(简体) · <a href="docs/README_en.md">English</a>
</p>
</div>
## 系统介绍
维易CMDB是一个简洁轻量且高度可定制的运维配置管理数据库CMDB它支持灵活的模型配置和资源自动发现旨在为企业提供便捷的资产管理解决方案帮助运维团队高效地管理 IT 基础设施和服务
- 产品文档[https://veops.cn/docs/](https://veops.cn/docs/)
- 在线体验[https://cmdb.veops.cn](https://cmdb.veops.cn)
- 用户名demo 或者 admin
- 密码123456
- **重要提示**`master` 分支在开发过程中可能处于**不稳定的状态**请通过 [releases](https://github.com/veops/cmdb/releases) 获取最新稳定版本
- 在线预览: [CMDB](http://121.42.12.46:8000)
- username: admin
- password: admin
> **重要提示**: `master` 分支在开发过程中可能处于 *不稳定的状态*
请通过[releases](https://github.com/pycook/cmdb/releases)获取
Overview
----
![基础资源视图](https://raw.githubusercontent.com/pycook/cmdb/master/ui/public/cmdb01.jpeg)
### 主要功能
![模型配置](https://raw.githubusercontent.com/pycook/cmdb/master/ui/public/cmdb02.jpeg)
- **自定义模型和模型关系**支持模型属性的自定义包括下拉列表字体颜色计算属性等高级功能满足不同业务需求
- **自动发现资源**支持计算机网络设备存储设备数据库中间件公有云资源等自动发现
- **多维度视图展示**包括资源视图层级视图关系视图等帮助运维人员全面管理资源
- **细粒度权限控制**通过精确的访问控制和完备的操作日志保障系统的安全性
- **全面的资源搜索功能**支持灵活的资源和关系搜索快速定位和操作资源
- **集成 IP 地址管理IPAM和数据中心基础设施管理DCIM**简化网络资源和数据中心设备的管理
更多详细功能请移步 [维易科技官网](https://veops.cn) 进行了解
Docker一键快速构建
----
- 进入主目录先安装docker环境
```
docker-compose up -d
```
### 系统优势
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
- 灵活性
+ 无需指定固定运维场景支持自由配置并内置多种模板
+ 支持自动发现和入库 IT 资产快速搭建资产管理系统
- 安全性
+ 细粒度的权限控制机制确保资源管理的安全性
+ 完整的操作日志记录便于审计和问题追踪
- 多应用
+ 提供多种视图展示方式满足不同场景的需求
+ 强大的 API 接口支持深度集成
+ 支持定义属性触发器和计算属性增强数据处理能力
### 技术栈
本地搭建: 环境和依赖
----
- 存储: mysql, redis
- python版本: python2.7, >=python3.6
+ 后端Python [3.8-3.11]
+ 数据存储MySQLRedis
+ 前端Vue.js
+ UI组件库Ant Design Vue
Install
----
- 启动mysql服务, redis服务
### 系统概览
- 创建数据库cmdb
- 拉取代码
```bash
git clone https://github.com/pycook/cmdb.git
cd cmdb
cp api/settings.py.example api/settings.py
```
**设置api/settings.py里的database**
<table style="border-collapse: collapse;">
<tr>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/6d2df835-ae93-4d91-9bd9-213c270eca7a"/>
</td>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/cb8b598a-a1f9-4c74-adf1-6e59aea2c9b3"/>
</td>
</tr>
- 安装库
- 后端: ```pipenv run pipenv install```
- 前端: ```cd ui && yarn install && cd ..```
- 创建数据库表 ```pipenv run flask db-setup && pipenv run flask init-cache```
- 可以将docs/cmdb.sql导入到数据库里登录用户和密码都是:admin
- 启动服务
- 后端: ```pipenv run flask run -h 0.0.0.0```
- 前端: ```cd ui && yarn run serve```
- worker: ```celery worker -A celery_worker.celery -E -Q cmdb_async --concurrency=1```
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
- 如果是非本机访问, 要修改**ui/.env****VUE_APP_API_BASE_URL**里的IP地址为后端服务的ip地址
<tr>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/b440224f-53c3-4b7f-a9be-285d7a4b848f"/>
</td>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/f457d5a0-b60b-4949-b94e-020f4c61444b"/>
</td>
</tr>
</table>
## 关注我们
Install by Makefile
----
- 启动mysql服务, redis服务
欢迎 Star 加关注第一时间获取更新动态
- 创建数据库cmdb
- 拉取代码
```bash
git clone https://github.com/pycook/cmdb.git
cd cmdb
cp api/settings.py.example api/settings.py
```
**设置api/settings.py里的database**
![star us](https://github.com/user-attachments/assets/f9056d5a-171c-4f53-9fec-d40c9e5ff94d)
- 顺序在cmdb目录下执行
- 环境: ```make env```
- 启动API: ```make api```
- 启动UI: ```make ui```
- 启动worker: ```make worker```
## 快速开始
### 1. 搭建
----
_**欢迎加入CMDB运维开发QQ群336164978**_
+ 方案一Docker 一键快速构建
- 第1步: 安装 Docker 环境和 Docker Composev2
- 第2步: 拷贝项目代码, `git clone https://github.com/veops/cmdb.git`
- 第3步进入主目录并启动, `docker compose up -d`
+ 方案二[本地开发环境搭建](docs/local.md)
+ 方案三[Makefile 安装](docs/makefile.md)
### 2. 访问
- 打开浏览器并访问: [http://127.0.0.1:8000](http://127.0.0.1:8000)
- 用户名: demo 或者 admin
- 密码: 123456
## 接入公司
+ 欢迎使用开源CMDB的公司和团队 [#112](https://github.com/veops/cmdb/issues/112) 登记
## 代码贡献
我们欢迎所有开发者贡献代码改善和扩展这个项目请先阅读我们的[贡献指南](docs/CONTRIBUTING.md)此外您还可以通过社交媒体活动和分享来支持 Veops 的开源
<a href="https://github.com/veops/cmdb/graphs/contributors">
<img src="https://contrib.rocks/image?repo=veops/cmdb" />
</a>
## 更多开源
- [OneTerm](https://github.com/veops/oneterm): 一款简单轻量灵活的堡垒机服务
- [messenger](https://github.com/veops/messenger): 一个简单轻量的消息发送服务
- [ACL](https://github.com/veops/acl): 一个简单通用的权限管理系统设计与实践
## 相关文章
- <a href="https://mp.weixin.qq.com/s/v3eANth64UBW5xdyOkK3tg" target="_blank">尽可能通用的运维CMDB的设计与实践() - 概览</a>
- <a href="https://mp.weixin.qq.com/s/rQaf4AES7YJsyNQG_MKOLg" target="_blank">尽可能通用的运维CMDB的设计与实践() - 自动发现</a>
- <a href="https://github.com/veops/cmdb/tree/master/docs/cmdb_api.md" target="_blank">CMDB接口文档</a>
更多文章可以在公众号 **维易科技OneOps** 里查看
## 与我联系
+ 邮箱: <a href="mailto:bd@veops.cn">bd@veops.cn</a>
+ 公众号**维易科技OneOps**关注后可以加入微信群参与产品和技术交流
<img src="docs/images/wechat.png" alt="公众号: 维易科技OneOps" />
![QQ群](ui/public/qr_code.jpg)

View File

@ -1,40 +1,40 @@
# -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
import datetime
import decimal
import logging
import os
import sys
from inspect import getmembers
from logging.handlers import RotatingFileHandler
from pathlib import Path
from flask import Flask
from flask import jsonify
from flask import make_response
from flask import request
from flask import make_response, jsonify
from flask.blueprints import Blueprint
from flask.cli import click
from flask.json.provider import DefaultJSONProvider
from flask_babel.speaklater import LazyString
import api.views.entry
from api.extensions import (bcrypt, babel, cache, celery, cors, db, es, login_manager, migrate, rd)
from api.extensions import inner_secrets
from api.lib.perm.authentication.cas import CAS
from api.lib.perm.authentication.oauth2 import OAuth2
from api.lib.secrets.secrets import InnerKVManger
import api.views
from api.extensions import (
bcrypt,
cors,
cache,
db,
login_manager,
migrate,
celery,
rd,
es
)
from api.flask_cas import CAS
from api.models.acl import User
HERE = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.join(HERE, os.pardir)
BASE_DIR = Path(__file__).resolve().parent.parent
API_PACKAGE = "api"
@login_manager.user_loader
def load_user(user_id):
"""Load user by ID."""
return User.get_by_id(int(user_id))
return User.get_by(uid=int(user_id), first=True, to_dict=False)
class ReverseProxy(object):
@ -72,79 +72,40 @@ class ReverseProxy(object):
return self.app(environ, start_response)
class MyJSONEncoder(DefaultJSONProvider):
def default(self, o):
if isinstance(o, (decimal.Decimal, datetime.date, datetime.time, LazyString)):
return str(o)
if isinstance(o, datetime.datetime):
return o.strftime('%Y-%m-%d %H:%M:%S')
return o
def create_app(config_object="settings"):
def create_app(config_object="{0}.settings".format(API_PACKAGE)):
"""Create application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split(".")[0])
app.config.from_object(config_object)
app.json = MyJSONEncoder(app)
configure_logger(app)
register_extensions(app)
register_blueprints(app)
register_error_handlers(app)
register_shell_context(app)
register_commands(app)
configure_logger(app)
CAS(app)
OAuth2(app)
app.wsgi_app = ReverseProxy(app.wsgi_app)
configure_upload_dir(app)
return app
def configure_upload_dir(app):
upload_dir = app.config.get('UPLOAD_DIRECTORY', 'uploaded_files')
common_setting_path = os.path.join(HERE, upload_dir)
for path in [common_setting_path]:
if not os.path.exists(path):
app.logger.warning(f"{path}, not exist, create...")
os.makedirs(path)
app.config['UPLOAD_DIRECTORY_FULL'] = common_setting_path
def register_extensions(app):
"""Register Flask extensions."""
def get_locale():
accept_languages = app.config.get('ACCEPT_LANGUAGES', ['en', 'zh'])
return request.accept_languages.best_match(accept_languages)
bcrypt.init_app(app)
babel.init_app(app, locale_selector=get_locale)
cache.init_app(app)
db.init_app(app)
cors.init_app(app)
login_manager.init_app(app)
migrate.init_app(app, db, directory=f"{BASE_DIR}/migrations")
migrate.init_app(app, db)
rd.init_app(app)
if app.config.get('USE_ES'):
if app.config.get("USE_ES"):
es.init_app(app)
app.config.update(app.config.get("CELERY"))
celery.conf.update(app.config)
if app.config.get('SECRETS_ENGINE') == 'inner':
with app.app_context():
inner_secrets.init_app(app, InnerKVManger())
def register_blueprints(app):
for item in getmembers(api.views.entry):
for item in getmembers(api.views):
if item[0].startswith("blueprint") and isinstance(item[1], Blueprint):
app.register_blueprint(item[1])
@ -157,14 +118,10 @@ def register_error_handlers(app):
import traceback
app.logger.error(traceback.format_exc())
error_code = getattr(error, "code", 500)
if not str(error_code).isdigit():
error_code = 400
return make_response(jsonify(message=str(error)), error_code)
for errcode in app.config.get("ERROR_CODES") or [400, 401, 403, 404, 405, 500, 502]:
app.errorhandler(errcode)(render_error)
app.handle_exception = render_error
@ -183,8 +140,9 @@ def register_commands(app):
for root, _, files in os.walk(os.path.join(HERE, "commands")):
for filename in files:
if not filename.startswith("_") and filename.endswith("py"):
if root not in sys.path:
sys.path.insert(1, root)
module_path = os.path.join(API_PACKAGE, root[root.index("commands"):])
if module_path not in sys.path:
sys.path.insert(1, module_path)
command = __import__(os.path.splitext(filename)[0])
func_list = [o[0] for o in getmembers(command) if isinstance(o[1], click.core.Command)]
for func_name in func_list:
@ -202,11 +160,10 @@ def configure_logger(app):
app.logger.addHandler(handler)
log_file = app.config['LOG_PATH']
if log_file and log_file != "/dev/stdout":
file_handler = RotatingFileHandler(log_file,
maxBytes=2 ** 30,
backupCount=7)
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
file_handler.setFormatter(formatter)
app.logger.addHandler(file_handler)
file_handler = RotatingFileHandler(log_file,
maxBytes=2 ** 30,
backupCount=7)
file_handler.setLevel(getattr(logging, app.config['LOG_LEVEL']))
file_handler.setFormatter(formatter)
app.logger.addHandler(file_handler)
app.logger.setLevel(getattr(logging, app.config['LOG_LEVEL']))

View File

@ -0,0 +1,76 @@
# -*- coding:utf-8 -*-
import json
import click
from flask import current_app
from flask.cli import with_appcontext
import api.lib.cmdb.ci
from api.extensions import db
from api.extensions import rd
from api.lib.cmdb.const import REDIS_PREFIX_CI
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
from api.lib.cmdb.const import ValueTypeEnum
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
@click.command()
@with_appcontext
def init_cache():
db.session.remove()
if current_app.config.get("USE_ES"):
from api.extensions import es
from api.models.cmdb import Attribute
from api.lib.cmdb.utils import ValueTypeMap
attributes = Attribute.get_by(to_dict=False)
for attr in attributes:
other = dict()
other['index'] = True if attr.is_index else False
if attr.value_type == ValueTypeEnum.TEXT:
other['analyzer'] = 'ik_max_word'
other['search_analyzer'] = 'ik_smart'
if attr.is_index:
other["fields"] = {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
try:
es.update_mapping(attr.name, ValueTypeMap.es_type[attr.value_type], other)
except Exception as e:
print(e)
cis = CI.get_by(to_dict=False)
for ci in cis:
if current_app.config.get("USE_ES"):
res = es.get_index_id(ci.id)
if res:
continue
else:
res = rd.get([ci.id], REDIS_PREFIX_CI)
if res and list(filter(lambda x: x, res)):
continue
m = api.lib.cmdb.ci.CIManager()
ci_dict = m.get_ci_by_id_from_db(ci.id, need_children=False, use_master=False)
if current_app.config.get("USE_ES"):
es.create(ci_dict)
else:
rd.create_or_update({ci.id: json.dumps(ci_dict)}, REDIS_PREFIX_CI)
ci_relations = CIRelation.get_by(to_dict=False)
relations = dict()
for cr in ci_relations:
relations.setdefault(cr.first_ci_id, []).append(cr.second_ci_id)
for i in relations:
relations[i] = json.dumps(relations[i])
if relations:
rd.create_or_update(relations, REDIS_PREFIX_CI_RELATION)
db.session.remove()

View File

@ -5,7 +5,9 @@ from glob import glob
from subprocess import call
import click
from flask import current_app
from flask.cli import with_appcontext
from werkzeug.exceptions import MethodNotAllowed, NotFound
from api.extensions import db
@ -76,65 +78,75 @@ def clean():
"""
for dirpath, dirnames, filenames in os.walk("."):
for filename in filenames:
if filename.endswith(".pyc") or filename.endswith(".pyo") or filename.endswith(".c"):
if filename.endswith(".pyc") or filename.endswith(".pyo"):
full_pathname = os.path.join(dirpath, filename)
click.echo("Removing {}".format(full_pathname))
os.remove(full_pathname)
@click.command()
@click.option("--url", default=None, help="Url to test (ex. /static/image.png)")
@click.option(
"--order", default="rule", help="Property on Rule to order by (default: rule)"
)
@with_appcontext
def urls(url, order):
"""Display all of the url matching routes for the project.
Borrowed from Flask-Script, converted to use Click.
"""
rows = []
column_headers = ("Rule", "Endpoint", "Arguments")
if url:
try:
rule, arguments = current_app.url_map.bind("localhost").match(
url, return_rule=True
)
rows.append((rule.rule, rule.endpoint, arguments))
column_length = 3
except (NotFound, MethodNotAllowed) as e:
rows.append(("<{}>".format(e), None, None))
column_length = 1
else:
rules = sorted(
current_app.url_map.iter_rules(), key=lambda x: getattr(x, order)
)
for rule in rules:
rows.append((rule.rule, rule.endpoint, None))
column_length = 2
str_template = ""
table_width = 0
if column_length >= 1:
max_rule_length = max(len(r[0]) for r in rows)
max_rule_length = max_rule_length if max_rule_length > 4 else 4
str_template += "{:" + str(max_rule_length) + "}"
table_width += max_rule_length
if column_length >= 2:
max_endpoint_length = max(len(str(r[1])) for r in rows)
max_endpoint_length = max_endpoint_length if max_endpoint_length > 8 else 8
str_template += " {:" + str(max_endpoint_length) + "}"
table_width += 2 + max_endpoint_length
if column_length >= 3:
max_arguments_length = max(len(str(r[2])) for r in rows)
max_arguments_length = max_arguments_length if max_arguments_length > 9 else 9
str_template += " {:" + str(max_arguments_length) + "}"
table_width += 2 + max_arguments_length
click.echo(str_template.format(*column_headers[:column_length]))
click.echo("-" * table_width)
for row in rows:
click.echo(str_template.format(*row[:column_length]))
@click.command()
@with_appcontext
def db_setup():
"""create tables
"""
db.create_all()
try:
db.session.execute("set global sql_mode='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,"
"ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'")
db.session.commit()
except:
pass
try:
db.session.execute("set global tidb_enable_noop_functions='ON'")
db.session.commit()
except:
pass
@click.group()
def translate():
"""Translation and localization commands."""
@translate.command()
@click.argument('lang')
def init(lang):
"""Initialize a new language."""
if os.system('pybabel extract -F babel.cfg -k _l -o messages.pot .'):
raise RuntimeError('extract command failed')
if os.system(
'pybabel init -i messages.pot -d api/translations -l ' + lang):
raise RuntimeError('init command failed')
os.remove('messages.pot')
@translate.command()
def update():
"""Update all languages."""
if os.system('pybabel extract -F babel.cfg -k _l -o messages.pot .'):
raise RuntimeError('extract command failed')
if os.system('pybabel update -i messages.pot -d api/translations'):
raise RuntimeError('update command failed')
os.remove('messages.pot')
@translate.command()
def compile():
"""Compile all languages."""
if os.system('pybabel compile -d api/translations'):
raise RuntimeError('compile command failed')

View File

@ -2,7 +2,6 @@
from celery import Celery
from flask_babel import Babel
from flask_bcrypt import Bcrypt
from flask_caching import Cache
from flask_cors import CORS
@ -10,18 +9,15 @@ from flask_login import LoginManager
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from api.lib.secrets.inner import KeyManage
from api.lib.utils import ESHandler
from api.lib.utils import RedisHandler
bcrypt = Bcrypt()
babel = Babel()
login_manager = LoginManager()
db = SQLAlchemy(session_options={"autoflush": False})
db = SQLAlchemy()
migrate = Migrate()
cache = Cache()
celery = Celery()
cors = CORS(supports_credentials=True)
rd = RedisHandler()
es = ESHandler()
inner_secrets = KeyManage()

View File

@ -15,7 +15,7 @@ try:
except ImportError:
from flask import _request_ctx_stack as stack
from . import routing
from api.flask_cas import routing
class CAS(object):
@ -75,4 +75,4 @@ class CAS(object):
@property
def token(self):
return flask.session.get(
self.app.config['CAS_TOKEN_SESSION_KEY'], None)
self.app.config['CAS_TOKEN_SESSION_KEY'], None)

View File

@ -68,7 +68,7 @@ def create_cas_login_url(cas_url, cas_route, service,
('service', service),
('renew', renew),
('gateway', gateway),
)
)
def create_cas_logout_url(cas_url, cas_route, url=None):
@ -91,7 +91,7 @@ def create_cas_logout_url(cas_url, cas_route, url=None):
cas_url,
cas_route,
('service', url),
)
)
def create_cas_validate_url(cas_url, cas_route, service, ticket,
@ -119,4 +119,4 @@ def create_cas_validate_url(cas_url, cas_route, service, ticket,
('service', service),
('ticket', ticket),
('renew', renew),
)
)

View File

@ -1,24 +1,14 @@
# -*- coding:utf-8 -*-
import datetime
import uuid
import json
import bs4
from flask import Blueprint
from flask import current_app
from flask import redirect
from flask import request
from flask import session
from flask import url_for
from flask_login import login_user
from flask_login import logout_user
from six.moves.urllib.parse import urlparse
from flask import current_app, session, request, url_for, redirect
from flask_login import login_user, logout_user
from six.moves.urllib_request import urlopen
from api.lib.common_setting.common_data import AuthenticateDataCRUD
from api.lib.common_setting.const import AuthenticateType
from api.lib.perm.acl.audit import AuditCRUD
from api.lib.perm.acl.cache import UserCache
from api.lib.perm.acl.resp_format import ErrFormat
from .cas_urls import create_cas_login_url
from .cas_urls import create_cas_logout_url
from .cas_urls import create_cas_validate_url
@ -26,7 +16,6 @@ from .cas_urls import create_cas_validate_url
blueprint = Blueprint('cas', __name__)
@blueprint.route('/api/cas/login')
@blueprint.route('/api/sso/login')
def login():
"""
@ -40,20 +29,16 @@ def login():
If validation was successful the logged in username is saved in
the user's session under the key `CAS_USERNAME_SESSION_KEY`.
"""
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
if request.values.get("next"):
session["next"] = request.values.get("next")
# _service = url_for('cas.login', _external=True)
_service = "{}://{}{}".format(urlparse(request.referrer).scheme,
urlparse(request.referrer).netloc,
url_for('cas.login'))
_service = url_for('cas.login', _external=True, next=session["next"]) \
if session.get("next") else url_for('cas.login', _external=True)
redirect_url = create_cas_login_url(
config['cas_server'],
config['cas_login_route'],
current_app.config['CAS_SERVER'],
current_app.config['CAS_LOGIN_ROUTE'],
_service)
if 'ticket' in request.args:
@ -62,38 +47,30 @@ def login():
if request.args.get('ticket'):
if validate(request.args['ticket']):
redirect_url = session.get("next") or config.get("cas_after_login") or "/"
redirect_url = session.get("next") or \
current_app.config.get("CAS_AFTER_LOGIN")
username = session.get("CAS_USERNAME")
user = UserCache.get(username)
login_user(user)
session.permanent = True
_id = AuditCRUD.add_login_log(username, True, ErrFormat.login_succeed)
session['LOGIN_ID'] = _id
else:
del session[cas_token_session_key]
redirect_url = create_cas_login_url(
config['cas_server'],
config['cas_login_route'],
current_app.config['CAS_SERVER'],
current_app.config['CAS_LOGIN_ROUTE'],
url_for('cas.login', _external=True),
renew=True)
AuditCRUD.add_login_log(session.get("CAS_USERNAME"), False, ErrFormat.invalid_password)
current_app.logger.info("redirect to: {0}".format(redirect_url))
return redirect(redirect_url)
@blueprint.route('/api/cas/logout')
@blueprint.route('/api/sso/logout')
def logout():
"""
When the user accesses this route they are logged out.
"""
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
current_app.logger.info(config)
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
cas_token_session_key = current_app.config['CAS_TOKEN_SESSION_KEY']
@ -105,14 +82,12 @@ def logout():
"next" in session and session.pop("next")
redirect_url = create_cas_logout_url(
config['cas_server'],
config['cas_logout_route'],
current_app.config['CAS_SERVER'],
current_app.config['CAS_LOGOUT_ROUTE'],
url_for('cas.login', _external=True, next=request.referrer))
logout_user()
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
current_app.logger.debug('Redirecting to: {0}'.format(redirect_url))
return redirect(redirect_url)
@ -125,15 +100,14 @@ def validate(ticket):
and the validated username is saved in the session under the
key `CAS_USERNAME_SESSION_KEY`.
"""
config = AuthenticateDataCRUD(AuthenticateType.CAS).get()
cas_username_session_key = current_app.config['CAS_USERNAME_SESSION_KEY']
current_app.logger.debug("validating token {0}".format(ticket))
cas_validate_url = create_cas_validate_url(
config['cas_validate_server'],
config['cas_validate_route'],
current_app.config['CAS_VALIDATE_SERVER'],
current_app.config['CAS_VALIDATE_ROUTE'],
url_for('cas.login', _external=True),
ticket)
@ -141,49 +115,32 @@ def validate(ticket):
try:
response = urlopen(cas_validate_url).read()
ticket_id = _parse_tag(response, "cas:user")
strs = [s.strip() for s in ticket_id.split('|') if s.strip()]
ticketid = _parse_tag(response, "cas:user")
strs = [s.strip() for s in ticketid.split('|') if s.strip()]
username, is_valid = None, False
if len(strs) == 1:
username = strs[0]
is_valid = True
user_info = json.loads(_parse_tag(response, "cas:other"))
current_app.logger.info(user_info)
except ValueError:
current_app.logger.error("CAS returned unexpected result")
is_valid = False
return is_valid
if is_valid:
current_app.logger.debug("{}: {}".format(cas_username_session_key, username))
current_app.logger.debug("valid")
session[cas_username_session_key] = username
user = UserCache.get(username)
if user is None:
current_app.logger.info("create user: {}".format(username))
from api.lib.perm.acl.user import UserCRUD
soup = bs4.BeautifulSoup(response)
cas_user_map = config.get('cas_user_map')
user_dict = dict()
for k in cas_user_map:
v = soup.find(cas_user_map[k]['tag'], cas_user_map[k].get('attrs', {}))
user_dict[k] = v and v.text or None
user_dict['password'] = uuid.uuid4().hex
if "email" not in user_dict:
user_dict['email'] = username
UserCRUD.add(**user_dict)
from api.lib.perm.acl.acl import ACLManager
user_info = ACLManager.get_user_info(username)
session["acl"] = dict(uid=user_info.get("uid"),
session["acl"] = dict(uid=user_info.get("uuid"),
avatar=user.avatar if user else user_info.get("avatar"),
userId=user_info.get("uid"),
rid=user_info.get("rid"),
userName=user_info.get("username"),
userId=user_info.get("id"),
userName=user_info.get("name"),
nickName=user_info.get("nickname"),
parentRoles=user_info.get("parents"),
childRoles=user_info.get("children"),
roleName=user_info.get("role"))
session["uid"] = user_info.get("uid")
session["uid"] = user_info.get("uuid")
current_app.logger.debug(session)
current_app.logger.debug(request.url)
else:
@ -194,7 +151,7 @@ def validate(ticket):
def _parse_tag(string, tag):
"""
Used for parsing xml. Search string for the first occurrence of
Used for parsing xml. Search string for the first occurence of
<tag>.....</tag> and return text (stripped of leading and tailing
whitespace) between tags. Return "" if tag not found.
"""
@ -202,5 +159,4 @@ def _parse_tag(string, tag):
if soup.find(tag) is None:
return ''
return soup.find(tag).string.strip()

190
api/lib/cmdb/attribute.py Normal file
View File

@ -0,0 +1,190 @@
# -*- coding:utf-8 -*-
from flask import abort
from flask import current_app
from api.extensions import db
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.utils import ValueTypeMap
from api.lib.decorator import kwargs_required
from api.models.cmdb import Attribute
from api.models.cmdb import CITypeAttribute
from api.models.cmdb import PreferenceShowAttributes
class AttributeManager(object):
"""
CI attributes manager
"""
def __init__(self):
pass
@staticmethod
def get_choice_values(attr_id, value_type):
choice_table = ValueTypeMap.choice.get(value_type)
choice_values = choice_table.get_by(fl=["value"], attr_id=attr_id)
return [choice_value["value"] for choice_value in choice_values]
@staticmethod
def _add_choice_values(_id, value_type, choice_values):
choice_table = ValueTypeMap.choice.get(value_type)
db.session.query(choice_table).filter(choice_table.attr_id == _id).delete()
db.session.flush()
choice_values = choice_values
for v in choice_values:
table = choice_table(attr_id=_id, value=v)
db.session.add(table)
db.session.flush()
@classmethod
def search_attributes(cls, name=None, alias=None, page=1, page_size=None):
"""
:param name:
:param alias:
:param page:
:param page_size:
:return: attribute, if name is None, then return all attributes
"""
if name is not None:
attrs = Attribute.get_by_like(name=name)
elif alias is not None:
attrs = Attribute.get_by_like(alias=alias)
else:
attrs = Attribute.get_by()
numfound = len(attrs)
attrs = attrs[(page - 1) * page_size:][:page_size]
res = list()
for attr in attrs:
attr["is_choice"] and attr.update(dict(choice_value=cls.get_choice_values(attr["id"], attr["value_type"])))
res.append(attr)
return numfound, res
def get_attribute_by_name(self, name):
attr = Attribute.get_by(name=name, first=True)
if attr and attr["is_choice"]:
attr.update(dict(choice_value=self.get_choice_values(attr["id"], attr["value_type"])))
return attr
def get_attribute_by_alias(self, alias):
attr = Attribute.get_by(alias=alias, first=True)
if attr and attr["is_choice"]:
attr.update(dict(choice_value=self.get_choice_values(attr["id"], attr["value_type"])))
return attr
def get_attribute_by_id(self, _id):
attr = Attribute.get_by_id(_id).to_dict()
if attr and attr["is_choice"]:
attr.update(dict(choice_value=self.get_choice_values(attr["id"], attr["value_type"])))
return attr
def get_attribute(self, key):
attr = AttributeCache.get(key).to_dict()
if attr and attr["is_choice"]:
attr.update(dict(choice_value=self.get_choice_values(attr["id"], attr["value_type"])))
return attr
@classmethod
@kwargs_required("name")
def add(cls, **kwargs):
choice_value = kwargs.pop("choice_value", [])
kwargs.pop("is_choice", None)
is_choice = True if choice_value else False
name = kwargs.pop("name")
alias = kwargs.pop("alias", "")
alias = name if not alias else alias
Attribute.get_by(name=name, first=True) and abort(400, "attribute name <{0}> is duplicated".format(name))
Attribute.get_by(alias=alias, first=True) and abort(400, "attribute alias <{0}> is duplicated".format(name))
attr = Attribute.create(flush=True,
name=name,
alias=alias,
is_choice=is_choice,
**kwargs)
if choice_value:
cls._add_choice_values(attr.id, attr.value_type, choice_value)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error("add attribute error, {0}".format(str(e)))
return abort(400, "add attribute <{0}> failed".format(name))
AttributeCache.clean(attr)
if current_app.config.get("USE_ES"):
from api.extensions import es
other = dict()
other['index'] = True if attr.is_index else False
if attr.value_type == ValueTypeEnum.TEXT:
other['analyzer'] = 'ik_max_word'
other['search_analyzer'] = 'ik_smart'
if attr.is_index:
other["fields"] = {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
es.update_mapping(name, ValueTypeMap.es_type[attr.value_type], other)
return attr.id
def update(self, _id, **kwargs):
attr = Attribute.get_by_id(_id) or abort(404, "Attribute <{0}> does not exist".format(_id))
if kwargs.get("name"):
other = Attribute.get_by(name=kwargs['name'], first=True, to_dict=False)
if other and other.id != attr.id:
return abort(400, "Attribute name <{0}> cannot be duplicate!".format(kwargs['name']))
if kwargs.get("alias"):
other = Attribute.get_by(alias=kwargs['alias'], first=True, to_dict=False)
if other and other.id != attr.id:
return abort(400, "Attribute alias <{0}> cannot be duplicate!".format(kwargs['alias']))
choice_value = kwargs.pop("choice_value", False)
is_choice = True if choice_value else False
attr.update(flush=True, **kwargs)
if is_choice:
self._add_choice_values(attr.id, attr.value_type, choice_value)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error("update attribute error, {0}".format(str(e)))
return abort(400, "update attribute <{0}> failed".format(_id))
AttributeCache.clean(attr)
return attr.id
@staticmethod
def delete(_id):
attr = Attribute.get_by_id(_id) or abort(404, "Attribute <{0}> does not exist".format(_id))
name = attr.name
if attr.is_choice:
choice_table = ValueTypeMap.choice.get(attr.value_type)
db.session.query(choice_table).filter(choice_table.attr_id == _id).delete() # FIXME: session conflict
db.session.flush()
AttributeCache.clean(attr)
attr.soft_delete()
for i in CITypeAttribute.get_by(attr_id=_id, to_dict=False):
i.soft_delete()
for i in PreferenceShowAttributes.get_by(attr_id=_id, to_dict=False):
i.soft_delete()
return name

148
api/lib/cmdb/cache.py Normal file
View File

@ -0,0 +1,148 @@
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from api.extensions import cache
from api.models.cmdb import Attribute
from api.models.cmdb import CIType
from api.models.cmdb import CITypeAttribute
from api.models.cmdb import RelationType
class AttributeCache(object):
PREFIX_ID = 'Field::ID::{0}'
PREFIX_NAME = 'Field::Name::{0}'
PREFIX_ALIAS = 'Field::Alias::{0}'
@classmethod
def get(cls, key):
if key is None:
return
attr = cache.get(cls.PREFIX_NAME.format(key))
attr = attr or cache.get(cls.PREFIX_ID.format(key))
attr = attr or cache.get(cls.PREFIX_ALIAS.format(key))
if attr is None:
attr = Attribute.get_by(name=key, first=True, to_dict=False)
attr = attr or Attribute.get_by_id(key)
attr = attr or Attribute.get_by(alias=key, first=True, to_dict=False)
if attr is not None:
cls.set(attr)
return attr
@classmethod
def set(cls, attr):
cache.set(cls.PREFIX_ID.format(attr.id), attr)
cache.set(cls.PREFIX_NAME.format(attr.name), attr)
cache.set(cls.PREFIX_ALIAS.format(attr.alias), attr)
@classmethod
def clean(cls, attr):
cache.delete(cls.PREFIX_ID.format(attr.id))
cache.delete(cls.PREFIX_NAME.format(attr.name))
cache.delete(cls.PREFIX_ALIAS.format(attr.alias))
class CITypeCache(object):
PREFIX_ID = "CIType::ID::{0}"
PREFIX_NAME = "CIType::Name::{0}"
PREFIX_ALIAS = "CIType::Alias::{0}"
@classmethod
def get(cls, key):
if key is None:
return
ct = cache.get(cls.PREFIX_NAME.format(key))
ct = ct or cache.get(cls.PREFIX_ID.format(key))
ct = ct or cache.get(cls.PREFIX_ALIAS.format(key))
if ct is None:
ct = CIType.get_by(name=key, first=True, to_dict=False)
ct = ct or CIType.get_by_id(key)
ct = ct or CIType.get_by(alias=key, first=True, to_dict=False)
if ct is not None:
cls.set(ct)
return ct
@classmethod
def set(cls, ct):
cache.set(cls.PREFIX_NAME.format(ct.name), ct)
cache.set(cls.PREFIX_ID.format(ct.id), ct)
cache.set(cls.PREFIX_ALIAS.format(ct.alias), ct)
@classmethod
def clean(cls, key):
ct = cls.get(key)
if ct is not None:
cache.delete(cls.PREFIX_NAME.format(ct.name))
cache.delete(cls.PREFIX_ID.format(ct.id))
cache.delete(cls.PREFIX_ALIAS.format(ct.alias))
class RelationTypeCache(object):
PREFIX_ID = "RelationType::ID::{0}"
PREFIX_NAME = "RelationType::Name::{0}"
@classmethod
def get(cls, key):
if key is None:
return
ct = cache.get(cls.PREFIX_NAME.format(key))
ct = ct or cache.get(cls.PREFIX_ID.format(key))
if ct is None:
ct = RelationType.get_by(name=key, first=True, to_dict=False) or RelationType.get_by_id(key)
if ct is not None:
cls.set(ct)
return ct
@classmethod
def set(cls, ct):
cache.set(cls.PREFIX_NAME.format(ct.name), ct)
cache.set(cls.PREFIX_ID.format(ct.id), ct)
@classmethod
def clean(cls, key):
ct = cls.get(key)
if ct is not None:
cache.delete(cls.PREFIX_NAME.format(ct.name))
cache.delete(cls.PREFIX_ID.format(ct.id))
class CITypeAttributeCache(object):
"""
key is type_id or type_name
"""
PREFIX_ID = "CITypeAttribute::ID::{0}"
PREFIX_NAME = "CITypeAttribute::Name::{0}"
@classmethod
def get(cls, key):
if key is None:
return
attrs = cache.get(cls.PREFIX_NAME.format(key))
attrs = attrs or cache.get(cls.PREFIX_ID.format(key))
if not attrs:
attrs = CITypeAttribute.get_by(type_id=key, to_dict=False)
if not attrs:
ci_type = CIType.get_by(name=key, first=True, to_dict=False)
if ci_type is not None:
attrs = CITypeAttribute.get_by(type_id=ci_type.id, to_dict=False)
if attrs is not None:
cls.set(key, attrs)
return attrs
@classmethod
def set(cls, key, values):
ci_type = CITypeCache.get(key)
if ci_type is not None:
cache.set(cls.PREFIX_ID.format(ci_type.id), values)
cache.set(cls.PREFIX_NAME.format(ci_type.name), values)
@classmethod
def clean(cls, key):
ci_type = CITypeCache.get(key)
attrs = cls.get(key)
if attrs is not None and ci_type:
cache.delete(cls.PREFIX_ID.format(ci_type.id))
cache.delete(cls.PREFIX_NAME.format(ci_type.name))

569
api/lib/cmdb/ci.py Normal file
View File

@ -0,0 +1,569 @@
# -*- coding:utf-8 -*-
import datetime
import json
from flask import abort
from flask import current_app
from werkzeug.exceptions import BadRequest
from api.extensions import db
from api.extensions import rd
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.cache import RelationTypeCache
from api.lib.cmdb.ci_type import CITypeAttributeManager
from api.lib.cmdb.ci_type import CITypeManager
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.cmdb.const import ExistPolicy
from api.lib.cmdb.const import OperateType
from api.lib.cmdb.const import REDIS_PREFIX_CI
from api.lib.cmdb.const import RetKey
from api.lib.cmdb.history import AttributeHistoryManger
from api.lib.cmdb.history import CIRelationHistoryManager
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CIS_BY_IDS
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CIS_BY_VALUE_TABLE
from api.lib.cmdb.utils import TableMap
from api.lib.cmdb.utils import ValueTypeMap
from api.lib.cmdb.value import AttributeValueManager
from api.lib.decorator import kwargs_required
from api.lib.utils import handle_arg_list
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
from api.models.cmdb import CITypeAttribute
from api.tasks.cmdb import ci_cache
from api.tasks.cmdb import ci_delete
from api.tasks.cmdb import ci_relation_cache
from api.tasks.cmdb import ci_relation_delete
class CIManager(object):
""" manage CI interface
"""
def __init__(self):
pass
@staticmethod
def get_type_name(ci_id):
ci = CI.get_by_id(ci_id) or abort(404, "CI <{0}> is not existed".format(ci_id))
return CITypeCache.get(ci.type_id).name
@staticmethod
def confirm_ci_existed(ci_id):
CI.get_by_id(ci_id) or abort(404, "CI <{0}> is not existed".format(ci_id))
@classmethod
def get_ci_by_id(cls, ci_id, ret_key=RetKey.NAME, fields=None, need_children=True):
"""
:param ci_id:
:param ret_key: name, id, or alias
:param fields: attribute list
:param need_children:
:return:
"""
ci = CI.get_by_id(ci_id) or abort(404, "CI <{0}> is not existed".format(ci_id))
res = dict()
if need_children:
children = CIRelationManager.get_children(ci_id, ret_key=ret_key) # one floor
res.update(children)
ci_type = CITypeCache.get(ci.type_id)
res["ci_type"] = ci_type.name
res.update(cls.get_cis_by_ids([str(ci_id)], fields=fields, ret_key=ret_key))
res['_type'] = ci_type.id
res['_id'] = ci_id
return res
@staticmethod
def get_ci_by_id_from_db(ci_id, ret_key=RetKey.NAME, fields=None, need_children=True, use_master=False):
"""
:param ci_id:
:param ret_key: name, id or alias
:param fields: list
:param need_children:
:param use_master: whether to use master db
:return:
"""
ci = CI.get_by_id(ci_id) or abort(404, "CI <{0}> is not existed".format(ci_id))
res = dict()
if need_children:
children = CIRelationManager.get_children(ci_id, ret_key=ret_key) # one floor
res.update(children)
ci_type = CITypeCache.get(ci.type_id)
res["ci_type"] = ci_type.name
fields = CITypeAttributeManager.get_attr_names_by_type_id(ci.type_id) if not fields else fields
unique_key = AttributeCache.get(ci_type.unique_id)
_res = AttributeValueManager().get_attr_values(fields,
ci_id,
ret_key=ret_key,
unique_key=unique_key,
use_master=use_master)
res.update(_res)
res['type_id'] = ci_type.id
res['ci_id'] = ci_id
return res
def get_ci_by_ids(self, ci_id_list, ret_key=RetKey.NAME, fields=None):
return [self.get_ci_by_id(ci_id, ret_key=ret_key, fields=fields) for ci_id in ci_id_list]
@classmethod
def get_cis_by_type(cls, type_id, ret_key=RetKey.NAME, fields="", page=1, per_page=None):
cis = db.session.query(CI.id).filter(CI.type_id == type_id).filter(CI.deleted.is_(False))
numfound = cis.count()
cis = cis.offset((page - 1) * per_page).limit(per_page)
ci_ids = [str(ci.id) for ci in cis]
res = cls.get_cis_by_ids(ci_ids, ret_key, fields)
return numfound, page, res
@staticmethod
def ci_is_exist(unique_key, unique_value):
"""
:param unique_key: is a attribute
:param unique_value:
:return:
"""
value_table = TableMap(attr_name=unique_key.name).table
unique = value_table.get_by(attr_id=unique_key.id,
value=unique_value,
to_dict=False,
first=True)
if unique:
return CI.get_by_id(unique.ci_id)
@staticmethod
def _delete_ci_by_id(ci_id):
ci = CI.get_by_id(ci_id)
ci.delete() # TODO: soft delete
@classmethod
def add(cls, ci_type_name, exist_policy=ExistPolicy.REPLACE, _no_attribute_policy=ExistPolicy.IGNORE, **ci_dict):
"""
:param ci_type_name:
:param exist_policy: replace or reject or need
:param _no_attribute_policy: ignore or reject
:param ci_dict:
:return:
"""
ci_type = CITypeManager.check_is_existed(ci_type_name)
unique_key = AttributeCache.get(ci_type.unique_id) or abort(400, 'illegality unique attribute')
unique_value = ci_dict.get(unique_key.name)
unique_value = unique_value or ci_dict.get(unique_key.alias)
unique_value = unique_value or ci_dict.get(unique_key.id)
unique_value = unique_value or abort(400, '{0} missing'.format(unique_key.name))
existed = cls.ci_is_exist(unique_key, unique_value)
if existed is not None:
if exist_policy == ExistPolicy.REJECT:
return abort(400, 'CI is already existed')
if existed.type_id != ci_type.id:
existed.update(type_id=ci_type.id)
ci = existed
else:
if exist_policy == ExistPolicy.NEED:
return abort(404, 'CI <{0}> does not exist'.format(unique_value))
ci = CI.create(type_id=ci_type.id)
value_manager = AttributeValueManager()
for p, v in ci_dict.items():
try:
value_manager.create_or_update_attr_value(p, v, ci.id, _no_attribute_policy)
except BadRequest as e:
if existed is None:
cls.delete(ci.id)
raise e
ci_cache.apply_async([ci.id], queue=CMDB_QUEUE)
return ci.id
def update(self, ci_id, **ci_dict):
self.confirm_ci_existed(ci_id)
value_manager = AttributeValueManager()
for p, v in ci_dict.items():
try:
value_manager.create_or_update_attr_value(p, v, ci_id)
except BadRequest as e:
raise e
ci_cache.apply_async([ci_id], queue=CMDB_QUEUE)
@staticmethod
def update_unique_value(ci_id, unique_name, unique_value):
CI.get_by_id(ci_id) or abort(404, "CI <{0}> is not found".format(ci_id))
AttributeValueManager().create_or_update_attr_value(unique_name, unique_value, ci_id)
ci_cache.apply_async([ci_id], queue=CMDB_QUEUE)
@staticmethod
def delete(ci_id):
ci = CI.get_by_id(ci_id) or abort(404, "CI <{0}> is not found".format(ci_id))
attrs = CITypeAttribute.get_by(type_id=ci.type_id, to_dict=False)
attr_names = set([AttributeCache.get(attr.attr_id).name for attr in attrs])
for attr_name in attr_names:
value_table = TableMap(attr_name=attr_name).table
for item in value_table.get_by(ci_id=ci_id, to_dict=False):
item.delete()
for item in CIRelation.get_by(first_ci_id=ci_id, to_dict=False):
ci_relation_delete.apply_async(args=(item.first_ci_id, item.second_ci_id), queue=CMDB_QUEUE)
item.delete()
for item in CIRelation.get_by(second_ci_id=ci_id, to_dict=False):
ci_relation_delete.apply_async(args=(item.first_ci_id, item.second_ci_id), queue=CMDB_QUEUE)
item.delete()
ci.delete() # TODO: soft delete
AttributeHistoryManger.add(ci_id, [(None, OperateType.DELETE, None, None)])
ci_delete.apply_async([ci.id], queue=CMDB_QUEUE)
return ci_id
@staticmethod
def add_heartbeat(ci_type, unique_value):
ci_type = CITypeManager().check_is_existed(ci_type)
unique_key = AttributeCache.get(ci_type.unique_id)
value_table = TableMap(attr_name=unique_key.name).table
v = value_table.get_by(attr_id=unique_key.id,
value=unique_value,
to_dict=False,
first=True) \
or abort(404, "not found")
ci = CI.get_by_id(v.ci_id) or abort(404, "CI <{0}> is not found".format(v.ci_id))
ci.update(heartbeat=datetime.datetime.now())
@classmethod
@kwargs_required("type_id", "page")
def get_heartbeat(cls, **kwargs):
query = db.session.query(CI.id, CI.heartbeat).filter(CI.deleted.is_(False))
expire = datetime.datetime.now() - datetime.timedelta(minutes=72)
type_ids = handle_arg_list(kwargs["type_id"])
query = query.filter(CI.type_id.in_(type_ids))
page = kwargs.get("page")
agent_status = kwargs.get("agent_status")
if agent_status == -1:
query = query.filter(CI.heartbeat.is_(None))
elif agent_status == 0:
query = query.filter(CI.heartbeat <= expire)
elif agent_status == 1:
query = query.filter(CI.heartbeat > expire)
numfound = query.count()
per_page_count = current_app.config.get("DEFAULT_PAGE_COUNT")
cis = query.offset((page - 1) * per_page_count).limit(per_page_count).all()
ci_ids = [ci.id for ci in cis]
heartbeat_dict = {}
for ci in cis:
if agent_status is not None:
heartbeat_dict[ci.id] = agent_status
else:
if ci.heartbeat is None:
heartbeat_dict[ci.id] = -1
elif ci.heartbeat <= expire:
heartbeat_dict[ci.id] = 0
else:
heartbeat_dict[ci.id] = 1
current_app.logger.debug(heartbeat_dict)
ci_ids = list(map(str, ci_ids))
res = cls.get_cis_by_ids(ci_ids, fields=["hostname", "private_ip"])
result = [(i.get("hostname"), i.get("private_ip")[0], i.get("ci_type"),
heartbeat_dict.get(i.get("_id"))) for i in res
if i.get("private_ip")]
return numfound, result
@staticmethod
def _get_cis_from_cache(ci_ids, ret_key=RetKey.NAME, fields=None):
res = rd.get(ci_ids, REDIS_PREFIX_CI)
if res is not None and None not in res and ret_key == RetKey.NAME:
res = list(map(json.loads, res))
if not fields:
return res
else:
_res = []
for d in res:
_d = dict()
_d["_id"], _d["_type"] = d.get("_id"), d.get("_type")
_d["ci_type"] = d.get("ci_type")
for field in fields:
_d[field] = d.get(field)
_res.append(_d)
return _res
@staticmethod
def _get_cis_from_db(ci_ids, ret_key=RetKey.NAME, fields=None, value_tables=None):
if not fields:
filter_fields_sql = ""
else:
_fields = list()
for field in fields:
attr = AttributeCache.get(field)
if attr is not None:
_fields.append(str(attr.id))
filter_fields_sql = "WHERE A.attr_id in ({0})".format(",".join(_fields))
ci_ids = ",".join(ci_ids)
if value_tables is None:
value_tables = ValueTypeMap.table_name.values()
value_sql = " UNION ".join([QUERY_CIS_BY_VALUE_TABLE.format(value_table, ci_ids)
for value_table in value_tables])
query_sql = QUERY_CIS_BY_IDS.format(filter_fields_sql, value_sql)
# current_app.logger.debug(query_sql)
cis = db.session.execute(query_sql).fetchall()
ci_set = set()
res = list()
ci_dict = dict()
for ci_id, type_id, attr_id, attr_name, attr_alias, value, value_type, is_list in cis:
if ci_id not in ci_set:
ci_dict = dict()
ci_type = CITypeCache.get(type_id)
ci_dict["ci_id"] = ci_id
ci_dict["ci_type"] = type_id
ci_dict["ci_type"] = ci_type.name
ci_dict["ci_type_alias"] = ci_type.alias
ci_set.add(ci_id)
res.append(ci_dict)
if ret_key == RetKey.NAME:
attr_key = attr_name
elif ret_key == RetKey.ALIAS:
attr_key = attr_alias
elif ret_key == RetKey.ID:
attr_key = attr_id
else:
return abort(400, "invalid ret key")
value = ValueTypeMap.serialize2[value_type](value)
if is_list:
ci_dict.setdefault(attr_key, []).append(value)
else:
ci_dict[attr_key] = value
return res
@classmethod
def get_cis_by_ids(cls, ci_ids, ret_key=RetKey.NAME, fields=None, value_tables=None):
"""
:param ci_ids: list of CI instance ID, eg. ['1', '2']
:param ret_key: name, id or alias
:param fields:
:param value_tables:
:return:
"""
if not ci_ids:
return []
fields = [] if fields is None or not isinstance(fields, list) else fields
ci_id_tuple = tuple(map(int, ci_ids))
res = cls._get_cis_from_cache(ci_id_tuple, ret_key, fields)
if res is not None:
return res
current_app.logger.warning("cache not hit...............")
return cls._get_cis_from_db(ci_ids, ret_key, fields, value_tables)
class CIRelationManager(object):
"""
Manage relation between CIs
"""
def __init__(self):
pass
@staticmethod
def _get_default_relation_type():
return RelationTypeCache.get("contain").id # FIXME
@classmethod
def get_children(cls, ci_id, ret_key=RetKey.NAME):
second_cis = CIRelation.get_by(first_ci_id=ci_id, to_dict=False)
second_ci_ids = (second_ci.second_ci_id for second_ci in second_cis)
ci_type2ci_ids = dict()
for ci_id in second_ci_ids:
type_id = CI.get_by_id(ci_id).type_id
ci_type2ci_ids.setdefault(type_id, []).append(ci_id)
res = {}
for type_id in ci_type2ci_ids:
ci_type = CITypeCache.get(type_id)
children = CIManager.get_cis_by_ids(list(map(str, ci_type2ci_ids[type_id])), ret_key=ret_key)
res[ci_type.name] = children
return res
def get_second_cis(self, first_ci_id, relation_type_id=None, page=1, per_page=None, **kwargs):
second_cis = db.session.query(CI.id).filter(CI.deleted.is_(False)).join(
CIRelation, CIRelation.second_ci_id == CI.id).filter(
CIRelation.first_ci_id == first_ci_id)
if relation_type_id is not None:
second_cis = second_cis.filter(CIRelation.relation_type_id == relation_type_id)
if kwargs: # TODO: special for devices
second_cis = self._query_wrap_for_device(second_cis, **kwargs)
numfound = second_cis.count()
if per_page != "all":
second_cis = second_cis.offset((page - 1) * per_page).limit(per_page).all()
ci_ids = [str(son.id) for son in second_cis]
result = CIManager.get_cis_by_ids(ci_ids)
return numfound, len(ci_ids), result
@staticmethod
def _sort_handler(sort_by, query_sql):
if sort_by.startswith("+"):
sort_type = "asc"
sort_by = sort_by[1:]
elif sort_by.startswith("-"):
sort_type = "desc"
sort_by = sort_by[1:]
else:
sort_type = "asc"
attr = AttributeCache.get(sort_by)
if attr is None:
return query_sql
attr_id = attr.id
value_table = TableMap(attr_name=sort_by).table
ci_table = query_sql.subquery()
query_sql = db.session.query(ci_table.c.id, value_table.value).join(
value_table, value_table.ci_id == ci_table.c.id).filter(
value_table.attr_id == attr_id).filter(ci_table.deleted.is_(False)).order_by(
getattr(value_table.value, sort_type)())
return query_sql
def _query_wrap_for_device(self, query_sql, **kwargs):
_type = kwargs.pop("_type", False) or kwargs.pop("type", False) or kwargs.pop("ci_type", False)
if _type:
ci_type = CITypeCache.get(_type)
if ci_type is None:
return
query_sql = query_sql.filter(CI.type_id == ci_type.id)
for k, v in kwargs.items():
attr = AttributeCache.get(k)
if attr is None:
continue
value_table = TableMap(attr_name=k).table
ci_table = query_sql.subquery()
query_sql = db.session.query(ci_table.c.id).join(
value_table, value_table.ci_id == ci_table.c.id).filter(
value_table.attr_id == attr.id).filter(ci_table.deleted.is_(False)).filter(
value_table.value.ilike(v.replace("*", "%")))
# current_app.logger.debug(query_sql)
sort_by = kwargs.pop("sort", "")
if sort_by:
query_sql = self._sort_handler(sort_by, query_sql)
return query_sql
@classmethod
def get_first_cis(cls, second_ci, relation_type_id=None, page=1, per_page=None):
first_cis = db.session.query(CIRelation.first_ci_id).filter(
CIRelation.second_ci_id == second_ci).filter(CIRelation.deleted.is_(False))
if relation_type_id is not None:
first_cis = first_cis.filter(CIRelation.relation_type_id == relation_type_id)
numfound = first_cis.count()
if per_page != "all":
first_cis = first_cis.offset((page - 1) * per_page).limit(per_page).all()
first_ci_ids = [str(first_ci.first_ci_id) for first_ci in first_cis]
result = CIManager.get_cis_by_ids(first_ci_ids)
return numfound, len(first_ci_ids), result
@classmethod
def add(cls, first_ci_id, second_ci_id, more=None, relation_type_id=None):
relation_type_id = relation_type_id or cls._get_default_relation_type()
CIManager.confirm_ci_existed(first_ci_id)
CIManager.confirm_ci_existed(second_ci_id)
existed = CIRelation.get_by(first_ci_id=first_ci_id,
second_ci_id=second_ci_id,
to_dict=False,
first=True)
if existed is not None:
if existed.relation_type_id != relation_type_id:
existed.update(relation_type_id=relation_type_id)
CIRelationHistoryManager().add(existed, OperateType.UPDATE)
else:
existed = CIRelation.create(first_ci_id=first_ci_id,
second_ci_id=second_ci_id,
relation_type_id=relation_type_id)
CIRelationHistoryManager().add(existed, OperateType.ADD)
ci_relation_cache.apply_async(args=(first_ci_id, second_ci_id), queue=CMDB_QUEUE)
if more is not None:
existed.upadte(more=more)
return existed.id
@staticmethod
def delete(cr_id):
cr = CIRelation.get_by_id(cr_id) or abort(404, "CIRelation <{0}> is not existed".format(cr_id))
cr.soft_delete()
his_manager = CIRelationHistoryManager()
his_manager.add(cr, operate_type=OperateType.DELETE)
ci_relation_delete.apply_async(args=(cr.first_ci_id, cr.second_ci_id), queue=CMDB_QUEUE)
return cr_id
@classmethod
def delete_2(cls, first_ci_id, second_ci_id):
cr = CIRelation.get_by(first_ci_id=first_ci_id,
second_ci_id=second_ci_id,
to_dict=False,
first=True)
ci_relation_delete.apply_async(args=(first_ci_id, second_ci_id), queue=CMDB_QUEUE)
return cls.delete(cr.cr_id)

429
api/lib/cmdb/ci_type.py Normal file
View File

@ -0,0 +1,429 @@
# -*- coding:utf-8 -*-
from flask import abort
from flask import current_app
from api.extensions import db
from api.lib.cmdb.attribute import AttributeManager
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeAttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.decorator import kwargs_required
from api.models.cmdb import CI
from api.models.cmdb import CIType
from api.models.cmdb import CITypeAttribute
from api.models.cmdb import CITypeAttributeGroup
from api.models.cmdb import CITypeAttributeGroupItem
from api.models.cmdb import CITypeGroup
from api.models.cmdb import CITypeGroupItem
from api.models.cmdb import CITypeRelation
from api.models.cmdb import PreferenceTreeView
from api.models.cmdb import PreferenceShowAttributes
class CITypeManager(object):
"""
manage CIType
"""
def __init__(self):
pass
@staticmethod
def get_name_by_id(type_id):
return CITypeCache.get(type_id).name
@staticmethod
def check_is_existed(key):
return CITypeCache.get(key) or abort(404, "CIType <{0}> is not existed".format(key))
@staticmethod
def get_ci_types(type_name=None):
ci_types = CIType.get_by() if type_name is None else CIType.get_by_like(name=type_name)
res = list()
for type_dict in ci_types:
type_dict["unique_key"] = AttributeCache.get(type_dict["unique_id"]).name
res.append(type_dict)
return res
@staticmethod
def query(_type):
ci_type = CITypeCache.get(_type) or abort(404, "CIType <{0}> is not found".format(_type))
return ci_type.to_dict()
@classmethod
@kwargs_required("name")
def add(cls, **kwargs):
unique_key = kwargs.pop("unique_key", None)
unique_key = AttributeCache.get(unique_key) or abort(404, "Unique key is not defined")
CIType.get_by(name=kwargs['name']) and abort(404, "CIType <{0}> is already existed".format(kwargs.get("name")))
kwargs["alias"] = kwargs["name"] if not kwargs.get("alias") else kwargs["alias"]
kwargs["unique_id"] = unique_key.id
ci_type = CIType.create(**kwargs)
CITypeAttributeManager.add(ci_type.id, [unique_key.id], is_required=True)
CITypeCache.clean(ci_type.name)
return ci_type.id
@classmethod
def update(cls, type_id, **kwargs):
ci_type = cls.check_is_existed(type_id)
unique_key = kwargs.pop("unique_key", None)
unique_key = AttributeCache.get(unique_key)
if unique_key is not None:
kwargs["unique_id"] = unique_key.id
type_attr = CITypeAttribute.get_by(type_id=type_id,
attr_id=unique_key.id,
first=True,
to_dict=False)
if type_attr is None:
CITypeAttributeManager.add(type_id, [unique_key.id], is_required=True)
ci_type.update(**kwargs)
CITypeCache.clean(type_id)
return type_id
@classmethod
def set_enabled(cls, type_id, enabled=True):
ci_type = cls.check_is_existed(type_id)
ci_type.update(enabled=enabled)
return type_id
@classmethod
def delete(cls, type_id):
ci_type = cls.check_is_existed(type_id)
if CI.get_by(type_id=type_id, first=True, to_dict=False) is not False:
return abort(400, "cannot delete, because CI instance exists")
for item in CITypeRelation.get_by(parent_id=type_id, to_dict=False):
item.soft_delete()
for item in CITypeRelation.get_by(child_id=type_id, to_dict=False):
item.soft_delete()
for item in PreferenceTreeView.get_by(type_id=type_id, to_dict=False):
item.soft_delete()
for item in PreferenceShowAttributes.get_by(type_id=type_id, to_dict=False):
item.soft_delete()
ci_type.soft_delete()
CITypeCache.clean(type_id)
class CITypeGroupManager(object):
@staticmethod
def get(need_other=None):
groups = CITypeGroup.get_by()
group_types = set()
for group in groups:
for t in sorted(CITypeGroupItem.get_by(group_id=group['id']), key=lambda x: x['order']):
group.setdefault("ci_types", []).append(CITypeCache.get(t['type_id']).to_dict())
group_types.add(t["type_id"])
if need_other:
ci_types = CITypeManager.get_ci_types()
other_types = dict(ci_types=[ci_type for ci_type in ci_types if ci_type["id"] not in group_types])
groups.append(other_types)
return groups
@staticmethod
def add(name):
CITypeGroup.get_by(name=name, first=True) and abort(400, "Group {0} does exist".format(name))
return CITypeGroup.create(name=name)
@staticmethod
def update(gid, name, type_ids):
"""
update all
:param gid:
:param name:
:param type_ids:
:return:
"""
existed = CITypeGroup.get_by_id(gid) or abort(404, "Group <{0}> does not exist".format(gid))
if name is not None:
existed.update(name=name)
for idx, type_id in enumerate(type_ids):
item = CITypeGroupItem.get_by(group_id=gid, type_id=type_id, first=True, to_dict=False)
if item is not None:
item.update(order=idx)
else:
CITypeGroupItem.create(group_id=gid, type_id=type_id, order=idx)
@staticmethod
def delete(gid):
existed = CITypeGroup.get_by_id(gid) or abort(404, "Group <{0}> does not exist".format(gid))
items = CITypeGroupItem.get_by(group_id=gid, to_dict=False)
for item in items:
item.soft_delete()
existed.soft_delete()
class CITypeAttributeManager(object):
"""
manage CIType's attributes, include query, add, update, delete
"""
def __init__(self):
pass
@staticmethod
def get_attr_names_by_type_id(type_id):
return [AttributeCache.get(attr.attr_id).name for attr in CITypeAttributeCache.get(type_id)]
@staticmethod
def get_attributes_by_type_id(type_id):
attrs = CITypeAttributeCache.get(type_id)
result = list()
for attr in sorted(attrs, key=lambda x: (x.order, x.id)):
attr_dict = AttributeManager().get_attribute(attr.attr_id)
attr_dict["is_required"] = attr.is_required
attr_dict["order"] = attr.order
attr_dict["default_show"] = attr.default_show
result.append(attr_dict)
return result
@staticmethod
def _check(type_id, attr_ids):
CITypeManager.check_is_existed(type_id)
if not attr_ids or not isinstance(attr_ids, list):
return abort(400, "Attributes are required")
for attr_id in attr_ids:
AttributeCache.get(attr_id) or abort(404, "Attribute <{0}> is not existed".format(attr_id))
@classmethod
def add(cls, type_id, attr_ids=None, **kwargs):
"""
add attributes to CIType
:param type_id:
:param attr_ids: list
:param kwargs:
:return:
"""
cls._check(type_id, attr_ids)
for attr_id in attr_ids:
existed = CITypeAttribute.get_by(type_id=type_id,
attr_id=attr_id,
first=True,
to_dict=False)
if existed is not None:
continue
current_app.logger.debug(attr_id)
CITypeAttribute.create(type_id=type_id, attr_id=attr_id, **kwargs)
CITypeAttributeCache.clean(type_id)
@classmethod
def update(cls, type_id, attributes):
"""
update attributes to CIType
:param type_id:
:param attributes: list
:return:
"""
cls._check(type_id, [i.get('attr_id') for i in attributes])
for attr in attributes:
existed = CITypeAttribute.get_by(type_id=type_id,
attr_id=attr.get("attr_id"),
first=True,
to_dict=False)
if existed is None:
continue
existed.update(**attr)
CITypeAttributeCache.clean(type_id)
@classmethod
def delete(cls, type_id, attr_ids=None):
"""
delete attributes from CIType
:param type_id:
:param attr_ids: list
:return:
"""
cls._check(type_id, attr_ids)
for attr_id in attr_ids:
existed = CITypeAttribute.get_by(type_id=type_id,
attr_id=attr_id,
first=True,
to_dict=False)
if existed is not None:
existed.soft_delete()
CITypeAttributeCache.clean(type_id)
class CITypeRelationManager(object):
"""
manage relation between CITypes
"""
@staticmethod
def get():
res = CITypeRelation.get_by(to_dict=False)
for idx, item in enumerate(res):
_item = item.to_dict()
res[idx] = _item
res[idx]['parent'] = item.parent.to_dict()
res[idx]['child'] = item.child.to_dict()
res[idx]['relation_type'] = item.relation_type.to_dict()
return res
@staticmethod
def get_child_type_ids(type_id, level):
ids = [type_id]
query = db.session.query(CITypeRelation).filter(CITypeRelation.deleted.is_(False))
for _ in range(0, level):
ids = [i.child_id for i in query.filter(CITypeRelation.parent_id.in_(ids))]
return ids
@staticmethod
def _wrap_relation_type_dict(type_id, relation_inst):
ci_type_dict = CITypeCache.get(type_id).to_dict()
ci_type_dict["ctr_id"] = relation_inst.id
ci_type_dict["attributes"] = CITypeAttributeManager.get_attributes_by_type_id(ci_type_dict["id"])
ci_type_dict["relation_type"] = relation_inst.relation_type.name
return ci_type_dict
@classmethod
def get_children(cls, parent_id):
children = CITypeRelation.get_by(parent_id=parent_id, to_dict=False)
return [cls._wrap_relation_type_dict(child.child_id, child) for child in children]
@classmethod
def get_parents(cls, child_id):
parents = CITypeRelation.get_by(child_id=child_id, to_dict=False)
return [cls._wrap_relation_type_dict(parent.parent_id, parent) for parent in parents]
@staticmethod
def _get(parent_id, child_id):
return CITypeRelation.get_by(parent_id=parent_id,
child_id=child_id,
to_dict=False,
first=True)
@classmethod
def add(cls, parent, child, relation_type_id):
p = CITypeManager.check_is_existed(parent)
c = CITypeManager.check_is_existed(child)
existed = cls._get(p.id, c.id)
if existed is not None:
existed.update(relation_type_id=relation_type_id)
else:
existed = CITypeRelation.create(parent_id=p.id,
child_id=c.id,
relation_type_id=relation_type_id)
return existed.id
@staticmethod
def delete(_id):
ctr = CITypeRelation.get_by_id(_id) or abort(404, "Type relation <{0}> is not found".format(_id))
ctr.soft_delete()
@classmethod
def delete_2(cls, parent, child):
ctr = cls._get(parent, child)
return cls.delete(ctr.id)
class CITypeAttributeGroupManager(object):
@staticmethod
def get_by_type_id(type_id, need_other=None):
groups = CITypeAttributeGroup.get_by(type_id=type_id)
groups = sorted(groups, key=lambda x: x["order"])
grouped = list()
for group in groups:
items = CITypeAttributeGroupItem.get_by(group_id=group["id"], to_dict=False)
items = sorted(items, key=lambda x: x.order)
group["attributes"] = [AttributeCache.get(i.attr_id).to_dict() for i in items]
grouped.extend([i.attr_id for i in items])
if need_other is not None:
grouped = set(grouped)
attributes = CITypeAttributeManager.get_attributes_by_type_id(type_id)
other_attributes = [attr for attr in attributes if attr["id"] not in grouped]
groups.append(dict(attributes=other_attributes))
return groups
@staticmethod
def create_or_update(type_id, name, attr_order, group_order=0):
"""
create or update
:param type_id:
:param name:
:param group_order: group order
:param attr_order:
:return:
"""
existed = CITypeAttributeGroup.get_by(type_id=type_id, name=name, first=True, to_dict=False)
existed = existed or CITypeAttributeGroup.create(type_id=type_id, name=name, order=group_order)
existed.update(order=group_order)
attr_order = dict(attr_order)
current_app.logger.info(attr_order)
existed_items = CITypeAttributeGroupItem.get_by(group_id=existed.id, to_dict=False)
for item in existed_items:
if item.attr_id not in attr_order:
item.soft_delete()
else:
item.update(order=attr_order[item.attr_id])
existed_items = {item.attr_id: 1 for item in existed_items}
for attr_id, order in attr_order.items():
if attr_id not in existed_items:
CITypeAttributeGroupItem.create(group_id=existed.id, attr_id=attr_id, order=order)
return existed
@classmethod
def update(cls, group_id, name, attr_order, group_order=0):
group = CITypeAttributeGroup.get_by_id(group_id) or abort(404, "Group <{0}> does not exist".format(group_id))
other = CITypeAttributeGroup.get_by(type_id=group.type_id, name=name, first=True, to_dict=False)
if other is not None and other.id != group.id:
return abort(400, "Group <{0}> duplicate".format(name))
if name is not None:
group.update(name=name)
cls.create_or_update(group.type_id, name, attr_order, group_order)
@staticmethod
def delete(group_id):
group = CITypeAttributeGroup.get_by_id(group_id) \
or abort(404, "AttributeGroup <{0}> does not exist".format(group_id))
group.soft_delete()
items = CITypeAttributeGroupItem.get_by(group_id=group_id, to_dict=False)
for item in items:
item.soft_delete()
return group_id

57
api/lib/cmdb/const.py Normal file
View File

@ -0,0 +1,57 @@
# -*- coding:utf-8 -*-
from api.lib.utils import BaseEnum
class ValueTypeEnum(BaseEnum):
INT = "0"
FLOAT = "1"
TEXT = "2"
DATETIME = "3"
DATE = "4"
TIME = "5"
class CIStatusEnum(BaseEnum):
REVIEW = "0"
VALIDATE = "1"
class ExistPolicy(BaseEnum):
REJECT = "reject"
NEED = "need"
IGNORE = "ignore"
REPLACE = "replace"
class OperateType(BaseEnum):
ADD = "0"
DELETE = "1"
UPDATE = "2"
class RetKey(BaseEnum):
ID = "id"
NAME = "name"
ALIAS = "alias"
class ResourceType(BaseEnum):
CI = "CIType"
class PermEnum(BaseEnum):
ADD = "add"
UPDATE = "update"
DELETE = "delete"
READ = "read"
class RoleEnum(BaseEnum):
CONFIG = "admin"
CMDB_QUEUE = "cmdb_async"
REDIS_PREFIX_CI = "CMDB_CI"
REDIS_PREFIX_CI_RELATION = "CMDB_CI_RELATION"

124
api/lib/cmdb/history.py Normal file
View File

@ -0,0 +1,124 @@
# -*- coding:utf-8 -*-
from flask import abort
from flask import g
from api.extensions import db
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import RelationTypeCache
from api.lib.cmdb.const import OperateType
from api.lib.perm.acl.cache import UserCache
from api.models.cmdb import Attribute
from api.models.cmdb import AttributeHistory
from api.models.cmdb import CIRelationHistory
from api.models.cmdb import OperationRecord
class AttributeHistoryManger(object):
@staticmethod
def get_records(start, end, username, page, page_size):
records = db.session.query(OperationRecord).filter(OperationRecord.deleted.is_(False))
numfound = db.session.query(db.func.count(OperationRecord.id)).filter(OperationRecord.deleted.is_(False))
if start:
records = records.filter(OperationRecord.created_at >= start)
numfound = numfound.filter(OperationRecord.created_at >= start)
if end:
records = records.filter(OperationRecord.created_at <= end)
numfound = records.filter(OperationRecord.created_at <= end)
if username:
user = UserCache.get(username)
if user:
records = records.filter(OperationRecord.uid == user.uid)
else:
return abort(404, "User <{0}> is not found".format(username))
records = records.order_by(-OperationRecord.id).offset(page_size * (page - 1)).limit(page_size).all()
total = len(records)
numfound = numfound.first()[0]
res = []
for record in records:
_res = record.to_dict()
_res["user"] = UserCache.get(_res.get("uid")).nickname or UserCache.get(_res.get("uid")).username
attr_history = AttributeHistory.get_by(record_id=_res.get("id"), to_dict=False)
_res["attr_history"] = [AttributeCache.get(h.attr_id).attr_alias for h in attr_history]
rel_history = CIRelationHistory.get_by(record_id=_res.get("id"), to_dict=False)
rel_statis = {}
for rel in rel_history:
if rel.operate_type not in rel_statis:
rel_statis[rel.operate_type] = 1
else:
rel_statis[rel.operate_type] += 1
_res["rel_history"] = rel_statis
res.append(_res)
return numfound, total, res
@staticmethod
def get_by_ci_id(ci_id):
res = db.session.query(AttributeHistory, Attribute, OperationRecord).join(
Attribute, Attribute.id == AttributeHistory.attr_id).join(
OperationRecord, OperationRecord.id == AttributeHistory.record_id).filter(
AttributeHistory.ci_id == ci_id).order_by(OperationRecord.id.desc())
return [dict(attr_name=i.Attribute.name,
attr_alias=i.Attribute.alias,
operate_type=i.AttributeHistory.operate_type,
username=UserCache.get(i.OperationRecord.uid).nickname,
old=i.AttributeHistory.old,
new=i.AttributeHistory.new,
created_at=i.OperationRecord.created_at.strftime('%Y-%m-%d %H:%M:%S'),
record_id=i.OperationRecord.id,
hid=i.AttributeHistory.id
) for i in res]
@staticmethod
def get_record_detail(record_id):
from api.lib.cmdb.ci import CIManager
record = OperationRecord.get_by_id(record_id) or abort(404, "Record <{0}> is not found".format(record_id))
username = UserCache.get(record.uid).nickname or UserCache.get(record.uid).username
timestamp = record.created_at.strftime("%Y-%m-%d %H:%M:%S")
attr_history = AttributeHistory.get_By(record_id=record_id, to_dict=False)
rel_history = CIRelationHistory.get_by(record_id=record_id, to_dict=False)
attr_dict, rel_dict = dict(), {"add": [], "delete": []}
for attr_h in attr_history:
attr_dict[AttributeCache.get(attr_h.attr_id).alias] = dict(
old=attr_h.old,
new=attr_h.new,
operate_type=attr_h.operate_type)
for rel_h in rel_history:
first = CIManager.get_ci_by_id(rel_h.first_ci_id)
second = CIManager.get_ci_by_id(rel_h.second_ci_id)
rel_dict[rel_h.operate_type].append((first, RelationTypeCache.get(rel_h.relation_type_id).name, second))
return username, timestamp, attr_dict, rel_dict
@staticmethod
def add(ci_id, history_list):
record = OperationRecord.create(uid=g.user.uid)
for attr_id, operate_type, old, new in history_list or []:
AttributeHistory.create(attr_id=attr_id,
operate_type=operate_type,
old=old,
new=new,
ci_id=ci_id,
record_id=record.id)
class CIRelationHistoryManager(object):
@staticmethod
def add(rel_obj, operate_type=OperateType.ADD):
record = OperationRecord.create(uid=g.user.uid)
CIRelationHistory.create(relation_id=rel_obj.id,
record_id=record.id,
operate_type=operate_type,
first_ci_id=rel_obj.first_ci_id,
second_ci_id=rel_obj.second_ci_id,
relation_type_id=rel_obj.relation_type_id)

148
api/lib/cmdb/preference.py Normal file
View File

@ -0,0 +1,148 @@
# -*- coding:utf-8 -*-
import json
import six
import toposort
from flask import abort
from flask import g
from api.extensions import db
from api.lib.cmdb.attribute import AttributeManager
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeAttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.models.cmdb import CITypeAttribute
from api.models.cmdb import PreferenceRelationView
from api.models.cmdb import PreferenceShowAttributes
from api.models.cmdb import PreferenceTreeView
class PreferenceManager(object):
@staticmethod
def get_types(instance=False, tree=False):
types = db.session.query(PreferenceShowAttributes.type_id).filter(
PreferenceShowAttributes.uid == g.user.uid).filter(
PreferenceShowAttributes.deleted.is_(False)).group_by(PreferenceShowAttributes.type_id).all() \
if instance else []
tree_types = PreferenceTreeView.get_by(uid=g.user.uid, to_dict=False) if tree else []
type_ids = list(set([i.type_id for i in types + tree_types]))
return [CITypeCache.get(type_id).to_dict() for type_id in type_ids]
@staticmethod
def get_show_attributes(type_id):
if not isinstance(type_id, six.integer_types):
type_id = CITypeCache.get(type_id).id
attrs = db.session.query(PreferenceShowAttributes, CITypeAttribute.order).join(
CITypeAttribute, CITypeAttribute.attr_id == PreferenceShowAttributes.attr_id).filter(
PreferenceShowAttributes.uid == g.user.uid).filter(
PreferenceShowAttributes.type_id == type_id).filter(
PreferenceShowAttributes.deleted.is_(False)).filter(CITypeAttribute.deleted.is_(False)).filter(
CITypeAttribute.type_id == type_id).order_by(
CITypeAttribute.order).all()
result = [i.PreferenceShowAttributes.attr.to_dict() for i in attrs]
is_subscribed = True
if not attrs:
attrs = db.session.query(CITypeAttribute).filter(
CITypeAttribute.type_id == type_id).filter(
CITypeAttribute.deleted.is_(False)).filter(
CITypeAttribute.default_show.is_(True)).order_by(CITypeAttribute.order)
result = [i.attr.to_dict() for i in attrs]
is_subscribed = False
for i in result:
if i["is_choice"]:
i.update(dict(choice_value=AttributeManager.get_choice_values(i["id"], i["value_type"])))
return is_subscribed, result
@classmethod
def create_or_update_show_attributes(cls, type_id, attr_order):
existed_all = PreferenceShowAttributes.get_by(type_id=type_id, uid=g.user.uid, to_dict=False)
for _attr, order in attr_order:
attr = AttributeCache.get(_attr) or abort(404, "Attribute <{0}> does not exist".format(_attr))
existed = PreferenceShowAttributes.get_by(type_id=type_id,
uid=g.user.uid,
attr_id=attr.id,
first=True,
to_dict=False)
if existed is None:
PreferenceShowAttributes.create(type_id=type_id,
uid=g.user.uid,
attr_id=attr.id,
order=order)
else:
existed.update(order=order)
attr_dict = {int(i): j for i, j in attr_order}
for i in existed_all:
if i.attr_id not in attr_dict:
i.soft_delete()
@staticmethod
def get_tree_view():
res = PreferenceTreeView.get_by(uid=g.user.uid, to_dict=True)
for item in res:
if item["levels"]:
item.update(CITypeCache.get(item['type_id']).to_dict())
item.update(dict(levels=[AttributeCache.get(l).to_dict()
for l in item["levels"].split(",") if AttributeCache.get(l)]))
return res
@staticmethod
def create_or_update_tree_view(type_id, levels):
attrs = CITypeAttributeCache.get(type_id)
for idx, i in enumerate(levels):
for attr in attrs:
attr = AttributeCache.get(attr.attr_id)
if i == attr.id or i == attr.name or i == attr.alias:
levels[idx] = str(attr.id)
levels = ",".join(levels)
existed = PreferenceTreeView.get_by(uid=g.user.uid, type_id=type_id, to_dict=False, first=True)
if existed is not None:
if not levels:
existed.soft_delete()
return existed
return existed.update(levels=levels)
elif levels:
return PreferenceTreeView.create(levels=levels, type_id=type_id, uid=g.user.uid)
@staticmethod
def get_relation_view():
views = PreferenceRelationView.get_by(to_dict=True)
result = dict()
name2id = list()
for view in views:
result.setdefault(view['name'], []).extend(json.loads(view['cr_ids']))
name2id.append([view['name'], view['id']])
id2type = dict()
for view_name in result:
for i in result[view_name]:
id2type[i['parent_id']] = None
id2type[i['child_id']] = None
result[view_name] = toposort.toposort_flatten(
{i['child_id']: {i['parent_id']} for i in result[view_name]})
for type_id in id2type:
id2type[type_id] = CITypeCache.get(type_id).to_dict()
return result, id2type, sorted(name2id, key=lambda x: x[1])
@classmethod
def create_or_update_relation_view(cls, name, cr_ids):
existed = PreferenceRelationView.get_by(name=name, to_dict=False, first=True)
if existed is None:
PreferenceRelationView.create(name=name, cr_ids=json.dumps(cr_ids))
return cls.get_relation_view()
@staticmethod
def delete_relation_view(name):
for existed in PreferenceRelationView.get_by(name=name, to_dict=False):
existed.soft_delete()
return name

View File

@ -3,13 +3,10 @@
from flask import abort
from api.lib.cmdb.resp_format import ErrFormat
from api.models.cmdb import RelationType
class RelationTypeManager(object):
cls = RelationType
@staticmethod
def get_all():
return RelationType.get_by(to_dict=False)
@ -24,21 +21,17 @@ class RelationTypeManager(object):
@staticmethod
def add(name):
RelationType.get_by(name=name, first=True, to_dict=False) and abort(
400, ErrFormat.relation_type_exists.format(name))
RelationType.get_by(name=name, first=True, to_dict=False) and abort(400, "It's already existed")
return RelationType.create(name=name)
@staticmethod
def update(rel_id, name):
existed = RelationType.get_by_id(rel_id) or abort(
404, ErrFormat.relation_type_not_found.format("id={}".format(rel_id)))
existed = RelationType.get_by_id(rel_id) or abort(404, "RelationType <{0}> does not exist".format(rel_id))
return existed.update(name=name)
@staticmethod
def delete(rel_id):
existed = RelationType.get_by_id(rel_id) or abort(
404, ErrFormat.relation_type_not_found.format("id={}".format(rel_id)))
existed = RelationType.get_by_id(rel_id) or abort(404, "RelationType <{0}> does not exist".format(rel_id))
existed.soft_delete()

View File

@ -1,2 +1,3 @@
# -*- coding:utf-8 -*-
__all__ = ['db', 'es']

View File

@ -1,5 +1,7 @@
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
QUERY_CIS_BY_VALUE_TABLE = """
SELECT attr.name AS attr_name,
@ -42,7 +44,7 @@ FACET_QUERY1 = """
FACET_QUERY = """
SELECT {0}.value,
count(distinct({0}.ci_id))
count({0}.ci_id)
FROM {0}
INNER JOIN ({1}) AS F ON F.ci_id={0}.ci_id
WHERE {0}.attr_id={2:d}
@ -61,3 +63,4 @@ QUERY_CI_BY_TYPE = """
FROM c_cis
WHERE c_cis.type_id in ({0})
"""

View File

@ -0,0 +1,367 @@
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
import time
from flask import current_app
from api.extensions import db
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.const import RetKey
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci.db.query_sql import FACET_QUERY
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_ATTR_NAME
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_TYPE
from api.lib.cmdb.utils import TableMap
from api.lib.utils import handle_arg_list
from api.models.cmdb import CI
class Search(object):
def __init__(self, query=None,
fl=None,
facet_field=None,
page=1,
ret_key=RetKey.NAME,
count=1,
sort=None,
ci_ids=None):
self.orig_query = query
self.fl = fl
self.facet_field = facet_field
self.page = page
self.ret_key = ret_key
self.count = count
self.sort = sort
self.ci_ids = ci_ids or []
self.query_sql = ""
self.type_id_list = []
self.only_type_query = False
@staticmethod
def _operator_proc(key):
operator = "&"
if key.startswith("+"):
key = key[1:].strip()
elif key.startswith("-"):
operator = "|"
key = key[1:].strip()
elif key.startswith("~"):
operator = "~"
key = key[1:].strip()
return operator, key
def _attr_name_proc(self, key):
operator, key = self._operator_proc(key)
if key in ('ci_type', 'type', '_type'):
return '_type', ValueTypeEnum.TEXT, operator, None
if key in ('id', 'ci_id', '_id'):
return '_id', ValueTypeEnum.TEXT, operator, None
attr = AttributeCache.get(key)
if attr:
return attr.name, attr.value_type, operator, attr
else:
raise SearchError("{0} is not existed".format(key))
def _type_query_handler(self, v):
new_v = v[1:-1].split(";") if v.startswith("(") and v.endswith(")") else [v]
for _v in new_v:
ci_type = CITypeCache.get(_v)
if ci_type is not None:
self.type_id_list.append(str(ci_type.id))
if self.type_id_list:
type_ids = ",".join(self.type_id_list)
_query_sql = QUERY_CI_BY_TYPE.format(type_ids)
if self.only_type_query:
return _query_sql
else:
return ""
return ""
@staticmethod
def _in_query_handler(attr, v):
new_v = v[1:-1].split(";")
table_name = TableMap(attr_name=attr.name).table_name
in_query = " OR {0}.value ".format(table_name).join(['LIKE "{0}"'.format(_v.replace("*", "%")) for _v in new_v])
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, in_query)
return _query_sql
@staticmethod
def _range_query_handler(attr, v):
start, end = [x.strip() for x in v[1:-1].split("_TO_")]
table_name = TableMap(attr_name=attr.name).table_name
range_query = "BETWEEN '{0}' AND '{1}'".format(start.replace("*", "%"), end.replace("*", "%"))
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, range_query)
return _query_sql
@staticmethod
def _comparison_query_handler(attr, v):
table_name = TableMap(attr_name=attr.name).table_name
if v.startswith(">=") or v.startswith("<="):
comparison_query = "{0} '{1}'".format(v[:2], v[2:].replace("*", "%"))
else:
comparison_query = "{0} '{1}'".format(v[0], v[1:].replace("*", "%"))
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, comparison_query)
return _query_sql
@staticmethod
def __sort_by(field):
field = field or ""
sort_type = "ASC"
if field.startswith("+"):
field = field[1:]
elif field.startswith("-"):
field = field[1:]
sort_type = "DESC"
return field, sort_type
def __sort_by_id(self, sort_type, query_sql):
ret_sql = "SELECT SQL_CALC_FOUND_ROWS DISTINCT B.ci_id FROM ({0}) AS B {1}"
if self.only_type_query:
return ret_sql.format(query_sql, "ORDER BY B.ci_id {1} LIMIT {0:d}, {2};".format(
(self.page - 1) * self.count, sort_type, self.count))
elif self.type_id_list:
self.query_sql = "SELECT B.ci_id FROM ({0}) AS B {1}".format(
query_sql,
"INNER JOIN c_cis on c_cis.id=B.ci_id WHERE c_cis.type_id IN ({0}) ".format(
",".join(self.type_id_list)))
return ret_sql.format(
query_sql,
"INNER JOIN c_cis on c_cis.id=B.ci_id WHERE c_cis.type_id IN ({3}) "
"ORDER BY B.ci_id {1} LIMIT {0:d}, {2};".format(
(self.page - 1) * self.count, sort_type, self.count, ",".join(self.type_id_list)))
else:
self.query_sql = "SELECT B.ci_id FROM ({0}) AS B {1}".format(
query_sql,
"INNER JOIN c_cis on c_cis.id=B.ci_id ")
return ret_sql.format(
query_sql,
"INNER JOIN c_cis on c_cis.id=B.ci_id "
"ORDER BY B.ci_id {1} LIMIT {0:d}, {2};".format((self.page - 1) * self.count, sort_type, self.count))
def __sort_by_field(self, field, sort_type, query_sql):
attr = AttributeCache.get(field)
attr_id = attr.id
table_name = TableMap(attr_name=attr.name).table_name
_v_query_sql = """SELECT {0}.ci_id, {1}.value
FROM ({2}) AS {0} INNER JOIN {1} ON {1}.ci_id = {0}.ci_id
WHERE {1}.attr_id = {3}""".format("ALIAS", table_name, query_sql, attr_id)
new_table = _v_query_sql
if self.only_type_query or not self.type_id_list:
return "SELECT SQL_CALC_FOUND_ROWS DISTINCT C.ci_id, C.value " \
"FROM ({0}) AS C " \
"ORDER BY C.value {2} " \
"LIMIT {1:d}, {3};".format(new_table, (self.page - 1) * self.count, sort_type, self.count)
elif self.type_id_list:
self.query_sql = """SELECT C.ci_id
FROM ({0}) AS C
INNER JOIN c_cis on c_cis.id=C.ci_id
WHERE c_cis.type_id IN ({1})""".format(new_table, ",".join(self.type_id_list))
return """SELECT SQL_CALC_FOUND_ROWS DISTINCT C.ci_id, C.value
FROM ({0}) AS C
INNER JOIN c_cis on c_cis.id=C.ci_id
WHERE c_cis.type_id IN ({4})
ORDER BY C.value {2}
LIMIT {1:d}, {3};""".format(new_table,
(self.page - 1) * self.count,
sort_type, self.count,
",".join(self.type_id_list))
def _sort_query_handler(self, field, query_sql):
field, sort_type = self.__sort_by(field)
if field in ("_id", "ci_id") or not field:
return self.__sort_by_id(sort_type, query_sql)
else:
return self.__sort_by_field(field, sort_type, query_sql)
@staticmethod
def _wrap_sql(operator, alias, _query_sql, query_sql):
if operator == "&":
query_sql = """SELECT * FROM ({0}) as {1}
INNER JOIN ({2}) as {3} USING(ci_id)""".format(query_sql, alias, _query_sql, alias + "A")
elif operator == "|":
query_sql = "SELECT * FROM ({0}) as {1} UNION ALL ({2})".format(query_sql, alias, _query_sql)
elif operator == "~":
query_sql = """SELECT * FROM ({0}) as {1} LEFT JOIN ({2}) as {3} USING(ci_id)
WHERE {3}.ci_id is NULL""".format(query_sql, alias, _query_sql, alias + "A")
return query_sql
def _execute_sql(self, query_sql):
v_query_sql = self._sort_query_handler(self.sort, query_sql)
start = time.time()
execute = db.session.execute
current_app.logger.debug(v_query_sql)
res = execute(v_query_sql).fetchall()
end_time = time.time()
current_app.logger.debug("query ci ids time is: {0}".format(end_time - start))
numfound = execute("SELECT FOUND_ROWS();").fetchall()[0][0]
current_app.logger.debug("statistics ci ids time is: {0}".format(time.time() - end_time))
return numfound, res
def __confirm_type_first(self, queries):
for q in queries:
if q.startswith("_type"):
queries.remove(q)
queries.insert(0, q)
if len(queries) == 1 or queries[1].startswith("-") or queries[1].startswith("~"):
self.only_type_query = True
return queries
def __query_build_by_field(self, queries):
query_sql, alias, operator = "", "A", "&"
is_first, only_type_query_special = True, True
for q in queries:
_query_sql = ""
if ":" in q:
k = q.split(":")[0].strip()
v = ":".join(q.split(":")[1:]).strip()
current_app.logger.debug(v)
field, field_type, operator, attr = self._attr_name_proc(k)
if field == "_type":
_query_sql = self._type_query_handler(v)
current_app.logger.debug(_query_sql)
elif field == "_id": # exclude all others
ci = CI.get_by_id(v)
if ci is not None:
return 1, [str(v)]
elif field:
if attr is None:
raise SearchError("{0} is not found".format(field))
# in query
if v.startswith("(") and v.endswith(")"):
_query_sql = self._in_query_handler(attr, v)
# range query
elif v.startswith("[") and v.endswith("]") and "_TO_" in v:
_query_sql = self._range_query_handler(attr, v)
# comparison query
elif v.startswith(">=") or v.startswith("<=") or v.startswith(">") or v.startswith("<"):
_query_sql = self._comparison_query_handler(attr, v)
else:
table_name = TableMap(attr_name=attr.name).table_name
_query_sql = QUERY_CI_BY_ATTR_NAME.format(
table_name, attr.id, 'LIKE "{0}"'.format(v.replace("*", "%")))
else:
raise SearchError("argument q format invalid: {0}".format(q))
elif q:
raise SearchError("argument q format invalid: {0}".format(q))
if is_first and _query_sql and not self.only_type_query:
query_sql = "SELECT * FROM ({0}) AS {1}".format(_query_sql, alias)
is_first = False
alias += "A"
elif self.only_type_query and only_type_query_special:
is_first = False
only_type_query_special = False
query_sql = _query_sql
elif _query_sql:
query_sql = self._wrap_sql(operator, alias, _query_sql, query_sql)
alias += "AA"
return None, query_sql
def _filter_ids(self, query_sql):
if self.ci_ids:
return "SELECT * FROM ({0}) AS IN_QUERY WHERE IN_QUERY.ci_id IN ({1})".format(
query_sql, ",".join(list(map(str, self.ci_ids))))
return query_sql
def _query_build_raw(self):
queries = handle_arg_list(self.orig_query)
queries = self.__confirm_type_first(queries)
current_app.logger.debug(queries)
ret, query_sql = self.__query_build_by_field(queries)
if ret is not None:
return ret, query_sql
s = time.time()
if query_sql:
query_sql = self._filter_ids(query_sql)
self.query_sql = query_sql
current_app.logger.debug(query_sql)
numfound, res = self._execute_sql(query_sql)
current_app.logger.debug("query ci ids is: {0}".format(time.time() - s))
return numfound, [_res[0] for _res in res]
return 0, []
def _facet_build(self):
facet = {}
for f in self.facet_field:
k, field_type, _, attr = self._attr_name_proc(f)
if k:
table_name = TableMap(attr_name=k).table_name
query_sql = FACET_QUERY.format(table_name, self.query_sql, attr.id)
current_app.logger.debug(query_sql)
result = db.session.execute(query_sql).fetchall()
facet[k] = result
facet_result = dict()
for k, v in facet.items():
if not k.startswith('_'):
a = getattr(AttributeCache.get(k), self.ret_key)
facet_result[a] = [(f[0], f[1], a) for f in v]
return facet_result
def _fl_build(self):
_fl = list()
for f in self.fl:
k, _, _, _ = self._attr_name_proc(f)
if k:
_fl.append(k)
return _fl
def search(self):
numfound, ci_ids = self._query_build_raw()
ci_ids = list(map(str, ci_ids))
_fl = self._fl_build()
if self.facet_field and numfound:
facet = self._facet_build()
else:
facet = dict()
response, counter = [], {}
if ci_ids:
response = CIManager.get_cis_by_ids(ci_ids, ret_key=self.ret_key, fields=_fl)
for res in response:
ci_type = res.get("ci_type")
if ci_type not in counter.keys():
counter[ci_type] = 0
counter[ci_type] += 1
total = len(response)
return response, counter, total, self.page, numfound, facet

View File

@ -3,14 +3,12 @@
from __future__ import unicode_literals
import six
from flask import current_app
from api.extensions import es
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.const import RetKey
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search import SearchError
from api.lib.utils import handle_arg_list
@ -23,11 +21,9 @@ class Search(object):
ret_key=RetKey.NAME,
count=1,
sort=None,
ci_ids=None,
excludes=None):
ci_ids=None):
self.orig_query = query
self.fl = fl or []
self.excludes = excludes or []
self.fl = fl
self.facet_field = facet_field
self.page = page
self.ret_key = ret_key
@ -42,9 +38,6 @@ class Search(object):
operator = "&"
if key.startswith("+"):
key = key[1:].strip()
elif key.startswith("-~"):
operator = "|~"
key = key[2:].strip()
elif key.startswith("-"):
operator = "|"
key = key[1:].strip()
@ -59,8 +52,6 @@ class Search(object):
return self.query['query']['bool']['must']
elif operator == "|":
return self.query['query']['bool']['should']
elif operator == "|~":
return self.query['query']['bool']['should']
else:
return self.query['query']['bool']['must_not']
@ -77,37 +68,17 @@ class Search(object):
if attr:
return attr.name, attr.value_type, operator
else:
raise SearchError(ErrFormat.attribute_not_found.format(key))
raise SearchError("{0} is not existed".format(key))
def _in_query_handle(self, attr, v, is_not):
def _in_query_handle(self, attr, v):
terms = v[1:-1].split(";")
operator = "|"
if attr in ('_type', 'ci_type', 'type_id') and terms and terms[0].isdigit():
attr = "type_id"
terms = map(int, terms)
current_app.logger.warning(terms)
for term in terms:
if is_not:
self._operator2query(operator).append({
"bool": {
"must_not": [
{
"term": {
attr: term
}
}
]
}
})
else:
self._operator2query(operator).append({
"term": {
attr: term
}
})
self._operator2query(operator).append({
"term": {
attr: term
}
})
def _filter_ids(self):
if self.ci_ids:
@ -119,36 +90,18 @@ class Search(object):
return int(float(s))
return s
def _range_query_handle(self, attr, v, operator, is_not):
def _range_query_handle(self, attr, v, operator):
left, right = v.split("_TO_")
left, right = left.strip()[1:], right.strip()[:-1]
if is_not:
self._operator2query(operator).append({
"bool": {
"must_not": [
{
"range": {
attr: {
"lte": self._digit(right),
"gte": self._digit(left),
"boost": 2.0
}
}
}
]
self._operator2query(operator).append({
"range": {
attr: {
"lte": self._digit(right),
"gte": self._digit(left),
"boost": 2.0
}
})
else:
self._operator2query(operator).append({
"range": {
attr: {
"lte": self._digit(right),
"gte": self._digit(left),
"boost": 2.0
}
}
})
}
})
def _comparison_query_handle(self, attr, v, operator):
if v.startswith(">="):
@ -168,50 +121,21 @@ class Search(object):
}
})
def _match_query_handle(self, attr, v, operator, is_not):
def _match_query_handle(self, attr, v, operator):
if "*" in v:
if is_not:
self._operator2query(operator).append({
"bool": {
"must_not": [
{
"wildcard": {
attr: v.lower() if isinstance(v, six.string_types) else v
}
}
]
}
})
else:
self._operator2query(operator).append({
"wildcard": {
attr: v.lower() if isinstance(v, six.string_types) else v
}
})
self._operator2query(operator).append({
"wildcard": {
attr: v
}
})
else:
if attr == "ci_type" and v.isdigit():
attr = "type_id"
if is_not:
self._operator2query(operator).append({
"bool": {
"must_not": [
{
"term": {
attr: v.lower() if isinstance(v, six.string_types) else v
}
}
]
}
})
else:
self._operator2query(operator).append({
"term": {
attr: v.lower() if isinstance(v, six.string_types) else v
}
})
self._operator2query(operator).append({
"term": {
attr: v
}
})
def __query_build_by_field(self, queries):
@ -221,28 +145,25 @@ class Search(object):
v = ":".join(q.split(":")[1:]).strip()
field_name, field_type, operator = self._attr_name_proc(k)
if field_name:
is_not = True if operator == "|~" else False
# in query
if v.startswith("(") and v.endswith(")"):
self._in_query_handle(field_name, v, is_not)
self._in_query_handle(field_name, v)
# range query
elif v.startswith("[") and v.endswith("]") and "_TO_" in v:
self._range_query_handle(field_name, v, operator, is_not)
self._range_query_handle(field_name, v, operator)
# comparison query
elif v.startswith(">=") or v.startswith("<=") or v.startswith(">") or v.startswith("<"):
self._comparison_query_handle(field_name, v, operator)
else:
self._match_query_handle(field_name, v, operator, is_not)
self._match_query_handle(field_name, v, operator)
else:
raise SearchError(ErrFormat.argument_invalid.format("q"))
raise SearchError("argument q format invalid: {0}".format(q))
elif q:
raise SearchError(ErrFormat.argument_invalid.format("q"))
raise SearchError("argument q format invalid: {0}".format(q))
def _query_build_raw(self):
queries = handle_arg_list(self.orig_query)
current_app.logger.debug(queries)
self.__query_build_by_field(queries)
@ -264,7 +185,7 @@ class Search(object):
for field in self.facet_field:
attr = AttributeCache.get(field)
if not attr:
raise SearchError(ErrFormat.attribute_not_found(field))
raise SearchError("Facet by <{0}> does not exist".format(field))
aggregations['aggs'].update({
field: {
"terms": {
@ -295,10 +216,10 @@ class Search(object):
attr = AttributeCache.get(field)
if not attr:
raise SearchError(ErrFormat.attribute_not_found.format(field))
raise SearchError("Sort by <{0}> does not exist".format(field))
sort_by = ("{0}.keyword".format(field)
if attr.value_type not in (ValueTypeEnum.INT, ValueTypeEnum.FLOAT) else field)
sort_by = "{0}.keyword".format(field) \
if attr.value_type not in (ValueTypeEnum.INT, ValueTypeEnum.FLOAT) else field
sorts.append({sort_by: {"order": sort_type}})
self.query.update(dict(sort=sorts))
@ -315,7 +236,7 @@ class Search(object):
numfound, cis, facet = self._query_build_raw()
except Exception as e:
current_app.logger.error(str(e))
raise SearchError(ErrFormat.unknown_search_error)
raise SearchError("unknown search error")
total = len(cis)

View File

@ -0,0 +1,77 @@
# -*- coding:utf-8 -*-
import json
from flask import abort
from flask import current_app
from api.extensions import rd
from api.lib.cmdb.ci_type import CITypeRelationManager
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.lib.cmdb.search.ci.es.search import Search as SearchFromES
from api.models.cmdb import CI
class Search(object):
def __init__(self, root_id, level=1, query=None, fl=None, facet_field=None, page=1, count=None, sort=None):
self.orig_query = query
self.fl = fl
self.facet_field = facet_field
self.page = page
self.count = count or current_app.config.get("DEFAULT_PAGE_COUNT")
self.sort = sort or ("ci_id" if current_app.config.get("USE_ES") else None)
self.root_id = root_id
self.level = int(level)
def search(self):
ci = CI.get_by_id(self.root_id) or abort(404, "CI <{0}> does not exist".format(self.root_id))
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
for _ in range(0, self.level):
print(rd.get(ids, REDIS_PREFIX_CI_RELATION))
_tmp = list(map(json.loads, filter(lambda x: x is not None, rd.get(ids, REDIS_PREFIX_CI_RELATION) or [])))
ids = [j for i in _tmp for j in i]
if not self.orig_query or ("_type:" not in self.orig_query
and "type_id:" not in self.orig_query
and "ci_type:" not in self.orig_query):
type_ids = CITypeRelationManager.get_child_type_ids(ci.type_id, self.level)
self.orig_query = "_type:({0}),{1}".format(";".join(list(map(str, type_ids))), self.orig_query)
if not ids:
# cis, counter, total, self.page, numfound, facet_
return [], {}, 0, self.page, 0, {}
if current_app.config.get("USE_ES"):
return SearchFromES(self.orig_query,
fl=self.fl,
facet_field=self.facet_field,
page=self.page,
count=self.count,
sort=self.sort,
ci_ids=ids).search()
else:
return SearchFromDB(self.orig_query,
fl=self.fl,
facet_field=self.facet_field,
page=self.page,
count=self.count,
sort=self.sort,
ci_ids=ids).search()
def statistics(self):
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
for l in range(0, self.level):
if l == 0:
_tmp = list(map(json.loads, [i or '[]' for i in rd.get(ids, REDIS_PREFIX_CI_RELATION) or []]))
else:
for idx, i in enumerate(_tmp):
if i:
__tmp = list(map(json.loads, filter(lambda x: x is not None,
rd.get(i, REDIS_PREFIX_CI_RELATION) or [])))
_tmp[idx] = [j for i in __tmp for j in i]
else:
_tmp[idx] = []
return {_id: len(_tmp[idx]) for idx, _id in enumerate(ids)}

116
api/lib/cmdb/utils.py Normal file
View File

@ -0,0 +1,116 @@
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
import datetime
import six
from markupsafe import escape
import api.models.cmdb as model
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.const import ValueTypeEnum
def string2int(x):
return int(float(x))
def str2datetime(x):
try:
return datetime.datetime.strptime(x, "%Y-%m-%d")
except ValueError:
pass
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S")
class ValueTypeMap(object):
deserialize = {
ValueTypeEnum.INT: string2int,
ValueTypeEnum.FLOAT: float,
ValueTypeEnum.TEXT: lambda x: escape(x).encode('utf-8').decode('utf-8'),
ValueTypeEnum.TIME: lambda x: escape(x).encode('utf-8').decode('utf-8'),
ValueTypeEnum.DATETIME: str2datetime,
ValueTypeEnum.DATE: str2datetime,
}
serialize = {
ValueTypeEnum.INT: int,
ValueTypeEnum.FLOAT: float,
ValueTypeEnum.TEXT: lambda x: x if isinstance(x, six.text_type) else str(x),
ValueTypeEnum.TIME: lambda x: x if isinstance(x, six.text_type) else str(x),
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d"),
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S"),
}
serialize2 = {
ValueTypeEnum.INT: int,
ValueTypeEnum.FLOAT: float,
ValueTypeEnum.TEXT: lambda x: x.decode() if not isinstance(x, six.string_types) else x,
ValueTypeEnum.TIME: lambda x: x.decode() if not isinstance(x, six.string_types) else x,
ValueTypeEnum.DATE: lambda x: x.decode() if not isinstance(x, six.string_types) else x,
ValueTypeEnum.DATETIME: lambda x: x.decode() if not isinstance(x, six.string_types) else x,
}
choice = {
ValueTypeEnum.INT: model.IntegerChoice,
ValueTypeEnum.FLOAT: model.FloatChoice,
ValueTypeEnum.TEXT: model.TextChoice,
}
table = {
ValueTypeEnum.INT: model.CIValueInteger,
ValueTypeEnum.TEXT: model.CIValueText,
ValueTypeEnum.DATETIME: model.CIValueDateTime,
ValueTypeEnum.DATE: model.CIValueDateTime,
ValueTypeEnum.TIME: model.CIValueText,
ValueTypeEnum.FLOAT: model.CIValueFloat,
'index_{0}'.format(ValueTypeEnum.INT): model.CIIndexValueInteger,
'index_{0}'.format(ValueTypeEnum.TEXT): model.CIIndexValueText,
'index_{0}'.format(ValueTypeEnum.DATETIME): model.CIIndexValueDateTime,
'index_{0}'.format(ValueTypeEnum.DATE): model.CIIndexValueDateTime,
'index_{0}'.format(ValueTypeEnum.TIME): model.CIIndexValueText,
'index_{0}'.format(ValueTypeEnum.FLOAT): model.CIIndexValueFloat,
}
table_name = {
ValueTypeEnum.INT: 'c_value_integers',
ValueTypeEnum.TEXT: 'c_value_texts',
ValueTypeEnum.DATETIME: 'c_value_datetime',
ValueTypeEnum.DATE: 'c_value_datetime',
ValueTypeEnum.TIME: 'c_value_texts',
ValueTypeEnum.FLOAT: 'c_value_floats',
'index_{0}'.format(ValueTypeEnum.INT): 'c_value_index_integers',
'index_{0}'.format(ValueTypeEnum.TEXT): 'c_value_index_texts',
'index_{0}'.format(ValueTypeEnum.DATETIME): 'c_value_index_datetime',
'index_{0}'.format(ValueTypeEnum.DATE): 'c_value_index_datetime',
'index_{0}'.format(ValueTypeEnum.TIME): 'c_value_index_texts',
'index_{0}'.format(ValueTypeEnum.FLOAT): 'c_value_index_floats',
}
es_type = {
ValueTypeEnum.INT: 'long',
ValueTypeEnum.TEXT: 'text',
ValueTypeEnum.DATETIME: 'text',
ValueTypeEnum.DATE: 'text',
ValueTypeEnum.TIME: 'text',
ValueTypeEnum.FLOAT: 'float'
}
class TableMap(object):
def __init__(self, attr_name=None):
self.attr_name = attr_name
@property
def table(self):
attr = AttributeCache.get(self.attr_name)
i = "index_{0}".format(attr.value_type) if attr.is_index else attr.value_type
return ValueTypeMap.table.get(i)
@property
def table_name(self):
attr = AttributeCache.get(self.attr_name)
i = "index_{0}".format(attr.value_type) if attr.is_index else attr.value_type
return ValueTypeMap.table_name.get(i)

147
api/lib/cmdb/value.py Normal file
View File

@ -0,0 +1,147 @@
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from flask import abort
from api.extensions import db
from api.lib.cmdb.attribute import AttributeManager
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.const import ExistPolicy
from api.lib.cmdb.const import OperateType
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.history import AttributeHistoryManger
from api.lib.cmdb.utils import TableMap
from api.lib.cmdb.utils import ValueTypeMap
from api.lib.utils import handle_arg_list
class AttributeValueManager(object):
"""
manage CI attribute values
"""
def __init__(self):
pass
@staticmethod
def _get_attr(key):
"""
:param key: id, name or alias
:return: attribute instance
"""
return AttributeCache.get(key)
def get_attr_values(self, fields, ci_id, ret_key="name", unique_key=None, use_master=False):
"""
:param fields:
:param ci_id:
:param ret_key: It can be name or alias
:param unique_key: primary attribute
:param use_master: Only for master-slave read-write separation
:return:
"""
res = dict()
for field in fields:
attr = self._get_attr(field)
if not attr:
continue
value_table = TableMap(attr_name=attr.name).table
rs = value_table.get_by(ci_id=ci_id,
attr_id=attr.id,
use_master=use_master,
to_dict=False)
field_name = getattr(attr, ret_key)
if attr.is_list:
res[field_name] = [ValueTypeMap.serialize[attr.value_type](i.value) for i in rs]
else:
res[field_name] = ValueTypeMap.serialize[attr.value_type](rs[0].value) if rs else None
if unique_key is not None and attr.id == unique_key.id and rs:
res['unique'] = unique_key.name
return res
@staticmethod
def __deserialize_value(value_type, value):
if not value:
return value
deserialize = ValueTypeMap.deserialize[value_type]
try:
v = deserialize(value)
return v
except ValueError:
return abort(400, "attribute value <{0}> is invalid".format(value))
@staticmethod
def __check_is_choice(attr_id, value_type, value):
choice_values = AttributeManager.get_choice_values(attr_id, value_type)
if value not in choice_values:
return abort(400, "{0} does not existed in choice values".format(value))
@staticmethod
def __check_is_unique(value_table, attr_id, ci_id, value):
existed = db.session.query(value_table.attr_id).filter(
value_table.attr_id == attr_id).filter(value_table.deleted.is_(False)).filter(
value_table.value == value).filter(value_table.ci_id != ci_id).first()
existed and abort(400, "attribute <{0}> value {1} must be unique".format(attr_id, value))
def _validate(self, attr, value, value_table, ci_id):
v = self.__deserialize_value(attr.value_type, value)
attr.is_choice and value and self.__check_is_choice(attr.id, attr.value_type, v)
attr.is_unique and self.__check_is_unique(value_table, attr.id, ci_id, v)
return v
@staticmethod
def _write_change(ci_id, attr_id, operate_type, old, new):
AttributeHistoryManger.add(ci_id, [(attr_id, operate_type, old, new)])
def create_or_update_attr_value(self, key, value, ci_id, _no_attribute_policy=ExistPolicy.IGNORE):
"""
add or update attribute value, then write history
:param key: id, name or alias
:param value:
:param ci_id:
:param _no_attribute_policy: ignore or reject
:return:
"""
attr = self._get_attr(key)
if attr is None:
if _no_attribute_policy == ExistPolicy.IGNORE:
return
if _no_attribute_policy == ExistPolicy.REJECT:
return abort(400, 'attribute {0} does not exist'.format(key))
value_table = TableMap(attr_name=attr.name).table
existed_attr = value_table.get_by(attr_id=attr.id,
ci_id=ci_id,
first=True,
to_dict=False)
existed_value = existed_attr and existed_attr.value
operate_type = OperateType.ADD if existed_attr is None else OperateType.UPDATE
value_list = handle_arg_list(value) if attr.is_list else [value]
if not isinstance(value, list):
value_list = [value]
for v in value_list:
v = self._validate(attr, v, value_table, ci_id)
if not v and attr.value_type != ValueTypeEnum.TEXT:
v = None
if operate_type == OperateType.ADD:
if v is not None:
value_table.create(ci_id=ci_id, attr_id=attr.id, value=v)
self._write_change(ci_id, attr.id, operate_type, None, v)
elif existed_attr.value != v:
if v is not None:
existed_attr.update(value=v)
else:
existed_attr.delete()
self._write_change(ci_id, attr.id, operate_type, existed_value, v)

127
api/lib/database.py Normal file
View File

@ -0,0 +1,127 @@
# -*- coding:utf-8 -*-
import datetime
import six
from api.extensions import db
from api.lib.exception import CommitException
class FormatMixin(object):
def to_dict(self):
res = dict()
for k in getattr(self, "__table__").columns:
if not isinstance(getattr(self, k.name), datetime.datetime):
res[k.name] = getattr(self, k.name)
else:
res[k.name] = getattr(self, k.name).strftime('%Y-%m-%d %H:%M:%S')
return res
@classmethod
def get_columns(cls):
return {k.name: 1 for k in getattr(cls, "__mapper__").c.values()}
class CRUDMixin(FormatMixin):
@classmethod
def create(cls, flush=False, **kwargs):
return cls(**kwargs).save(flush=flush)
def update(self, flush=False, **kwargs):
kwargs.pop("id", None)
for attr, value in six.iteritems(kwargs):
if value is not None:
setattr(self, attr, value)
if flush:
return self.save(flush=flush)
return self.save()
def save(self, commit=True, flush=False):
db.session.add(self)
try:
if flush:
db.session.flush()
elif commit:
db.session.commit()
except Exception as e:
db.session.rollback()
raise CommitException(str(e))
return self
def delete(self, flush=False):
db.session.delete(self)
try:
if flush:
return db.session.flush()
return db.session.commit()
except Exception as e:
db.session.rollback()
raise CommitException(str(e))
def soft_delete(self, flush=False):
setattr(self, "deleted", True)
setattr(self, "deleted_at", datetime.datetime.now())
self.save(flush=flush)
@classmethod
def get_by_id(cls, _id):
if any((isinstance(_id, six.string_types) and _id.isdigit(),
isinstance(_id, (six.integer_types, float))), ):
return getattr(cls, "query").get(int(_id)) or None
@classmethod
def get_by(cls, first=False, to_dict=True, fl=None, exclude=None, deleted=False, use_master=False, **kwargs):
db_session = db.session if not use_master else db.session().using_bind("master")
fl = fl.strip().split(",") if fl and isinstance(fl, six.string_types) else (fl or [])
exclude = exclude.strip().split(",") if exclude and isinstance(exclude, six.string_types) else (exclude or [])
keys = cls.get_columns()
fl = [k for k in fl if k in keys]
fl = [k for k in keys if k not in exclude and not k.isupper()] if exclude else fl
fl = list(filter(lambda x: "." not in x, fl))
if hasattr(cls, "deleted") and deleted is not None:
kwargs["deleted"] = deleted
if fl:
query = db_session.query(*[getattr(cls, k) for k in fl])
query = query.filter_by(**kwargs)
result = [{k: getattr(i, k) for k in fl} for i in query]
else:
result = [i.to_dict() if to_dict else i for i in getattr(cls, 'query').filter_by(**kwargs)]
return result[0] if first and result else (None if first else result)
@classmethod
def get_by_like(cls, to_dict=True, **kwargs):
query = db.session.query(cls)
for k, v in kwargs.items():
query = query.filter(getattr(cls, k).ilike('%{0}%'.format(v)))
return [i.to_dict() if to_dict else i for i in query]
class SoftDeleteMixin(object):
deleted_at = db.Column(db.DateTime)
deleted = db.Column(db.Boolean, index=True, default=False)
class TimestampMixin(object):
created_at = db.Column(db.DateTime, default=lambda: datetime.datetime.now())
updated_at = db.Column(db.DateTime, onupdate=lambda: datetime.datetime.now())
class SurrogatePK(object):
__table_args__ = {"extend_existing": True}
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
class Model(SoftDeleteMixin, TimestampMixin, CRUDMixin, db.Model, SurrogatePK):
__abstract__ = True
class CRUDModel(db.Model, CRUDMixin):
__abstract__ = True

35
api/lib/decorator.py Normal file
View File

@ -0,0 +1,35 @@
# -*- coding:utf-8 -*-
from functools import wraps
from flask import abort
from flask import request
def kwargs_required(*required_args):
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
for arg in required_args:
if arg not in kwargs:
return abort(400, "Argument <{0}> is required".format(arg))
return func(*args, **kwargs)
return wrapper
return decorate
def args_required(*required_args):
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
for arg in required_args:
if arg not in request.values:
return abort(400, "Argument <{0}> is required".format(arg))
return func(*args, **kwargs)
return wrapper
return decorate

5
api/lib/exception.py Normal file
View File

@ -0,0 +1,5 @@
# -*- coding:utf-8 -*-
class CommitException(Exception):
pass

View File

@ -1,25 +1,25 @@
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
import hashlib
import requests
from flask import abort
from flask import current_app
from flask_login import current_user
from flask import g
from future.moves.urllib.parse import urlparse
def build_api_key(path, params):
current_user is not None or abort(403, u"您得登陆才能进行该操作")
key = current_user.key
secret = current_user.secret
g.user is not None or abort(403, "您得登陆才能进行该操作")
key = g.user.key
secret = g.user.secret
values = "".join([str(params[k]) for k in sorted(params.keys())
if params[k] is not None]) if params.keys() else ""
_secret = "".join([path, secret, values]).encode("utf-8")
params["_secret"] = hashlib.sha1(_secret).hexdigest()
params["_key"] = key
return params

View File

@ -1,54 +1,51 @@
# -*- coding:utf-8 -*-
import smtplib
import time
from email import Utils
from email.header import Header
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import make_msgid
from flask import current_app
def send_mail(sender, receiver, subject, content, ctype="html", pics=()):
"""subject and body are unicode objects"""
if not receiver:
return
if not sender:
sender = current_app.config.get("DEFAULT_MAIL_SENDER")
smtpserver = current_app.config.get("MAIL_SERVER")
smtp_server = current_app.config.get("MAIL_SERVER")
if ctype == "html":
msg = MIMEText(content, 'html', 'utf-8')
else:
msg = MIMEText(content, 'plain', 'utf-8')
if len(pics) != 0:
msgRoot = MIMEMultipart('related')
msgText = MIMEText(content, 'html', 'utf-8')
msgRoot.attach(msgText)
msg_root = MIMEMultipart('related')
msg_text = MIMEText(content, 'html', 'utf-8')
msg_root.attach(msg_text)
i = 1
for pic in pics:
fp = open(pic, "rb")
image = MIMEImage(fp.read())
fp.close()
image.add_header('Content-ID', '<img%02d>' % i)
msgRoot.attach(image)
msg_root.attach(image)
i += 1
msg = msgRoot
msg = msg_root
msg['Subject'] = Header(subject, 'utf-8')
msg['From'] = sender
msg['To'] = ';'.join(receiver)
msg['Message-ID'] = make_msgid()
msg['Message-ID'] = Utils.make_msgid()
msg['date'] = time.strftime('%a, %d %b %Y %H:%M:%S %z')
if current_app.config.get("MAIL_USE_SSL") or current_app.config.get("MAIL_USE_TLS"):
smtp = smtplib.SMTP_SSL(smtpserver)
else:
smtp = smtplib.SMTP()
smtp.connect(smtpserver, 25)
if current_app.config.get("MAIL_PASSWORD") != "":
smtp.login(current_app.config.get("MAIL_USERNAME"), current_app.config.get("MAIL_PASSWORD"))
smtp = smtplib.SMTP()
smtp.connect(smtp_server, 25)
username, password = current_app.config.get("MAIL_USERNAME"), current_app.config.get("MAIL_PASSWORD")
if username and password:
smtp.login(username, password)
smtp.sendmail(sender, receiver, msg.as_string())
smtp.quit()

View File

@ -0,0 +1,23 @@
# -*- coding:utf-8 -*-
from functools import wraps
from flask import request
from flask import abort
from api.lib.perm.acl.cache import AppCache
def validate_app(func):
@wraps(func)
def wrapper(*args, **kwargs):
app_id = request.values.get('app_id')
app = AppCache.get(app_id)
if app is None:
return abort(400, "App <{0}> does not exist".format(app_id))
request.values['app_id'] = app.id
return func(*args, **kwargs)
return wrapper

139
api/lib/perm/acl/acl.py Normal file
View File

@ -0,0 +1,139 @@
# -*- coding:utf-8 -*-
import functools
import six
from flask import current_app, g, request
from flask import session, abort
from api.extensions import cache
def get_access_token():
return
class AccessTokenCache(object):
@classmethod
def get(cls):
if cache.get("AccessToken") is not None:
return cache.get("AccessToken")
res = get_access_token() or ""
cache.set("AccessToken", res, timeout=60 * 60)
return res
@classmethod
def clean(cls):
cache.clear("AccessToken")
class ACLManager(object):
def __init__(self):
self.access_token = AccessTokenCache.get()
self.acl_session = dict(uid=session.get("uid"),
token=self.access_token)
self.user_info = session["acl"] if "acl" in session else {}
def add_resource(self, name, resource_type_name=None):
pass
def grant_resource_to_role(self, name, role, resource_type_name=None):
pass
def del_resource(self, name, resource_type_name=None):
pass
def get_user_info(self, username):
return dict()
def get_resources(self, resource_type_name=None):
if "acl" not in session:
abort(405)
return []
def has_permission(self, resource_name, resource_type, perm):
if "acl" not in session:
abort(405)
return True
def validate_permission(resources, resource_type, perm):
if not resources:
return
if current_app.config.get("USE_ACL"):
if g.user.username == "worker":
return
resources = [resources] if isinstance(resources, six.string_types) else resources
for resource in resources:
if not ACLManager().has_permission(resource, resource_type, perm):
return abort(403, "has no permission")
def can_access_resources(resource_type):
def decorator_can_access_resources(func):
@functools.wraps(func)
def wrapper_can_access_resources(*args, **kwargs):
if current_app.config.get("USE_ACL"):
res = ACLManager().get_resources(resource_type)
result = {i.get("name"): i.get("permissions") for i in res}
if hasattr(g, "resources"):
g.resources.update({resource_type: result})
else:
g.resources = {resource_type: result}
return func(*args, **kwargs)
return wrapper_can_access_resources
return decorator_can_access_resources
def has_perm(resources, resource_type, perm):
def decorator_has_perm(func):
@functools.wraps(func)
def wrapper_has_perm(*args, **kwargs):
if not resources:
return
if current_app.config.get("USE_ACL"):
validate_permission(resources, resource_type, perm)
return func(*args, **kwargs)
return wrapper_has_perm
return decorator_has_perm
def has_perm_from_args(arg_name, resource_type, perm, callback=None):
def decorator_has_perm(func):
@functools.wraps(func)
def wrapper_has_perm(*args, **kwargs):
if not arg_name:
return
resource = request.view_args.get(arg_name) or request.values.get(arg_name)
if callback is not None and resource:
resource = callback(resource)
if current_app.config.get("USE_ACL") and resource:
validate_permission(resource, resource_type, perm)
return func(*args, **kwargs)
return wrapper_has_perm
return decorator_has_perm
def role_required(role_name):
def decorator_role_required(func):
@functools.wraps(func)
def wrapper_role_required(*args, **kwargs):
if not role_name:
return
if current_app.config.get("USE_ACL"):
if role_name not in session.get("acl", {}).get("parentRoles", []):
return abort(403, "Role {0} is required".format(role_name))
return func(*args, **kwargs)
return wrapper_role_required
return decorator_role_required

173
api/lib/perm/acl/cache.py Normal file
View File

@ -0,0 +1,173 @@
# -*- coding:utf-8 -*-
from api.extensions import cache
from api.models.acl import App
from api.models.acl import Permission
from api.models.acl import Role
from api.models.acl import User
class AppCache(object):
PREFIX_ID = "App::id::{0}"
PREFIX_NAME = "App::name::{0}"
@classmethod
def get(cls, key):
app = cache.get(cls.PREFIX_ID.format(key)) or cache.get(cls.PREFIX_NAME.format(key))
if app is None:
app = App.get_by_id(key) or App.get_by(name=key, to_dict=False, first=True)
if app is not None:
cls.set(app)
return app
@classmethod
def set(cls, app):
cache.set(cls.PREFIX_ID.format(app.id), app)
cache.set(cls.PREFIX_NAME.format(app.name), app)
@classmethod
def clean(cls, app):
cache.delete(cls.PREFIX_ID.format(app.id))
cache.delete(cls.PREFIX_NAME.format(app.name))
class UserCache(object):
PREFIX_ID = "User::uid::{0}"
PREFIX_NAME = "User::username::{0}"
PREFIX_NICK = "User::nickname::{0}"
@classmethod
def get(cls, key):
user = cache.get(cls.PREFIX_ID.format(key)) or \
cache.get(cls.PREFIX_NAME.format(key)) or \
cache.get(cls.PREFIX_NICK.format(key))
if not user:
user = User.query.get(key) or \
User.query.get_by_username(key) or \
User.query.get_by_nickname(key)
if user:
cls.set(user)
return user
@classmethod
def set(cls, user):
cache.set(cls.PREFIX_ID.format(user.uid), user)
cache.set(cls.PREFIX_NAME.format(user.username), user)
cache.set(cls.PREFIX_NICK.format(user.nickname), user)
@classmethod
def clean(cls, user):
cache.delete(cls.PREFIX_ID.format(user.uid))
cache.delete(cls.PREFIX_NAME.format(user.username))
cache.delete(cls.PREFIX_NICK.format(user.nickname))
class RoleCache(object):
PREFIX_ID = "Role::id::{0}"
PREFIX_NAME = "Role::app_id::{0}::name::{1}"
@classmethod
def get_by_name(cls, app_id, name):
role = cache.get(cls.PREFIX_NAME.format(app_id, name))
if role is None:
role = Role.get_by(app_id=app_id, name=name, first=True, to_dict=False)
if role is not None:
cache.set(cls.PREFIX_NAME.format(app_id, name), role)
return role
@classmethod
def get(cls, rid):
role = cache.get(cls.PREFIX_ID.format(rid))
if role is None:
role = Role.get_by_id(rid)
if role is not None:
cache.set(cls.PREFIX_ID.format(rid), role)
return role
@classmethod
def clean(cls, rid):
cache.delete(cls.PREFIX_ID.format(rid))
@classmethod
def clean_by_name(cls, app_id, name):
cache.delete(cls.PREFIX_NAME.format(app_id, name))
class RoleRelationCache(object):
PREFIX_PARENT = "RoleRelationParent::id::{0}"
PREFIX_CHILDREN = "RoleRelationChildren::id::{0}"
PREFIX_RESOURCES = "RoleRelationResources::id::{0}"
@classmethod
def get_parent_ids(cls, rid):
parent_ids = cache.get(cls.PREFIX_PARENT.format(rid))
if not parent_ids:
from api.lib.perm.acl.role import RoleRelationCRUD
parent_ids = RoleRelationCRUD.get_parent_ids(rid)
cache.set(cls.PREFIX_PARENT.format(rid), parent_ids, timeout=0)
return parent_ids
@classmethod
def get_child_ids(cls, rid):
child_ids = cache.get(cls.PREFIX_CHILDREN.format(rid))
if not child_ids:
from api.lib.perm.acl.role import RoleRelationCRUD
child_ids = RoleRelationCRUD.get_child_ids(rid)
cache.set(cls.PREFIX_CHILDREN.format(rid), child_ids, timeout=0)
return child_ids
@classmethod
def get_resources(cls, rid):
"""
:param rid:
:return: {id2perms: {resource_id: [perm,]}, group2perms: {group_id: [perm, ]}}
"""
resources = cache.get(cls.PREFIX_RESOURCES.format(rid))
if not resources:
from api.lib.perm.acl.role import RoleCRUD
resources = RoleCRUD.get_resources(rid)
cache.set(cls.PREFIX_RESOURCES.format(rid), resources, timeout=0)
return resources or {}
@classmethod
def rebuild(cls, rid):
cls.clean(rid)
cls.get_parent_ids(rid)
cls.get_child_ids(rid)
cls.get_resources(rid)
@classmethod
def clean(cls, rid):
cache.delete(cls.PREFIX_PARENT.format(rid))
cache.delete(cls.PREFIX_CHILDREN.format(rid))
cache.delete(cls.PREFIX_RESOURCES.format(rid))
class PermissionCache(object):
PREFIX_ID = "Permission::id::{0}"
PREFIX_NAME = "Permission::name::{0}"
@classmethod
def get(cls, key):
perm = cache.get(cls.PREFIX_ID.format(key))
perm = perm or cache.get(cls.PREFIX_NAME.format(key))
if perm is None:
perm = Permission.get_by_id(key)
perm = perm or Permission.get_by(name=key, first=True, to_dict=False)
if perm is not None:
cache.set(cls.PREFIX_ID.format(key), perm)
return perm
@classmethod
def clean(cls, key):
cache.delete(cls.PREFIX_ID.format(key))
cache.delete(cls.PREFIX_NAME.format(key))

View File

@ -0,0 +1,4 @@
# -*- coding:utf-8 -*-
ACL_QUEUE = "acl_async"

View File

@ -0,0 +1,42 @@
# -*- coding:utf-8 -*-
from api.lib.perm.acl.cache import PermissionCache
from api.lib.perm.acl.cache import RoleCache
from api.models.acl import RolePermission
class PermissionCRUD(object):
@staticmethod
def get_all(resource_id=None, group_id=None):
result = dict()
if resource_id is not None:
perms = RolePermission.get_by(resource_id=resource_id, to_dict=False)
else:
perms = RolePermission.get_by(group_id=group_id, to_dict=False)
for perm in perms:
perm_dict = PermissionCache.get(perm.perm_id).to_dict()
perm_dict.update(dict(rid=perm.rid))
result.setdefault(RoleCache.get(perm.rid).name, []).append(perm_dict)
return result
@staticmethod
def grant(rid, perms, resource_id=None, group_id=None):
for perm in perms:
perm = PermissionCache.get(perm)
existed = RolePermission.get_by(rid=rid, perm_id=perm.id, group_id=group_id, resource_id=resource_id)
existed or RolePermission.create(rid=rid, perm_id=perm.id, group_id=group_id, resource_id=resource_id)
@staticmethod
def revoke(rid, perms, resource_id=None, group_id=None):
for perm in perms:
perm = PermissionCache.get(perm)
existed = RolePermission.get_by(rid=rid,
perm_id=perm.id,
group_id=group_id,
resource_id=resource_id,
first=True,
to_dict=False)
existed and existed.soft_delete()

View File

@ -0,0 +1,175 @@
# -*- coding:utf-8 -*-
from flask import abort
from api.extensions import db
from api.models.acl import Permission
from api.models.acl import Resource
from api.models.acl import ResourceGroup
from api.models.acl import ResourceGroupItems
from api.models.acl import ResourceType
class ResourceTypeCRUD(object):
@staticmethod
def search(q, app_id, page=1, page_size=None):
query = db.session.query(ResourceType).filter(
ResourceType.deleted.is_(False)).filter(ResourceType.app_id == app_id)
if q:
query = query.filter(ResourceType.name.ilike('%{0}%'.format(q)))
numfound = query.count()
res = query.offset((page - 1) * page_size).limit(page_size)
rt_ids = [i.id for i in res]
perms = db.session.query(Permission).filter(Permission.deleted.is_(False)).filter(
Permission.resource_type_id.in_(rt_ids))
id2perms = dict()
for perm in perms:
id2perms.setdefault(perm.resource_type_id, []).append(perm.to_dict())
return numfound, res, id2perms
@staticmethod
def get_perms(rt_id):
perms = Permission.get_by(resource_type_id=rt_id, to_dict=False)
return [i.to_dict() for i in perms]
@classmethod
def add(cls, app_id, name, perms):
ResourceType.get_by(name=name, app_id=app_id) and abort(
400, "ResourceType <{0}> is already existed".format(name))
rt = ResourceType.create(name=name, app_id=app_id)
cls.update_perms(rt.id, perms, app_id)
return rt
@classmethod
def update(cls, rt_id, **kwargs):
kwargs.pop('app_id', None)
rt = ResourceType.get_by_id(rt_id) or abort(404, "ResourceType <{0}> is not found".format(rt_id))
if 'name' in kwargs:
other = ResourceType.get_by(name=kwargs['name'], app_id=rt.app_id, to_dict=False, first=True)
if other and other.id != rt_id:
return abort(400, "ResourceType <{0}> is duplicated".format(kwargs['name']))
if 'perms' in kwargs:
cls.update_perms(rt_id, kwargs.pop('perms'), rt.app_id)
return rt.update(**kwargs)
@classmethod
def delete(cls, rt_id):
rt = ResourceType.get_by_id(rt_id) or abort(404, "ResourceType <{0}> is not found".format(rt_id))
cls.update_perms(rt_id, [], rt.app_id)
rt.soft_delete()
@classmethod
def update_perms(cls, rt_id, perms, app_id):
existed = Permission.get_by(resource_type_id=rt_id, to_dict=False)
existed_names = [i.name for i in existed]
for i in existed:
if i.name not in perms:
i.soft_delete()
for i in perms:
if i not in existed_names:
Permission.create(resource_type_id=rt_id,
name=i,
app_id=app_id)
class ResourceGroupCRUD(object):
@staticmethod
def search(q, app_id, page=1, page_size=None):
query = db.session.query(ResourceGroup).filter(
ResourceGroup.deleted.is_(False)).filter(ResourceGroup.app_id == app_id)
if q:
query = query.filter(ResourceGroup.name.ilike("%{0}%".format(q)))
numfound = query.count()
return numfound, query.offset((page - 1) * page_size).limit(page_size)
@staticmethod
def get_items(rg_id):
items = ResourceGroupItems.get_by(group_id=rg_id, to_dict=False)
return [i.resource.to_dict() for i in items]
@staticmethod
def add(name, type_id, app_id):
ResourceGroup.get_by(name=name, resource_type_id=type_id, app_id=app_id) and abort(
400, "ResourceGroup <{0}> is already existed".format(name))
return ResourceGroup.create(name=name, resource_type_id=type_id, app_id=app_id)
@staticmethod
def update(rg_id, items):
existed = ResourceGroupItems.get_by(group_id=rg_id, to_dict=False)
existed_ids = [i.resource_id for i in existed]
for i in existed:
if i.resource_id not in items:
i.soft_delete()
for _id in items:
if _id not in existed_ids:
ResourceGroupItems.create(group_id=rg_id, resource_id=_id)
@staticmethod
def delete(rg_id):
rg = ResourceGroup.get_by_id(rg_id) or abort(404, "ResourceGroup <{0}> is not found".format(rg_id))
rg.soft_delete()
items = ResourceGroupItems.get_by(group_id=rg_id, to_dict=False)
for item in items:
item.soft_delete()
class ResourceCRUD(object):
@staticmethod
def search(q, app_id, resource_type_id=None, page=1, page_size=None):
query = db.session.query(Resource).filter(
Resource.deleted.is_(False)).filter(Resource.app_id == app_id)
if q:
query = query.filter(Resource.name.ilike("%{0}%".format(q)))
if resource_type_id:
query = query.filter(Resource.resource_type_id == resource_type_id)
numfound = query.count()
return numfound, query.offset((page - 1) * page_size).limit(page_size)
@staticmethod
def add(name, type_id, app_id):
Resource.get_by(name=name, resource_type_id=type_id, app_id=app_id) and abort(
400, "Resource <{0}> is already existed".format(name))
return Resource.create(name=name, resource_type_id=type_id, app_id=app_id)
@staticmethod
def update(_id, name):
resource = Resource.get_by_id(_id) or abort(404, "Resource <{0}> is not found".format(_id))
other = Resource.get_by(name=name, resource_type_id=resource.resource_type_id, to_dict=False, first=True)
if other and other.id != _id:
return abort(400, "Resource <{0}> is duplicated".format(name))
return resource.update(name=name)
@staticmethod
def delete(_id):
resource = Resource.get_by_id(_id) or abort(404, "Resource <{0}> is not found".format(_id))
resource.soft_delete()

207
api/lib/perm/acl/role.py Normal file
View File

@ -0,0 +1,207 @@
# -*- coding:utf-8 -*-
import six
from flask import abort
from api.extensions import db
from api.lib.perm.acl.cache import RoleCache
from api.lib.perm.acl.cache import RoleRelationCache
from api.lib.perm.acl.const import ACL_QUEUE
from api.models.acl import Resource
from api.models.acl import ResourceGroupItems
from api.models.acl import Role
from api.models.acl import RolePermission
from api.models.acl import RoleRelation
from api.tasks.acl import role_rebuild
class RoleRelationCRUD(object):
@staticmethod
def get_parents(rids=None, uids=None):
rid2uid = dict()
if uids is not None:
uids = [uids] if isinstance(uids, six.integer_types) else uids
rids = db.session.query(Role).filter(Role.deleted.is_(False)).filter(Role.uid.in_(uids))
rid2uid = {i.rid: i.uid for i in rids}
rids = [i.rid for i in rids]
else:
rids = [rids] if isinstance(rids, six.integer_types) else rids
res = db.session.query(RoleRelation).filter(
RoleRelation.child_id.in_(rids)).filter(RoleRelation.deleted.is_(False))
id2parents = {}
for i in res:
id2parents.setdefault(rid2uid.get(i.child_id, i.child_id), []).append(RoleCache.get(i.parent_id).to_dict())
return id2parents
@staticmethod
def get_parent_ids(rid):
res = RoleRelation.get_by(child_id=rid, to_dict=False)
return [i.parent_id for i in res]
@staticmethod
def get_child_ids(rid):
res = RoleRelation.get_by(child_id=rid, to_dict=False)
return [i.parent_id for i in res]
@classmethod
def recursive_parent_ids(cls, rid):
all_parent_ids = set()
def _get_parent(_id):
all_parent_ids.add(_id)
parent_ids = RoleRelationCache.get_parent_ids(_id)
for parent_id in parent_ids:
_get_parent(parent_id)
_get_parent(rid)
return all_parent_ids
@classmethod
def recursive_child_ids(cls, rid):
all_child_ids = set()
def _get_children(_id):
all_child_ids.add(_id)
child_ids = RoleRelationCache.get_child_ids(_id)
for child_id in child_ids:
_get_children(child_id)
_get_children(rid)
return all_child_ids
@staticmethod
def add(parent_id, child_id):
RoleRelation.get_by(parent_id=parent_id, child_id=child_id) and abort(400, "It's already existed")
return RoleRelation.create(parent_id=parent_id, child_id=child_id)
@staticmethod
def delete(_id):
existed = RoleRelation.get_by_id(_id) or abort(400, "RoleRelation <{0}> does not exist".format(_id))
existed.soft_delete()
@staticmethod
def delete2(parent_id, child_id):
existed = RoleRelation.get_by(parent_id=parent_id, child_id=child_id)
existed or abort(400, "RoleRelation < {0} -> {1} > does not exist".format(parent_id, child_id))
existed.soft_delete()
class RoleCRUD(object):
@staticmethod
def search(q, app_id, page=1, page_size=None):
query = db.session.query(Role).filter(Role.deleted.is_(False)).filter(
Role.app_id == app_id).filter(Role.uid.is_(None))
if q:
query = query.filter(Role.name.ilike('%{0}%'.format(q)))
numfound = query.count()
return numfound, query.offset((page - 1) * page_size).limit(page_size)
@staticmethod
def add_role(name, app_id, is_app_admin=False, uid=None):
Role.get_by(name=name, app_id=app_id) and abort(400, "Role <{0}> is already existed".format(name))
return Role.create(name=name,
app_id=app_id,
is_app_admin=is_app_admin,
uid=uid)
@staticmethod
def update_role(rid, **kwargs):
kwargs.pop('app_id', None)
role = Role.get_by_id(rid) or abort(404, "Role <{0}> does not exist".format(rid))
RoleCache.clean(rid)
return role.update(**kwargs)
@classmethod
def delete_role(cls, rid):
role = Role.get_by_id(rid) or abort(404, "Role <{0}> does not exist".format(rid))
parent_ids = RoleRelationCRUD.get_parent_ids(rid)
child_ids = RoleRelationCRUD.get_child_ids(rid)
for i in RoleRelation.get_by(parent_id=rid, to_dict=False):
i.soft_delete()
for i in RoleRelation.get_by(child_id=rid, to_dict=False):
i.soft_delete()
for i in RolePermission.get_by(rid=rid, to_dict=False):
i.soft_delete()
role_rebuild.apply_async(args=(parent_ids + child_ids,), queue=ACL_QUEUE)
RoleCache.clean(rid)
RoleRelationCache.clean(rid)
role.soft_delete()
@staticmethod
def get_resources(rid):
res = RolePermission.get_by(rid=rid, to_dict=False)
id2perms = dict(id2perms={}, group2perms={})
for i in res:
if i.resource_id:
id2perms['id2perms'].setdefault(i.resource_id, []).append(i.perm.name)
elif i.group_id:
id2perms['group2perms'].setdefault(i.group_id, []).append(i.perm.name)
return id2perms
@staticmethod
def get_group_ids(resource_id):
return [i.group_id for i in ResourceGroupItems.get_by(resource_id, to_dict=False)]
@classmethod
def has_permission(cls, rid, resource_name, perm):
resource = Resource.get_by(name=resource_name, first=True, to_dict=False)
resource = resource or abort(403, "Resource <{0}> is not in ACL".format(resource_name))
parent_ids = RoleRelationCRUD.recursive_parent_ids(rid)
group_ids = cls.get_group_ids(resource.id)
for parent_id in parent_ids:
id2perms = RoleRelationCache.get_resources(parent_id)
perms = id2perms['id2perms'].get(resource.id, [])
if perms and {perm}.issubset(set(perms)):
return True
for group_id in group_ids:
perms = id2perms['group2perms'].get(group_id, [])
if perms and {perm}.issubset(set(perms)):
return True
return False
@classmethod
def get_permissions(cls, rid, resource_name):
resource = Resource.get_by(name=resource_name, first=True, to_dict=False)
resource = resource or abort(403, "Resource <{0}> is not in ACL".format(resource_name))
parent_ids = RoleRelationCRUD.recursive_parent_ids(rid)
group_ids = cls.get_group_ids(resource.id)
perms = []
for parent_id in parent_ids:
id2perms = RoleRelationCache.get_resources(parent_id)
perms += id2perms['id2perms'].get(parent_id, [])
for group_id in group_ids:
perms += id2perms['group2perms'].get(group_id, [])
return set(perms)

66
api/lib/perm/acl/user.py Normal file
View File

@ -0,0 +1,66 @@
# -*- coding:utf-8 -*-
import uuid
import string
import random
from flask import abort
from flask import g
from api.extensions import db
from api.lib.perm.acl.cache import UserCache
from api.models.acl import User
class UserCRUD(object):
@staticmethod
def search(q, page=1, page_size=None):
query = db.session.query(User).filter(User.deleted.is_(False))
if q:
query = query.filter(User.username.ilike('%{0}%'.format(q)))
numfound = query.count()
return numfound, query.offset((page - 1) * page_size).limit(page_size)
@staticmethod
def _gen_key_secret():
key = uuid.uuid4().hex
secret = ''.join(random.sample(string.ascii_letters + string.digits + '~!@#$%^&*?', 32))
return key, secret
@classmethod
def add(cls, **kwargs):
existed = User.get_by(username=kwargs['username'], email=kwargs['email'])
existed and abort(400, "User <{0}> is already existed".format(kwargs['username']))
kwargs['nickname'] = kwargs.get('nickname') or kwargs['username']
kwargs['block'] = 0
kwargs['key'], kwargs['secret'] = cls._gen_key_secret()
return User.create(**kwargs)
@staticmethod
def update(uid, **kwargs):
user = User.get_by(uid=uid, to_dict=False, first=True) or abort(404, "User <{0}> does not exist".format(uid))
UserCache.clean(user)
return user.update(**kwargs)
@classmethod
def reset_key_secret(cls):
key, secret = cls._gen_key_secret()
g.user.update(key=key, secret=secret)
return key, secret
@classmethod
def delete(cls, uid):
user = User.get_by(uid=uid, to_dict=False, first=True) or abort(404, "User <{0}> does not exist".format(uid))
UserCache.clean(user)
user.soft_delete()

104
api/lib/perm/auth.py Normal file
View File

@ -0,0 +1,104 @@
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from functools import wraps
import jwt
from flask import abort
from flask import current_app
from flask import g
from flask import request
from flask import session
from flask_login import login_user
from api.models.acl import User
from api.lib.perm.acl.cache import UserCache
def _auth_with_key():
key = request.values.get('_key')
secret = request.values.get('_secret')
path = request.path
keys = sorted(request.values.keys())
req_args = [request.values[k] for k in keys if k not in ("_key", "_secret")]
user, authenticated = User.query.authenticate_with_key(key, secret, req_args, path)
if user and authenticated:
login_user(user)
return True
return False
def _auth_with_session():
if isinstance(getattr(g, 'user', None), User):
login_user(g.user)
return True
if "acl" in session and "userName" in (session["acl"] or {}):
login_user(UserCache.get(session["acl"]["userName"]))
return True
return False
def _auth_with_token():
auth_headers = request.headers.get('Access-Token', '').strip()
if not auth_headers:
return False
try:
token = auth_headers
data = jwt.decode(token, current_app.config['SECRET_KEY'])
user = User.query.filter_by(email=data['sub']).first()
if not user:
return False
login_user(user)
return True
except jwt.ExpiredSignatureError:
return False
except (jwt.InvalidTokenError, Exception):
return False
def _auth_with_ip_white_list():
ip = request.remote_addr
key = request.values.get('_key')
secret = request.values.get('_secret')
if not key and not secret and ip.strip() in current_app.config.get("WHITE_LIST", []): # TODO
user = UserCache.get("worker")
login_user(user)
return True
return False
def auth_required(func):
if request.json is not None:
setattr(request, 'values', request.json)
else:
setattr(request, 'values', request.values.to_dict())
current_app.logger.debug(request.values)
@wraps(func)
def wrapper(*args, **kwargs):
if not getattr(func, 'authenticated', True):
return func(*args, **kwargs)
if _auth_with_session() or _auth_with_key() or _auth_with_token() or _auth_with_ip_white_list():
return func(*args, **kwargs)
abort(401)
return wrapper
def auth_abandoned(func):
setattr(func, "authenticated", False)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper

View File

@ -1,16 +1,32 @@
# -*- coding:utf-8 -*-
import base64
import elasticsearch
import redis
import six
from Crypto.Cipher import AES
from elasticsearch import Elasticsearch
from flask import current_app
from api.lib.secrets.inner import InnerCrypt
from api.lib.secrets.inner import KeyManage
def get_page(page):
try:
page = int(page)
except ValueError:
page = 1
return page if page >= 1 else 1
def get_page_size(page_size):
if page_size == "all":
return page_size
try:
page_size = int(page_size)
except (ValueError, TypeError):
page_size = current_app.config.get("DEFAULT_PAGE_COUNT")
return page_size if page_size >= 1 else current_app.config.get("DEFAULT_PAGE_COUNT")
def handle_arg_list(arg):
return list(filter(lambda x: x != "", arg.strip().split(","))) if isinstance(arg, six.string_types) else arg
class BaseEnum(object):
@ -31,46 +47,6 @@ class BaseEnum(object):
return cls._ALL_
def get_page(page):
try:
page = int(page)
except (TypeError, ValueError):
page = 1
return page if page >= 1 else 1
def get_page_size(page_size):
if page_size == "all":
return page_size
try:
page_size = int(page_size)
except (ValueError, TypeError):
page_size = current_app.config.get("DEFAULT_PAGE_COUNT")
return page_size if page_size >= 1 else current_app.config.get("DEFAULT_PAGE_COUNT")
def handle_bool_arg(arg):
if arg in current_app.config.get("BOOL_TRUE"):
return True
return False
def handle_arg_list(arg):
if isinstance(arg, (list, dict)):
return arg
if arg == 0:
return [0]
if not arg:
return []
if isinstance(arg, (six.integer_types, float)):
return [arg]
return list(filter(lambda x: x != "", arg.strip().split(","))) if isinstance(arg, six.string_types) else arg
class RedisHandler(object):
def __init__(self, flask_app=None):
self.flask_app = flask_app
@ -84,8 +60,7 @@ class RedisHandler(object):
max_connections=config.get("REDIS_MAX_CONN"),
host=config.get("CACHE_REDIS_HOST"),
port=config.get("CACHE_REDIS_PORT"),
password=config.get("CACHE_REDIS_PASSWORD"),
db=config.get("REDIS_DB") or 0)
db=config.get("REDIS_DB"))
self.r = redis.Redis(connection_pool=pool)
except Exception as e:
current_app.logger.warning(str(e))
@ -112,27 +87,10 @@ class RedisHandler(object):
try:
ret = self.r.hdel(prefix, key_id)
if not ret:
current_app.logger.warning("[{0}] is not in redis".format(key_id))
current_app.logger.warn("[{0}] is not in redis".format(key_id))
except Exception as e:
current_app.logger.error("delete redis key error, {0}".format(str(e)))
def set_str(self, key, value, expired=None):
try:
if expired:
self.r.setex(key, expired, value)
else:
self.r.set(key, value)
except Exception as e:
current_app.logger.error("set redis error, {0}".format(str(e)))
def get_str(self, key):
try:
value = self.r.get(key)
except Exception as e:
current_app.logger.error("get redis error, {0}".format(str(e)))
return
return value
class ESHandler(object):
def __init__(self, flask_app=None):
@ -143,23 +101,9 @@ class ESHandler(object):
def init_app(self, app):
self.flask_app = app
config = self.flask_app.config
if config.get('ES_USER') and config.get('ES_PASSWORD'):
uri = "http://{}:{}@{}:{}/".format(config.get('ES_USER'), config.get('ES_PASSWORD'),
config.get('ES_HOST'), config.get('ES_PORT'))
else:
uri = "{}:{}".format(config.get('ES_HOST'), config.get('ES_PORT') or 9200)
self.es = Elasticsearch(uri,
timeout=10,
max_retries=3,
retry_on_timeout=True,
retry_on_status=(502, 503, 504, "N/A"),
maxsize=10)
try:
if not self.es.indices.exists(index=self.index):
self.es.indices.create(index=self.index)
except elasticsearch.exceptions.RequestError as ex:
if ex.error != 'resource_already_exists_exception':
raise
self.es = Elasticsearch(config.get("ES_HOST"))
if not self.es.indices.exists(index=self.index):
self.es.indices.create(index=self.index)
def update_mapping(self, field, value_type, other):
body = {
@ -174,12 +118,6 @@ class ESHandler(object):
)
def get_index_id(self, ci_id):
try:
return self._get_index_id(ci_id)
except:
return self._get_index_id(ci_id)
def _get_index_id(self, ci_id):
query = {
'query': {
'match': {'ci_id': ci_id}
@ -220,70 +158,8 @@ class ESHandler(object):
res = self.es.search(index=self.index, body=query, filter_path=filter_path)
if res['hits'].get('hits'):
return (res['hits']['total']['value'],
[i['_source'] for i in res['hits']['hits']],
res.get("aggregations", {}))
return res['hits']['total']['value'], \
[i['_source'] for i in res['hits']['hits']], \
res.get("aggregations", {})
else:
return 0, [], {}
class AESCrypto(object):
BLOCK_SIZE = 16 # Bytes
pad = lambda s: s + ((AESCrypto.BLOCK_SIZE - len(s) % AESCrypto.BLOCK_SIZE) *
chr(AESCrypto.BLOCK_SIZE - len(s) % AESCrypto.BLOCK_SIZE))
unpad = lambda s: s[:-ord(s[len(s) - 1:])]
iv = '0102030405060708'
@staticmethod
def key():
key = current_app.config.get("SECRET_KEY")[:16]
if len(key) < 16:
key = "{}{}".format(key, (16 - len(key)) * "x")
return key.encode('utf8')
@classmethod
def encrypt(cls, data):
data = cls.pad(data)
cipher = AES.new(cls.key(), AES.MODE_CBC, cls.iv.encode('utf8'))
return base64.b64encode(cipher.encrypt(data.encode('utf8'))).decode('utf8')
@classmethod
def decrypt(cls, data):
encode_bytes = base64.decodebytes(data.encode('utf8'))
cipher = AES.new(cls.key(), AES.MODE_CBC, cls.iv.encode('utf8'))
text_decrypted = cipher.decrypt(encode_bytes)
return cls.unpad(text_decrypted).decode('utf8')
class Crypto(AESCrypto):
@classmethod
def encrypt(cls, data):
from api.lib.secrets.secrets import InnerKVManger
if not KeyManage(backend=InnerKVManger()).is_seal():
res, status = InnerCrypt().encrypt(data)
if status:
return res
return AESCrypto().encrypt(data)
@classmethod
def decrypt(cls, data):
from api.lib.secrets.secrets import InnerKVManger
if not KeyManage(backend=InnerKVManger()).is_seal():
try:
res, status = InnerCrypt().decrypt(data)
if status:
return res
except:
pass
try:
return AESCrypto().decrypt(data)
except:
return data

View File

@ -1,4 +1,4 @@
# -*- coding:utf-8 -*-
# -*- coding:utf-8 -*-
from .cmdb import *

199
api/models/acl.py Normal file
View File

@ -0,0 +1,199 @@
# -*- coding:utf-8 -*-
import copy
import hashlib
from datetime import datetime
from flask import current_app
from flask_sqlalchemy import BaseQuery
from api.extensions import db
from api.lib.database import CRUDModel
from api.lib.database import Model
from api.lib.database import SoftDeleteMixin
class App(Model):
__tablename__ = "acl_apps"
name = db.Column(db.String(64), index=True)
description = db.Column(db.Text)
app_id = db.Column(db.Text)
secret_key = db.Column(db.Text)
class UserQuery(BaseQuery):
def _join(self, *args, **kwargs):
super(UserQuery, self)._join(*args, **kwargs)
def authenticate(self, login, password):
user = self.filter(db.or_(User.username == login,
User.email == login)).first()
if user:
current_app.logger.info(user)
authenticated = user.check_password(password)
else:
authenticated = False
return user, authenticated
def authenticate_with_key(self, key, secret, args, path):
user = self.filter(User.key == key).filter(User.block == 0).first()
if not user:
return None, False
if user and hashlib.sha1('{0}{1}{2}'.format(
path, user.secret, "".join(args)).encode("utf-8")).hexdigest() == secret:
authenticated = True
else:
authenticated = False
return user, authenticated
def search(self, key):
query = self.filter(db.or_(User.email == key,
User.nickname.ilike('%' + key + '%'),
User.username.ilike('%' + key + '%')))
return query
def get_by_username(self, username):
user = self.filter(User.username == username).first()
return user
def get_by_nickname(self, nickname):
user = self.filter(User.nickname == nickname).first()
return user
def get(self, uid):
user = self.filter(User.uid == uid).first()
return copy.deepcopy(user)
class User(CRUDModel, SoftDeleteMixin):
__tablename__ = 'users'
# __bind_key__ = "user"
query_class = UserQuery
uid = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(32), unique=True)
nickname = db.Column(db.String(20), nullable=True)
department = db.Column(db.String(20))
catalog = db.Column(db.String(64))
email = db.Column(db.String(100), unique=True, nullable=False)
mobile = db.Column(db.String(14), unique=True)
_password = db.Column("password", db.String(80))
key = db.Column(db.String(32), nullable=False)
secret = db.Column(db.String(32), nullable=False)
date_joined = db.Column(db.DateTime, default=datetime.utcnow)
last_login = db.Column(db.DateTime, default=datetime.utcnow)
block = db.Column(db.Boolean, default=False)
has_logined = db.Column(db.Boolean, default=False)
wx_id = db.Column(db.String(32))
avatar = db.Column(db.String(128))
def __str__(self):
return self.username
def is_active(self):
return not self.block
def get_id(self):
return self.uid
@staticmethod
def is_authenticated():
return True
def _get_password(self):
return self._password
def _set_password(self, password):
self._password = hashlib.md5(password.encode('utf-8')).hexdigest()
password = db.synonym("_password", descriptor=property(_get_password, _set_password))
def check_password(self, password):
if self.password is None:
return False
return self.password == password
class Role(Model):
__tablename__ = "acl_roles"
name = db.Column(db.Text, nullable=False)
is_app_admin = db.Column(db.Boolean, default=False)
app_id = db.Column(db.Integer, db.ForeignKey("acl_apps.id"))
uid = db.Column(db.Integer, db.ForeignKey("users.uid"))
class RoleRelation(Model):
__tablename__ = "acl_role_relations"
parent_id = db.Column(db.Integer, db.ForeignKey('acl_roles.id'))
child_id = db.Column(db.Integer, db.ForeignKey('acl_roles.id'))
__table_args__ = (
db.UniqueConstraint("parent_id", "child_id", name="role_relation_unique"),)
class ResourceType(Model):
__tablename__ = "acl_resource_types"
name = db.Column(db.String(64), index=True)
description = db.Column(db.Text)
app_id = db.Column(db.Integer, db.ForeignKey('acl_apps.id'))
class ResourceGroup(Model):
__tablename__ = "acl_resource_groups"
name = db.Column(db.String(64), index=True, nullable=False)
resource_type_id = db.Column(db.Integer, db.ForeignKey("acl_resource_types.id"))
app_id = db.Column(db.Integer, db.ForeignKey('acl_apps.id'))
__table_args__ = (db.UniqueConstraint("name", "resource_type_id", "app_id", name="resource_group_app_unique"),)
class Resource(Model):
__tablename__ = "acl_resources"
name = db.Column(db.String(128), nullable=False)
resource_type_id = db.Column(db.Integer, db.ForeignKey("acl_resource_types.id"))
app_id = db.Column(db.Integer, db.ForeignKey("acl_apps.id"))
__table_args__ = (db.UniqueConstraint("name", "resource_type_id", "app_id", name="resource_name_app_unique"),)
class ResourceGroupItems(Model):
__tablename__ = "acl_resource_group_items"
group_id = db.Column(db.Integer, db.ForeignKey('acl_resource_groups.id'), nullable=False)
resource_id = db.Column(db.Integer, db.ForeignKey('acl_resources.id'), nullable=False)
class Permission(Model):
__tablename__ = "acl_permissions"
name = db.Column(db.String(64), nullable=False)
resource_type_id = db.Column(db.Integer, db.ForeignKey("acl_resource_types.id"))
app_id = db.Column(db.Integer, db.ForeignKey("acl_apps.id"))
__table_args__ = (db.UniqueConstraint("name", "resource_type_id", "app_id", name="perm_name_app_unique"),)
class RolePermission(Model):
__tablename__ = "acl_role_permissions"
rid = db.Column(db.Integer, db.ForeignKey('acl_roles.id'))
resource_id = db.Column(db.Integer, db.ForeignKey('acl_resources.id'))
group_id = db.Column(db.Integer, db.ForeignKey('acl_resource_groups.id'))
perm_id = db.Column(db.Integer, db.ForeignKey('acl_permissions.id'))
perm = db.relationship("Permission", backref='acl_role_permissions.perm_id')

310
api/models/cmdb.py Normal file
View File

@ -0,0 +1,310 @@
# -*- coding:utf-8 -*-
import datetime
from api.extensions import db
from api.lib.cmdb.const import CIStatusEnum
from api.lib.cmdb.const import OperateType
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.database import Model
# template
class RelationType(Model):
__tablename__ = "c_relation_types"
name = db.Column(db.String(16), index=True)
class CITypeGroup(Model):
__tablename__ = "c_ci_type_groups"
name = db.Column(db.String(32))
class CITypeGroupItem(Model):
__tablename__ = "c_ci_type_group_items"
group_id = db.Column(db.Integer, db.ForeignKey("c_ci_type_groups.id"), nullable=False)
type_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
order = db.Column(db.SmallInteger, default=0)
class CIType(Model):
__tablename__ = "c_ci_types"
name = db.Column(db.String(32))
alias = db.Column(db.String(32))
unique_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"), nullable=False)
enabled = db.Column(db.Boolean, default=True, nullable=False)
is_attached = db.Column(db.Boolean, default=False, nullable=False)
icon_url = db.Column(db.String(256))
order = db.Column(db.SmallInteger, default=0, nullable=False)
unique_key = db.relationship("Attribute", backref="c_ci_types.unique_id")
class CITypeRelation(Model):
__tablename__ = "c_ci_type_relations"
parent_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
child_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
parent = db.relationship("CIType", primaryjoin="CIType.id==CITypeRelation.parent_id")
child = db.relationship("CIType", primaryjoin="CIType.id==CITypeRelation.child_id")
relation_type = db.relationship("RelationType", backref="c_ci_type_relations.relation_type_id")
class Attribute(Model):
__tablename__ = "c_attributes"
name = db.Column(db.String(32), nullable=False)
alias = db.Column(db.String(32), nullable=False)
value_type = db.Column(db.Enum(*ValueTypeEnum.all()), default=ValueTypeEnum.TEXT, nullable=False)
is_choice = db.Column(db.Boolean, default=False)
is_list = db.Column(db.Boolean, default=False)
is_unique = db.Column(db.Boolean, default=False)
is_index = db.Column(db.Boolean, default=False)
is_link = db.Column(db.Boolean, default=False)
is_password = db.Column(db.Boolean, default=False)
is_sortable = db.Column(db.Boolean, default=False)
class CITypeAttribute(Model):
__tablename__ = "c_ci_type_attributes"
type_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"), nullable=False)
order = db.Column(db.Integer, default=0)
is_required = db.Column(db.Boolean, default=False)
default_show = db.Column(db.Boolean, default=True)
attr = db.relationship("Attribute", backref="c_ci_type_attributes.attr_id")
class CITypeAttributeGroup(Model):
__tablename__ = "c_ci_type_attribute_groups"
name = db.Column(db.String(64))
type_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
order = db.Column(db.SmallInteger, default=0)
class CITypeAttributeGroupItem(Model):
__tablename__ = "c_ci_type_attribute_group_items"
group_id = db.Column(db.Integer, db.ForeignKey("c_ci_type_attribute_groups.id"), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"), nullable=False)
order = db.Column(db.SmallInteger, default=0)
# instance
class CI(Model):
__tablename__ = "c_cis"
type_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
status = db.Column(db.Enum(*CIStatusEnum.all(), name="status"))
heartbeat = db.Column(db.DateTime, default=lambda: datetime.datetime.now())
ci_type = db.relationship("CIType", backref="c_cis.type_id")
class CIRelation(Model):
__tablename__ = "c_ci_relations"
first_ci_id = db.Column(db.Integer, db.ForeignKey("c_cis.id"), nullable=False)
second_ci_id = db.Column(db.Integer, db.ForeignKey("c_cis.id"), nullable=False)
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
more = db.Column(db.Integer, db.ForeignKey("c_cis.id"))
first_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.first_ci_id")
second_ci = db.relationship("CI", primaryjoin="CI.id==CIRelation.second_ci_id")
relation_type = db.relationship("RelationType", backref="c_ci_relations.relation_type_id")
class IntegerChoice(Model):
__tablename__ = 'c_choice_integers'
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Integer, nullable=False)
attr = db.relationship("Attribute", backref="c_choice_integers.attr_id")
class FloatChoice(Model):
__tablename__ = 'c_choice_floats'
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Float, nullable=False)
attr = db.relationship("Attribute", backref="c_choice_floats.attr_id")
class TextChoice(Model):
__tablename__ = 'c_choice_texts'
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Text, nullable=False)
attr = db.relationship("Attribute", backref="c_choice_texts.attr_id")
class CIIndexValueInteger(Model):
__tablename__ = "c_value_index_integers"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Integer, nullable=False)
ci = db.relationship("CI", backref="c_value_index_integers.ci_id")
attr = db.relationship("Attribute", backref="c_value_index_integers.attr_id")
__table_args__ = (db.Index("integer_attr_value_index", "attr_id", "value"),)
class CIIndexValueFloat(Model):
__tablename__ = "c_value_index_floats"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Float, nullable=False)
ci = db.relationship("CI", backref="c_value_index_floats.ci_id")
attr = db.relationship("Attribute", backref="c_value_index_floats.attr_id")
__table_args__ = (db.Index("float_attr_value_index", "attr_id", "value"),)
class CIIndexValueText(Model):
__tablename__ = "c_value_index_texts"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.String(128), nullable=False)
ci = db.relationship("CI", backref="c_value_index_texts.ci_id")
attr = db.relationship("Attribute", backref="c_value_index_texts.attr_id")
__table_args__ = (db.Index("text_attr_value_index", "attr_id", "value"),)
class CIIndexValueDateTime(Model):
__tablename__ = "c_value_index_datetime"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.DateTime, nullable=False)
ci = db.relationship("CI", backref="c_value_index_datetime.ci_id")
attr = db.relationship("Attribute", backref="c_value_index_datetime.attr_id")
__table_args__ = (db.Index("datetime_attr_value_index", "attr_id", "value"),)
class CIValueInteger(Model):
__tablename__ = "c_value_integers"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Integer, nullable=False)
ci = db.relationship("CI", backref="c_value_integers.ci_id")
attr = db.relationship("Attribute", backref="c_value_integers.attr_id")
class CIValueFloat(Model):
__tablename__ = "c_value_floats"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Float, nullable=False)
ci = db.relationship("CI", backref="c_value_floats.ci_id")
attr = db.relationship("Attribute", backref="c_value_floats.attr_id")
class CIValueText(Model):
__tablename__ = "c_value_texts"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.Text, nullable=False)
ci = db.relationship("CI", backref="c_value_texts.ci_id")
attr = db.relationship("Attribute", backref="c_value_texts.attr_id")
class CIValueDateTime(Model):
__tablename__ = "c_value_datetime"
ci_id = db.Column(db.Integer, db.ForeignKey('c_cis.id'), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
value = db.Column(db.DateTime, nullable=False)
ci = db.relationship("CI", backref="c_value_datetime.ci_id")
attr = db.relationship("Attribute", backref="c_value_datetime.attr_id")
# history
class OperationRecord(Model):
__tablename__ = "c_records"
uid = db.Column(db.Integer, index=True, nullable=False)
origin = db.Column(db.String(32))
ticket_id = db.Column(db.String(32))
reason = db.Column(db.Text)
class AttributeHistory(Model):
__tablename__ = "c_attribute_histories"
operate_type = db.Column(db.Enum(*OperateType.all(), name="operate_type"))
record_id = db.Column(db.Integer, db.ForeignKey("c_records.id"), nullable=False)
ci_id = db.Column(db.Integer, index=True, nullable=False)
attr_id = db.Column(db.Integer, index=True)
old = db.Column(db.Text)
new = db.Column(db.Text)
class CIRelationHistory(Model):
__tablename__ = "c_relation_histories"
operate_type = db.Column(db.Enum(OperateType.ADD, OperateType.DELETE, name="operate_type"))
record_id = db.Column(db.Integer, db.ForeignKey("c_records.id"), nullable=False)
first_ci_id = db.Column(db.Integer)
second_ci_id = db.Column(db.Integer)
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"))
relation_id = db.Column(db.Integer, nullable=False)
# preference
class PreferenceShowAttributes(Model):
__tablename__ = "c_preference_show_attributes"
uid = db.Column(db.Integer, index=True, nullable=False)
type_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"))
order = db.Column(db.SmallInteger, default=0)
ci_type = db.relationship("CIType", backref="c_preference_show_attributes.type_id")
attr = db.relationship("Attribute", backref="c_preference_show_attributes.attr_id")
class PreferenceTreeView(Model):
__tablename__ = "c_preference_tree_views"
uid = db.Column(db.Integer, index=True, nullable=False)
type_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
levels = db.Column(db.Text) # TODO: JSON
class PreferenceRelationView(Model):
__tablename__ = "c_preference_relation_views"
name = db.Column(db.String(8), index=True, nullable=False)
cr_ids = db.Column(db.TEXT) # [{parent_id: x, child_id: y}] TODO: JSON

View File

@ -2,12 +2,10 @@
import os
import sys
from inspect import getmembers
from inspect import isclass
from inspect import getmembers, isclass
import six
from flask import jsonify
from flask import send_file
from flask_restful import Resource
from api.lib.perm.auth import auth_required
@ -23,20 +21,17 @@ class APIView(Resource):
def jsonify(*args, **kwargs):
return jsonify(*args, **kwargs)
@staticmethod
def send_file(*args, **kwargs):
return send_file(*args, **kwargs)
API_PACKAGE = os.path.abspath(os.path.dirname(__file__))
API_PACKAGE = "api"
def register_resources(resource_path, rest_api):
for root, _, files in os.walk(os.path.join(resource_path)):
for filename in files:
if not filename.startswith("_") and filename.endswith("py"):
if root not in sys.path:
sys.path.insert(1, root)
module_path = os.path.join(API_PACKAGE, root[root.index("views"):])
if module_path not in sys.path:
sys.path.insert(1, module_path)
view = __import__(os.path.splitext(filename)[0])
resource_list = [o[0] for o in getmembers(view) if isclass(o[1]) and issubclass(o[1], Resource)]
resource_list = [i for i in resource_list if i != "APIView"]
@ -46,4 +41,5 @@ def register_resources(resource_path, rest_api):
resource_cls.url_prefix = ("",)
if isinstance(resource_cls.url_prefix, six.string_types):
resource_cls.url_prefix = (resource_cls.url_prefix,)
rest_api.add_resource(resource_cls, *resource_cls.url_prefix)

81
api/settings.py.example Normal file
View File

@ -0,0 +1,81 @@
# -*- coding: utf-8 -*-
"""Application configuration.
Most configuration is set via environment variables.
For local development, use a .env file to set
environment variables.
"""
from environs import Env
env = Env()
env.read_env()
ENV = env.str("FLASK_ENV", default="production")
DEBUG = ENV == "development"
SECRET_KEY = env.str("SECRET_KEY")
BCRYPT_LOG_ROUNDS = env.int("BCRYPT_LOG_ROUNDS", default=13)
DEBUG_TB_ENABLED = DEBUG
DEBUG_TB_INTERCEPT_REDIRECTS = False
ERROR_CODES = [400, 401, 403, 404, 405, 500, 502]
# # database
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
SQLALCHEMY_BINDS = {
"user": 'mysql+pymysql://{user}:{password}@127.0.0.1:3306/{db}?charset=utf8'
}
SQLALCHEMY_ECHO = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ENGINE_OPTIONS = {
'pool_recycle': 300,
}
# # cache
CACHE_TYPE = "redis"
CACHE_REDIS_HOST = "127.0.0.1"
CACHE_REDIS_PORT = 6379
CACHE_KEY_PREFIX = "CMDB::"
CACHE_DEFAULT_TIMEOUT = 3000
# # log
LOG_PATH = './logs/app.log'
LOG_LEVEL = 'DEBUG'
# # mail
MAIL_SERVER = ''
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = True
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
DEFAULT_MAIL_SENDER = ''
# # queue
CELERY_RESULT_BACKEND = "redis://127.0.0.1:6379/2"
BROKER_URL = 'redis://127.0.0.1:6379/2'
BROKER_VHOST = '/'
# # SSO
CAS_SERVER = "http://sso.xxx.com"
CAS_VALIDATE_SERVER = "http://sso.xxx.com"
CAS_LOGIN_ROUTE = "/cas/login"
CAS_LOGOUT_ROUTE = "/cas/logout"
CAS_VALIDATE_ROUTE = "/cas/serviceValidate"
CAS_AFTER_LOGIN = "/"
DEFAULT_SERVICE = "http://127.0.0.1:8000"
# # pagination
DEFAULT_PAGE_COUNT = 50
# # permission
WHITE_LIST = ["127.0.0.1"]
USE_ACL = False
# # elastic search
ES_HOST = '127.0.0.1'
USE_ES = False

16
api/tasks/acl.py Normal file
View File

@ -0,0 +1,16 @@
# -*- coding:utf-8 -*-
from flask import current_app
from api.extensions import celery
from api.lib.perm.acl.cache import RoleRelationCache
from api.lib.perm.acl.const import ACL_QUEUE
@celery.task(name="acl.role_rebuild", queue=ACL_QUEUE)
def role_rebuild(rids):
rids = rids if isinstance(rids, list) else [rids]
for rid in rids:
RoleRelationCache.rebuild(rid)
current_app.logger.info("%d rebuild.........." % rids)

66
api/tasks/cmdb.py Normal file
View File

@ -0,0 +1,66 @@
# -*- coding:utf-8 -*-
import json
import time
from flask import current_app
import api.lib.cmdb.ci
from api.extensions import celery
from api.extensions import db
from api.extensions import es
from api.extensions import rd
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.cmdb.const import REDIS_PREFIX_CI
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
@celery.task(name="cmdb.ci_cache", queue=CMDB_QUEUE)
def ci_cache(ci_id):
time.sleep(0.01)
db.session.close()
m = api.lib.cmdb.ci.CIManager()
ci = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
if current_app.config.get("USE_ES"):
es.create_or_update(ci_id, ci)
else:
rd.create_or_update({ci_id: json.dumps(ci)}, REDIS_PREFIX_CI)
current_app.logger.info("{0} flush..........".format(ci_id))
@celery.task(name="cmdb.ci_delete", queue=CMDB_QUEUE)
def ci_delete(ci_id):
current_app.logger.info(ci_id)
if current_app.config.get("USE_ES"):
es.delete(ci_id)
else:
rd.delete(ci_id, REDIS_PREFIX_CI)
current_app.logger.info("{0} delete..........".format(ci_id))
@celery.task(name="cmdb.ci_relation_cache", queue=CMDB_QUEUE)
def ci_relation_cache(parent_id, child_id):
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
children = json.loads(children) if children is not None else []
children.append(child_id)
rd.create_or_update({parent_id: json.dumps(list(set(children)))}, REDIS_PREFIX_CI_RELATION)
current_app.logger.info("ADD ci relation cache: {0} -> {1}".format(parent_id, child_id))
@celery.task(name="cmdb.ci_relation_delete", queue=CMDB_QUEUE)
def ci_relation_delete(parent_id, child_id):
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
children = json.loads(children) if children is not None else []
if child_id in children:
children.remove(child_id)
rd.create_or_update({parent_id: json.dumps(list(set(children)))}, REDIS_PREFIX_CI_RELATION)
current_app.logger.info("DELETE ci relation cache: {0} -> {1}".format(parent_id, child_id))

10
api/tasks/test.py Normal file
View File

@ -0,0 +1,10 @@
# -*- coding:utf-8 -*-
from flask import current_app
from api.extensions import celery
@celery.task(queue="ticket_web")
def test_task():
current_app.logger.info("test task.............................")

View File

@ -6,9 +6,7 @@ from flask import Blueprint
from flask_restful import Api
from api.resource import register_resources
from .account import AuthWithKeyView
from .account import LoginView
from .account import LogoutView
from .account import LoginView, LogoutView
HERE = os.path.abspath(os.path.dirname(__file__))
@ -17,19 +15,15 @@ blueprint_account = Blueprint('account_api', __name__, url_prefix='/api')
account_rest = Api(blueprint_account)
account_rest.add_resource(LoginView, LoginView.url_prefix)
account_rest.add_resource(LogoutView, LogoutView.url_prefix)
account_rest.add_resource(AuthWithKeyView, AuthWithKeyView.url_prefix)
# cmdb
blueprint_cmdb_v01 = Blueprint('cmdb_api_v01', __name__, url_prefix='/api/v0.1')
rest = Api(blueprint_cmdb_v01)
register_resources(os.path.join(HERE, "cmdb"), rest)
# acl
blueprint_acl_v1 = Blueprint('acl_api_v1', __name__, url_prefix='/api/v1/acl')
rest = Api(blueprint_acl_v1)
register_resources(os.path.join(HERE, "acl"), rest)
# common_setting
blueprint_cs_v1 = Blueprint('common_setting_api_v1', __name__, url_prefix='/api/common-setting/v1')
rest = Api(blueprint_cs_v1)
register_resources(os.path.join(HERE, "common_setting"), rest)

View File

@ -3,16 +3,14 @@
import datetime
import jwt
import six
from flask import abort
from flask import current_app
from flask import request
from flask_login import login_user
from flask_login import logout_user
from flask_login import login_user, logout_user
from api.lib.decorator import args_required
from api.lib.perm.acl.cache import User
from api.lib.perm.auth import auth_abandoned
from api.models.acl import User
from api.resource import APIView
@ -26,8 +24,10 @@ class LoginView(APIView):
username = request.values.get("username") or request.values.get("email")
password = request.values.get("password")
user, authenticated = User.query.authenticate(username, password)
if not user:
return abort(403, "User <{0}> does not exist".format(username))
if not authenticated:
return abort(401, "invalid username or password")
return abort(403, "invalid username or password")
login_user(user)
@ -37,30 +37,7 @@ class LoginView(APIView):
'exp': datetime.datetime.now() + datetime.timedelta(minutes=24 * 60 * 7)},
current_app.config['SECRET_KEY'])
return self.jsonify(token=token.decode() if six.PY2 else token, username=username)
class AuthWithKeyView(APIView):
url_prefix = "/auth_with_key"
@args_required("key")
@args_required("secret")
@args_required("path")
@auth_abandoned
def post(self):
key = request.values.get('key')
secret = request.values.get('secret')
path = six.moves.urllib.parse.urlparse(request.values.get('path')).path
payload = request.values.get('payload') or {}
payload.pop('_key', None)
payload.pop('_secret', None)
req_args = [str(payload[k]) for k in sorted(payload.keys())]
user, authenticated = User.query.authenticate_with_key(key, secret, req_args, path)
return self.jsonify(user=user.to_dict() if user else {},
authenticated=authenticated)
return self.jsonify(token=token.decode())
class LogoutView(APIView):

View File

@ -0,0 +1,40 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.decorator import args_required
from api.lib.perm.acl.permission import PermissionCRUD
from api.lib.utils import handle_arg_list
from api.resource import APIView
class ResourcePermissionView(APIView):
url_prefix = ("/resources/<int:resource_id>/permissions", "/resource_groups/<int:group_id>/permissions")
def get(self, resource_id=None, group_id=None):
return self.jsonify(PermissionCRUD.get_all(resource_id, group_id))
class RolePermissionGrantView(APIView):
url_prefix = ('/roles/<int:rid>/resources/<int:resource_id>/grant',
'/roles/<int:rid>/resource_groups/<int:group_id>/grant')
@args_required('perms')
def post(self, rid, resource_id=None, group_id=None):
perms = handle_arg_list(request.values.get("perms"))
PermissionCRUD.grant(rid, perms, resource_id=resource_id, group_id=group_id)
return self.jsonify(rid=rid, resource_id=resource_id, group_id=group_id, perms=perms)
class RolePermissionRevokeView(APIView):
url_prefix = ('/roles/<int:rid>/resources/<int:resource_id>/revoke',
'/roles/<int:rid>/resource_groups/<int:group_id>/revoke')
@args_required('perms')
def post(self, rid, resource_id=None, group_id=None):
perms = handle_arg_list(request.values.get("perms"))
PermissionCRUD.revoke(rid, perms, resource_id=resource_id, group_id=group_id)
return self.jsonify(rid=rid, resource_id=resource_id, group_id=group_id, perms=perms)

View File

@ -1,16 +1,12 @@
# -*- coding:utf-8 -*-
from flask import request
from flask_login import current_user
from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.perm.acl import validate_app
from api.lib.perm.acl.resource import ResourceCRUD
from api.lib.perm.acl.resource import ResourceGroupCRUD
from api.lib.perm.acl.resource import ResourceTypeCRUD
from api.lib.perm.auth import auth_only_for_acl
from api.lib.perm.auth import auth_with_app_token
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.lib.utils import handle_arg_list
@ -20,8 +16,8 @@ from api.resource import APIView
class ResourceTypeView(APIView):
url_prefix = ("/resource_types", "/resource_types/<int:type_id>")
@args_required('app_id')
@validate_app
@auth_with_app_token
def get(self):
page = get_page(request.values.get("page", 1))
page_size = get_page_size(request.values.get("page_size"))
@ -37,28 +33,23 @@ class ResourceTypeView(APIView):
id2perms=id2perms)
@args_required('name')
@args_required('app_id')
@args_required('perms')
@validate_app
@auth_only_for_acl
@args_validate(ResourceTypeCRUD.cls, exclude_args=['app_id'])
def post(self):
name = request.values.get('name')
app_id = request.values.get('app_id')
description = request.values.get('description', '')
perms = request.values.get('perms')
rt = ResourceTypeCRUD.add(app_id, name, description, perms)
rt = ResourceTypeCRUD.add(app_id, name, perms)
return self.jsonify(rt.to_dict())
@auth_only_for_acl
@args_validate(ResourceTypeCRUD.cls, exclude_args=['app_id'])
def put(self, type_id):
rt = ResourceTypeCRUD.update(type_id, **request.values)
return self.jsonify(rt.to_dict())
@auth_only_for_acl
def delete(self, type_id):
ResourceTypeCRUD.delete(type_id)
@ -68,7 +59,6 @@ class ResourceTypeView(APIView):
class ResourceTypePermsView(APIView):
url_prefix = "/resource_types/<int:type_id>/perms"
@auth_with_app_token
def get(self, type_id):
return self.jsonify(ResourceTypeCRUD.get_perms(type_id))
@ -76,43 +66,36 @@ class ResourceTypePermsView(APIView):
class ResourceView(APIView):
url_prefix = ("/resources", "/resources/<int:resource_id>")
@args_required('app_id')
@validate_app
@auth_with_app_token
def get(self):
page = get_page(request.values.get("page", 1))
page_size = get_page_size(request.values.get("page_size"))
q = request.values.get('q')
u = request.values.get('u')
resource_type_id = request.values.get('resource_type_id')
app_id = request.values.get('app_id')
numfound, res = ResourceCRUD.search(q, u, app_id, resource_type_id, page, page_size)
numfound, res = ResourceCRUD.search(q, app_id, resource_type_id, page, page_size)
return self.jsonify(numfound=numfound,
page=page,
page_size=page_size,
resources=res)
resources=[i.to_dict() for i in res])
@args_required('name')
@args_required('type_id')
@args_required('app_id')
@validate_app
@auth_only_for_acl
@args_validate(ResourceCRUD.cls, exclude_args=['app_id'])
def post(self):
name = request.values.get('name')
type_id = request.values.get('type_id')
app_id = request.values.get('app_id')
uid = request.values.get('uid')
if not uid and hasattr(current_user, "uid"):
uid = current_user.uid
resource = ResourceCRUD.add(name, type_id, app_id, uid)
resource = ResourceCRUD.add(name, type_id, app_id)
return self.jsonify(resource.to_dict())
@args_required('name')
@auth_only_for_acl
@args_validate(ResourceCRUD.cls, exclude_args=['app_id'])
def put(self, resource_id):
name = request.values.get('name')
@ -120,7 +103,6 @@ class ResourceView(APIView):
return self.jsonify(resource.to_dict())
@auth_only_for_acl
def delete(self, resource_id):
ResourceCRUD.delete(resource_id)
@ -130,16 +112,15 @@ class ResourceView(APIView):
class ResourceGroupView(APIView):
url_prefix = ("/resource_groups", "/resource_groups/<int:group_id>")
@args_required('app_id')
@validate_app
@auth_with_app_token
def get(self):
page = get_page(request.values.get("page", 1))
page_size = get_page_size(request.values.get("page_size"))
q = request.values.get('q')
app_id = request.values.get('app_id')
resource_type_id = request.values.get('resource_type_id')
numfound, res = ResourceGroupCRUD.search(q, app_id, resource_type_id, page, page_size)
numfound, res = ResourceGroupCRUD.search(q, app_id, page, page_size)
return self.jsonify(numfound=numfound,
page=page,
@ -148,9 +129,8 @@ class ResourceGroupView(APIView):
@args_required('name')
@args_required('type_id')
@args_required('app_id')
@validate_app
@auth_only_for_acl
@args_validate(ResourceGroupCRUD.cls, exclude_args=['app_id'])
def post(self):
name = request.values.get('name')
type_id = request.values.get('type_id')
@ -161,8 +141,6 @@ class ResourceGroupView(APIView):
return self.jsonify(group.to_dict())
@args_required('items')
@auth_only_for_acl
@args_validate(ResourceGroupCRUD.cls, exclude_args=['app_id'])
def put(self, group_id):
items = handle_arg_list(request.values.get("items"))
@ -172,7 +150,6 @@ class ResourceGroupView(APIView):
return self.jsonify(items)
@auth_only_for_acl
def delete(self, group_id):
ResourceGroupCRUD.delete(group_id)
@ -182,7 +159,6 @@ class ResourceGroupView(APIView):
class ResourceGroupItemsView(APIView):
url_prefix = "/resource_groups/<int:group_id>/items"
@auth_with_app_token
def get(self, group_id):
items = ResourceGroupCRUD.get_items(group_id)

74
api/views/acl/role.py Normal file
View File

@ -0,0 +1,74 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.decorator import args_required
from api.lib.perm.acl import validate_app
from api.lib.perm.acl.role import RoleCRUD
from api.lib.perm.acl.role import RoleRelationCRUD
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.resource import APIView
class RoleView(APIView):
url_prefix = ("/roles", "/roles/<int:rid>")
@args_required('app_id')
@validate_app
def get(self):
page = get_page(request.values.get("page", 1))
page_size = get_page_size(request.values.get("page_size"))
q = request.values.get('q')
app_id = request.values.get('app_id')
numfound, roles = RoleCRUD.search(q, app_id, page, page_size)
id2parents = RoleRelationCRUD.get_parents([i.id for i in roles])
return self.jsonify(numfound=numfound,
page=page,
page_size=page_size,
id2parents=id2parents,
roles=[i.to_dict() for i in roles])
@args_required('name')
@args_required('app_id')
@validate_app
def post(self):
name = request.values.get('name')
app_id = request.values.get('app_id')
is_app_admin = request.values.get('is_app_admin', False)
role = RoleCRUD.add_role(name, app_id, is_app_admin=is_app_admin)
return self.jsonify(role.to_dict())
def put(self, rid):
role = RoleCRUD.update_role(rid, **request.values)
return self.jsonify(role.to_dict())
def delete(self, rid):
RoleCRUD.delete_role(rid)
return self.jsonify(rid=rid)
class RoleRelationView(APIView):
url_prefix = "/roles/<int:child_id>/parents"
@args_required('parent_id')
def post(self, child_id):
parent_id = request.values.get('parent_id')
res = RoleRelationCRUD.add(parent_id, child_id)
return self.jsonify(res.to_dict())
@args_required('parent_id')
def delete(self, child_id):
parent_id = request.values.get('parent_id')
RoleRelationCRUD.delete2(parent_id, child_id)
return self.jsonify(parent_id=parent_id, child_id=child_id)

77
api/views/acl/user.py Normal file
View File

@ -0,0 +1,77 @@
# -*- coding:utf-8 -*-
from flask import request
from flask import session
from flask_login import current_user
from api.lib.decorator import args_required
from api.lib.perm.acl.role import RoleRelationCRUD
from api.lib.perm.acl.user import UserCRUD
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.resource import APIView
class GetUserInfoView(APIView):
url_prefix = "/users/info"
def get(self):
name = session.get("acl", {}).get("nickName") or session.get("CAS_USERNAME") or current_user.nickname
role = dict(permissions=session.get("acl", {}).get("parentRoles", []) or ["admin"])
avatar = session.get("acl", {}).get("avatar") or current_user.avatar
return self.jsonify(result=dict(name=name,
role=role,
avatar=avatar))
class UserView(APIView):
url_prefix = ("/users", "/users/<int:uid>")
def get(self):
page = get_page(request.values.get('page', 1))
page_size = get_page_size(request.values.get('page_size'))
q = request.values.get("q")
numfound, users = UserCRUD.search(q, page, page_size)
id2parents = RoleRelationCRUD.get_parents(uids=[i.uid for i in users])
users = [i.to_dict() for i in users]
for u in users:
u.pop('password', None)
u.pop('key', None)
u.pop('secret', None)
return self.jsonify(numfound=numfound,
page=page,
page_size=page_size,
id2parents=id2parents,
users=users)
@args_required('username')
@args_required('email')
def post(self):
user = UserCRUD.add(**request.values)
return self.jsonify(user.to_dict())
def put(self, uid):
user = UserCRUD.update(uid, **request.values)
return self.jsonify(user.to_dict())
def delete(self, uid):
UserCRUD.delete(uid)
return self.jsonify(uid=uid)
class UserResetKeySecretView(APIView):
url_prefix = "/users/reset_key_secret"
def post(self):
key, secret = UserCRUD.reset_key_secret()
return self.jsonify(key=key, secret=secret)
def put(self):
return self.post()

View File

@ -6,9 +6,9 @@ from flask import current_app
from flask import request
from api.lib.cmdb.attribute import AttributeManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.const import RoleEnum
from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.perm.acl.acl import role_required
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.lib.utils import handle_arg_list
@ -33,8 +33,7 @@ class AttributeSearchView(APIView):
class AttributeView(APIView):
url_prefix = ("/attributes", "/attributes/<string:attr_name>", "/attributes/<int:attr_id>",
"/attributes/<int:attr_id>/calc_computed_attribute")
url_prefix = ("/attributes", "/attributes/<string:attr_name>", "/attributes/<int:attr_id>")
def get(self, attr_name=None, attr_id=None):
attr_manager = AttributeManager()
@ -43,46 +42,33 @@ class AttributeView(APIView):
attr_dict = attr_manager.get_attribute_by_name(attr_name)
if attr_dict is None:
attr_dict = attr_manager.get_attribute_by_alias(attr_name)
if not attr_dict:
return abort(404, ErrFormat.attribute_not_found.format("name={}".format(attr_name)))
elif attr_id is not None:
attr_dict = attr_manager.get_attribute_by_id(attr_id)
if attr_dict is not None:
return self.jsonify(attribute=attr_dict)
abort(404, "Attribute is not found")
if not attr_dict:
return abort(404, ErrFormat.attribute_not_found.format("name={}".format(attr_name)))
return self.jsonify(attribute=attr_dict)
@role_required(RoleEnum.CONFIG)
@args_required("name")
@args_validate(AttributeManager.cls)
def post(self):
choice_value = handle_arg_list(request.values.get("choice_value"))
params = request.values
params["choice_value"] = choice_value
current_app.logger.debug(params)
attr_id = AttributeManager.add(**params)
return self.jsonify(attr_id=attr_id)
@args_validate(AttributeManager.cls)
@role_required(RoleEnum.CONFIG)
def put(self, attr_id):
if request.url.endswith("/calc_computed_attribute"):
AttributeManager.calc_computed_attribute(attr_id)
return self.jsonify(attr_id=attr_id)
choice_value = handle_arg_list(request.values.get("choice_value"))
params = request.values
params["choice_value"] = choice_value
current_app.logger.debug(params)
AttributeManager().update(attr_id, **params)
return self.jsonify(attr_id=attr_id)
@role_required(RoleEnum.CONFIG)
def delete(self, attr_id):
attr_name = AttributeManager.delete(attr_id)
return self.jsonify(message="attribute {0} deleted".format(attr_name))

View File

@ -9,15 +9,14 @@ from flask import request
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.const import ExistPolicy
from api.lib.cmdb.const import ResourceTypeEnum, PermEnum
from api.lib.cmdb.const import ResourceType, PermEnum
from api.lib.cmdb.const import RetKey
from api.lib.cmdb.perms import has_perm_for_ci
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search as ci_search
from api.lib.decorator import args_required
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.lib.cmdb.search.ci.es.search import Search as SearchFromES
from api.lib.perm.acl.acl import has_perm_from_args
from api.lib.perm.auth import auth_abandoned
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.lib.utils import handle_arg_list
@ -63,67 +62,52 @@ class CIView(APIView):
ret_key = RetKey.NAME
manager = CIManager()
ci = manager.get_ci_by_id_from_db(ci_id, ret_key=ret_key, fields=fields, valid=True)
ci = manager.get_ci_by_id_from_db(ci_id, ret_key=ret_key, fields=fields)
return self.jsonify(ci_id=ci_id, ci=ci)
@staticmethod
def _wrap_ci_dict():
ci_dict = {k: v.strip() if isinstance(v, six.string_types) else v for k, v in request.values.items()
if k != "ci_type" and not k.startswith("_")}
ci_dict = dict()
for k, v in request.values.items():
if k != "ci_type" and not k.startswith("_"):
ci_dict[k] = v.strip() if isinstance(v, six.string_types) else v
return ci_dict
@has_perm_for_ci("ci_type", ResourceTypeEnum.CI, PermEnum.ADD, lambda x: CITypeCache.get(x))
@has_perm_from_args("ci_type", ResourceType.CI, PermEnum.ADD)
def post(self):
ci_type = request.values.get("ci_type")
ticket_id = request.values.pop("ticket_id", None)
_no_attribute_policy = request.values.get("no_attribute_policy", ExistPolicy.IGNORE)
exist_policy = request.values.pop('exist_policy', None)
_no_attribute_policy = request.values.get("_no_attribute_policy", ExistPolicy.IGNORE)
ci_dict = self._wrap_ci_dict()
manager = CIManager()
current_app.logger.debug(ci_dict)
ci_id = manager.add(ci_type,
exist_policy=exist_policy or ExistPolicy.REJECT,
_no_attribute_policy=_no_attribute_policy,
_is_admin=request.values.pop('__is_admin', None) or False,
ticket_id=ticket_id,
**ci_dict)
exist_policy=ExistPolicy.REJECT,
_no_attribute_policy=_no_attribute_policy, **ci_dict)
return self.jsonify(ci_id=ci_id)
@has_perm_for_ci("ci_id", ResourceTypeEnum.CI, PermEnum.UPDATE, CIManager.get_type)
@has_perm_from_args("ci_id", ResourceType.CI, PermEnum.UPDATE, CIManager.get_type_name)
def put(self, ci_id=None):
args = request.values
ci_type = args.get("ci_type")
ticket_id = request.values.pop("ticket_id", None)
_no_attribute_policy = args.get("no_attribute_policy", ExistPolicy.IGNORE)
_no_attribute_policy = args.get("_no_attribute_policy", ExistPolicy.IGNORE)
ci_dict = self._wrap_ci_dict()
manager = CIManager()
if ci_id is not None:
manager.update(ci_id,
_is_admin=request.values.pop('__is_admin', None) or False,
ticket_id=ticket_id,
**ci_dict)
manager.update(ci_id, **ci_dict)
else:
request.values.pop('exist_policy', None)
ci_id = manager.add(ci_type,
exist_policy=ExistPolicy.REPLACE,
_no_attribute_policy=_no_attribute_policy,
_is_admin=request.values.pop('__is_admin', None) or False,
ticket_id=ticket_id,
**ci_dict)
return self.jsonify(ci_id=ci_id)
@has_perm_for_ci("ci_id", ResourceTypeEnum.CI, PermEnum.DELETE, CIManager.get_type)
@has_perm_from_args("ci_id", ResourceType.CI, PermEnum.DELETE, CIManager.get_type_name)
def delete(self, ci_id):
manager = CIManager()
manager.delete(ci_id)
return self.jsonify(message="ok")
@ -132,13 +116,13 @@ class CIDetailView(APIView):
def get(self, ci_id):
_ci = CI.get_by_id(ci_id).to_dict()
return self.jsonify(**_ci)
class CISearchView(APIView):
url_prefix = ("/ci/s", "/ci/search")
@auth_abandoned
def get(self):
"""@params: q: query statement
fl: filter by column
@ -146,32 +130,28 @@ class CISearchView(APIView):
ret_key: id, name, alias
facet: statistic
"""
page = get_page(request.values.get("page", 1))
count = get_page_size(request.values.get("count") or request.values.get("page_size"))
query = request.values.get('q', "")
fl = handle_arg_list(request.values.get('fl', ""))
excludes = handle_arg_list(request.values.get('excludes', ""))
ret_key = request.values.get('ret_key', RetKey.NAME)
if ret_key not in (RetKey.NAME, RetKey.ALIAS, RetKey.ID):
ret_key = RetKey.NAME
facet = handle_arg_list(request.values.get("facet", ""))
sort = request.values.get("sort")
use_id_filter = request.values.get("use_id_filter", False) in current_app.config.get('BOOL_TRUE')
start = time.time()
s = ci_search(query, fl, facet, page, ret_key, count, sort, excludes, use_id_filter=use_id_filter)
if current_app.config.get("USE_ES"):
s = SearchFromES(query, fl, facet, page, ret_key, count, sort)
else:
s = SearchFromDB(query, fl, facet, page, ret_key, count, sort)
try:
response, counter, total, page, numfound, facet = s.search()
except SearchError as e:
return abort(400, str(e))
if request.values.get('need_children') in current_app.config.get('BOOL_TRUE') and len(response) == 1:
children = CIRelationManager.get_children(response[0]['_id'], ret_key=ret_key) # one floor
response[0].update(children)
current_app.logger.debug("search time is: {0}".format(time.time() - start))
current_app.logger.debug("search time is :{0}".format(time.time() - start))
return self.jsonify(numfound=numfound,
total=total,
page=page,
@ -179,18 +159,15 @@ class CISearchView(APIView):
counter=counter,
result=response)
def post(self):
return self.get()
class CIUnique(APIView):
url_prefix = "/ci/<int:ci_id>/unique"
@has_perm_from_args("ci_id", ResourceTypeEnum.CI, PermEnum.UPDATE, CIManager.get_type_name)
@has_perm_from_args("ci_id", ResourceType.CI, PermEnum.UPDATE, CIManager.get_type_name)
def put(self, ci_id):
params = request.values
unique_name = list(params.keys())[0]
unique_value = list(params.values())[0]
unique_name = params.keys()[0]
unique_value = params.values()[0]
CIManager.update_unique_value(ci_id, unique_name, unique_value)
@ -227,52 +204,14 @@ class CIHeartbeatView(APIView):
class CIFlushView(APIView):
url_prefix = ("/ci/flush", "/ci/<int:ci_id>/flush")
@auth_abandoned
def get(self, ci_id=None):
from api.tasks.cmdb import ci_cache
from api.lib.cmdb.const import CMDB_QUEUE
if ci_id is not None:
ci_cache.apply_async(args=(ci_id, None, None), queue=CMDB_QUEUE)
ci_cache.apply_async([ci_id], queue=CMDB_QUEUE)
else:
cis = CI.get_by(to_dict=False)
for ci in cis:
ci_cache.apply_async(args=(ci.id, None, None), queue=CMDB_QUEUE)
ci_cache.apply_async([ci.id], queue=CMDB_QUEUE)
return self.jsonify(code=200)
class CIAutoDiscoveryStatisticsView(APIView):
url_prefix = "/ci/adc/statistics"
def get(self):
return self.jsonify(CIManager.get_ad_statistics())
class CIPasswordView(APIView):
url_prefix = "/ci/<int:ci_id>/attributes/<int:attr_id>/password"
def get(self, ci_id, attr_id):
return self.jsonify(ci_id=ci_id, attr_id=attr_id, value=CIManager.load_password(ci_id, attr_id))
def post(self, ci_id, attr_id):
return self.get(ci_id, attr_id)
class CIBaselineView(APIView):
url_prefix = ("/ci/baseline", "/ci/<int:ci_id>/baseline/rollback")
@args_required("before_date")
def get(self):
ci_ids = handle_arg_list(request.values.get('ci_ids'))
before_date = request.values.get('before_date')
return self.jsonify(CIManager().baseline(list(map(int, ci_ids)), before_date))
@args_required("before_date")
@has_perm_for_ci("ci_id", ResourceTypeEnum.CI, PermEnum.UPDATE, CIManager.get_type)
def post(self, ci_id):
if 'rollback' in request.url:
before_date = request.values.get('before_date')
return self.jsonify(**CIManager().rollback(ci_id, before_date))
return self.get(ci_id)

View File

@ -0,0 +1,133 @@
# -*- coding:utf-8 -*-
import time
from flask import abort
from flask import current_app
from flask import request
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci_relation.search import Search
from api.lib.perm.auth import auth_abandoned
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.lib.utils import handle_arg_list
from api.resource import APIView
class CIRelationSearchView(APIView):
url_prefix = ("/ci_relations/s", "/ci_relations/search")
@auth_abandoned
def get(self):
"""@params: q: query statement
fl: filter by column
count: the number of ci
root_id: ci id
level: default is 1
facet: statistic
"""
page = get_page(request.values.get("page", 1))
count = get_page_size(request.values.get("count") or request.values.get("page_size"))
root_id = request.values.get('root_id')
level = request.values.get('level', 1)
query = request.values.get('q', "")
fl = handle_arg_list(request.values.get('fl', ""))
facet = handle_arg_list(request.values.get("facet", ""))
sort = request.values.get("sort")
start = time.time()
s = Search(root_id, level, query, fl, facet, page, count, sort)
try:
response, counter, total, page, numfound, facet = s.search()
except SearchError as e:
return abort(400, str(e))
current_app.logger.debug("search time is :{0}".format(time.time() - start))
return self.jsonify(numfound=numfound,
total=total,
page=page,
facet=facet,
counter=counter,
result=response)
class CIRelationStatisticsView(APIView):
url_prefix = "/ci_relations/statistics"
@auth_abandoned
def get(self):
root_ids = list(map(int, handle_arg_list(request.values.get('root_ids'))))
level = request.values.get('level', 1)
start = time.time()
s = Search(root_ids, level)
try:
result = s.statistics()
except SearchError as e:
return abort(400, str(e))
current_app.logger.debug("search time is :{0}".format(time.time() - start))
return self.jsonify(result)
class GetSecondCIsView(APIView):
url_prefix = "/ci_relations/<int:first_ci_id>/second_cis"
def get(self, first_ci_id):
page = get_page(request.values.get("page", 1))
count = get_page_size(request.values.get("count"))
relation_type = request.values.get("relation_type", "contain")
manager = CIRelationManager()
numfound, total, second_cis = manager.get_second_cis(
first_ci_id, page=page, per_page=count, relation_type=relation_type)
return self.jsonify(numfound=numfound,
total=total,
page=page,
second_cis=second_cis)
class GetFirstCIsView(APIView):
url_prefix = "/ci_relations/<int:second_ci_id>/first_cis"
def get(self, second_ci_id):
page = get_page(request.values.get("page", 1))
count = get_page_size(request.values.get("count"))
manager = CIRelationManager()
numfound, total, first_cis = manager.get_first_cis(second_ci_id, per_page=count, page=page)
return self.jsonify(numfound=numfound,
total=total,
page=page,
first_cis=first_cis)
class CIRelationView(APIView):
url_prefix = "/ci_relations/<int:first_ci_id>/<int:second_ci_id>"
def post(self, first_ci_id, second_ci_id):
manager = CIRelationManager()
res = manager.add(first_ci_id, second_ci_id)
return self.jsonify(cr_id=res)
def delete(self, first_ci_id, second_ci_id):
manager = CIRelationManager()
manager.delete_2(first_ci_id, second_ci_id)
return self.jsonify(message="CIType Relation is deleted")
class DeleteCIRelationView(APIView):
url_prefix = "/ci_relations/<int:cr_id>"
def delete(self, cr_id):
manager = CIRelationManager()
manager.delete(cr_id)
return self.jsonify(message="CIType Relation is deleted")

202
api/views/cmdb/ci_type.py Normal file
View File

@ -0,0 +1,202 @@
# -*- coding:utf-8 -*-
from flask import abort
from flask import current_app
from flask import request
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci_type import CITypeAttributeGroupManager
from api.lib.cmdb.ci_type import CITypeAttributeManager
from api.lib.cmdb.ci_type import CITypeGroupManager
from api.lib.cmdb.ci_type import CITypeManager
from api.lib.cmdb.const import RoleEnum
from api.lib.decorator import args_required
from api.lib.perm.acl.acl import role_required
from api.lib.utils import handle_arg_list
from api.resource import APIView
class CITypeView(APIView):
url_prefix = ("/ci_types", "/ci_types/<int:type_id>", "/ci_types/<string:type_name>")
def get(self, type_id=None, type_name=None):
q = request.args.get("type_name")
if type_id is not None:
ci_types = [CITypeCache.get(type_id).to_dict()]
elif type_name is not None:
ci_types = [CITypeCache.get(type_name).to_dict()]
else:
ci_types = CITypeManager().get_ci_types(q)
count = len(ci_types)
return self.jsonify(numfound=count, ci_types=ci_types)
@role_required(RoleEnum.CONFIG)
@args_required("name")
def post(self):
params = request.values
type_name = params.get("name")
type_alias = params.get("alias")
type_alias = type_name if not type_alias else type_alias
params['alias'] = type_alias
manager = CITypeManager()
type_id = manager.add(**params)
return self.jsonify(type_id=type_id)
@role_required(RoleEnum.CONFIG)
def put(self, type_id):
params = request.values
manager = CITypeManager()
manager.update(type_id, **params)
return self.jsonify(type_id=type_id)
@role_required(RoleEnum.CONFIG)
def delete(self, type_id):
CITypeManager.delete(type_id)
return self.jsonify(type_id=type_id)
class CITypeGroupView(APIView):
url_prefix = ("/ci_types/groups", "/ci_types/groups/<int:gid>")
def get(self):
need_other = request.values.get("need_other")
return self.jsonify(CITypeGroupManager.get(need_other))
@role_required(RoleEnum.CONFIG)
@args_required("name")
def post(self):
name = request.values.get("name")
group = CITypeGroupManager.add(name)
return self.jsonify(group.to_dict())
@role_required(RoleEnum.CONFIG)
def put(self, gid):
name = request.values.get('name')
type_ids = request.values.get('type_ids')
CITypeGroupManager.update(gid, name, type_ids)
return self.jsonify(gid=gid)
@role_required(RoleEnum.CONFIG)
def delete(self, gid):
CITypeGroupManager.delete(gid)
return self.jsonify(gid=gid)
class CITypeQueryView(APIView):
url_prefix = "/ci_types/query"
@args_required("q")
def get(self):
q = request.args.get("q")
res = CITypeManager.query(q)
return self.jsonify(ci_type=res)
class EnableCITypeView(APIView):
url_prefix = "/ci_types/<int:type_id>/enable"
@role_required(RoleEnum.CONFIG)
def post(self, type_id):
enable = request.values.get("enable", True)
CITypeManager.set_enabled(type_id, enabled=enable)
return self.jsonify(type_id=type_id, enable=enable)
class CITypeAttributeView(APIView):
url_prefix = ("/ci_types/<int:type_id>/attributes", "/ci_types/<string:type_name>/attributes")
def get(self, type_id=None, type_name=None):
t = CITypeCache.get(type_id) or CITypeCache.get(type_name) or abort(404, "CIType does not exist")
type_id = t.id
unique_id = t.unique_id
unique = AttributeCache.get(unique_id).name
return self.jsonify(attributes=CITypeAttributeManager.get_attributes_by_type_id(type_id),
type_id=type_id,
unique_id=unique_id,
unique=unique)
@role_required(RoleEnum.CONFIG)
@args_required("attr_id")
def post(self, type_id=None):
attr_id_list = handle_arg_list(request.values.get("attr_id"))
params = request.values
params.pop("attr_id", "")
CITypeAttributeManager.add(type_id, attr_id_list, **params)
return self.jsonify(attributes=attr_id_list)
@role_required(RoleEnum.CONFIG)
@args_required("attributes")
def put(self, type_id=None):
"""
attributes is list, only support raw data request
:param type_id:
:return:
"""
attributes = request.values.get("attributes")
current_app.logger.debug(attributes)
if not isinstance(attributes, list):
return abort(400, "attributes must be list")
CITypeAttributeManager.update(type_id, attributes)
return self.jsonify(attributes=attributes)
@role_required(RoleEnum.CONFIG)
@args_required("attr_id")
def delete(self, type_id=None):
"""
Form request: attr_id is a string, separated by commas
Raw data request: attr_id is a list
:param type_id:
:return:
"""
attr_id_list = handle_arg_list(request.values.get("attr_id", ""))
CITypeAttributeManager.delete(type_id, attr_id_list)
return self.jsonify(attributes=attr_id_list)
class CITypeAttributeGroupView(APIView):
url_prefix = ("/ci_types/<int:type_id>/attribute_groups",
"/ci_types/attribute_groups/<int:group_id>")
def get(self, type_id):
need_other = request.values.get("need_other")
return self.jsonify(CITypeAttributeGroupManager.get_by_type_id(type_id, need_other))
@role_required(RoleEnum.CONFIG)
@args_required("name")
def post(self, type_id):
name = request.values.get("name").strip()
order = request.values.get("order") or 0
attrs = handle_arg_list(request.values.get("attributes", ""))
orders = list(range(len(attrs)))
attr_order = list(zip(attrs, orders))
group = CITypeAttributeGroupManager.create_or_update(type_id, name, attr_order, order)
current_app.logger.warning(group.id)
return self.jsonify(group_id=group.id)
@role_required(RoleEnum.CONFIG)
def put(self, group_id):
name = request.values.get("name")
order = request.values.get("order") or 0
attrs = handle_arg_list(request.values.get("attributes", ""))
orders = list(range(len(attrs)))
attr_order = list(zip(attrs, orders))
CITypeAttributeGroupManager.update(group_id, name, attr_order, order)
return self.jsonify(group_id=group_id)
@role_required(RoleEnum.CONFIG)
def delete(self, group_id):
CITypeAttributeGroupManager.delete(group_id)
return self.jsonify(group_id=group_id)

View File

@ -0,0 +1,58 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.ci_type import CITypeRelationManager
from api.lib.cmdb.const import RoleEnum
from api.lib.decorator import args_required
from api.lib.perm.acl.acl import role_required
from api.resource import APIView
class GetChildrenView(APIView):
url_prefix = "/ci_type_relations/<int:parent_id>/children"
def get(self, parent_id):
return self.jsonify(children=CITypeRelationManager.get_children(parent_id))
class GetParentsView(APIView):
url_prefix = "/ci_type_relations/<int:child_id>/parents"
def get(self, child_id):
return self.jsonify(parents=CITypeRelationManager.get_parents(child_id))
class CITypeRelationView(APIView):
url_prefix = ("/ci_type_relations", "/ci_type_relations/<int:parent_id>/<int:child_id>")
@role_required(RoleEnum.CONFIG)
def get(self):
res = CITypeRelationManager.get()
return self.jsonify(res)
@role_required(RoleEnum.CONFIG)
@args_required("relation_type_id")
def post(self, parent_id, child_id):
relation_type_id = request.values.get("relation_type_id")
ctr_id = CITypeRelationManager.add(parent_id, child_id, relation_type_id)
return self.jsonify(ctr_id=ctr_id)
@role_required(RoleEnum.CONFIG)
def delete(self, parent_id, child_id):
CITypeRelationManager.delete_2(parent_id, child_id)
return self.jsonify(code=200, parent_id=parent_id, child_id=child_id)
class CITypeRelationDelete2View(APIView):
url_prefix = "/ci_type_relations/<int:ctr_id>"
@role_required(RoleEnum.CONFIG)
def delete(self, ctr_id):
CITypeRelationManager.delete(ctr_id)
return self.jsonify(code=200, ctr_id=ctr_id)

63
api/views/cmdb/history.py Normal file
View File

@ -0,0 +1,63 @@
# -*- coding:utf-8 -*-
import datetime
from flask import abort
from flask import request
from api.lib.cmdb.history import AttributeHistoryManger
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.resource import APIView
class RecordView(APIView):
url_prefix = "/history/records"
def get(self):
page = get_page(request.values.get("page", 1))
page_size = get_page_size(request.values.get("page_size"))
_start = request.values.get("start")
_end = request.values.get("end")
username = request.values.get("username", "")
start, end = None, None
if _start:
try:
start = datetime.datetime.strptime(_start, '%Y-%m-%d %H:%M:%S')
except ValueError:
abort(400, 'incorrect start date time')
if _end:
try:
end = datetime.datetime.strptime(_end, '%Y-%m-%d %H:%M:%S')
except ValueError:
abort(400, 'incorrect end date time')
numfound, total, res = AttributeHistoryManger.get_records(start, end, username, page, page_size)
return self.jsonify(numfound=numfound,
records=res,
page=page,
total=total,
start=_start,
end=_end,
username=username)
class CIHistoryView(APIView):
url_prefix = "/history/ci/<int:ci_id>"
def get(self, ci_id):
result = AttributeHistoryManger.get_by_ci_id(ci_id)
return self.jsonify(result)
class RecordDetailView(APIView):
url_prefix = "/history/records/<int:record_id>"
def get(self, record_id):
username, timestamp, attr_dict, rel_dict = AttributeHistoryManger.get_record_detail(record_id)
return self.jsonify(username=username,
timestamp=timestamp,
attr_history=attr_dict,
rel_history=rel_dict)

View File

@ -0,0 +1,95 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.ci_type import CITypeManager
from api.lib.cmdb.const import ResourceType, PermEnum, RoleEnum
from api.lib.cmdb.preference import PreferenceManager
from api.lib.decorator import args_required
from api.lib.perm.acl.acl import has_perm_from_args
from api.lib.perm.acl.acl import role_required
from api.lib.utils import handle_arg_list
from api.resource import APIView
class PreferenceShowCITypesView(APIView):
url_prefix = "/preference/ci_types"
def get(self):
instance = request.values.get("instance")
tree = request.values.get("tree")
return self.jsonify(PreferenceManager.get_types(instance, tree))
class PreferenceShowAttributesView(APIView):
url_prefix = "/preference/ci_types/<id_or_name>/attributes"
def get(self, id_or_name):
is_subscribed, attributes = PreferenceManager.get_show_attributes(id_or_name)
return self.jsonify(attributes=attributes, is_subscribed=is_subscribed)
@has_perm_from_args("id_or_name", ResourceType.CI, PermEnum.READ, CITypeManager.get_name_by_id)
@args_required("attr")
def post(self, id_or_name):
id_or_name = int(id_or_name)
attr_list = handle_arg_list(request.values.get("attr", ""))
orders = list(range(len(attr_list)))
PreferenceManager.create_or_update_show_attributes(id_or_name, list(zip(attr_list, orders)))
return self.jsonify(type_id=id_or_name,
attr_order=list(zip(attr_list, orders)))
@has_perm_from_args("id_or_name", ResourceType.CI, PermEnum.READ, CITypeManager.get_name_by_id)
def put(self, id_or_name):
return self.post(id_or_name)
class PreferenceTreeApiView(APIView):
url_prefix = "/preference/tree/view"
def get(self):
return self.jsonify(PreferenceManager.get_tree_view())
@has_perm_from_args("type_id", ResourceType.CI, PermEnum.READ, CITypeManager.get_name_by_id)
@args_required("type_id")
@args_required("levels")
def post(self):
type_id = request.values.get("type_id")
levels = handle_arg_list(request.values.get("levels"))
res = PreferenceManager.create_or_update_tree_view(type_id, levels)
return self.jsonify(res and res.to_dict() or {})
def put(self):
return self.post()
class PreferenceRelationApiView(APIView):
url_prefix = "/preference/relation/view"
def get(self):
views, id2type, name2id = PreferenceManager.get_relation_view()
return self.jsonify(views=views, id2type=id2type, name2id=name2id)
@role_required(RoleEnum.CONFIG)
@args_required("name")
def post(self):
name = request.values.get("name")
cr_ids = request.values.get("cr_ids")
views, id2type, name2id = PreferenceManager.create_or_update_relation_view(name, cr_ids)
return self.jsonify(views=views, id2type=id2type, name2id=name2id)
def put(self):
return self.post()
@args_required("name")
def delete(self):
name = request.values.get("name")
PreferenceManager.delete_relation_view(name)
return self.jsonify(name=name)

View File

@ -0,0 +1,37 @@
# -*- coding:utf-8 -*-
from flask import abort
from flask import request
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.relation_type import RelationTypeManager
from api.lib.decorator import args_required
from api.lib.perm.acl.acl import role_required
from api.resource import APIView
class RelationTypeView(APIView):
url_prefix = ("/relation_types", "/relation_types/<int:rel_id>")
def get(self):
return self.jsonify([i.to_dict() for i in RelationTypeManager.get_all()])
@role_required(RoleEnum.CONFIG)
@args_required("name")
def post(self):
name = request.values.get("name") or abort(400, "Name cannot be empty")
rel = RelationTypeManager.add(name)
return self.jsonify(rel.to_dict())
@role_required(RoleEnum.CONFIG)
@args_required("name")
def put(self, rel_id):
name = request.values.get("name") or abort(400, "Name cannot be empty")
rel = RelationTypeManager.update(rel_id, name)
return self.jsonify(rel.to_dict())
@role_required(RoleEnum.CONFIG)
def delete(self, rel_id):
RelationTypeManager.delete(rel_id)
return self.jsonify(rel_id=rel_id)

15
autoapp.py Normal file
View File

@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
"""Create an application instance."""
from flask import g
from flask_login import current_user
from api.app import create_app
app = create_app()
@app.before_request
def before_request():
g.user = current_user

View File

@ -3,7 +3,8 @@
from api.app import create_app
from api.extensions import celery
# celery -A celery_worker.celery worker -l DEBUG -E -Q xxxx
# celery worker -A celery_worker.celery -l DEBUG -E -Q <queue_name>
print(celery)
app = create_app()
app.app_context().push()

View File

@ -1,7 +0,0 @@
# Environment variable overrides for local development
FLASK_APP=autoapp.py
FLASK_DEBUG=1
FLASK_ENV=development
GUNICORN_WORKERS=2
LOG_LEVEL=debug
SECRET_KEY='<YourSecretKey>'

View File

@ -1,79 +0,0 @@
line-length = 120
cache-dir = ".ruff_cache"
target-version = "py310"
unsafe-fixes = true
show-fixes = true
[lint]
select = [
"E",
"F",
"I",
"TCH",
# W
"W505",
# PT
"PT018",
# SIM
"SIM101",
"SIM114",
# PGH
"PGH004",
# PL
"PLE1142",
# RUF
"RUF100",
# UP
"UP007"
]
preview = true
ignore = ["FURB101"]
[lint.flake8-pytest-style]
mark-parentheses = false
parametrize-names-type = "list"
parametrize-values-row-type = "list"
parametrize-values-type = "tuple"
[lint.flake8-unused-arguments]
ignore-variadic-names = true
[lint.isort]
lines-between-types = 1
order-by-type = true
[lint.per-file-ignores]
"**/api/v1/*.py" = ["TCH"]
"**/model/*.py" = ["TCH003"]
"**/models/__init__.py" = ["F401", "F403"]
"**/tests/*.py" = ["E402"]
"celery_worker.py" = ["F401"]
"api/views/entry.py" = ["I001"]
"migrations/*.py" = ["I001", "E402"]
"*.py" = ["I001"]
"api/views/common_setting/department.py" = ["F841"]
"api/lib/common_setting/upload_file.py" = ["F841"]
"api/lib/common_setting/acl.py" = ["F841"]
"**/__init__.py" = ["F822"]
"api/tasks/*.py" = ["E722"]
"api/views/cmdb/*.py" = ["E722"]
"api/views/acl/*.py" = ["E722"]
"api/lib/secrets/*.py" = ["E722", "F841"]
"api/lib/utils.py" = ["E722", "E731"]
"api/lib/perm/authentication/cas/*" = ["E113", "F841"]
"api/lib/perm/acl/*" = ["E722"]
"api/lib/*" = ["E721", "F722"]
"api/lib/cmdb/*" = ["F722", "E722"]
"api/lib/cmdb/search/ci/es/search.py" = ["F841", "SIM114"]
"api/lib/cmdb/search/ci/db/search.py" = ["F841"]
"api/lib/cmdb/value.py" = ["F841"]
"api/lib/cmdb/history.py" = ["E501"]
"api/commands/common.py" = ["E722"]
"api/commands/click_cmdb.py" = ["E722"]
"api/lib/perm/auth.py" = ["SIM114"]
[format]
preview = true
quote-style = "single"
docstring-code-format = true
skip-magic-trailing-comma = false

View File

@ -1,88 +0,0 @@
[[source]]
url = "https://mirrors.aliyun.com/pypi/simple"
verify_ssl = true
name = "pypi"
[packages]
# Flask
Flask = "==2.2.5"
Werkzeug = "==2.2.3"
click = ">=5.0"
# Api
Flask-RESTful = "==0.3.10"
# Database
Flask-SQLAlchemy = "==3.0.5"
SQLAlchemy = "==1.4.49"
PyMySQL = "==1.1.0"
redis = "==4.6.0"
python-redis-lock = "==4.0.0"
# Migrations
Flask-Migrate = "==2.5.2"
# Deployment
gunicorn = "==21.0.1"
supervisor = "==4.0.3"
# Auth
Flask-Login = ">=0.6.2"
Flask-Bcrypt = "==1.0.1"
Flask-Cors = ">=3.0.8"
ldap3 = "==2.9.1"
pycryptodome = "==3.12.0"
cryptography = ">=41.0.2"
# i18n
flask-babel = "==4.0.0"
# Caching
Flask-Caching = ">=1.0.0"
# Environment variable parsing
environs = "==4.2.0"
marshmallow = "==2.20.2"
# async tasks
celery = "==5.3.1"
celery_once = "==3.0.1"
more-itertools = "==5.0.0"
kombu = ">=5.3.1"
# common setting
timeout-decorator = "==0.5.0"
WTForms = "==3.0.0"
email-validator = "==1.3.1"
treelib = "==1.6.1"
flasgger = "==0.9.5"
Pillow = ">=10.0.1"
# other
six = "==1.16.0"
bs4 = ">=0.0.1"
toposort = ">=1.5"
requests = ">=2.22.0"
requests_oauthlib = "==1.3.1"
markdownify = "==0.11.6"
PyJWT = "==2.4.0"
elasticsearch = "==7.17.9"
future = "==0.18.3"
itsdangerous = "==2.1.2"
Jinja2 = "==3.1.2"
jinja2schema = "==0.1.4"
msgpack-python = "==0.5.6"
alembic = "==1.7.7"
hvac = "==2.0.0"
colorama = ">=0.4.6"
pycryptodomex = ">=3.19.0"
lz4 = ">=4.3.2"
python-magic = "==0.4.27"
jsonpath = "==0.82.2"
networkx = ">=3.1"
ipaddress = ">=1.0.23"
[dev-packages]
# Testing
pytest = "==4.6.5"
WebTest = "==2.0.33"
factory-boy = "==2.12.*"
pdbpp = "==0.10.0"
# Lint and code style
flake8 = "==3.7.7"
flake8-blind-except = "==0.1.1"
flake8-debugger = "==3.1.0"
flake8-docstrings = "==1.3.0"
flake8-isort = "==2.7.0"
isort = "==4.3.21"
pep8-naming = "==0.8.2"
pydocstyle = "==3.0.0"

View File

@ -1,56 +0,0 @@
import click
from flask.cli import with_appcontext
from api.lib.perm.acl.user import UserCRUD
@click.command()
@with_appcontext
def init_acl():
"""
acl init
"""
from api.models.acl import Role
from api.models.acl import App
from api.tasks.acl import role_rebuild
from api.lib.perm.acl.const import ACL_QUEUE
roles = Role.get_by(to_dict=False)
apps = App.get_by(to_dict=False)
for role in roles:
if role.app_id:
role_rebuild.apply_async(args=(role.id, role.app_id), queue=ACL_QUEUE)
else:
for app in apps:
role_rebuild.apply_async(args=(role.id, app.id), queue=ACL_QUEUE)
@click.command()
@with_appcontext
def add_user():
"""
create a user
is_admin: default is False
"""
from api.models.acl import App
from api.lib.perm.acl.cache import AppCache
from api.lib.perm.acl.cache import RoleCache
from api.lib.perm.acl.role import RoleCRUD
from api.lib.perm.acl.role import RoleRelationCRUD
username = click.prompt('Enter username', confirmation_prompt=False)
password = click.prompt('Enter password', hide_input=True, confirmation_prompt=True)
email = click.prompt('Enter email ', confirmation_prompt=False)
is_admin = click.prompt('Admin (Y/N) ', confirmation_prompt=False, type=bool, default=False)
UserCRUD.add(username=username, password=password, email=email)
if is_admin:
app = AppCache.get('acl') or App.create(name='acl')
acl_admin = RoleCache.get_by_name(app.id, 'acl_admin') or RoleCRUD.add_role('acl_admin', app.id, True)
rid = RoleCache.get_by_name(None, username).id
RoleRelationCRUD.add(acl_admin, acl_admin.id, [rid], app.id)

View File

@ -1,587 +0,0 @@
# -*- coding:utf-8 -*-
import click
import copy
import datetime
import json
import requests
import time
import uuid
from flask import current_app
from flask.cli import with_appcontext
from flask_login import login_user
import api.lib.cmdb.ci
from api.extensions import db
from api.extensions import rd
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import REDIS_PREFIX_CI
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.dcim.rack import RackManager
from api.lib.exception import AbortException
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import UserCache
from api.lib.perm.acl.cache import AppCache
from api.lib.perm.acl.resource import ResourceCRUD
from api.lib.perm.acl.resource import ResourceTypeCRUD
from api.lib.perm.acl.role import RoleCRUD
from api.lib.secrets.inner import KeyManage
from api.lib.secrets.inner import global_key_threshold
from api.lib.secrets.secrets import InnerKVManger
from api.models.acl import App
from api.models.acl import ResourceType
from api.models.cmdb import Attribute
from api.models.cmdb import AttributeHistory
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
from api.models.cmdb import CIType
from api.models.cmdb import CITypeTrigger
from api.models.cmdb import OperationRecord
from api.models.cmdb import PreferenceRelationView
from api.tasks.cmdb import batch_ci_cache
@click.command()
@with_appcontext
def cmdb_init_cache():
db.session.remove()
ci_relations = CIRelation.get_by(to_dict=False)
relations = dict()
relations2 = dict()
for cr in ci_relations:
relations.setdefault(cr.first_ci_id, {}).update({cr.second_ci_id: cr.second_ci.type_id})
if cr.ancestor_ids:
relations2.setdefault('{},{}'.format(cr.ancestor_ids, cr.first_ci_id), {}).update(
{cr.second_ci_id: cr.second_ci.type_id})
for i in relations:
relations[i] = json.dumps(relations[i])
for i in relations2:
relations2[i] = json.dumps(relations2[i])
if relations:
rd.create_or_update(relations, REDIS_PREFIX_CI_RELATION)
if relations2:
rd.create_or_update(relations2, REDIS_PREFIX_CI_RELATION2)
es = None
if current_app.config.get("USE_ES"):
from api.extensions import es
from api.models.cmdb import Attribute
from api.lib.cmdb.utils import ValueTypeMap
attributes = Attribute.get_by(to_dict=False)
for attr in attributes:
other = dict()
other['index'] = True if attr.is_index else False
if attr.value_type == ValueTypeEnum.TEXT:
other['analyzer'] = 'ik_max_word'
other['search_analyzer'] = 'ik_smart'
if attr.is_index:
other["fields"] = {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
try:
es.update_mapping(attr.name, ValueTypeMap.es_type[attr.value_type], other)
except Exception as e:
print(e)
cis = CI.get_by(to_dict=False)
for ci in cis:
if current_app.config.get("USE_ES"):
res = es.get_index_id(ci.id)
if res:
continue
else:
res = rd.get([ci.id], REDIS_PREFIX_CI)
if res and list(filter(lambda x: x, res)):
continue
m = api.lib.cmdb.ci.CIManager()
ci_dict = m.get_ci_by_id_from_db(ci.id, need_children=False, use_master=False)
if current_app.config.get("USE_ES"):
es.create(ci_dict)
else:
rd.create_or_update({ci.id: json.dumps(ci_dict)}, REDIS_PREFIX_CI)
db.session.remove()
@click.command()
@with_appcontext
def cmdb_init_acl():
_app = AppCache.get('cmdb') or App.create(name='cmdb')
app_id = _app.id
current_app.test_request_context().push()
# 1. add resource type
for resource_type in ResourceTypeEnum.all():
try:
perms = PermEnum.all()
if resource_type in (ResourceTypeEnum.CI_FILTER, ResourceTypeEnum.PAGE):
perms = [PermEnum.READ]
elif resource_type == ResourceTypeEnum.CI_TYPE_RELATION:
perms = [PermEnum.ADD, PermEnum.DELETE, PermEnum.GRANT]
elif resource_type in (ResourceTypeEnum.RELATION_VIEW, ResourceTypeEnum.TOPOLOGY_VIEW):
perms = [PermEnum.READ, PermEnum.UPDATE, PermEnum.DELETE, PermEnum.GRANT]
ResourceTypeCRUD.add(app_id, resource_type, '', perms)
except AbortException:
pass
# 2. add role
try:
RoleCRUD.add_role(RoleEnum.CONFIG, app_id, True)
except AbortException:
pass
try:
RoleCRUD.add_role(RoleEnum.CMDB_READ_ALL, app_id, False)
except AbortException:
pass
# 3. add resource and grant
ci_types = CIType.get_by(to_dict=False)
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
for ci_type in ci_types:
try:
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
except AbortException:
pass
ACLManager().grant_resource_to_role(ci_type.name,
RoleEnum.CMDB_READ_ALL,
ResourceTypeEnum.CI,
[PermEnum.READ])
relation_views = PreferenceRelationView.get_by(to_dict=False)
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.RELATION_VIEW, first=True, to_dict=False).id
for view in relation_views:
try:
ResourceCRUD.add(view.name, resource_type_id, app_id)
except AbortException:
pass
ACLManager().grant_resource_to_role(view.name,
RoleEnum.CMDB_READ_ALL,
ResourceTypeEnum.RELATION_VIEW,
[PermEnum.READ])
@click.command()
@with_appcontext
def cmdb_counter():
"""
Dashboard calculations
"""
from api.lib.cmdb.cache import CMDBCounterCache
current_app.test_request_context().push()
if not UserCache.get('worker'):
from api.lib.perm.acl.user import UserCRUD
UserCRUD.add(username='worker', password=uuid.uuid4().hex, email='worker@xxx.com')
login_user(UserCache.get('worker'))
i = 0
today = datetime.date.today()
while True:
try:
db.session.commit()
CMDBCounterCache.reset()
if i % 5 == 0:
CMDBCounterCache.flush_adc_counter()
i = 0
if datetime.date.today() != today:
CMDBCounterCache.clear_ad_exec_history()
today = datetime.date.today()
CMDBCounterCache.flush_sub_counter()
RackManager().check_u_slot()
i += 1
except:
import traceback
print(traceback.format_exc())
time.sleep(60)
@click.command()
@with_appcontext
def cmdb_trigger():
"""
Trigger execution for date attribute
"""
from api.lib.cmdb.ci import CITriggerManager
current_app.test_request_context().push()
if not UserCache.get('worker'):
from api.lib.perm.acl.user import UserCRUD
UserCRUD.add(username='worker', password=uuid.uuid4().hex, email='worker@xxx.com')
login_user(UserCache.get('worker'))
current_day = datetime.datetime.today().strftime("%Y-%m-%d")
trigger2cis = dict()
trigger2completed = dict()
i = 0
while True:
try:
db.session.remove()
if datetime.datetime.today().strftime("%Y-%m-%d") != current_day:
trigger2cis = dict()
trigger2completed = dict()
current_day = datetime.datetime.today().strftime("%Y-%m-%d")
if i == 3 or i == 0:
i = 0
triggers = CITypeTrigger.get_by(to_dict=False, __func_isnot__key_attr_id=None)
for trigger in triggers:
try:
ready_cis = CITriggerManager.waiting_cis(trigger)
except Exception as e:
print(e)
continue
if trigger.id not in trigger2cis:
trigger2cis[trigger.id] = (trigger, ready_cis)
else:
cur = trigger2cis[trigger.id]
cur_ci_ids = {_ci.ci_id for _ci in cur[1]}
trigger2cis[trigger.id] = (
trigger, cur[1] + [_ci for _ci in ready_cis if _ci.ci_id not in cur_ci_ids
and _ci.ci_id not in trigger2completed.get(trigger.id, {})])
for tid in trigger2cis:
trigger, cis = trigger2cis[tid]
for ci in copy.deepcopy(cis):
if CITriggerManager.trigger_notify(trigger, ci):
trigger2completed.setdefault(trigger.id, set()).add(ci.ci_id)
for _ci in cis:
if _ci.ci_id == ci.ci_id:
cis.remove(_ci)
i += 1
time.sleep(10)
except Exception as e:
import traceback
print(traceback.format_exc())
current_app.logger.error("cmdb trigger exception: {}".format(e))
time.sleep(60)
@click.command()
@with_appcontext
def cmdb_index_table_upgrade():
"""
Migrate data from tables c_value_integers, c_value_floats, and c_value_datetime
"""
for attr in Attribute.get_by(to_dict=False):
if attr.value_type not in {ValueTypeEnum.TEXT, ValueTypeEnum.JSON} and not attr.is_index:
attr.update(is_index=True)
AttributeCache.clean(attr)
from api.models.cmdb import CIValueInteger, CIIndexValueInteger
from api.models.cmdb import CIValueFloat, CIIndexValueFloat
from api.models.cmdb import CIValueDateTime, CIIndexValueDateTime
for i in CIValueInteger.get_by(to_dict=False):
CIIndexValueInteger.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
i.delete(commit=False)
db.session.commit()
for i in CIValueFloat.get_by(to_dict=False):
CIIndexValueFloat.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
i.delete(commit=False)
db.session.commit()
for i in CIValueDateTime.get_by(to_dict=False):
CIIndexValueDateTime.create(ci_id=i.ci_id, attr_id=i.attr_id, value=i.value, commit=False)
i.delete(commit=False)
db.session.commit()
def valid_address(address):
if not address:
return False
if not address.startswith(("http://127.0.0.1", "https://127.0.0.1")):
response = {
"message": "Address should start with http://127.0.0.1 or https://127.0.0.1",
"status": "failed"
}
KeyManage.print_response(response)
return False
return True
@click.command()
@click.option(
'-a',
'--address',
help='inner cmdb api, http://127.0.0.1:8000',
)
@with_appcontext
def cmdb_inner_secrets_init(address):
"""
init inner secrets for password feature
"""
res, ok = KeyManage(backend=InnerKVManger()).init()
if not ok:
if res.get("status") == "failed":
KeyManage.print_response(res)
return
token = res.get("details", {}).get("root_token", "")
if valid_address(address):
token = current_app.config.get("INNER_TRIGGER_TOKEN", "") if not token else token
if not token:
token = click.prompt('Enter root token', hide_input=True, confirmation_prompt=False)
assert token is not None
resp = requests.post("{}/api/v0.1/secrets/auto_seal".format(address.strip("/")),
headers={"Inner-Token": token})
if resp.status_code == 200:
KeyManage.print_response(resp.json())
else:
KeyManage.print_response({"message": resp.text or resp.status_code, "status": "failed"})
else:
KeyManage.print_response(res)
@click.command()
@click.option(
'-a',
'--address',
help='inner cmdb api, http://127.0.0.1:8000',
required=True,
)
@with_appcontext
def cmdb_inner_secrets_unseal(address):
"""
unseal the secrets feature
"""
# if not valid_address(address):
# return
address = "{}/api/v0.1/secrets/unseal".format(address.strip("/"))
for i in range(global_key_threshold):
token = click.prompt(f'Enter unseal token {i + 1}', hide_input=True, confirmation_prompt=False)
assert token is not None
resp = requests.post(address, headers={"Unseal-Token": token}, timeout=5)
if resp.status_code == 200:
KeyManage.print_response(resp.json())
if resp.json().get("status") in ["success", "skip"]:
return
else:
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
return
@click.command()
@click.option(
'-a',
'--address',
help='inner cmdb api, http://127.0.0.1:8000',
required=True,
)
@click.option(
'-k',
'--token',
help='root token',
prompt=True,
hide_input=True,
)
@with_appcontext
def cmdb_inner_secrets_seal(address, token):
"""
seal the secrets feature
"""
assert address is not None
assert token is not None
if not valid_address(address):
return
address = "{}/api/v0.1/secrets/seal".format(address.strip("/"))
resp = requests.post(address, headers={
"Inner-Token": token,
})
if resp.status_code == 200:
KeyManage.print_response(resp.json())
else:
KeyManage.print_response({"message": resp.status_code, "status": "failed"})
@click.command()
@with_appcontext
def cmdb_password_data_migrate():
"""
Migrate CI password data, version >= v2.3.6
"""
from api.models.cmdb import CIIndexValueText
from api.models.cmdb import CIValueText
from api.lib.secrets.inner import InnerCrypt
from api.lib.secrets.vault import VaultClient
attrs = Attribute.get_by(to_dict=False)
for attr in attrs:
if attr.is_password:
value_table = CIIndexValueText if attr.is_index else CIValueText
failed = False
for i in value_table.get_by(attr_id=attr.id, to_dict=False):
if current_app.config.get("SECRETS_ENGINE", 'inner') == 'inner':
_, status = InnerCrypt().decrypt(i.value)
if status:
continue
encrypt_value, status = InnerCrypt().encrypt(i.value)
if status:
CIValueText.create(ci_id=i.ci_id, attr_id=attr.id, value=encrypt_value)
else:
failed = True
continue
elif current_app.config.get("SECRETS_ENGINE") == 'vault':
if i.value == '******':
continue
vault = VaultClient(current_app.config.get('VAULT_URL'), current_app.config.get('VAULT_TOKEN'))
try:
vault.update("/{}/{}".format(i.ci_id, i.attr_id), dict(v=i.value))
except Exception as e:
print('save password to vault failed: {}'.format(e))
failed = True
continue
else:
continue
i.delete()
if not failed and attr.is_index:
attr.update(is_index=False)
@click.command()
@with_appcontext
def cmdb_agent_init():
"""
Initialize the agent's permissions and obtain the key and secret
"""
from api.models.acl import User
user = User.get_by(username="cmdb_agent", first=True, to_dict=False)
if user is None:
click.echo(
click.style('user cmdb_agent does not exist, please use flask add-user to create it first', fg='red'))
return
# grant
_app = AppCache.get('cmdb') or App.create(name='cmdb')
app_id = _app.id
ci_types = CIType.get_by(to_dict=False)
resource_type_id = ResourceType.get_by(name=ResourceTypeEnum.CI, first=True, to_dict=False).id
for ci_type in ci_types:
try:
ResourceCRUD.add(ci_type.name, resource_type_id, app_id)
except AbortException:
pass
ACLManager().grant_resource_to_role(ci_type.name,
"cmdb_agent",
ResourceTypeEnum.CI,
[PermEnum.READ, PermEnum.UPDATE, PermEnum.ADD, PermEnum.DELETE])
click.echo("Key : {}".format(click.style(user.key, bg='red')))
click.echo("Secret: {}".format(click.style(user.secret, bg='red')))
@click.command()
@click.option(
'-v',
'--version',
help='input cmdb version, e.g. 2.4.6',
required=True,
)
@with_appcontext
def cmdb_patch(version):
"""
CMDB upgrade patch
"""
version = version[1:] if version.lower().startswith("v") else version
try:
if version >= '2.4.6':
from api.models.cmdb import CITypeRelation
for cr in CITypeRelation.get_by(to_dict=False):
if hasattr(cr, 'parent_attr_id') and cr.parent_attr_id and not cr.parent_attr_ids:
parent_attr_ids, child_attr_ids = [cr.parent_attr_id], [cr.child_attr_id]
cr.update(parent_attr_ids=parent_attr_ids, child_attr_ids=child_attr_ids, commit=False)
db.session.commit()
from api.models.cmdb import AutoDiscoveryCIType, AutoDiscoveryCITypeRelation
from api.lib.cmdb.cache import CITypeCache, AttributeCache
for adt in AutoDiscoveryCIType.get_by(to_dict=False):
if adt.relation:
if not AutoDiscoveryCITypeRelation.get_by(ad_type_id=adt.type_id):
peer_type = CITypeCache.get(list(adt.relation.values())[0]['type_name'])
peer_type_id = peer_type and peer_type.id
peer_attr = AttributeCache.get(list(adt.relation.values())[0]['attr_name'])
peer_attr_id = peer_attr and peer_attr.id
if peer_type_id and peer_attr_id:
AutoDiscoveryCITypeRelation.create(ad_type_id=adt.type_id,
ad_key=list(adt.relation.keys())[0],
peer_type_id=peer_type_id,
peer_attr_id=peer_attr_id,
commit=False)
if hasattr(adt, 'interval') and adt.interval and not adt.cron:
adt.cron = "*/{} * * * *".format(adt.interval // 60 or 1)
db.session.commit()
if version >= "2.4.7":
from api.lib.cmdb.auto_discovery.const import DEFAULT_INNER
from api.models.cmdb import AutoDiscoveryRule
for i in DEFAULT_INNER:
existed = AutoDiscoveryRule.get_by(name=i['name'], first=True, to_dict=False)
if existed is not None:
if "en" in i['option'] and 'en' not in (existed.option or {}):
option = copy.deepcopy(existed.option)
option['en'] = i['option']['en']
existed.update(option=option, commit=False)
db.session.commit()
if version >= "2.4.14": # update ci columns: updated_at and updated_by
ci_ids = []
for i in CI.get_by(only_query=True).filter(CI.updated_at.is_(None)):
hist = AttributeHistory.get_by(ci_id=i.id, only_query=True).order_by(AttributeHistory.id.desc()).first()
if hist is not None:
record = OperationRecord.get_by_id(hist.record_id)
if record is not None:
u = UserCache.get(record.uid)
i.update(updated_at=record.created_at, updated_by=u and u.nickname, flush=True)
ci_ids.append(i.id)
db.session.commit()
batch_ci_cache.apply_async(args=(ci_ids,))
except Exception as e:
print("cmdb patch failed: {}".format(e))

View File

@ -1,230 +0,0 @@
import click
from flask import current_app
from flask.cli import with_appcontext
from werkzeug.datastructures import MultiDict
from api.lib.common_setting.acl import ACLManager
from api.lib.common_setting.employee import EmployeeAddForm, GrantEmployeeACLPerm
from api.lib.common_setting.resp_format import ErrFormat
from api.lib.common_setting.utils import CheckNewColumn
from api.models.common_setting import Employee, Department
class InitEmployee(object):
def __init__(self):
self.log = current_app.logger
def import_user_from_acl(self):
"""
Import users from ACL
"""
InitDepartment().init()
acl = ACLManager('acl')
user_list = acl.get_all_users()
username_list = [e['username'] for e in Employee.get_by()]
for user in user_list:
acl_uid = user['uid']
block = 1 if user['block'] else 0
acl_rid = self.get_rid_by_uid(acl_uid)
if user['username'] in username_list:
existed = Employee.get_by(first=True, username=user['username'], to_dict=False)
if existed:
existed.update(
acl_uid=acl_uid,
acl_rid=acl_rid,
block=block,
)
continue
try:
form = EmployeeAddForm(MultiDict(user))
if not form.validate():
raise Exception(
','.join(['{}: {}'.format(filed, ','.join(msg)) for filed, msg in form.errors.items()]))
data = form.data
data['acl_uid'] = acl_uid
data['acl_rid'] = acl_rid
data['block'] = block
data.pop('password')
Employee.create(
**data
)
except Exception as e:
self.log.error(ErrFormat.acl_import_user_failed.format(user['username'], str(e)))
self.log.error(e)
@staticmethod
def get_rid_by_uid(uid):
from api.models.acl import Role
role = Role.get_by(first=True, uid=uid)
return role['id'] if role is not None else 0
class InitDepartment(object):
def __init__(self):
self.log = current_app.logger
def init(self):
self.init_wide_company()
@staticmethod
def hard_delete(department_id, department_name):
existed_deleted_list = Department.query.filter(
Department.department_name == department_name,
Department.department_id == department_id,
Department.deleted == 1,
).all()
for existed in existed_deleted_list:
existed.delete()
@staticmethod
def get_department(department_name):
return Department.query.filter(
Department.department_name == department_name,
Department.deleted == 0,
).first()
def run(self, department_id, department_name, department_parent_id):
self.hard_delete(department_id, department_name)
res = self.get_department(department_name)
if res:
if res.department_id == department_id:
return
else:
res.update(
department_id=department_id,
department_parent_id=department_parent_id,
)
return
Department.create(
department_id=department_id,
department_name=department_name,
department_parent_id=department_parent_id,
)
new_d = self.get_department(department_name)
if new_d.department_id != department_id:
new_d.update(
department_id=department_id,
department_parent_id=department_parent_id,
)
self.log.info(f"init {department_name} success.")
def run_common(self, department_id, department_name, department_parent_id):
try:
self.run(department_id, department_name, department_parent_id)
except Exception as e:
current_app.logger.error(f"init {department_name} err:")
current_app.logger.error(e)
raise Exception(e)
def init_wide_company(self):
department_id = 0
department_name = '全公司'
department_parent_id = -1
self.run_common(department_id, department_name, department_parent_id)
@staticmethod
def create_acl_role_with_department():
acl = ACLManager('acl')
role_name_map = {role['name']: role for role in acl.get_all_roles()}
d_list = Department.query.filter(
Department.deleted == 0, Department.department_parent_id != -1).all()
for department in d_list:
if department.acl_rid > 0:
continue
role = role_name_map.get(department.department_name)
if not role:
payload = {
'app_id': 'acl',
'name': department.department_name,
}
role = acl.create_role(payload)
acl_rid = role.get('id') if role else 0
department.update(
acl_rid=acl_rid
)
info = f"update department acl_rid: {acl_rid}"
current_app.logger.info(info)
def init_backend_resource(self):
acl = self.check_app('backend')
acl_rid = self.get_admin_user_rid()
if acl_rid == 0:
return
GrantEmployeeACLPerm(acl).grant_by_rid(acl_rid, True)
@staticmethod
def check_app(app_name):
acl = ACLManager(app_name)
payload = dict(
name=app_name,
description=app_name
)
app = acl.validate_app()
if not app:
acl.create_app(payload)
return acl
@staticmethod
def get_admin_user_rid():
admin = Employee.get_by(first=True, username='admin', to_dict=False)
return admin.acl_rid if admin else 0
@click.command()
@with_appcontext
def init_import_user_from_acl():
"""
Import users from ACL
"""
InitEmployee().import_user_from_acl()
@click.command()
@with_appcontext
def init_department():
"""
Department initialization
"""
cli = InitDepartment()
cli.init_wide_company()
cli.create_acl_role_with_department()
cli.init_backend_resource()
@click.command()
@with_appcontext
def common_check_new_columns():
"""
add new columns to tables
"""
CheckNewColumn().run()
@click.command()
@with_appcontext
def common_sync_file_to_db():
from api.lib.common_setting.upload_file import CommonFileCRUD
CommonFileCRUD.sync_file_to_db()
@click.command()
@with_appcontext
@click.option('--value', type=click.INT, default=-1)
def set_auth_auto_redirect_enable(value):
if value < 0:
return
from api.lib.common_setting.common_data import CommonDataCRUD
CommonDataCRUD.set_auth_auto_redirect_enable(value)

View File

@ -1,464 +0,0 @@
# -*- coding:utf-8 -*-
from flask import abort
from flask import current_app
from flask import session
from flask_login import current_user
from api.extensions import db
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeAttributesCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.const import BUILTIN_KEYWORDS
from api.lib.cmdb.const import CITypeOperateType
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.history import CITypeHistoryManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.utils import ValueTypeMap
from api.lib.decorator import kwargs_required
from api.lib.perm.acl.acl import is_app_admin
from api.lib.perm.acl.acl import validate_permission
from api.lib.webhook import webhook_request
from api.models.cmdb import Attribute
from api.models.cmdb import CIType
from api.models.cmdb import CITypeAttribute
from api.models.cmdb import CITypeAttributeGroupItem
from api.models.cmdb import PreferenceShowAttributes
class AttributeManager(object):
"""
CI attributes manager
"""
cls = Attribute
def __init__(self):
pass
@staticmethod
def _get_choice_values_from_webhook(choice_webhook, payload=None):
ret_key = choice_webhook.get('ret_key')
try:
res = webhook_request(choice_webhook, payload or {}).json()
if ret_key:
ret_key_list = ret_key.strip().split("##")
for key in ret_key_list[:-1]:
if key in res:
res = res[key]
if isinstance(res, list):
return [[i[ret_key_list[-1]], {}] for i in res if i.get(ret_key_list[-1])]
return [[i, {}] for i in (res.get(ret_key_list[-1]) or [])]
except Exception as e:
current_app.logger.error("get choice values failed: {}".format(e))
return []
@staticmethod
def _get_choice_values_from_other(choice_other):
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search
if choice_other.get('type_ids'):
type_ids = choice_other.get('type_ids')
attr_id = choice_other.get('attr_id')
other_filter = choice_other.get('filter') or ''
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
s = search(query, fl=[str(attr_id)], facet=[str(attr_id)], count=1)
try:
_, _, _, _, _, facet = s.search()
return [[i[0], {}] for i in (list(facet.values()) or [[]])[0]]
except SearchError as e:
current_app.logger.error("get choice values from other ci failed: {}".format(e))
return []
elif choice_other.get('script'):
try:
x = compile(choice_other['script'], '', "exec")
local_ns = {}
exec(x, {}, local_ns)
res = local_ns['ChoiceValue']().values() or []
return [[i, {}] for i in res]
except Exception as e:
current_app.logger.error("get choice values from script: {}".format(e))
return []
@classmethod
def get_choice_values(cls, attr_id, value_type, choice_web_hook, choice_other,
choice_web_hook_parse=True, choice_other_parse=True):
if choice_web_hook:
if choice_web_hook_parse and isinstance(choice_web_hook, dict):
return cls._get_choice_values_from_webhook(choice_web_hook)
else:
return []
elif choice_other:
if choice_other_parse and isinstance(choice_other, dict):
return cls._get_choice_values_from_other(choice_other)
else:
return []
choice_table = ValueTypeMap.choice.get(value_type)
if not choice_table:
return []
choice_values = choice_table.get_by(fl=["value", "option"], attr_id=attr_id)
return [[ValueTypeMap.serialize[value_type](choice_value['value']), choice_value['option'] or
{"label": ValueTypeMap.serialize[value_type](choice_value['value'])}]
for choice_value in choice_values]
@staticmethod
def add_choice_values(_id, value_type, choice_values):
choice_table = ValueTypeMap.choice.get(value_type)
if choice_table is None:
return
choice_table.get_by(attr_id=_id, only_query=True).delete()
for v, option in choice_values:
choice_table.create(attr_id=_id, value=v, option=option, commit=False)
try:
db.session.flush()
except Exception as e:
current_app.logger.warning("add choice values failed: {}".format(e))
return abort(400, ErrFormat.invalid_choice_values)
@staticmethod
def _del_choice_values(_id, value_type):
choice_table = ValueTypeMap.choice.get(value_type)
choice_table and choice_table.get_by(attr_id=_id, only_query=True).delete()
db.session.flush()
@classmethod
def get_enum_map(cls, _attr_id, _attr=None):
attr = AttributeCache.get(_attr_id) if _attr_id else _attr
if attr and attr.is_choice:
choice_values = cls.get_choice_values(attr.id, attr.value_type, None, None)
return {i[0]: i[1]['label'] for i in choice_values if i[1] and i[1].get('label')}
return {}
@classmethod
def search_attributes(cls, name=None, alias=None, page=1, page_size=None):
"""
:param name:
:param alias:
:param page:
:param page_size:
:return: attribute, if name is None, then return all attributes
"""
if name is not None:
attrs = Attribute.get_by_like(name=name)
elif alias is not None:
attrs = Attribute.get_by_like(alias=alias)
else:
attrs = Attribute.get_by()
numfound = len(attrs)
attrs = attrs[(page - 1) * page_size:][:page_size]
res = list()
for attr in attrs:
attr["is_choice"] and attr.update(
dict(choice_value=cls.get_choice_values(attr["id"], attr["value_type"],
attr["choice_web_hook"], attr.get("choice_other"))))
attr['is_choice'] and attr.pop('choice_web_hook', None)
res.append(attr)
return numfound, res
def get_attribute_by_name(self, name):
attr = Attribute.get_by(name=name, first=True)
if attr.get("is_choice"):
attr["choice_value"] = self.get_choice_values(attr["id"],
attr["value_type"],
attr["choice_web_hook"],
attr.get("choice_other"))
return attr
def get_attribute_by_alias(self, alias):
attr = Attribute.get_by(alias=alias, first=True)
if attr.get("is_choice"):
attr["choice_value"] = self.get_choice_values(attr["id"],
attr["value_type"],
attr["choice_web_hook"],
attr.get("choice_other"))
return attr
def get_attribute_by_id(self, _id):
attr = Attribute.get_by_id(_id).to_dict()
if attr.get("is_choice"):
attr["choice_value"] = self.get_choice_values(attr["id"],
attr["value_type"],
attr["choice_web_hook"],
attr.get("choice_other"))
return attr
def get_attribute(self, key, choice_web_hook_parse=True, choice_other_parse=True):
attr = AttributeCache.get(key) or dict()
attr = attr and attr.to_dict()
if attr.get("is_choice"):
attr["choice_value"] = self.get_choice_values(
attr["id"],
attr["value_type"],
attr["choice_web_hook"],
attr.get("choice_other"),
choice_web_hook_parse=choice_web_hook_parse,
choice_other_parse=choice_other_parse,
)
return attr
@staticmethod
def can_create_computed_attribute():
if RoleEnum.CONFIG not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin('cmdb'):
return abort(403, ErrFormat.role_required.format(RoleEnum.CONFIG))
@classmethod
def calc_computed_attribute(cls, attr_id):
"""
calculate computed attribute for all ci
:param attr_id:
:return:
"""
cls.can_create_computed_attribute()
from api.tasks.cmdb import calc_computed_attribute
calc_computed_attribute.apply_async(args=(attr_id, current_user.uid), queue=CMDB_QUEUE)
@classmethod
@kwargs_required("name")
def add(cls, **kwargs):
choice_value = kwargs.pop("choice_value", [])
kwargs.pop("is_choice", None)
is_choice = True if choice_value or kwargs.get('choice_web_hook') or kwargs.get('choice_other') else False
name = kwargs.pop("name")
if name in BUILTIN_KEYWORDS or kwargs.get('alias') in BUILTIN_KEYWORDS:
return abort(400, ErrFormat.attribute_name_cannot_be_builtin)
while kwargs.get('choice_other'):
if isinstance(kwargs['choice_other'], dict):
if kwargs['choice_other'].get('script'):
break
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
break
return abort(400, ErrFormat.attribute_choice_other_invalid)
alias = kwargs.pop("alias", "")
alias = name if not alias else alias
Attribute.get_by(name=name, first=True) and abort(400, ErrFormat.attribute_name_duplicate.format(name))
if kwargs.get('default') and not (isinstance(kwargs['default'], dict) and 'default' in kwargs['default']):
kwargs['default'] = dict(default=kwargs['default'])
kwargs.get('is_computed') and cls.can_create_computed_attribute()
kwargs.get('choice_other') and kwargs['choice_other'].get('script') and cls.can_create_computed_attribute()
attr = Attribute.create(flush=True,
name=name,
alias=alias,
is_choice=is_choice,
uid=current_user.uid,
**kwargs)
if choice_value:
cls.add_choice_values(attr.id, attr.value_type, choice_value)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error("add attribute error, {0}".format(str(e)))
return abort(400, ErrFormat.add_attribute_failed.format(name))
AttributeCache.clean(attr)
if current_app.config.get("USE_ES"):
from api.extensions import es
other = dict()
other['index'] = True if attr.is_index else False
if attr.value_type == ValueTypeEnum.TEXT:
other['analyzer'] = 'ik_max_word'
other['search_analyzer'] = 'ik_smart'
if attr.is_index:
other["fields"] = {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
es.update_mapping(name, ValueTypeMap.es_type[attr.value_type], other)
return attr.id
@staticmethod
def _clean_ci_type_attributes_cache(attr_id):
for i in CITypeAttribute.get_by(attr_id=attr_id, to_dict=False):
CITypeAttributesCache.clean(i.type_id)
@staticmethod
def _change_index(attr, old, new):
from api.lib.cmdb.utils import TableMap
from api.tasks.cmdb import batch_ci_cache
from api.lib.cmdb.const import CMDB_QUEUE
old_table = TableMap(attr=attr, is_index=old).table
new_table = TableMap(attr=attr, is_index=new).table
ci_ids = []
for i in old_table.get_by(attr_id=attr.id, to_dict=False):
new_table.create(ci_id=i.ci_id, attr_id=attr.id, value=i.value, flush=True)
ci_ids.append(i.ci_id)
old_table.get_by(attr_id=attr.id, only_query=True).delete()
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error(str(e))
return abort(400, ErrFormat.attribute_index_change_failed)
batch_ci_cache.apply_async(args=(ci_ids,), queue=CMDB_QUEUE)
@staticmethod
def _can_edit_attribute(attr):
from api.lib.cmdb.ci_type import CITypeManager
if attr.uid == current_user.uid:
return True
for i in CITypeAttribute.get_by(attr_id=attr.id, to_dict=False):
resource = CITypeManager.get_name_by_id(i.type_id)
if resource:
validate_permission(resource, ResourceTypeEnum.CI, PermEnum.CONFIG, "cmdb")
return True
def update(self, _id, **kwargs):
attr = Attribute.get_by_id(_id) or abort(404, ErrFormat.attribute_not_found.format("id={}".format(_id)))
if kwargs.get("name"):
other = Attribute.get_by(name=kwargs['name'], first=True, to_dict=False)
if other and other.id != attr.id:
return abort(400, ErrFormat.attribute_name_duplicate.format(kwargs['name']))
if attr.value_type != kwargs.get('value_type'):
return abort(400, ErrFormat.attribute_value_type_cannot_change)
if "is_list" in kwargs and kwargs['is_list'] != attr.is_list:
return abort(400, ErrFormat.attribute_list_value_cannot_change)
if "is_index" in kwargs and kwargs['is_index'] != attr.is_index:
if not is_app_admin("cmdb"):
return abort(400, ErrFormat.attribute_index_cannot_change)
self._change_index(attr, attr.is_index, kwargs['is_index'])
while kwargs.get('choice_other'):
if isinstance(kwargs['choice_other'], dict):
if kwargs['choice_other'].get('script'):
break
if kwargs['choice_other'].get('type_ids') and kwargs['choice_other'].get('attr_id'):
break
return abort(400, ErrFormat.attribute_choice_other_invalid)
existed2 = attr.to_dict()
if not existed2['choice_web_hook'] and not existed2.get('choice_other') and existed2['is_choice']:
existed2['choice_value'] = self.get_choice_values(attr.id, attr.value_type, None, None)
choice_value = kwargs.pop("choice_value", False)
is_choice = True if choice_value or kwargs.get('choice_web_hook') or kwargs.get('choice_other') else False
kwargs['is_choice'] = is_choice
if kwargs.get('default') and not (isinstance(kwargs['default'], dict) and 'default' in kwargs['default']):
kwargs['default'] = dict(default=kwargs['default'])
kwargs.get('is_computed') and self.can_create_computed_attribute()
is_changed = False
for k in kwargs:
if kwargs[k] != getattr(attr, k, None):
is_changed = True
if is_changed and not self._can_edit_attribute(attr):
return abort(403, ErrFormat.cannot_edit_attribute)
attr.update(flush=True, filter_none=False, **kwargs)
if is_choice and choice_value:
self.add_choice_values(attr.id, attr.value_type, choice_value)
elif existed2['is_choice']:
self._del_choice_values(attr.id, attr.value_type)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error("update attribute error, {0}".format(str(e)))
return abort(400, ErrFormat.update_attribute_failed.format(("id={}".format(_id))))
new = attr.to_dict()
if not new['choice_web_hook'] and new['is_choice']:
new['choice_value'] = choice_value
CITypeHistoryManager.add(CITypeOperateType.UPDATE_ATTRIBUTE, None, attr_id=attr.id,
change=dict(old=existed2, new=new))
AttributeCache.clean(attr)
self._clean_ci_type_attributes_cache(_id)
return attr.id
@staticmethod
def delete(_id):
attr = Attribute.get_by_id(_id) or abort(404, ErrFormat.attribute_not_found.format("id={}".format(_id)))
name = attr.name
if CIType.get_by(unique_id=attr.id, first=True, to_dict=False) is not None:
return abort(400, ErrFormat.attribute_is_unique_id)
ref = CITypeAttribute.get_by(attr_id=_id, to_dict=False, first=True)
if ref is not None:
ci_type = CITypeCache.get(ref.type_id)
return abort(400, ErrFormat.attribute_is_ref_by_type.format(ci_type and ci_type.alias or ref.type_id))
if attr.uid != current_user.uid and not is_app_admin('cmdb'):
return abort(403, ErrFormat.cannot_delete_attribute)
if attr.is_choice:
choice_table = ValueTypeMap.choice.get(attr.value_type)
choice_table.get_by(attr_id=_id, only_query=True).delete()
attr.soft_delete()
AttributeCache.clean(attr)
for i in PreferenceShowAttributes.get_by(attr_id=_id, to_dict=False):
i.soft_delete(commit=False)
for i in CITypeAttributeGroupItem.get_by(attr_id=_id, to_dict=False):
i.soft_delete(commit=False)
db.session.commit()
return name

View File

@ -1,996 +0,0 @@
# -*- coding:utf-8 -*-
import copy
import datetime
import json
import jsonpath
import os
from flask import abort
from flask import current_app
from flask_login import current_user
from sqlalchemy import func
from api.extensions import db
from api.lib.cmdb.auto_discovery.const import CLOUD_MAP
from api.lib.cmdb.auto_discovery.const import DEFAULT_INNER
from api.lib.cmdb.auto_discovery.const import PRIVILEGED_USERS
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import AutoDiscoveryMappingCache
from api.lib.cmdb.cache import CITypeAttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci_type import CITypeGroupManager
from api.lib.cmdb.const import AutoDiscoveryType
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.custom_dashboard import SystemConfigManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search as ci_search
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.mixin import DBMixin
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import is_app_admin
from api.lib.perm.acl.acl import validate_permission
from api.lib.utils import AESCrypto
from api.models.cmdb import AutoDiscoveryAccount
from api.models.cmdb import AutoDiscoveryCI
from api.models.cmdb import AutoDiscoveryCIType
from api.models.cmdb import AutoDiscoveryCITypeRelation
from api.models.cmdb import AutoDiscoveryCounter
from api.models.cmdb import AutoDiscoveryExecHistory
from api.models.cmdb import AutoDiscoveryRule
from api.models.cmdb import AutoDiscoveryRuleSyncHistory
from api.tasks.cmdb import build_relations_for_ad_accept
from api.tasks.cmdb import write_ad_rule_sync_history
PWD = os.path.abspath(os.path.dirname(__file__))
app_cli = CMDBApp()
def parse_plugin_script(script):
attributes = []
try:
x = compile(script, '', "exec")
local_ns = {}
exec(x, {}, local_ns)
unique_key = local_ns['AutoDiscovery']().unique_key
attrs = local_ns['AutoDiscovery']().attributes() or []
except Exception as e:
return abort(400, str(e))
if not isinstance(attrs, list):
return abort(400, ErrFormat.adr_plugin_attributes_list_required)
for i in attrs:
if len(i) == 3:
name, _type, desc = i
elif len(i) == 2:
name, _type = i
desc = ""
else:
continue
attributes.append(dict(name=name, type=_type, desc=desc))
return unique_key, attributes
def check_plugin_script(**kwargs):
kwargs['unique_key'], kwargs['attributes'] = parse_plugin_script(kwargs['plugin_script'])
if not kwargs.get('unique_key'):
return abort(400, ErrFormat.adr_unique_key_required)
if not kwargs.get('attributes'):
return abort(400, ErrFormat.adr_plugin_attributes_list_no_empty)
return kwargs
class AutoDiscoveryRuleCRUD(DBMixin):
cls = AutoDiscoveryRule
@classmethod
def get_by_name(cls, name):
return cls.cls.get_by(name=name, first=True, to_dict=False)
@classmethod
def get_by_id(cls, _id):
return cls.cls.get_by_id(_id)
def get_by_inner(self):
return self.cls.get_by(is_inner=True, to_dict=True)
def import_template(self, rules):
for rule in rules:
rule.pop("id", None)
rule.pop("created_at", None)
rule.pop("updated_at", None)
existed = self.cls.get_by(name=rule['name'], first=True, to_dict=False)
if existed is not None:
existed.update(**rule)
else:
self.cls.create(**rule)
def _can_add(self, valid=True, **kwargs):
self.cls.get_by(name=kwargs['name']) and abort(400, ErrFormat.adr_duplicate.format(kwargs['name']))
if kwargs.get('is_plugin') and kwargs.get('plugin_script') and valid:
kwargs = check_plugin_script(**kwargs)
acl = ACLManager(app_cli.app_name)
has_perm = True
try:
if not acl.has_permission(app_cli.op.Auto_Discovery,
app_cli.resource_type_name,
app_cli.op.create_plugin) and not is_app_admin(app_cli.app_name):
has_perm = False
except Exception:
if not is_app_admin(app_cli.app_name):
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
if not has_perm:
return abort(403, ErrFormat.no_permission.format(
app_cli.op.Auto_Discovery, app_cli.op.create_plugin))
kwargs['owner'] = current_user.uid
return kwargs
def _can_update(self, valid=True, **kwargs):
existed = self.cls.get_by_id(kwargs['_id']) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['_id'])))
if 'name' in kwargs and not kwargs['name']:
return abort(400, ErrFormat.argument_value_required.format('name'))
if kwargs.get('name'):
other = self.cls.get_by(name=kwargs['name'], first=True, to_dict=False)
if other and other.id != existed.id:
return abort(400, ErrFormat.adr_duplicate.format(kwargs['name']))
if existed.is_plugin and valid:
acl = ACLManager(app_cli.app_name)
has_perm = True
try:
if not acl.has_permission(app_cli.op.Auto_Discovery,
app_cli.resource_type_name,
app_cli.op.update_plugin) and not is_app_admin(app_cli.app_name):
has_perm = False
except Exception:
if not is_app_admin(app_cli.app_name):
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
if not has_perm:
return abort(403, ErrFormat.no_permission.format(
app_cli.op.Auto_Discovery, app_cli.op.update_plugin))
return existed
def update(self, _id, **kwargs):
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
for item in AutoDiscoveryCIType.get_by(adr_id=_id, to_dict=False):
item.update(updated_at=datetime.datetime.now())
return super(AutoDiscoveryRuleCRUD, self).update(_id, filter_none=False, **kwargs)
def _can_delete(self, **kwargs):
if AutoDiscoveryCIType.get_by(adr_id=kwargs['_id'], first=True):
return abort(400, ErrFormat.adr_referenced)
existed = self.cls.get_by_id(kwargs['_id']) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['_id'])))
if existed.is_plugin:
acl = ACLManager(app_cli.app_name)
has_perm = True
try:
if not acl.has_permission(app_cli.op.Auto_Discovery,
app_cli.resource_type_name,
app_cli.op.delete_plugin) and not is_app_admin(app_cli.app_name):
has_perm = False
except Exception:
if not is_app_admin(app_cli.app_name):
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
if not has_perm:
return abort(403, ErrFormat.no_permission.format(
app_cli.op.Auto_Discovery, app_cli.op.delete_plugin))
return existed
class AutoDiscoveryCITypeCRUD(DBMixin):
cls = AutoDiscoveryCIType
@classmethod
def get_all(cls, type_ids=None):
res = cls.cls.get_by(to_dict=False)
return [i for i in res if type_ids is None or i.type_id in type_ids]
@classmethod
def get_by_id(cls, _id):
return cls.cls.get_by_id(_id)
@classmethod
def get_by_type_id(cls, type_id):
return cls.cls.get_by(type_id=type_id, to_dict=False)
@classmethod
def get_ad_attributes(cls, type_id):
result = []
adts = cls.get_by_type_id(type_id)
for adt in adts:
adr = AutoDiscoveryRuleCRUD.get_by_id(adt.adr_id)
if not adr:
continue
if adr.type == AutoDiscoveryType.HTTP:
for i in DEFAULT_INNER:
if adr.name == i['name']:
attrs = AutoDiscoveryHTTPManager.get_attributes(
i['en'], (adt.extra_option or {}).get('category')) or []
result.extend([i.get('name') for i in attrs])
break
elif adr.type == AutoDiscoveryType.SNMP:
attributes = AutoDiscoverySNMPManager.get_attributes()
result.extend([i.get('name') for i in (attributes or [])])
else:
result.extend([i.get('name') for i in (adr.attributes or [])])
return sorted(list(set(result)))
@classmethod
def get(cls, ci_id, oneagent_id, oneagent_name, last_update_at=None):
"""
OneAgent sync rules
:param ci_id:
:param oneagent_id:
:param oneagent_name:
:param last_update_at:
:return:
"""
result = []
rules = cls.cls.get_by(to_dict=True)
for rule in rules:
if not rule['enabled']:
continue
if isinstance(rule.get("extra_option"), dict):
decrypt_account(rule['extra_option'], rule['uid'])
if rule['extra_option'].get('_reference'):
rule['extra_option'].pop('password', None)
rule['extra_option'].pop('secret', None)
rule['extra_option'].update(
AutoDiscoveryAccountCRUD().get_config_by_id(rule['extra_option']['_reference']))
if oneagent_id and rule['agent_id'] == oneagent_id:
result.append(rule)
elif rule['query_expr']:
query = rule['query_expr'].lstrip('q').lstrip('=')
s = ci_search(query, fl=['_id'], count=1000000)
try:
response, _, _, _, _, _ = s.search()
except SearchError as e:
return abort(400, str(e))
for i in (response or []):
if i.get('_id') == ci_id:
result.append(rule)
break
elif not rule['agent_id'] and not rule['query_expr'] and rule['adr_id']:
try:
if not int(oneagent_id, 16): # excludes master
continue
except Exception:
pass
adr = AutoDiscoveryRuleCRUD.get_by_id(rule['adr_id'])
if not adr:
continue
if adr.type in (AutoDiscoveryType.SNMP, AutoDiscoveryType.HTTP):
continue
result.append(rule)
ad_rules_updated_at = (SystemConfigManager.get('ad_rules_updated_at') or {}).get('option', {}).get('v') or ""
new_last_update_at = ""
for i in result:
i['adr'] = AutoDiscoveryRule.get_by_id(i['adr_id']).to_dict()
i['adr'].pop("attributes", None)
__last_update_at = max([i['updated_at'] or "", i['created_at'] or "",
i['adr']['created_at'] or "", i['adr']['updated_at'] or "", ad_rules_updated_at])
if new_last_update_at < __last_update_at:
new_last_update_at = __last_update_at
write_ad_rule_sync_history.apply_async(args=(result, oneagent_id, oneagent_name, datetime.datetime.now()),
queue=CMDB_QUEUE)
if not last_update_at or new_last_update_at > last_update_at:
return result, new_last_update_at
else:
return [], new_last_update_at
@staticmethod
def __valid_exec_target(agent_id, query_expr):
_is_app_admin = is_app_admin("cmdb")
if not agent_id and not query_expr and not _is_app_admin:
return abort(403, ErrFormat.adt_target_all_no_permission)
if _is_app_admin:
return
if agent_id and isinstance(agent_id, str) and agent_id.startswith("0x"):
agent_id = agent_id.strip()
q = "op_duty:{0},-rd_duty:{0},oneagent_id:{1}"
s = ci_search(q.format(current_user.username, agent_id.strip()))
try:
response, _, _, _, _, _ = s.search()
if response:
return
except SearchError as e:
current_app.logger.warning(e)
return abort(400, str(e))
s = ci_search(q.format(current_user.nickname, agent_id.strip()))
try:
response, _, _, _, _, _ = s.search()
if response:
return
except SearchError as e:
current_app.logger.warning(e)
return abort(400, str(e))
if query_expr.strip():
query_expr = query_expr.strip()
if query_expr.startswith('q='):
query_expr = query_expr[2:]
s = ci_search(query_expr, count=1000000)
try:
response, _, _, _, _, _ = s.search()
for i in response:
if (current_user.username not in (i.get('rd_duty') or []) and
current_user.username not in (i.get('op_duty') or []) and
current_user.nickname not in (i.get('rd_duty') or []) and
current_user.nickname not in (i.get('op_duty') or [])):
return abort(403, ErrFormat.adt_target_expr_no_permission.format(
i.get("{}_name".format(i.get('ci_type')))))
except SearchError as e:
current_app.logger.warning(e)
return abort(400, str(e))
@staticmethod
def _can_add(**kwargs):
if kwargs.get('adr_id'):
adr = AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['adr_id'])))
if adr.type == AutoDiscoveryType.HTTP:
kwargs.setdefault('extra_option', dict())
en_name = None
for i in DEFAULT_INNER:
if i['name'] == adr.name:
en_name = i['en']
break
if en_name and kwargs['extra_option'].get('category'):
for item in CLOUD_MAP[en_name]:
if item["collect_key_map"].get(kwargs['extra_option']['category']):
kwargs["extra_option"]["collect_key"] = item["collect_key_map"][
kwargs['extra_option']['category']]
kwargs["extra_option"]["provider"] = en_name
break
if adr.type == AutoDiscoveryType.COMPONENTS and kwargs.get('extra_option'):
for i in DEFAULT_INNER:
if i['name'] == adr.name:
kwargs['extra_option']['collect_key'] = i['option'].get('collect_key')
break
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
encrypt_account(kwargs.get('extra_option'))
ci_type = CITypeCache.get(kwargs['type_id'])
unique = AttributeCache.get(ci_type.unique_id)
if unique and unique.name not in (kwargs.get('attributes') or {}).values():
current_app.logger.warning((unique.name, kwargs.get('attributes'), ci_type.alias))
return abort(400, ErrFormat.ad_not_unique_key.format(unique.name))
kwargs['uid'] = current_user.uid
return kwargs
def _can_update(self, **kwargs):
existed = self.cls.get_by_id(kwargs['_id']) or abort(
404, ErrFormat.ad_not_found.format("id={}".format(kwargs['_id'])))
adr = AutoDiscoveryRule.get_by_id(existed.adr_id) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(existed.adr_id)))
if adr.type == AutoDiscoveryType.HTTP:
kwargs.setdefault('extra_option', dict())
en_name = None
for i in DEFAULT_INNER:
if i['name'] == adr.name:
en_name = i['en']
break
if en_name and kwargs['extra_option'].get('category'):
for item in CLOUD_MAP[en_name]:
if item["collect_key_map"].get(kwargs['extra_option']['category']):
kwargs["extra_option"]["collect_key"] = item["collect_key_map"][
kwargs['extra_option']['category']]
kwargs["extra_option"]["provider"] = en_name
break
if adr.type == AutoDiscoveryType.COMPONENTS and kwargs.get('extra_option'):
for i in DEFAULT_INNER:
if i['name'] == adr.name:
kwargs['extra_option']['collect_key'] = i['option'].get('collect_key')
break
if 'attributes' in kwargs:
self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
ci_type = CITypeCache.get(existed.type_id)
unique = AttributeCache.get(ci_type.unique_id)
if unique and unique.name not in (kwargs.get('attributes') or {}).values():
current_app.logger.warning((unique.name, kwargs.get('attributes'), ci_type.alias))
return abort(400, ErrFormat.ad_not_unique_key.format(unique.name))
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('secret'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('password'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
return existed
def update(self, _id, **kwargs):
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
encrypt_account(kwargs.get('extra_option'))
inst = self._can_update(_id=_id, **kwargs)
if len(kwargs) == 1 and 'enabled' in kwargs: # enable or disable
pass
elif inst.agent_id != kwargs.get('agent_id') or inst.query_expr != kwargs.get('query_expr'):
for item in AutoDiscoveryRuleSyncHistory.get_by(adt_id=inst.id, to_dict=False):
item.delete(commit=False)
db.session.commit()
SystemConfigManager.create_or_update("ad_rules_updated_at",
dict(v=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")))
obj = inst.update(_id=_id, filter_none=False, **kwargs)
return obj
def _can_delete(self, **kwargs):
if AutoDiscoveryCICRUD.get_by_adt_id(kwargs['_id']):
return abort(400, ErrFormat.cannot_delete_adt)
existed = self.cls.get_by_id(kwargs['_id']) or abort(
404, ErrFormat.ad_not_found.format("id={}".format(kwargs['_id'])))
return existed
def delete(self, _id):
inst = self._can_delete(_id=_id)
inst.soft_delete()
for item in AutoDiscoveryRuleSyncHistory.get_by(adt_id=inst.id, to_dict=False):
item.delete(commit=False)
db.session.commit()
attributes = self.get_ad_attributes(inst.type_id)
for item in AutoDiscoveryCITypeRelationCRUD.get_by_type_id(inst.type_id):
if item.ad_key not in attributes:
item.soft_delete()
return inst
class AutoDiscoveryCITypeRelationCRUD(DBMixin):
cls = AutoDiscoveryCITypeRelation
@classmethod
def get_all(cls, type_ids=None):
res = cls.cls.get_by(to_dict=False)
return [i for i in res if type_ids is None or i.ad_type_id in type_ids]
@classmethod
def get_by_type_id(cls, type_id, to_dict=False):
return cls.cls.get_by(ad_type_id=type_id, to_dict=to_dict)
def upsert(self, ad_type_id, relations):
existed = self.cls.get_by(ad_type_id=ad_type_id, to_dict=False)
existed = {(i.ad_key, i.peer_type_id, i.peer_attr_id): i for i in existed}
new = []
for r in relations:
k = (r.get('ad_key'), r.get('peer_type_id'), r.get('peer_attr_id'))
if len(list(filter(lambda x: x, k))) == 3 and k not in existed:
self.cls.create(ad_type_id=ad_type_id, **r)
new.append(k)
for deleted in set(existed.keys()) - set(new):
existed[deleted].soft_delete()
return self.get_by_type_id(ad_type_id, to_dict=True)
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
class AutoDiscoveryCICRUD(DBMixin):
cls = AutoDiscoveryCI
@classmethod
def get_by_adt_id(cls, adt_id):
return cls.cls.get_by(adt_id=adt_id, to_dict=False)
@classmethod
def get_type_name(cls, adc_id):
adc = cls.cls.get_by_id(adc_id) or abort(404, ErrFormat.adc_not_found)
ci_type = CITypeCache.get(adc.type_id)
return ci_type and ci_type.name
@staticmethod
def get_ci_types(need_other):
result = CITypeGroupManager.get(need_other, False)
adt = {i.type_id for i in AutoDiscoveryCITypeCRUD.get_all()}
for item in result:
item['ci_types'] = [i for i in (item.get('ci_types') or []) if i['id'] in adt]
return result
@staticmethod
def get_attributes_by_type_id(type_id):
from api.lib.cmdb.ci_type import CITypeAttributeManager
attributes = [i for i in CITypeAttributeManager.get_attributes_by_type_id(type_id) or []]
attr_names = set()
adts = AutoDiscoveryCITypeCRUD.get_by_type_id(type_id)
for adt in adts:
attr_names |= set((adt.attributes or {}).values())
return [attr for attr in attributes if attr['name'] in attr_names]
@classmethod
def search(cls, page, page_size, fl=None, **kwargs):
type_id = kwargs['type_id']
adts = AutoDiscoveryCITypeCRUD.get_by_type_id(type_id)
if not adts:
return 0, []
adt2attr_map = {i.id: i.attributes or {} for i in adts}
query = db.session.query(cls.cls).filter(cls.cls.deleted.is_(False))
count_query = db.session.query(func.count(cls.cls.id)).filter(cls.cls.deleted.is_(False))
for k in kwargs:
if hasattr(cls.cls, k):
query = query.filter(getattr(cls.cls, k) == kwargs[k])
count_query = count_query.filter(getattr(cls.cls, k) == kwargs[k])
query = query.order_by(cls.cls.is_accept.desc()).order_by(cls.cls.id.desc())
result = []
for i in query.offset((page - 1) * page_size).limit(page_size):
item = i.to_dict()
adt_id = item['adt_id']
item['instance'] = {adt2attr_map[adt_id][k]: v for k, v in item.get('instance').items()
if (not fl or k in fl) and adt2attr_map.get(adt_id, {}).get(k)}
result.append(item)
numfound = query.count()
return numfound, result
@staticmethod
def _get_unique_key(type_id):
ci_type = CITypeCache.get(type_id)
if ci_type:
attr = CITypeAttributeCache.get(type_id, ci_type.unique_id)
return attr and attr.name
def _can_add(self, **kwargs):
pass
def upsert(self, **kwargs):
adt = AutoDiscoveryCIType.get_by_id(kwargs['adt_id']) or abort(404, ErrFormat.adt_not_found)
existed = self.cls.get_by(type_id=kwargs['type_id'],
unique_value=kwargs.get("unique_value"),
first=True, to_dict=False)
changed = False
if existed is not None:
if existed.instance != kwargs['instance']:
instance = copy.deepcopy(existed.instance) or {}
instance.update(kwargs['instance'])
kwargs['instance'] = instance
existed.update(filter_none=False, **kwargs)
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
stdout="update resource: {}".format(kwargs.get('unique_value')))
changed = True
else:
existed = self.cls.create(**kwargs)
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
stdout="add resource: {}".format(kwargs.get('unique_value')))
changed = True
if adt.auto_accept and changed:
try:
self.accept(existed)
except Exception as e:
current_app.logger.error(e)
return abort(400, str(e))
elif changed:
existed.update(is_accept=False, accept_time=None, accept_by=None, filter_none=False)
return existed
def _can_update(self, **kwargs):
existed = self.cls.get_by_id(kwargs['_id']) or abort(404, ErrFormat.adc_not_found)
return existed
def _can_delete(self, **kwargs):
return self._can_update(**kwargs)
def delete(self, _id):
inst = self._can_delete(_id=_id)
inst.delete()
adt = AutoDiscoveryCIType.get_by_id(inst.adt_id)
if adt:
adt.update(updated_at=datetime.datetime.now())
AutoDiscoveryExecHistoryCRUD().add(type_id=inst.type_id,
stdout="delete resource: {}".format(inst.unique_value))
self._after_delete(inst)
return inst
@classmethod
def delete2(cls, type_id, unique_value):
existed = cls.cls.get_by(type_id=type_id, unique_value=unique_value, first=True, to_dict=False) or abort(
404, ErrFormat.adc_not_found)
if current_app.config.get("USE_ACL"):
ci_type = CITypeCache.get(type_id) or abort(404, ErrFormat.ci_type_not_found)
not is_app_admin("cmdb") and validate_permission(ci_type.name, ResourceTypeEnum.CI, PermEnum.DELETE, "cmdb")
existed.delete()
adt = AutoDiscoveryCIType.get_by_id(existed.adt_id)
if adt:
adt.update(updated_at=datetime.datetime.now())
AutoDiscoveryExecHistoryCRUD().add(type_id=type_id,
stdout="delete resource: {}".format(unique_value))
# TODO: delete ci
@classmethod
def accept(cls, adc, adc_id=None, nickname=None):
if adc_id is not None:
adc = cls.cls.get_by_id(adc_id) or abort(404, ErrFormat.adc_not_found)
adt = AutoDiscoveryCITypeCRUD.get_by_id(adc.adt_id) or abort(404, ErrFormat.adt_not_found)
ci_id = None
ad_key2attr = adt.attributes or {}
if ad_key2attr:
ci_dict = {ad_key2attr[k]: None if not v and isinstance(v, (list, dict)) else v
for k, v in adc.instance.items() if k in ad_key2attr}
extra_option = adt.extra_option or {}
mapping, path_mapping = AutoDiscoveryHTTPManager.get_predefined_value_mapping(
extra_option.get('provider'), extra_option.get('category'))
if mapping:
ci_dict = {k: (mapping.get(k) or {}).get(str(v), v) for k, v in ci_dict.items()}
if path_mapping:
ci_dict = {k: jsonpath.jsonpath(v, path_mapping[k]) if k in path_mapping else v
for k, v in ci_dict.items()}
ci_id = CIManager.add(adc.type_id, is_auto_discovery=True, _is_admin=True, **ci_dict)
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
stdout="accept resource: {}".format(adc.unique_value))
build_relations_for_ad_accept.apply_async(args=(adc.to_dict(), ci_id, ad_key2attr), queue=CMDB_QUEUE)
adc.update(is_accept=True,
accept_by=nickname or current_user.nickname,
accept_time=datetime.datetime.now(),
ci_id=ci_id)
class AutoDiscoveryHTTPManager(object):
@staticmethod
def get_categories(name):
categories = (CLOUD_MAP.get(name) or {}) or []
for item in copy.deepcopy(categories):
item.pop('map', None)
item.pop('collect_key_map', None)
return categories
def get_resources(self, name):
en_name = None
for i in DEFAULT_INNER:
if i['name'] == name:
en_name = i['en']
break
if en_name:
categories = self.get_categories(en_name)
return [j for i in categories for j in i['items']]
return []
@staticmethod
def get_attributes(provider, resource):
for item in (CLOUD_MAP.get(provider) or {}):
for _resource in (item.get('map') or {}):
if _resource == resource:
tpt = item['map'][_resource]
if isinstance(tpt, dict):
tpt = tpt.get('template')
if tpt and os.path.exists(os.path.join(PWD, tpt)):
with open(os.path.join(PWD, tpt)) as f:
return json.loads(f.read())
return []
@staticmethod
def get_mapping(provider, resource):
for item in (CLOUD_MAP.get(provider) or {}):
for _resource in (item.get('map') or {}):
if _resource == resource:
mapping = item['map'][_resource]
if not isinstance(mapping, dict):
return {}
name = mapping.get('mapping')
mapping = AutoDiscoveryMappingCache.get(name)
if isinstance(mapping, dict):
return {mapping[key][provider]['key'].split('.')[0]: key for key in mapping if
(mapping[key].get(provider) or {}).get('key')}
return {}
@staticmethod
def get_predefined_value_mapping(provider, resource):
for item in (CLOUD_MAP.get(provider) or {}):
for _resource in (item.get('map') or {}):
if _resource == resource:
mapping = item['map'][_resource]
if not isinstance(mapping, dict):
return {}, {}
name = mapping.get('mapping')
mapping = AutoDiscoveryMappingCache.get(name)
if isinstance(mapping, dict):
return ({key: mapping[key][provider].get('map') for key in mapping if
mapping[key].get(provider, {}).get('map')},
{key: mapping[key][provider]['key'].split('.', 1)[1] for key in mapping if
((mapping[key].get(provider) or {}).get('key') or '').split('.')[1:]})
return {}, {}
class AutoDiscoverySNMPManager(object):
@staticmethod
def get_attributes():
if os.path.exists(os.path.join(PWD, "templates/net_device.json")):
with open(os.path.join(PWD, "templates/net_device.json")) as f:
return json.loads(f.read())
return []
class AutoDiscoveryComponentsManager(object):
@staticmethod
def get_attributes(name):
if os.path.exists(os.path.join(PWD, "templates/{}.json".format(name))):
with open(os.path.join(PWD, "templates/{}.json".format(name))) as f:
return json.loads(f.read())
return []
class AutoDiscoveryRuleSyncHistoryCRUD(DBMixin):
cls = AutoDiscoveryRuleSyncHistory
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
def upsert(self, **kwargs):
existed = self.cls.get_by(adt_id=kwargs.get('adt_id'),
oneagent_id=kwargs.get('oneagent_id'),
oneagent_name=kwargs.get('oneagent_name'),
first=True,
to_dict=False)
if existed is not None:
existed.update(**kwargs)
else:
self.cls.create(**kwargs)
class AutoDiscoveryExecHistoryCRUD(DBMixin):
cls = AutoDiscoveryExecHistory
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
class AutoDiscoveryCounterCRUD(DBMixin):
cls = AutoDiscoveryCounter
def get(self, type_id):
res = self.cls.get_by(type_id=type_id, first=True, to_dict=True)
if res is None:
return dict(rule_count=0, exec_target_count=0, instance_count=0, accept_count=0,
this_month_count=0, this_week_count=0, last_month_count=0, last_week_count=0)
return res
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
def encrypt_account(config):
if isinstance(config, dict):
if config.get('secret'):
config['secret'] = AESCrypto.encrypt(config['secret'])
if config.get('password'):
config['password'] = AESCrypto.encrypt(config['password'])
def decrypt_account(config, uid):
if isinstance(config, dict):
if config.get('password'):
if not (current_user.username in PRIVILEGED_USERS or current_user.uid == uid):
config.pop('password', None)
else:
try:
config['password'] = AESCrypto.decrypt(config['password'])
except Exception as e:
current_app.logger.error('decrypt account failed: {}'.format(e))
if config.get('secret'):
if not (current_user.username in PRIVILEGED_USERS or current_user.uid == uid):
config.pop('secret', None)
else:
try:
config['secret'] = AESCrypto.decrypt(config['secret'])
except Exception as e:
current_app.logger.error('decrypt account failed: {}'.format(e))
class AutoDiscoveryAccountCRUD(DBMixin):
cls = AutoDiscoveryAccount
def get(self, adr_id):
res = self.cls.get_by(adr_id=adr_id, to_dict=True)
for i in res:
decrypt_account(i.get('config'), i['uid'])
return res
def get_config_by_id(self, _id):
res = self.cls.get_by_id(_id)
if not res:
return {}
config = res.to_dict().get('config') or {}
decrypt_account(config, res.uid)
return config
def _can_add(self, **kwargs):
encrypt_account(kwargs.get('config'))
kwargs['uid'] = current_user.uid
return kwargs
def upsert(self, adr_id, accounts):
existed_all = self.cls.get_by(adr_id=adr_id, to_dict=False)
account_names = {i['name'] for i in accounts}
name_changed = dict()
for account in accounts:
existed = None
if account.get('id'):
existed = self.cls.get_by_id(account.get('id'))
if existed is None:
continue
account.pop('id')
name_changed[existed.name] = account.get('name')
else:
account = self._can_add(**account)
if existed is not None:
if current_user.uid == existed.uid:
config = copy.deepcopy(existed.config) or {}
config.update(account.get('config') or {})
account['config'] = config
existed.update(**account)
else:
self.cls.create(adr_id=adr_id, **account)
for item in existed_all:
if name_changed.get(item.name, item.name) not in account_names:
if current_user.uid == item.uid:
item.soft_delete()
def _can_update(self, **kwargs):
existed = self.cls.get_by_id(kwargs['_id']) or abort(404, ErrFormat.not_found)
if isinstance(kwargs.get('config'), dict) and kwargs['config'].get('secret'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
if isinstance(kwargs.get('config'), dict) and kwargs['config'].get('password'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
return existed
def update(self, _id, **kwargs):
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
encrypt_account(kwargs.get('config'))
inst = self._can_update(_id=_id, **kwargs)
obj = inst.update(_id=_id, filter_none=False, **kwargs)
return obj
def _can_delete(self, **kwargs):
pass

View File

@ -1,351 +0,0 @@
# -*- coding:utf-8 -*-
from api.lib.cmdb.const import AutoDiscoveryType
PRIVILEGED_USERS = ("cmdb_agent", "worker", "admin")
DEFAULT_INNER = [
dict(name="阿里云", en="aliyun", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-aliyun'}, "en": "aliyun"}),
dict(name="腾讯云", en="tencentcloud", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-tengxunyun'}, "en": "tencentcloud"}),
dict(name="华为云", en="huaweicloud", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-huaweiyun'}, "en": "huaweicloud"}),
dict(name="AWS", en="aws", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-aws'}, "en": "aws"}),
dict(name="VCenter", en="vcenter", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'cmdb-vcenter'}, "category": "private_cloud", "en": "vcenter"}),
dict(name="KVM", en="kvm", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'ops-KVM'}, "category": "private_cloud", "en": "kvm"}),
dict(name="Nginx", en="nginx", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-nginx'}, "en": "nginx", "collect_key": "nginx"}),
dict(name="Apache", en="apache", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-apache'}, "en": "apache", "collect_key": "apache"}),
dict(name="Tomcat", en="tomcat", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-tomcat'}, "en": "tomcat", "collect_key": "tomcat"}),
dict(name="MySQL", en="mysql", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-mySQL'}, "en": "mysql", "collect_key": "mysql"}),
dict(name="MSSQL", en="mssql", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-SQLServer'}, "en": "mssql", "collect_key": "sqlserver"}),
dict(name="Oracle", en="oracle", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-oracle'}, "en": "oracle", "collect_key": "oracle"}),
dict(name="Redis", en="redis", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-redis'}, "en": "redis", "collect_key": "redis"}),
dict(name="交换机", type=AutoDiscoveryType.SNMP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-jiaohuanji'}}),
dict(name="路由器", type=AutoDiscoveryType.SNMP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-luyouqi'}}),
dict(name="防火墙", type=AutoDiscoveryType.SNMP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-fanghuoqiang'}}),
dict(name="打印机", type=AutoDiscoveryType.SNMP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-dayinji'}}),
]
CLOUD_MAP = {
"aliyun": [
{
"category": "计算",
"items": ["云服务器 ECS", "云服务器 Disk"],
"map": {
"云服务器 ECS": {"template": "templates/aliyun_ecs.json", "mapping": "ecs"},
"云服务器 Disk": {"template": "templates/aliyun_ecs_disk.json", "mapping": "evs"},
},
"collect_key_map": {
"云服务器 ECS": "ali.ecs",
"云服务器 Disk": "ali.ecs_disk",
},
},
{
"category": "网络与CDN",
"items": [
"内容分发CDN",
"负载均衡SLB",
"专有网络VPC",
"交换机Switch",
],
"map": {
"内容分发CDN": {"template": "templates/aliyun_cdn.json", "mapping": "CDN"},
"负载均衡SLB": {"template": "templates/aliyun_slb.json", "mapping": "loadbalancer"},
"专有网络VPC": {"template": "templates/aliyun_vpc.json", "mapping": "vpc"},
"交换机Switch": {"template": "templates/aliyun_switch.json", "mapping": "vswitch"},
},
"collect_key_map": {
"内容分发CDN": "ali.cdn",
"负载均衡SLB": "ali.slb",
"专有网络VPC": "ali.vpc",
"交换机Switch": "ali.switch",
},
},
{
"category": "存储",
"items": ["块存储EBS", "对象存储OSS"],
"map": {
"块存储EBS": {"template": "templates/aliyun_ebs.json", "mapping": "evs"},
"对象存储OSS": {"template": "templates/aliyun_oss.json", "mapping": "objectStorage"},
},
"collect_key_map": {
"块存储EBS": "ali.ebs",
"对象存储OSS": "ali.oss",
},
},
{
"category": "数据库",
"items": ["云数据库RDS MySQL", "云数据库RDS PostgreSQL", "云数据库 Redis"],
"map": {
"云数据库RDS MySQL": {"template": "templates/aliyun_rds_mysql.json", "mapping": "mysql"},
"云数据库RDS PostgreSQL": {"template": "templates/aliyun_rds_postgre.json", "mapping": "postgresql"},
"云数据库 Redis": {"template": "templates/aliyun_redis.json", "mapping": "redis"},
},
"collect_key_map": {
"云数据库RDS MySQL": "ali.rds_mysql",
"云数据库RDS PostgreSQL": "ali.rds_postgre",
"云数据库 Redis": "ali.redis",
},
},
],
"tencentcloud": [
{
"category": "计算",
"items": ["云服务器 CVM"],
"map": {
"云服务器 CVM": {"template": "templates/tencent_cvm.json", "mapping": "ecs"},
},
"collect_key_map": {
"云服务器 CVM": "tencent.cvm",
},
},
{
"category": "CDN与边缘",
"items": ["内容分发CDN"],
"map": {
"内容分发CDN": {"template": "templates/tencent_cdn.json", "mapping": "CDN"},
},
"collect_key_map": {
"内容分发CDN": "tencent.cdn",
},
},
{
"category": "网络",
"items": ["负载均衡CLB", "私有网络VPC", "子网"],
"map": {
"负载均衡CLB": {"template": "templates/tencent_clb.json", "mapping": "loadbalancer"},
"私有网络VPC": {"template": "templates/tencent_vpc.json", "mapping": "vpc"},
"子网": {"template": "templates/tencent_subnet.json", "mapping": "vswitch"},
},
"collect_key_map": {
"负载均衡CLB": "tencent.clb",
"私有网络VPC": "tencent.vpc",
"子网": "tencent.subnet",
},
},
{
"category": "存储",
"items": ["云硬盘CBS", "对象存储COS"],
"map": {
"云硬盘CBS": {"template": "templates/tencent_cbs.json", "mapping": "evs"},
"对象存储COS": {"template": "templates/tencent_cos.json", "mapping": "objectStorage"},
},
"collect_key_map": {
"云硬盘CBS": "tencent.cbs",
"对象存储COS": "tencent.cos",
},
},
{
"category": "数据库",
"items": ["云数据库 MySQL", "云数据库 PostgreSQL", "云数据库 Redis"],
"map": {
"云数据库 MySQL": {"template": "templates/tencent_rdb.json", "mapping": "mysql"},
"云数据库 PostgreSQL": {"template": "templates/tencent_postgres.json", "mapping": "postgresql"},
"云数据库 Redis": {"template": "templates/tencent_redis.json", "mapping": "redis"},
},
"collect_key_map": {
"云数据库 MySQL": "tencent.rdb",
"云数据库 PostgreSQL": "tencent.rds_postgres",
"云数据库 Redis": "tencent.redis",
},
},
],
"huaweicloud": [
{
"category": "计算",
"items": ["云服务器 ECS"],
"map": {
"云服务器 ECS": {"template": "templates/huaweicloud_ecs.json", "mapping": "ecs"},
},
"collect_key_map": {
"云服务器 ECS": "huawei.ecs",
},
},
{
"category": "CDN与智能边缘",
"items": ["内容分发网络CDN"],
"map": {
"内容分发网络CDN": {"template": "templates/huawei_cdn.json", "mapping": "CDN"},
},
"collect_key_map": {
"内容分发网络CDN": "huawei.cdn",
},
},
{
"category": "网络",
"items": ["弹性负载均衡ELB", "虚拟私有云VPC", "子网"],
"map": {
"弹性负载均衡ELB": {"template": "templates/huawei_elb.json", "mapping": "loadbalancer"},
"虚拟私有云VPC": {"template": "templates/huawei_vpc.json", "mapping": "vpc"},
"子网": {"template": "templates/huawei_subnet.json", "mapping": "vswitch"},
},
"collect_key_map": {
"弹性负载均衡ELB": "huawei.elb",
"虚拟私有云VPC": "huawei.vpc",
"子网": "huawei.subnet",
},
},
{
"category": "存储",
"items": ["云硬盘EVS", "对象存储OBS"],
"map": {
"云硬盘EVS": {"template": "templates/huawei_evs.json", "mapping": "evs"},
"对象存储OBS": {"template": "templates/huawei_obs.json", "mapping": "objectStorage"},
},
"collect_key_map": {
"云硬盘EVS": "huawei.evs",
"对象存储OBS": "huawei.obs",
},
},
{
"category": "数据库",
"items": ["云数据库RDS MySQL", "云数据库RDS PostgreSQL"],
"map": {
"云数据库RDS MySQL": {"template": "templates/huawei_rds_mysql.json", "mapping": "mysql"},
"云数据库RDS PostgreSQL": {"template": "templates/huawei_rds_postgre.json", "mapping": "postgresql"},
},
"collect_key_map": {
"云数据库RDS MySQL": "huawei.rds_mysql",
"云数据库RDS PostgreSQL": "huawei.rds_postgre",
},
},
{
"category": "应用中间件",
"items": ["分布式缓存Redis"],
"map": {
"分布式缓存Redis": {"template": "templates/huawei_dcs.json", "mapping": "redis"},
},
"collect_key_map": {
"分布式缓存Redis": "huawei.dcs",
},
},
],
"aws": [
{
"category": "计算",
"items": ["云服务器 EC2"],
"map": {
"云服务器 EC2": {"template": "templates/aws_ec2.json", "mapping": "ecs"},
},
"collect_key_map": {
"云服务器 EC2": "aws.ec2",
},
},
{"category": "网络与CDN", "items": [], "map": {}, "collect_key_map": {}},
],
"vcenter": [
{
"category": "计算",
"items": [
"主机",
"虚拟机",
"主机集群"
],
"map": {
"主机": "templates/vsphere_host.json",
"虚拟机": "templates/vsphere_vm.json",
"主机集群": "templates/vsphere_cluster.json",
},
"collect_key_map": {
"主机": "vsphere.host",
"虚拟机": "vsphere.vm",
"主机集群": "vsphere.cluster",
},
},
{
"category": "网络",
"items": [
"网络",
"标准交换机",
"分布式交换机",
],
"map": {
"网络": "templates/vsphere_network.json",
"标准交换机": "templates/vsphere_standard_switch.json",
"分布式交换机": "templates/vsphere_distributed_switch.json",
},
"collect_key_map": {
"网络": "vsphere.network",
"标准交换机": "vsphere.standard_switch",
"分布式交换机": "vsphere.distributed_switch",
},
},
{
"category": "存储",
"items": ["数据存储", "数据存储集群"],
"map": {
"数据存储": "templates/vsphere_datastore.json",
"数据存储集群": "templates/vsphere_storage_pod.json",
},
"collect_key_map": {
"数据存储": "vsphere.datastore",
"数据存储集群": "vsphere.storage_pod",
},
},
{
"category": "其他",
"items": ["资源池", "数据中心", "文件夹"],
"map": {
"资源池": "templates/vsphere_pool.json",
"数据中心": "templates/vsphere_datacenter.json",
"文件夹": "templates/vsphere_folder.json",
},
"collect_key_map": {
"资源池": "vsphere.pool",
"数据中心": "vsphere.datacenter",
"文件夹": "vsphere.folder",
},
},
],
"kvm": [
{
"category": "计算",
"items": ["虚拟机"],
"map": {
"虚拟机": "templates/kvm_vm.json",
},
"collect_key_map": {
"虚拟机": "kvm.vm",
},
},
{
"category": "存储",
"items": ["存储"],
"map": {
"存储": "templates/kvm_storage.json",
},
"collect_key_map": {
"存储": "kvm.storage",
},
},
{
"category": "network",
"items": ["网络"],
"map": {
"网络": "templates/kvm_network.json",
},
"collect_key_map": {
"网络": "kvm.network",
},
},
],
}

Some files were not shown because too many files have changed in this diff Show More