Compare commits

..

324 Commits

Author SHA1 Message Date
pycook 0b3cad8215 feat: update docker-compose 2024-01-04 10:26:47 +08:00
pycook ed41a72900 release: v2.3.10 2024-01-03 19:23:42 +08:00
wang-liang0615 27854d4e0a
fix(acl-ui):operation history (#363) 2024-01-03 17:41:42 +08:00
wang-liang0615 0372459f9d
fix(ui) (#362) 2024-01-03 17:09:25 +08:00
simontigers f56378e2b5
fix(api): common edit department return (#359)
fix(api): common edit department return (#359)
2024-01-03 16:42:14 +08:00
wang-liang0615 00b276d5e7
format(ui) (#361) 2024-01-03 16:35:50 +08:00
wang-liang0615 3faefbe8d3
feat(ui):i18n (#360)
* feat(ui):i18n

* i18n

* feat(ui):i18n
2024-01-03 16:03:15 +08:00
simontigers 4261f6fb57
fix(api): common department allow parent (#358)
* fix(api): common department edit method

* fix(api): common department edit method

* fix(api): common department allow parent
2024-01-03 15:19:08 +08:00
wang-liang0615 c98199e98e
feat(ui):i18n (#357)
* feat(ui):i18n

* i18n
2024-01-03 14:58:47 +08:00
simontigers 82ea1ddc79
fix(api): common department edit method (#356)
* fix(api): common department edit method

* fix(api): common department edit method
2024-01-03 14:31:54 +08:00
simontigers 995e581315
fix(api): common department edit method (#355) 2024-01-03 14:26:40 +08:00
pycook ec8f626b8f
docs: update install (#354) 2024-01-03 13:58:35 +08:00
wang-liang0615 ec884c92e1
feat(ui):i18n (#352) 2024-01-03 13:29:38 +08:00
simontigers 9ee2776bdd
fix(api): common i18n wide (#351)
* fix(api): common_i18n wide

* fix(api): department i18n
2024-01-03 13:26:58 +08:00
simontigers 7186bdac9c
fix(api): common_i18n wide (#350) 2024-01-03 12:29:49 +08:00
wang-liang0615 0a9964375a
feat(ui):add packages (#349) 2024-01-03 09:42:32 +08:00
wang-liang0615 bff45d00b6
i18n (#348) 2024-01-02 18:04:53 +08:00
wang-liang0615 e429ad59ff
feat(ui):i18n (#347)
* feat(acl-ui):i18n

* feat(base-ui):i18n

* feat(cmdb-ui):i18n
2024-01-02 17:53:07 +08:00
pycook ace160ae19
Dev api 231229 (#345)
* fix(api): predefined value for float

* feat(api): update public clouds config

* feat(api): commands add-user support is_admin
2023-12-29 13:44:23 +08:00
wang-liang0615 7036fe023c
style(ui):global.less (#344) 2023-12-28 11:00:00 +08:00
wang-liang0615 341f5dba53
fix(cmdb-ui):model relation table (#343) 2023-12-27 13:24:01 +08:00
wang-liang0615 5883c1616e
feat:Batch import and download templates support predefined values (#342) 2023-12-27 13:18:05 +08:00
simontigers bee0a3a3e5
feat(api): common i18n (#340) 2023-12-26 10:06:15 +08:00
pycook 7a79a8bbf7
feat(api): i18n
feat(api): i18n
2023-12-25 21:51:44 +08:00
pycook 100a889cb8
fix(api): CI revoke permission (#337) 2023-12-25 12:15:20 +08:00
kdyq007 b093569453
[更新] 修复 LDAP 登录失败的问题 (#336)
Co-authored-by: sherlock <sherlock@gmail.com>
2023-12-25 09:36:31 +08:00
pycook 3919dfdfbb release: 2.3.9 2023-12-23 12:51:09 +08:00
pycook ef85ba2542
feat(api): update cmdb-init-acl commands (#335) 2023-12-23 12:44:01 +08:00
pycook 3d5c2ec5bc Merge branch 'master' of github.com:veops/cmdb 2023-12-23 12:31:52 +08:00
pycook c143d6ae5b fix(api): role grant 2023-12-23 12:30:52 +08:00
simontigers 10b273ee81
feat(api): add update_last_login_by_uid (#333) 2023-12-22 18:43:20 +08:00
wang-liang0615 855cb91b31
feat(cmdb-ui):ci type import&export,pref(cmdb-ui):download ci xlsx name, pref(cmdb-ui):ci detail history merge row method (#331)
* pref(cmdb-ui):download ci xlsx name

* pref(cmdb-ui):ci detail history merge row method

* feat(cmdb-ui):ci type import&export
2023-12-22 15:42:20 +08:00
pycook ffae57642c
fix(api): ci relation search
fix(api): ci relation search
2023-12-22 15:35:02 +08:00
simontigers 9ed1108c20
feat(api): add get_file_binary_str and save (#329) 2023-12-22 15:33:05 +08:00
simontigers 72b2f8b6de
fix(api): refresh rid after create and import employee (#328) 2023-12-22 15:24:48 +08:00
pycook 20f3e917fe
pref(api): import and export of CIType templates
pref(api): import and export of CIType templates
2023-12-22 14:32:03 +08:00
pycook c430515377
fix(api): add CI (#326) 2023-12-22 11:19:16 +08:00
simontigers a6e2aca281
fix(api): svg upload (#321) 2023-12-21 18:58:35 +08:00
wang-liang0615 18313b7bd1
fix(acl_ui):permission (#325) 2023-12-21 17:22:49 +08:00
wang-liang0615 dbc44a8ad6
fix(ui):common double menu (#324) 2023-12-21 14:59:39 +08:00
wang-liang0615 f143e30cf5
feat(ui):批量导入模型根据create权限过滤&&模型配置页面权限 (#323) 2023-12-21 14:23:38 +08:00
wang-liang0615 4beece5a6e
fix:open triggerForm from attributeCard (#322) 2023-12-21 14:18:00 +08:00
pycook 920295d955 feat: Fixed db volume name 2023-12-21 10:26:51 +08:00
pycook 6eb8ae1dac fix(api): CAS authentication 2023-12-20 12:10:00 +08:00
wang-liang0615 f1f86ce25a
feat(ui):api_host annotation (#320) 2023-12-19 14:22:40 +08:00
pycook 090007487d fix(api): oauth2.0 authentication 2023-12-19 13:07:21 +08:00
wang-liang0615 aa98a304c1
feat:add auth common api_host (#319) 2023-12-19 11:23:27 +08:00
pycook fe22e363b4 fix(api): ldap authentication 2023-12-19 00:16:56 +08:00
pycook 2d2fb6e1d6 release: v2.3.8 2023-12-18 20:08:19 +08:00
pycook 9894c77300 feat(ui): lint 2023-12-18 19:25:22 +08:00
simontigers 4ea947f741
fix: auth config (#318) 2023-12-18 18:27:06 +08:00
simontigers e5ab2c2573
fix: auth config (#317) 2023-12-18 16:52:24 +08:00
wang-liang0615 5581fa8d0f
lint(ui) (#316) 2023-12-18 16:41:21 +08:00
wang-liang0615 76c939fe5c
fix(ui):401 redirect && feat(ui):add auth ldap test (#315) 2023-12-18 16:30:02 +08:00
wang-liang0615 6e9ce08e2c
pref(cmdb-ui):change adt key & add adt alias (#314) 2023-12-18 16:07:52 +08:00
pycook 092a8b9b92
pref(api): A CIType allows repeated binding of auto-discovery rules (#313) 2023-12-16 17:56:14 +08:00
wang-liang0615 b45fd0cbbb
fix:is_list edit bug (#312)
* feat(ui):auth setting

* fix:is_list edit bug
2023-12-15 13:19:55 +08:00
simontigers 5320ecfd62
fix(api): common_data (#311) 2023-12-15 10:56:04 +08:00
wang-liang0615 1d253d7ad3
feat(ui):auth setting (#310) 2023-12-15 10:33:38 +08:00
pycook 73d53f0440
pref(api): authentication and login log (#308)
* pref(api): authentication and login log

* feat(api): ldap and OAuth2.0
2023-12-14 19:53:08 +08:00
simontigers d4a37af183
feat(api): auth config api (#309) 2023-12-14 19:39:21 +08:00
wang-liang0615 e03849b054
fix(cmdb-ui):batch upload cancel bug && download error (#306) 2023-12-13 14:50:53 +08:00
simontigers faed3fe6a2
fix(api): get_employee_notice check data None (#305)
* fix(api): get_employee_notice check data None

* fix(api): remove path when save messager url
2023-12-13 09:45:42 +08:00
pycook 21c9d9accd
feat(api): support OAuth2.0 and OIDC authentication, it has been tested with casdoor
feat(api): support OAuth2.0 and OIDC authentication, it has been tested with casdoor
2023-12-12 20:29:57 +08:00
wang-liang0615 a06599ce33
pref(cmdb-ui):batch upload for date type (#301) 2023-12-12 14:53:12 +08:00
pycook 2b69217136
fix(api): time data format
fix(api): time data format
2023-12-12 14:37:21 +08:00
wang-liang0615 03a3b8b169
Revert "pref(cmdb-ui):batch upload for date type (#298)" (#299)
This reverts commit cd319421d5.
2023-12-12 13:51:19 +08:00
wang-liang0615 cd319421d5
pref(cmdb-ui):batch upload for date type (#298)
* fix(cmdb-ui):set localstorage '' after unsubscribe ci

* pref(cmdb-ui):batch upload for date type
2023-12-12 13:38:08 +08:00
pycook c918d54ea5
perf(api): ci delete (#297) 2023-12-12 11:09:32 +08:00
wang-liang0615 cf0ad7bad6
fix(cmdb-ui):set localstorage '' after unsubscribe ci (#296) 2023-12-12 09:38:50 +08:00
pycook e0c8263542
feat(api): cas is compatible with casdoor
feat(api): cas is compatible with casdoor
2023-12-11 20:58:18 +08:00
pycook 275e8b15f3
Dev api 231211 (#294)
* fix(api): cas authentication

* feat(api): add lz4 package
2023-12-11 19:30:09 +08:00
simontigers 6ff942c107
feat(api): upload file save db (#292) 2023-12-11 18:22:33 +08:00
gmailnovo d3c87ee500
feat: Handle '/dev/stdout' in Logger Configuration 2023-12-07 11:27:45 +08:00
simontigers a4f65e7fc6
fix(api): Common create employee (#287)
* fix(api): add add_from arg in create employee

* fix(api): add check in acl call common after add user
2023-12-06 17:12:37 +08:00
wang-liang0615 10527bf9b8
pref(cmdb-ui):ci upload&delete concurrent 6 (#286) 2023-12-06 14:33:25 +08:00
pycook 0414121c27 docs: update local install 2023-11-30 16:18:33 +08:00
simontigers edde467c87
fix(api): common_data delele (#282) 2023-11-30 13:03:09 +08:00
pycook d525e1ec54
feat(api): only the role cmdb_admin can modify the CIType group (#280) 2023-11-29 17:40:12 +08:00
pycook 91c49b690f fix(api): get relation history 2023-11-28 20:37:36 +08:00
pycook 05453becf9 release: 2.3.7 2023-11-24 14:53:53 +08:00
pycook 5fe27f8678
feat(api): issue #212 (#279) 2023-11-24 10:26:48 +08:00
wang-liang0615 0924b8846f
feat(cmdb-ui):多对多关系&&仪表盘色卡调整 (#271) 2023-11-24 10:25:56 +08:00
loveiwei 62a669159a
doc: change content in readme_en (#278) 2023-11-23 20:10:25 +08:00
loveiwei 2933bf1efa
feature: add a new management script, such as install,start,pause,del… (#277)
* feature: add a new management script, such as install,start,pause,delete,uninstall

* doc: add install.sh method in readme

* doc: add install.sh method in readme, change install.sh for macos support

* doc: add install.sh method in readme

* doc: add install.sh method in readme
2023-11-23 17:45:53 +08:00
loveiwei 981f8b0145
Fix deploy 1700028675 (#272)
* fix: Solving the timezone issue in Redis, as well as the problem of MySQL logs always being in UTC timezone.

* fix: change the config path of slow_log into /tmp directory in mysqld.cnf file
2023-11-22 18:34:58 +08:00
pycook 213bda671c
docs: local install (#270) 2023-11-16 20:54:08 +08:00
loveiwei 837aabfe77
fix: Solving the timezone issue in Redis, as well as the problem of MySQL logs always being in UTC timezone. (#268) 2023-11-15 20:48:52 +08:00
wang-liang0615 cc599d414a
feat(acl-ui):resources table resizable (#267) 2023-11-14 09:37:45 +08:00
pycook 01fcd7f80e
Dev api 231108 (#264)
* perf(api): commands add-user

* feat(api): add commands cmdb-agent-init
2023-11-09 11:32:54 +08:00
pycook af254ddbeb
Dev api 1107 (#263)
feat: update cmdb.sql
2023-11-08 16:11:36 +08:00
wang-liang0615 422e89c6c6
ui package update (#261)
* update(cmdb-ui):update packages && delete yarn.lock

* update:gitignore
2023-11-08 16:06:48 +08:00
wang-liang0615 c2c11b38ed
Revert "Dev UI 231108 (#256)" (#260)
This reverts commit 45d3f57228.
2023-11-08 14:43:37 +08:00
wang-liang0615 45d3f57228
Dev UI 231108 (#256)
* pref(cmdb-ui):update packages

* pref(cmdb-ui):update packages
2023-11-08 14:32:17 +08:00
pycook c711c3d3fd
feat(api): encrypting webhook configurations (#255) 2023-11-07 17:15:24 +08:00
pycook 2ae4aeee67 fix(api): Code scanning alerts (#254) 2023-11-06 14:27:30 +08:00
pycook 46238b8b51 Dev api 1103 (#252)
feat(api): update requirements
2023-11-06 13:24:08 +08:00
dependabot[bot] b1528ec511 build(deps): bump browserify-sign from 4.2.1 to 4.2.2 in /cmdb-ui (#241)
Bumps [browserify-sign](https://githubfast.com/crypto-browserify/browserify-sign) from 4.2.1 to 4.2.2.
- [Changelog](https://githubfast.com/browserify/browserify-sign/blob/main/CHANGELOG.md)
- [Commits](https://githubfast.com/crypto-browserify/browserify-sign/compare/v4.2.1...v4.2.2)

---
updated-dependencies:
- dependency-name: browserify-sign
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-06 13:13:29 +08:00
pycook 8ef4edb7d2 fix(api): search sort misses cache (#251) 2023-11-03 12:01:44 +08:00
pycook 0172206c7c fix(api commands): cmdb-password-data-migrate (#249) 2023-11-02 20:43:39 +08:00
simontigers 9d5bdf1b0d style(common-setting): clean warning (#246) 2023-10-30 17:35:34 +08:00
pycook c0726b228d fix(api): secrets 2023-10-30 17:23:42 +08:00
pycook 5b314aa907 feat: add inner password storage and optimize flask command about inner cmdb (#248)
Co-authored-by: fxiang21 <fxiang21@126.com>
2023-10-30 16:48:53 +08:00
wang-liang0615 c9f0de9838 Dev UI 231030 (#247)
* config(ui):useEncryption default false

* fix(cmdb-ui):ident 4

* fix(cmdb-ui):relation views
2023-10-30 12:38:05 +08:00
pycook 2db41dd992 Dev api password (#244)
* fix: delete CI password data

* fix(api): update CI password to flush cache
2023-10-29 11:42:07 +08:00
pycook 89e492c1f3 fix: delete CI password data (#243) 2023-10-29 10:53:29 +08:00
pycook 1aeb9a2702 fix(cmdb-ui): CI update password 2023-10-28 23:07:01 +08:00
pycook b090a88b76 release: v2.3.6 2023-10-28 17:51:13 +08:00
pycook d5c479f7e5 feat(cmdb-api): ci password 2023-10-28 16:55:02 +08:00
pycook b342258e75 feat(cmdb-api): CI password data store (#242)
* add secrets,for test

* feat: vault SDK (#238)

* feat: vault SDK

* docs: i18n

* perf(vault): format code

* feat(secrets): support vault

* feat: add inner password storage

* feat: secrets

* feat: add inner password storage

* feat: add secrets feature

* perf(secrets): review

---------

Co-authored-by: fxiang21 <fxiang21@126.com>
Co-authored-by: Mimo <osatmnzn@gmail.com>
2023-10-28 16:19:00 +08:00
wang-liang0615 3d716eff3e feat:预定义值支持脚本&&密码存储&&一些bugfix (#239) 2023-10-27 11:10:43 +08:00
ivonGwy 9791a184e3 Doc (#235)
* change assignees
2023-10-25 16:00:04 +08:00
ivonGwy 142b8c95c5 Doc (#234)
* final template
2023-10-25 15:52:12 +08:00
ivonGwy 363b89011f Doc (#232)
* fix bugs
2023-10-25 14:54:23 +08:00
ivonGwy 321372fa88 Update issue templates 2023-10-25 14:46:44 +08:00
ivonGwy fc27dc6e64 Doc (#231)
* add issue template
2023-10-25 14:36:36 +08:00
ivonGwy 8ad98f4ba4 Update issue templates 2023-10-25 14:06:24 +08:00
ivonGwy 9b050fa7fa Update issue templates 2023-10-25 14:04:53 +08:00
pycook a30826827a fix(api): add ci (#230) 2023-10-25 13:51:29 +08:00
ivonGwy 691b50aec4 Doc (#229)
* change reandme
2023-10-25 13:19:30 +08:00
kdyq007 55ca2d5eb5 关闭前端密码加密;加强 ldap 用户验证 (#216)
* [更新] python-ldap 更新到 ldap3

* [更新] 关闭前端密码加密;加强 ldap 用户验证

* Update app.js

---------

Co-authored-by: sherlock <sherlock@gmail.com>
Co-authored-by: pycook <pycook@126.com>
2023-10-24 19:47:46 +08:00
pycook 779cd1ea9e feat: Predefined values support executing scripts (#227) 2023-10-24 19:32:43 +08:00
simontigers 7d53180a2f fix: add_employee_from_acl (#225) 2023-10-24 14:20:40 +08:00
wang-liang0615 b6c41c00dd fix:关系视图删除关系接口传参修改 (#224)
* fix:acl新增用户展示异常问题

* fix:关系视图删除关系接口传参修改
2023-10-24 06:04:21 +08:00
wang-liang0615 fff5679f6e fix:acl新增用户展示异常问题 (#223) 2023-10-24 05:59:08 +08:00
pycook 7556dfe56b feat: add cryptography to requirements 2023-10-23 14:37:01 +08:00
pycook 44b6f2b2ad fix: acl cache 2023-10-23 13:57:06 +08:00
Evan Sung f5607d96f3 fix(common): fix 'ACLManager' object has no attribute 'create_app' (#217) 2023-10-21 11:38:19 +08:00
Evan Sung 2e85a9971b fix(ci_cache): ci cache async args (#215) 2023-10-20 12:05:19 +08:00
kdyq007 8d177266dc feat: python-ldap 更新到 ldap3 (#214)
Co-authored-by: sherlock <sherlock@gmail.com>
2023-10-20 09:36:38 +08:00
pycook 42d870ea4e Dev api 20231019 (#210)
* fix(acl): get resources

* fix(celery worker): db server has gone away
2023-10-19 11:51:34 +08:00
wang-liang0615 424cad2130 fix:ci relation add type filter (#208) 2023-10-18 14:06:28 +08:00
pycook 430308a679 fix: ci relation statistics 2023-10-18 13:35:01 +08:00
pycook cc7570a4b2 docs: api doc 2023-10-17 12:06:37 +08:00
wang-liang0615 413c0dfdfe feat:webhook body 支持非json (#203) 2023-10-17 10:44:38 +08:00
Evan Sung f8fee771c4 Feature db migrate 20231013 (#202)
* feat(db): support flask migrate

* minor

---------

Co-authored-by: s01249 <songbing@smyfinancial.com>
2023-10-13 16:24:49 +08:00
Evan Sung 3d05cef7cd feat(db): support flask migrate (#201)
Co-authored-by: s01249 <songbing@smyfinancial.com>
2023-10-13 15:55:26 +08:00
simontigers 30477f736e fix: common perms (#200) 2023-10-12 16:02:35 +08:00
wang-liang0615 0cd5c0277b pref:批量上传&资源管理小优化 (#199) 2023-10-12 15:06:39 +08:00
ivonGwy 4fcddd1010 Merge pull request #197 from veops/doc
Doc
2023-10-11 14:34:05 +08:00
ivonGwy b269ef894f change wechat pic size 2023-10-11 14:33:19 +08:00
ivonGwy ea13432e4c change wechat pic 2023-10-11 14:28:49 +08:00
pycook c8a68bd185 Dev api (#196)
* docs: update

* docs: README & Makefile
2023-10-11 13:40:15 +08:00
wang-liang0615 f9ee895f58 Dev UI 231009 (#195)
* pref:用户密钥非必填

* fix:chartColor undefined
2023-10-11 09:12:04 +08:00
pycook 59d9f2c79a docs: update (#194) 2023-10-10 16:53:30 +08:00
wang-liang0615 f629558d60 pref:用户密钥非必填 (#193) 2023-10-10 09:25:24 +08:00
pycook 4cbf70e756 release: v2.3.5 2023-10-09 20:55:30 +08:00
pycook 40b9bec122 feat: get messenger url from common setting 2023-10-09 20:25:27 +08:00
wang-liang0615 17b27d492a 前端更新 (#192)
* fix:add package

* fix:notice_info为null的情况

* fix:2 bugs

* feat:1.common增加通知配置 2.cmdb预定义值webhook&其他模型

* fix:json 不支持预定义值

* fix:json 不支持预定义值

* fix:删除代码
2023-10-09 19:52:19 +08:00
simontigers dd8f66a3fa fix: init company structure resource (#191)
* fix: init company structure resource

* fix: notice_info null
2023-10-09 19:25:49 +08:00
wang-liang0615 d501436e3d 前端更新 (#189)
* fix:add package

* fix:notice_info为null的情况

* fix:2 bugs

* feat:1.common增加通知配置 2.cmdb预定义值webhook&其他模型

* fix:json 不支持预定义值

* fix:json 不支持预定义值
2023-10-09 17:43:34 +08:00
simontigers 8c6389b4f8 feat: notice_config access messenger (#190) 2023-10-09 17:32:20 +08:00
pycook 63ed73fa14 fix: delete user role 2023-10-09 15:40:18 +08:00
pycook ef90506916 feat: The definition of attribute choice values supports webhook and other model attribute values. 2023-10-09 15:33:18 +08:00
wang-liang0615 c0541b7e50 Dev UI (#186)
* fix:add package

* fix:notice_info为null的情况

* fix:2 bugs
2023-09-28 09:45:11 +08:00
pycook 388134b38c release 2.3.4 2023-09-27 11:37:18 +08:00
simontigers f57d640c5d feat: add api get_notice_by_ids (#184) 2023-09-27 09:54:30 +08:00
wang-liang0615 42c506ded8 前端更新 (#183)
* fix:add package

* fix:notice_info为null的情况
2023-09-27 09:18:33 +08:00
pycook 373346e71e feat: ci triggers 2023-09-26 21:18:34 +08:00
wang-liang0615 983cb4041c fix:add package (#182) 2023-09-26 21:12:10 +08:00
wang-liang0615 718fca57d2 Merge pull request #181 from veops/dev_ui
前端更新
2023-09-26 20:34:27 +08:00
wang-liang0615 2b1b0333ff Merge branch 'master' into dev_ui 2023-09-26 20:34:14 +08:00
wang-liang0615 9bb787d3f4 fix:topo图相同节点出现两次的bug 2023-09-26 20:13:12 +08:00
wang-liang0615 9f5979c1b1 feat:wangeditor 注册自定义组件 2023-09-26 20:07:00 +08:00
wang-liang0615 b58230fd56 delete:删除getwx 2023-09-26 20:04:38 +08:00
simontigers 8e6ce05c04 feat: common notice config (#180) 2023-09-26 19:44:20 +08:00
wang-liang0615 b8d93bc9eb feat: UI更新 触发器 (#179)
* feat:新增api&适配

* feat:触发器

* add packages & 注释代码

* feat: webhook tips
2023-09-26 18:25:04 +08:00
wang-liang0615 968ef93153 feat: webhook tips 2023-09-26 18:17:23 +08:00
wang-liang0615 e0a0113e69 add packages & 注释代码 2023-09-26 17:35:41 +08:00
wang-liang0615 7181f2879a feat:触发器 2023-09-26 17:01:31 +08:00
wang-liang0615 ff1626ff07 feat:新增api&适配 2023-09-26 16:26:25 +08:00
pycook dca8781a03 fix: ci_cache 2023-09-25 15:46:07 +08:00
wang-liang0615 43c5d74661 Merge pull request #178 from veops/dev_ui
前端更新:仪表盘优化
2023-09-25 14:52:09 +08:00
wang-liang0615 b1d0bdb536 pref:仪表盘优化 2023-09-25 14:50:08 +08:00
wang-liang0615 2420631ed5 Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-09-25 14:43:34 +08:00
pycook deabebb922 refactor: CI triggers 2023-09-22 17:39:54 +08:00
simontigers cd451060e3 fix: icon svg support (#177) 2023-09-20 15:56:57 +08:00
pycook 82d4c4f961 fix date search 2023-09-18 18:15:02 +08:00
pycook 79f5dac661 fix dashboard compute 2023-09-18 13:04:50 +08:00
pycook 1a7c3fcd21 release v2.3.3 2023-09-15 17:57:39 +08:00
pycook 606be7f8e8 dashboard ui update 2023-09-15 17:36:10 +08:00
simontigers 7f66f7688b feat: init resource for backend (#176) 2023-09-15 15:30:30 +08:00
pycook 25ad2192fd enhance dashboard 2023-09-15 15:26:20 +08:00
pycook 881b8cc1fa cmdb-api/api/lib/resp_format.py 2023-09-12 20:01:30 +08:00
pycook 68905281f2 Detect circular dependencies when adding CIType relationships 2023-09-12 20:00:56 +08:00
wang-liang0615 3cb78f30d2 计算属性 触发计算 (#174) 2023-09-11 19:16:05 +08:00
pycook ebce839eaa fix upload template and add /api/v0.1/attributes/<int:attr_id>/calc_computed_attribute 2023-09-11 19:15:31 +08:00
wang-liang0615 3170048ea9 Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-09-11 17:35:44 +08:00
wang-liang0615 976cac6742 计算属性 触发计算 2023-09-11 17:34:51 +08:00
pycook 99e5a932ae release 2.3.2 2023-09-07 13:44:51 +08:00
pycook 058585504f Merge pull request #172 from veops/dev_ui
新建ci及批量导入时,新建关系
2023-09-07 11:04:49 +08:00
wang-liang0615 6d50a13cf0 新建ci及批量导入时,新建关系 2023-09-07 10:25:18 +08:00
pycook 198ecad13a Merge branch 'master' of github.com:veops/cmdb 2023-09-07 10:12:55 +08:00
pycook 1660139b27 Add CI relationship when creating CI, the text value removes the escape 2023-09-07 10:12:42 +08:00
pycook 0155eb98a2 Merge pull request #171 from ronething/fix/makefile
optimize: makefile help
2023-09-05 20:34:16 +08:00
ashing c0c05bca86 fix: review
Signed-off-by: ashing <axingfly@gmail.com>
2023-09-05 20:33:07 +08:00
ashing 32227d375a fix: review
Signed-off-by: ashing <axingfly@gmail.com>
2023-09-05 20:29:29 +08:00
ashing eb9c20a295 fix: review
Signed-off-by: ashing <axingfly@gmail.com>
2023-09-05 20:21:20 +08:00
ashing b826de4195 optimize: makefile help
Signed-off-by: ashing <axingfly@gmail.com>
2023-09-05 20:06:31 +08:00
pycook c3b55c2850 Merge pull request #170 from ronething/feat/xx
feat: support docker deploy mysql and redis
2023-09-05 19:28:47 +08:00
ivonGwy 33313b6206 Merge pull request #169 from veops/doc
add document link
2023-09-05 15:41:52 +08:00
ivonGwy 00fbe15a19 add document link 2023-09-05 15:40:31 +08:00
ashing 9e5c926bfd feat: support docker deploy mysql and redis
Signed-off-by: ashing <axingfly@gmail.com>
2023-09-05 15:26:50 +08:00
wang-liang0615 f0467c3d3b Merge pull request #168 from veops/dev_ui
UI更新
2023-09-05 15:23:43 +08:00
wang-liang0615 6758b994f8 Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-09-05 15:22:18 +08:00
wang-liang0615 d8646f8e7c 模型关联 展示反向关系 2023-09-05 15:22:08 +08:00
pycook 0fa95aed36 Merge branch 'master' of github.com:veops/cmdb 2023-09-05 14:49:53 +08:00
pycook d82356444a move Dockerfile to docs 2023-09-05 14:49:34 +08:00
wang-liang0615 b8fa68ec8d Merge pull request #167 from veops/dev_ui
sub menu color
2023-09-04 16:34:26 +08:00
wang-liang0615 78c542e71d sub menu color 2023-09-04 16:33:35 +08:00
wang-liang0615 6594863934 Merge pull request #166 from veops/dev_ui
ui更新
2023-09-04 13:15:27 +08:00
wang-liang0615 61e8c61fc6 Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-09-04 13:14:35 +08:00
wang-liang0615 ec9b5a0fa0 sidebar 2023-09-04 13:14:11 +08:00
pycook 3664994cc5 import format 2023-09-02 12:09:41 +08:00
pycook 366311e59b format 2023-09-01 18:07:44 +08:00
pycook 03f2ff912d fix delete choice values 2023-08-31 16:02:24 +08:00
pycook ff9af51465 fix delete choice values 2023-08-31 15:18:15 +08:00
wang-liang0615 04fc588935 Merge pull request #165 from veops/dev_ui
proxy
2023-08-31 13:31:26 +08:00
wang-liang0615 f215e887c9 proxy 2023-08-31 13:28:15 +08:00
pycook 3e6955fc7a Merge pull request #162 from simontigers/cmdb_icon_manage
feat: add cmdb custom icon manage
2023-08-31 11:15:09 +08:00
pycook b1cfb88308 Merge pull request #163 from veops/dev_ui
支持上传自定义图标
2023-08-31 11:14:42 +08:00
hu.sima d1af0eba79 feat: add cmdb custom icon manage 2023-08-31 10:49:56 +08:00
wang-liang0615 f41873dc8c 支持上传自定义图标 2023-08-31 10:05:11 +08:00
pycook 63562007df fix update attribute 2023-08-30 13:34:10 +08:00
pycook a99ecb9ea5 Merge branch 'master' of github.com:veops/cmdb 2023-08-29 14:49:21 +08:00
pycook 08f9bd9071 The default value of USE_ACL is set to True 2023-08-29 14:49:09 +08:00
pycook 17c851f354 Merge pull request #161 from simontigers/common_setting_format
fix: company info create
2023-08-29 11:01:25 +08:00
hu.sima 3382195a25 fix: company info create 2023-08-29 10:56:48 +08:00
pycook cd70b16eb3 Merge branch 'master' of github.com:veops/cmdb 2023-08-25 11:01:24 +08:00
pycook 2a98c00d97 update ad_ci when deleting ci 2023-08-25 10:59:38 +08:00
wang-liang0615 5044af5490 Merge pull request #160 from veops/dev_ui
前端更新
2023-08-25 10:12:31 +08:00
wang-liang0615 320dc07676 fix 新增类型回车键发送两次请求 2023-08-25 10:11:09 +08:00
wang-liang0615 95d3b0233a fix 新增类型回车键发送两次请求 2023-08-25 10:08:04 +08:00
pycook 093466ef06 Merge pull request #158 from EvanSung/perf_20230824_optimize_ad_ci_relation
perf(ad_ci_relation): optimize ad_ci relation
2023-08-24 16:26:43 +08:00
EvanSung ceacc0ab15 perf(ad_ci_relation): optimize ad_ci relation 2023-08-24 14:16:12 +08:00
pycook cb3a9d9432 docker-compose add flask db-setup 2023-08-24 11:32:09 +08:00
pycook 23fd56cf28 vxe-table-plugin-export-xlsx==2.0.0 2023-08-24 11:06:28 +08:00
pycook dd1c783919 add config CACHE_REDIS_PASSWORD and fix delete ci_type 2023-08-23 18:05:28 +08:00
pycook 8296e9a552 fix update ci 2023-08-22 11:34:40 +08:00
pycook 590565bdf0 Register api and commands with absolute paths 2023-08-21 20:08:23 +08:00
pycook 9e2bf3d1f0 fix merge conflict 2023-08-21 11:55:49 +08:00
pycook 7547b67805 fix g.user 2023-08-21 11:54:33 +08:00
pycook 4abe4d7e8f version: 2.3.1 2023-08-20 11:24:53 +08:00
pycook 0ebd52f3fd lint 2023-08-20 11:23:55 +08:00
pycook 4cc2e47f02 Merge pull request #157 from EvanSung/fix_20230817_guser_issue
fix(acl): g user issue
2023-08-17 22:12:45 +08:00
EvanSung b16bfdfa03 fix(acl): g user issue 2023-08-17 18:40:45 +08:00
pycook ece24080d5 fix MyJSONEncoder 2023-08-16 21:28:27 +08:00
pycook a0ea8a193b Merge pull request #155 from veops/dev_ui
前端更新
2023-08-16 13:01:13 +08:00
pycook d8e09d449e Merge branch 'master' of github.com:veops/cmdb 2023-08-16 13:00:44 +08:00
pycook 44c99e0f2e Delete user without soft delete 2023-08-16 13:00:30 +08:00
wang-liang0615 7110b4bcb3 Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-08-16 10:09:47 +08:00
wang-liang0615 3989859945 delete user 2023-08-16 10:09:25 +08:00
pycook 29ed4dd708 Merge pull request #154 from simontigers/common_setting_format
fix: init-import-user-from-acl
2023-08-15 20:47:36 +08:00
hu.sima dbcbe33ba0 fix: init-import-user-from-acl 2023-08-15 20:45:28 +08:00
pycook ed340a1c33 Merge pull request #153 from simontigers/common_setting_format
fix: import_user_from_acl
2023-08-15 20:25:09 +08:00
hu.sima 17ee7622b4 fix: import_user_from_acl 2023-08-15 20:19:45 +08:00
pycook 51877778bf Merge branch 'master' of github.com:veops/cmdb 2023-08-15 19:48:11 +08:00
pycook 1de8b492ea [update] delete roles, users, attributes 2023-08-15 19:47:59 +08:00
wang-liang0615 1fe38c4ec3 Merge pull request #152 from veops/dev_ui
前端更新
2023-08-15 19:47:03 +08:00
wang-liang0615 92bff08b84 属性库 2023-08-15 19:34:17 +08:00
wang-liang0615 37d0dacd2e 属性库 2023-08-15 19:26:49 +08:00
wang-liang0615 e22f45fd25 属性库 2023-08-15 19:21:09 +08:00
wang-liang0615 56ef0a055a 属性库 2023-08-15 19:10:26 +08:00
wang-liang0615 522991e23b 后台管理-模型关联 关系删除&&筛选 2023-08-15 15:02:46 +08:00
pycook ff061d4d2e update gitattributes 2023-08-15 13:41:45 +08:00
wang-liang0615 5b84a704b1 Merge pull request #150 from EvanSung/optimize_20230810_acl_resource_fe
refactor(fe): reduce the width of resource mgt table
2023-08-10 19:32:49 +08:00
pycook 3ba83821f2 Merge pull request #148 from simontigers/common_setting_format
fix: default arg value
2023-08-10 19:31:18 +08:00
pycook 45b82aa52d Merge pull request #149 from veops/dev_ui
ui更新:password
2023-08-10 19:28:24 +08:00
EvanSung c99790bda2 refactor(fe): reduce the width of resource mgt table 2023-08-10 19:23:41 +08:00
wang-liang0615 7272880062 Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-08-10 19:21:28 +08:00
wang-liang0615 8130dd92ab 增加密码明文传输 2023-08-10 19:21:10 +08:00
hu.sima 9ca2d38307 fix: default arg value 2023-08-10 19:05:56 +08:00
pycook e61bbdbcb7 Merge pull request #147 from simontigers/common_setting_format
fix: remove useless
2023-08-10 19:01:25 +08:00
hu.sima 88960c3082 fix: remove useless 2023-08-10 18:55:32 +08:00
pycook 0aa433882e Merge pull request #146 from simontigers/common_setting_format
Common setting format
2023-08-10 18:23:24 +08:00
hu.sima d4f5713e0a fix: remove unused column 2023-08-10 16:29:52 +08:00
hu.sima 790204ea28 style: format common setting 2023-08-10 15:30:01 +08:00
pycook 927ed393d3 Merge pull request #145 from EvanSung/optimize_20230810_auth_require
optimize(auth): auth request json
2023-08-10 11:24:23 +08:00
EvanSung bf92b89a5f optimize(auth): auth request json 2023-08-10 10:43:59 +08:00
pycook 7ddaa143da fix celery config 2023-08-08 16:33:24 +08:00
pycook 157361cc7a Merge branch 'master' of github.com:veops/cmdb 2023-08-08 13:16:14 +08:00
pycook dcf768376a upgrade celery 2023-08-08 13:16:07 +08:00
pycook e339ad2c23 Merge pull request #144 from veops/dev_ui
UI更新:fix preferenceList=>attrList
2023-08-08 09:21:10 +08:00
wang-liang0615 8b10c5c72d Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-08-08 09:11:24 +08:00
wang-liang0615 bd13f6b744 fix preferenceList=>attrList 2023-08-08 09:11:03 +08:00
pycook 934d00e87d upgrade flask to 2.3.2 and replace g.user with current_user 2023-08-06 21:54:18 +08:00
pycook 9d421993a0 Merge pull request #138 from lovvvve/fix_ldap
fix ldap login
2023-08-04 11:31:58 +08:00
pycook 6751f673d5 Merge pull request #142 from veops/dev_ui
ci 批量更新和删除的异步处理
2023-08-04 09:27:55 +08:00
wang-liang0615 c420a2195a Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-08-03 16:54:47 +08:00
wang-liang0615 ec3a6e0b6e ci 批量更新和删除的异步处理 2023-08-03 16:54:27 +08:00
pycook ab0a5399f7 Merge pull request #139 from EvanSung/fix-post-acltrigger-session-invalid
fix(trigger): session invalid issue
2023-08-02 19:33:05 +08:00
songbing01249 33e63c762f fix(trigger): session invalid issue 2023-08-02 18:22:42 +08:00
lovvvve 53b2a228ae fix ldap login 2023-08-01 11:27:29 +00:00
pycook c687e7ad9b Merge pull request #134 from veops/dependabot/pip/cmdb-api/pillow-9.3.0
Bump pillow from 9.2.0 to 9.3.0 in /cmdb-api
2023-08-01 15:57:02 +08:00
pycook ea3cdbd5f5 Merge pull request #135 from simontigers/remove_pandas
fix: remove pandas
2023-08-01 15:55:15 +08:00
hu.sima 1ebe74d966 fix: remove pandas 2023-08-01 15:32:44 +08:00
dependabot[bot] 1f935d25a2 Bump pillow from 9.2.0 to 9.3.0 in /cmdb-api
Bumps [pillow](https://github.com/python-pillow/Pillow) from 9.2.0 to 9.3.0.
- [Release notes](https://github.com/python-pillow/Pillow/releases)
- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst)
- [Commits](https://github.com/python-pillow/Pillow/compare/9.2.0...9.3.0)

---
updated-dependencies:
- dependency-name: pillow
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-08-01 05:51:16 +00:00
pycook 14e4dbacac Merge branch 'master' of github.com:veops/cmdb 2023-08-01 13:47:34 +08:00
pycook f7a9257e16 fix dependabot alerts 2023-08-01 13:47:11 +08:00
pycook 99562df6cd fix dependabot alerts 2023-08-01 13:46:47 +08:00
pycook 630013cb85 Merge pull request #130 from veops/dependabot/pip/cmdb-api/certifi-2023.7.22
Bump certifi from 2023.5.7 to 2023.7.22 in /cmdb-api
2023-08-01 13:14:25 +08:00
pycook 6351d3af64 Merge pull request #132 from veops/dev_ui
删除角色相关
2023-07-31 19:54:19 +08:00
wang-liang0615 2d96048828 删除角色相关 2023-07-31 19:52:06 +08:00
pycook 3e67bb94bc Merge branch 'master' of github.com:veops/cmdb 2023-07-31 18:39:46 +08:00
pycook a26c9ff542 fix delete ci_type 2023-07-31 18:39:33 +08:00
pycook 252003f76d Merge pull request #131 from veops/dev_ui
前端acl
2023-07-28 18:03:36 +08:00
wang-liang0615 928149f2a0 common-setting 2023-07-27 15:47:13 +08:00
wang-liang0615 0283af812c acl 2023-07-27 15:30:27 +08:00
wang-liang0615 6ccb2e74e0 fix acl change page size 2023-07-27 15:08:25 +08:00
dependabot[bot] 5016464016 Bump certifi from 2023.5.7 to 2023.7.22 in /cmdb-api
Bumps [certifi](https://github.com/certifi/python-certifi) from 2023.5.7 to 2023.7.22.
- [Commits](https://github.com/certifi/python-certifi/compare/2023.05.07...2023.07.22)

---
updated-dependencies:
- dependency-name: certifi
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-25 23:35:30 +00:00
pycook b787bd9b7b Merge pull request #129 from veops/dev_ui
前端更新
2023-07-25 18:19:47 +08:00
wang-liang0615 11f69d8679 Merge branch 'master' of github.com:veops/cmdb into dev_ui 2023-07-25 13:11:03 +08:00
wang-liang0615 c3168035ed 授权高亮提示 2023-07-25 13:10:45 +08:00
pycook 5140c0a58f add command cmdb-index-table-upgrade 2023-07-25 10:31:30 +08:00
wang-liang0615 ee97582579 style 新建属性行错乱 2023-07-25 10:18:22 +08:00
pycook 83e9172722 废弃3个表: c_value_datetime c_value_floats c_value_integers, time类型属性值增加写入校验 2023-07-24 21:55:00 +08:00
pycook 341e687987 禁止删除唯一标识的属性 2023-07-21 15:58:41 +08:00
pycook 93d9804127 Merge branch 'master' of github.com:veops/cmdb 2023-07-20 18:37:14 +08:00
pycook f3d42cb356 fix docker-compose 2023-07-20 18:36:32 +08:00
pycook 04b9a3c929 Merge pull request #127 from veops/dev_ui
fix currentValueType
2023-07-20 15:39:11 +08:00
wang-liang0615 e98160b84a fix currentValueType 2023-07-20 15:30:12 +08:00
pycook 7c769a53da 更新架构图 2023-07-20 11:01:25 +08:00
pycook 08dc343083 update readme 2023-07-20 11:01:25 +08:00
pycook 52e654ff60 update readme 2023-07-20 11:01:25 +08:00
491 changed files with 10520 additions and 61680 deletions

6
.env
View File

@ -1,6 +0,0 @@
MYSQL_ROOT_PASSWORD='123456'
MYSQL_HOST='mysql'
MYSQL_PORT=3306
MYSQL_USER='cmdb'
MYSQL_DATABASE='cmdb'
MYSQL_PASSWORD='123456'

0
.github/config.yml vendored Normal file
View File

View File

@ -1,79 +0,0 @@
name: docker-images-build-and-release
on:
push:
branches:
- master
tags: ["v*"]
# pull_request:
# branches:
# - master
env:
# Use docker.io for Docker Hub if empty
REGISTRY_SERVER_ADDRESS: ghcr.io/veops
TAG: ${{ github.sha }}
jobs:
setup-environment:
timeout-minutes: 30
runs-on: ubuntu-latest
steps:
- name: Checkout Repo
uses: actions/checkout@v4
release-api-images:
runs-on: ubuntu-latest
needs: [setup-environment]
permissions:
contents: read
packages: write
timeout-minutes: 90
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- name: Login to GitHub Package Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push CMDB-API Docker image
uses: docker/build-push-action@v6
with:
file: docker/Dockerfile-API
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ env.REGISTRY_SERVER_ADDRESS }}/cmdb-api:${{ env.TAG }}
# release-ui-images:
# runs-on: ubuntu-latest
# needs: [setup-environment]
# permissions:
# contents: read
# packages: write
# timeout-minutes: 90
# steps:
# - name: Checkout Repo
# uses: actions/checkout@v4
# - name: Login to GitHub Package Registry
# uses: docker/login-action@v2
# with:
# registry: ghcr.io
# username: ${{ github.repository_owner }}
# password: ${{ secrets.GITHUB_TOKEN }}
# - name: Set up QEMU
# uses: docker/setup-qemu-action@v3
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@v3
# - name: Build and push CMDB-UI Docker image
# uses: docker/build-push-action@v6
# with:
# file: docker/Dockerfile-UI
# context: .
# platforms: linux/amd64,linux/arm64
# push: true
# tags: ${{ env.REGISTRY_SERVER_ADDRESS }}/cmdb-ui:${{ env.TAG }}

View File

@ -1,13 +0,0 @@
# Contributor Code of Conduct
As contributors and maintainers of this project, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
We are committed to making participation in this project a harassment-free experience for everyone, regardless of the level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, age, or religion.
Examples of unacceptable behavior by participants include the use of sexual language or imagery, derogatory comments or personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed from the project team.
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.
This Code of Conduct is adapted from the [Contributor Covenant](https://contributor-covenant.org), version 1.0.0, available at [http://contributor-covenant.org/version/1/0/0/](http://contributor-covenant.org/version/1/0/0/)

View File

@ -1,4 +1,6 @@
include ./Makefile.variable
MYSQL_ROOT_PASSWORD ?= root
MYSQL_PORT ?= 3306
REDIS_PORT ?= 6379
default: help
help: ## display this help
@ -48,25 +50,3 @@ clean: ## remove unwanted files like .pyc's
lint: ## check style with flake8
flake8 --exclude=env .
.PHONY: lint
api-docker-build:
export DOCKER_CLI_EXPERIMENTAL=enabled ;\
! ( docker buildx ls | grep multi-platform-builder ) && docker buildx create --use --platform=$(BUILD_ARCH) --name multi-platform-builder ;\
docker buildx build \
--builder multi-platform-builder \
--platform=$(BUILD_ARCH) \
--tag $(REGISTRY)/cmdb-api:$(CMDB_DOCKER_VERSION) \
--tag $(REGISTRY)/cmdb-api:latest \
-f docker/Dockerfile-API \
.
ui-docker-build:
export DOCKER_CLI_EXPERIMENTAL=enabled ;\
! ( docker buildx ls | grep multi-platform-builder ) && docker buildx create --use --platform=$(BUILD_ARCH) --name multi-platform-builder ;\
docker buildx build \
--builder multi-platform-builder \
--platform=$(BUILD_ARCH) \
--tag $(REGISTRY)/cmdb-ui:$(CMDB_DOCKER_VERSION) \
--tag $(REGISTRY)/cmdb-ui:latest \
-f docker/Dockerfile-UI \
.

View File

@ -1,21 +0,0 @@
SHELL := /bin/bash -o pipefail
MYSQL_ROOT_PASSWORD ?= root
MYSQL_PORT ?= 3306
REDIS_PORT ?= 6379
LATEST_TAG_DIFF:=$(shell git describe --tags --abbrev=8)
LATEST_COMMIT:=$(VERSION)-dev-$(shell git rev-parse --short=8 HEAD)
BUILD_ARCH ?= linux/amd64,linux/arm64
# Set your version by env or using latest tags from git
CMDB_VERSION?=$(LATEST_TAG_DIFF)
ifeq ($(CMDB_VERSION),)
#fall back to last commit
CMDB_VERSION=$(LATEST_COMMIT)
endif
COMMIT_VERSION:=$(LATEST_COMMIT)
CMDB_DOCKER_VERSION:=${CMDB_VERSION}
CMDB_CHART_VERSION:=$(shell echo ${CMDB_VERSION} | sed 's/^v//g' )
REGISTRY ?= local

184
README.md
View File

@ -1,138 +1,108 @@
<p align="center">
<a href="https://veops.cn">
<img src="https://github.com/user-attachments/assets/c5cfb272-899b-418d-9e69-8e1dd07db0f6" alt="维易CMDB"/>
</a>
<a href="https://veops.cn"><img src="docs/images/logo.png" alt="维易CMDB" width="300"/></a>
</p>
<h4 align="center">简单、轻量、通用的运维配置管理数据库</h4>
<h3 align="center">简单、轻量、通用的运维配置管理数据库</h3>
<p align="center">
<a href="https://github.com/veops/cmdb/blob/master/LICENSE"><img src="https://img.shields.io/badge/License-AGPLv3-brightgreen" alt="License: GPLv3"></a>
<a href="https://github.com/veops/cmdb/releases"><img alt="the latest release version" src="https://img.shields.io/github/v/release/veops/cmdb?color=75C1C4&include_prereleases&label=Release&logo=github&logoColor=white"></a>
<a href="https:https://github.com/sendya/ant-design-pro-vue"><img src="https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-green" alt="UI"></a>
<a href="https://github.com/pallets/flask"><img src="https://img.shields.io/badge/API-Flask-bright" alt="API"></a>
<a href="https://github.com/veops/cmdb/stargazers"><img src="https://img.shields.io/github/stars/veops/cmdb" alt="Stars Badge"/></a>
<a href="https://github.com/veops/cmdb"><img src="https://img.shields.io/github/forks/veops/cmdb" alt="Forks Badge"/></a>
</p>
<p align="center">
中文(简体) · <a href="docs/README_en.md">English</a>
<a href="https:https://github.com/sendya/ant-design-pro-vue"><img src="https://img.shields.io/badge/UI-Ant%20Design%20Pro%20Vue-brightgreen" alt="UI"></a>
<a href="https://github.com/pallets/flask"><img src="https://img.shields.io/badge/API-Flask-brightgreen" alt="API"></a>
</p>
------------------------------
[English](docs/README_en.md) / [中文](README.md)
- 产品文档https://veops.cn/docs/
- 在线体验:<a href="https://cmdb.veops.cn" target="_blank">CMDB</a>
- username: demo 或者 admin
- password: 123456
> **重要提示**: `master` 分支在开发过程中可能处于 _不稳定的状态_
> 请通过[releases](https://github.com/veops/cmdb/releases)获取
## 系统介绍
维易CMDB是一个简洁、轻量且高度可定制的运维配置管理数据库CMDB。它支持灵活的模型配置和资源自动发现旨在为企业提供便捷的资产管理解决方案帮助运维团队高效地管理 IT 基础设施和服务。
### 系统概览
- 产品文档:[https://veops.cn/docs/](https://veops.cn/docs/)
- 在线体验:[https://cmdb.veops.cn](https://cmdb.veops.cn)
- 用户名demo 或者 admin
- 密码123456
- **重要提示**`master` 分支在开发过程中可能处于**不稳定的状态**。请通过 [releases](https://github.com/veops/cmdb/releases) 获取最新稳定版本。
<img src=docs/images/dashboard.png />
[查看更多展示](docs/screenshot.md)
### 相关文章
- <a href="https://mp.weixin.qq.com/s/v3eANth64UBW5xdyOkK3tg" target="_blank">概要设计</a>
- <a href="https://github.com/veops/cmdb/tree/master/docs/cmdb_api.md" target="_blank">API 文档</a>
- <a href="https://mp.weixin.qq.com/s/rQaf4AES7YJsyNQG_MKOLg" target="_blank">自动发现</a>
- 更多文章可以在公众号 **维易科技OneOps** 里查看
### 特点
- 灵活性
1. 配置灵活,不设定任何运维场景,有内置模板
2. 自动发现、入库 IT 资产
- 安全性
1. 细粒度权限控制
2. 完备操作日志
- 多应用
1. 丰富视图展示维度
2. API简单强大
3. 支持定义属性触发器、计算属性
### 主要功能
- **自定义模型和模型关系**:支持模型属性的自定义,包括下拉列表、字体颜色、计算属性等高级功能,满足不同业务需求。
- **自动发现资源**:支持计算机、网络设备、存储设备、数据库、中间件、公有云资源等自动发现。
- **多维度视图展示**:包括资源视图、层级视图、关系视图等,帮助运维人员全面管理资源。
- **细粒度权限控制**:通过精确的访问控制和完备的操作日志保障系统的安全性。
- **全面的资源搜索功能**:支持灵活的资源和关系搜索,快速定位和操作资源。
- **集成 IP 地址管理IPAM和数据中心基础设施管理DCIM**:简化网络资源和数据中心设备的管理。
- 模型属性支持索引、多值、默认排序、字体颜色,支持计算属性
- 支持自动发现、定时巡检、文件导入
- 支持资源、层级、关系视图展示
- 支持模型间关系配置和展示
- 细粒度访问控制,完备的操作日志
- 支持跨模型搜索
更多详细功能,请移步 [维易科技官网](https://veops.cn) 进行了解。
### 系统优势
- 灵活性
+ 无需指定固定运维场景,支持自由配置并内置多种模板
+ 支持自动发现和入库 IT 资产,快速搭建资产管理系统
- 安全性
+ 细粒度的权限控制机制,确保资源管理的安全性
+ 完整的操作日志记录,便于审计和问题追踪
- 多应用
+ 提供多种视图展示方式,满足不同场景的需求
+ 强大的 API 接口,支持深度集成
+ 支持定义属性触发器和计算属性,增强数据处理能力
### 技术栈
+ 后端Python [3.8-3.11]
+ 数据存储MySQL、Redis
+ 前端Vue.js
+ UI组件库Ant Design Vue
### 更多功能
### 系统概览
<table style="border-collapse: collapse;">
<tr>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/6d2df835-ae93-4d91-9bd9-213c270eca7a"/>
</td>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/cb8b598a-a1f9-4c74-adf1-6e59aea2c9b3"/>
</td>
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/b440224f-53c3-4b7f-a9be-285d7a4b848f"/>
</td>
<td style="padding: 5px;background-color:#fff;">
<img width="400" src="https://github.com/user-attachments/assets/f457d5a0-b60b-4949-b94e-020f4c61444b"/>
</td>
</tr>
</table>
## 关注我们
欢迎 Star 加关注,第一时间获取更新动态!
![star us](https://github.com/user-attachments/assets/f9056d5a-171c-4f53-9fec-d40c9e5ff94d)
## 快速开始
### 1. 搭建
+ 方案一Docker 一键快速构建
- 第1步: 安装 Docker 环境和 Docker Composev2
- 第2步: 拷贝项目代码, `git clone https://github.com/veops/cmdb.git`
- 第3步进入主目录并启动, `docker compose up -d`
+ 方案二:[本地开发环境搭建](docs/local.md)
+ 方案三:[Makefile 安装](docs/makefile.md)
### 2. 访问
- 打开浏览器并访问: [http://127.0.0.1:8000](http://127.0.0.1:8000)
- 用户名: demo 或者 admin
- 密码: 123456
> 也欢迎移步[维易科技官网](https://veops.cn),发现更多免费运维系统。
## 接入公司
+ 欢迎使用开源CMDB的公司和团队,在 [#112](https://github.com/veops/cmdb/issues/112) 登记
> 欢迎使用开源CMDB的公司在 [#112](https://github.com/veops/cmdb/issues/112) 登记
## 代码贡献
我们欢迎所有开发者贡献代码,改善和扩展这个项目。请先阅读我们的[贡献指南](docs/CONTRIBUTING.md)。此外,您还可以通过社交媒体、活动和分享来支持 Veops 的开源。
## 安装
<a href="https://github.com/veops/cmdb/graphs/contributors">
<img src="https://contrib.rocks/image?repo=veops/cmdb" />
</a>
### Docker 一键快速构建
> 方法一
- 第一步: 先安装 docker 环境, 以及docker-compose
- 第二步: 拷贝项目
```shell
git clone https://github.com/veops/cmdb.git
```
- 第三步:进入主目录,执行:
```
docker-compose up -d
```
> 方法二, 该方法适用于linux系统
- 第一步: 先安装 docker 环境, 以及docker-compose
- 第二步: 直接使用项目根目录下的install.sh 文件进行 `安装`、`启动`、`暂停`、`查状态`、`删除`、`卸载`
```shell
curl -so install.sh https://raw.githubusercontent.com/veops/cmdb/master/install.sh
sh install.sh install
```
## 更多开源
### [本地开发环境搭建](docs/local.md)
- [OneTerm](https://github.com/veops/oneterm): 一款简单、轻量、灵活的堡垒机服务。
- [messenger](https://github.com/veops/messenger): 一个简单轻量的消息发送服务。
- [ACL](https://github.com/veops/acl): 一个简单通用的权限管理系统设计与实践。
### [Makefile 安装](docs/makefile.md)
## 相关文章
## 验证
- 浏览器打开: [http://127.0.0.1:8000](http://127.0.0.1:8000)
- username: demo 或者 admin
- password: 123456
- <a href="https://mp.weixin.qq.com/s/v3eANth64UBW5xdyOkK3tg" target="_blank">尽可能通用的运维CMDB的设计与实践() - 概览</a>
- <a href="https://mp.weixin.qq.com/s/rQaf4AES7YJsyNQG_MKOLg" target="_blank">尽可能通用的运维CMDB的设计与实践() - 自动发现</a>
- <a href="https://github.com/veops/cmdb/tree/master/docs/cmdb_api.md" target="_blank">CMDB接口文档</a>
更多文章可以在公众号 **维易科技OneOps** 里查看
---
## 与我联系
_**欢迎关注公众号(维易科技OneOps),关注后可加入微信群,进行产品和技术交流。**_
+ 邮箱: <a href="mailto:bd@veops.cn">bd@veops.cn</a>
+ 公众号:**维易科技OneOps**。关注后可以加入微信群,参与产品和技术交流
<img src="docs/images/wechat.png" alt="公众号: 维易科技OneOps" />
![公众号: 维易科技OneOps](docs/images/wechat.png)

View File

@ -1,79 +0,0 @@
line-length = 120
cache-dir = ".ruff_cache"
target-version = "py310"
unsafe-fixes = true
show-fixes = true
[lint]
select = [
"E",
"F",
"I",
"TCH",
# W
"W505",
# PT
"PT018",
# SIM
"SIM101",
"SIM114",
# PGH
"PGH004",
# PL
"PLE1142",
# RUF
"RUF100",
# UP
"UP007"
]
preview = true
ignore = ["FURB101"]
[lint.flake8-pytest-style]
mark-parentheses = false
parametrize-names-type = "list"
parametrize-values-row-type = "list"
parametrize-values-type = "tuple"
[lint.flake8-unused-arguments]
ignore-variadic-names = true
[lint.isort]
lines-between-types = 1
order-by-type = true
[lint.per-file-ignores]
"**/api/v1/*.py" = ["TCH"]
"**/model/*.py" = ["TCH003"]
"**/models/__init__.py" = ["F401", "F403"]
"**/tests/*.py" = ["E402"]
"celery_worker.py" = ["F401"]
"api/views/entry.py" = ["I001"]
"migrations/*.py" = ["I001", "E402"]
"*.py" = ["I001"]
"api/views/common_setting/department.py" = ["F841"]
"api/lib/common_setting/upload_file.py" = ["F841"]
"api/lib/common_setting/acl.py" = ["F841"]
"**/__init__.py" = ["F822"]
"api/tasks/*.py" = ["E722"]
"api/views/cmdb/*.py" = ["E722"]
"api/views/acl/*.py" = ["E722"]
"api/lib/secrets/*.py" = ["E722", "F841"]
"api/lib/utils.py" = ["E722", "E731"]
"api/lib/perm/authentication/cas/*" = ["E113", "F841"]
"api/lib/perm/acl/*" = ["E722"]
"api/lib/*" = ["E721", "F722"]
"api/lib/cmdb/*" = ["F722", "E722"]
"api/lib/cmdb/search/ci/es/search.py" = ["F841", "SIM114"]
"api/lib/cmdb/search/ci/db/search.py" = ["F841"]
"api/lib/cmdb/value.py" = ["F841"]
"api/lib/cmdb/history.py" = ["E501"]
"api/commands/common.py" = ["E722"]
"api/commands/click_cmdb.py" = ["E722"]
"api/lib/perm/auth.py" = ["SIM114"]
[format]
preview = true
quote-style = "single"
docstring-code-format = true
skip-magic-trailing-comma = false

View File

@ -5,17 +5,16 @@ name = "pypi"
[packages]
# Flask
Flask = "==2.2.5"
Werkzeug = "==2.2.3"
Flask = "==2.3.2"
Werkzeug = ">=2.3.6"
click = ">=5.0"
# Api
Flask-RESTful = "==0.3.10"
# Database
Flask-SQLAlchemy = "==3.0.5"
Flask-SQLAlchemy = "==2.5.0"
SQLAlchemy = "==1.4.49"
PyMySQL = "==1.1.0"
redis = "==4.6.0"
python-redis-lock = "==4.0.0"
# Migrations
Flask-Migrate = "==2.5.2"
# Deployment
@ -36,7 +35,7 @@ Flask-Caching = ">=1.0.0"
environs = "==4.2.0"
marshmallow = "==2.20.2"
# async tasks
celery = "==5.3.1"
celery = ">=5.3.1"
celery_once = "==3.0.1"
more-itertools = "==5.0.0"
kombu = ">=5.3.1"
@ -66,10 +65,6 @@ hvac = "==2.0.0"
colorama = ">=0.4.6"
pycryptodomex = ">=3.19.0"
lz4 = ">=4.3.2"
python-magic = "==0.4.27"
jsonpath = "==0.82.2"
networkx = ">=3.1"
ipaddress = ">=1.0.23"
[dev-packages]
# Testing

View File

@ -50,7 +50,7 @@ def add_user():
if is_admin:
app = AppCache.get('acl') or App.create(name='acl')
acl_admin = RoleCache.get_by_name(app.id, 'acl_admin') or RoleCRUD.add_role('acl_admin', app.id, True)
acl_admin = RoleCache.get('acl_admin') or RoleCRUD.add_role('acl_admin', app.id, True)
rid = RoleCache.get_by_name(None, username).id
RoleRelationCRUD.add(acl_admin, acl_admin.id, [rid], app.id)

View File

@ -1,13 +1,14 @@
# -*- coding:utf-8 -*-
import click
import copy
import datetime
import json
import requests
import time
import uuid
import click
import requests
from flask import current_app
from flask.cli import with_appcontext
from flask_login import login_user
@ -23,7 +24,6 @@ from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.dcim.rack import RackManager
from api.lib.exception import AbortException
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import UserCache
@ -37,14 +37,11 @@ from api.lib.secrets.secrets import InnerKVManger
from api.models.acl import App
from api.models.acl import ResourceType
from api.models.cmdb import Attribute
from api.models.cmdb import AttributeHistory
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
from api.models.cmdb import CIType
from api.models.cmdb import CITypeTrigger
from api.models.cmdb import OperationRecord
from api.models.cmdb import PreferenceRelationView
from api.tasks.cmdb import batch_ci_cache
@click.command()
@ -58,12 +55,9 @@ def cmdb_init_cache():
for cr in ci_relations:
relations.setdefault(cr.first_ci_id, {}).update({cr.second_ci_id: cr.second_ci.type_id})
if cr.ancestor_ids:
relations2.setdefault('{},{}'.format(cr.ancestor_ids, cr.first_ci_id), {}).update(
{cr.second_ci_id: cr.second_ci.type_id})
relations2.setdefault(cr.ancestor_ids, {}).update({cr.second_ci_id: cr.second_ci.type_id})
for i in relations:
relations[i] = json.dumps(relations[i])
for i in relations2:
relations2[i] = json.dumps(relations2[i])
if relations:
rd.create_or_update(relations, REDIS_PREFIX_CI_RELATION)
if relations2:
@ -121,8 +115,6 @@ def cmdb_init_acl():
_app = AppCache.get('cmdb') or App.create(name='cmdb')
app_id = _app.id
current_app.test_request_context().push()
# 1. add resource type
for resource_type in ResourceTypeEnum.all():
try:
@ -131,7 +123,7 @@ def cmdb_init_acl():
perms = [PermEnum.READ]
elif resource_type == ResourceTypeEnum.CI_TYPE_RELATION:
perms = [PermEnum.ADD, PermEnum.DELETE, PermEnum.GRANT]
elif resource_type in (ResourceTypeEnum.RELATION_VIEW, ResourceTypeEnum.TOPOLOGY_VIEW):
elif resource_type == ResourceTypeEnum.RELATION_VIEW:
perms = [PermEnum.READ, PermEnum.UPDATE, PermEnum.DELETE, PermEnum.GRANT]
ResourceTypeCRUD.add(app_id, resource_type, '', perms)
@ -191,28 +183,11 @@ def cmdb_counter():
UserCRUD.add(username='worker', password=uuid.uuid4().hex, email='worker@xxx.com')
login_user(UserCache.get('worker'))
i = 0
today = datetime.date.today()
while True:
try:
db.session.commit()
db.session.remove()
CMDBCounterCache.reset()
if i % 5 == 0:
CMDBCounterCache.flush_adc_counter()
i = 0
if datetime.date.today() != today:
CMDBCounterCache.clear_ad_exec_history()
today = datetime.date.today()
CMDBCounterCache.flush_sub_counter()
RackManager().check_u_slot()
i += 1
except:
import traceback
print(traceback.format_exc())
@ -228,12 +203,6 @@ def cmdb_trigger():
"""
from api.lib.cmdb.ci import CITriggerManager
current_app.test_request_context().push()
if not UserCache.get('worker'):
from api.lib.perm.acl.user import UserCRUD
UserCRUD.add(username='worker', password=uuid.uuid4().hex, email='worker@xxx.com')
login_user(UserCache.get('worker'))
current_day = datetime.datetime.today().strftime("%Y-%m-%d")
trigger2cis = dict()
trigger2completed = dict()
@ -262,10 +231,10 @@ def cmdb_trigger():
trigger2cis[trigger.id] = (trigger, ready_cis)
else:
cur = trigger2cis[trigger.id]
cur_ci_ids = {_ci.ci_id for _ci in cur[1]}
cur_ci_ids = {i.ci_id for i in cur[1]}
trigger2cis[trigger.id] = (
trigger, cur[1] + [_ci for _ci in ready_cis if _ci.ci_id not in cur_ci_ids
and _ci.ci_id not in trigger2completed.get(trigger.id, {})])
trigger, cur[1] + [i for i in ready_cis if i.ci_id not in cur_ci_ids
and i.ci_id not in trigger2completed.get(trigger.id, {})])
for tid in trigger2cis:
trigger, cis = trigger2cis[tid]
@ -342,7 +311,7 @@ def cmdb_inner_secrets_init(address):
"""
init inner secrets for password feature
"""
res, ok = KeyManage(backend=InnerKVManger()).init()
res, ok = KeyManage(backend=InnerKVManger).init()
if not ok:
if res.get("status") == "failed":
KeyManage.print_response(res)
@ -352,7 +321,7 @@ def cmdb_inner_secrets_init(address):
if valid_address(address):
token = current_app.config.get("INNER_TRIGGER_TOKEN", "") if not token else token
if not token:
token = click.prompt('Enter root token', hide_input=True, confirmation_prompt=False)
token = click.prompt(f'Enter root token', hide_input=True, confirmation_prompt=False)
assert token is not None
resp = requests.post("{}/api/v0.1/secrets/auto_seal".format(address.strip("/")),
headers={"Inner-Token": token})
@ -376,13 +345,13 @@ def cmdb_inner_secrets_unseal(address):
"""
unseal the secrets feature
"""
# if not valid_address(address):
# return
if not valid_address(address):
return
address = "{}/api/v0.1/secrets/unseal".format(address.strip("/"))
for i in range(global_key_threshold):
token = click.prompt(f'Enter unseal token {i + 1}', hide_input=True, confirmation_prompt=False)
assert token is not None
resp = requests.post(address, headers={"Unseal-Token": token}, timeout=5)
resp = requests.post(address, headers={"Unseal-Token": token})
if resp.status_code == 200:
KeyManage.print_response(resp.json())
if resp.json().get("status") in ["success", "skip"]:
@ -509,79 +478,3 @@ def cmdb_agent_init():
click.echo("Key : {}".format(click.style(user.key, bg='red')))
click.echo("Secret: {}".format(click.style(user.secret, bg='red')))
@click.command()
@click.option(
'-v',
'--version',
help='input cmdb version, e.g. 2.4.6',
required=True,
)
@with_appcontext
def cmdb_patch(version):
"""
CMDB upgrade patch
"""
version = version[1:] if version.lower().startswith("v") else version
try:
if version >= '2.4.6':
from api.models.cmdb import CITypeRelation
for cr in CITypeRelation.get_by(to_dict=False):
if hasattr(cr, 'parent_attr_id') and cr.parent_attr_id and not cr.parent_attr_ids:
parent_attr_ids, child_attr_ids = [cr.parent_attr_id], [cr.child_attr_id]
cr.update(parent_attr_ids=parent_attr_ids, child_attr_ids=child_attr_ids, commit=False)
db.session.commit()
from api.models.cmdb import AutoDiscoveryCIType, AutoDiscoveryCITypeRelation
from api.lib.cmdb.cache import CITypeCache, AttributeCache
for adt in AutoDiscoveryCIType.get_by(to_dict=False):
if adt.relation:
if not AutoDiscoveryCITypeRelation.get_by(ad_type_id=adt.type_id):
peer_type = CITypeCache.get(list(adt.relation.values())[0]['type_name'])
peer_type_id = peer_type and peer_type.id
peer_attr = AttributeCache.get(list(adt.relation.values())[0]['attr_name'])
peer_attr_id = peer_attr and peer_attr.id
if peer_type_id and peer_attr_id:
AutoDiscoveryCITypeRelation.create(ad_type_id=adt.type_id,
ad_key=list(adt.relation.keys())[0],
peer_type_id=peer_type_id,
peer_attr_id=peer_attr_id,
commit=False)
if hasattr(adt, 'interval') and adt.interval and not adt.cron:
adt.cron = "*/{} * * * *".format(adt.interval // 60 or 1)
db.session.commit()
if version >= "2.4.7":
from api.lib.cmdb.auto_discovery.const import DEFAULT_INNER
from api.models.cmdb import AutoDiscoveryRule
for i in DEFAULT_INNER:
existed = AutoDiscoveryRule.get_by(name=i['name'], first=True, to_dict=False)
if existed is not None:
if "en" in i['option'] and 'en' not in (existed.option or {}):
option = copy.deepcopy(existed.option)
option['en'] = i['option']['en']
existed.update(option=option, commit=False)
db.session.commit()
if version >= "2.4.14": # update ci columns: updated_at and updated_by
ci_ids = []
for i in CI.get_by(only_query=True).filter(CI.updated_at.is_(None)):
hist = AttributeHistory.get_by(ci_id=i.id, only_query=True).order_by(AttributeHistory.id.desc()).first()
if hist is not None:
record = OperationRecord.get_by_id(hist.record_id)
if record is not None:
u = UserCache.get(record.uid)
i.update(updated_at=record.created_at, updated_by=u and u.nickname, flush=True)
ci_ids.append(i.id)
db.session.commit()
batch_ci_cache.apply_async(args=(ci_ids,))
except Exception as e:
print("cmdb patch failed: {}".format(e))

View File

@ -4,9 +4,8 @@ from flask.cli import with_appcontext
from werkzeug.datastructures import MultiDict
from api.lib.common_setting.acl import ACLManager
from api.lib.common_setting.employee import EmployeeAddForm, GrantEmployeeACLPerm
from api.lib.common_setting.employee import EmployeeAddForm
from api.lib.common_setting.resp_format import ErrFormat
from api.lib.common_setting.utils import CheckNewColumn
from api.models.common_setting import Employee, Department
@ -159,11 +158,50 @@ class InitDepartment(object):
def init_backend_resource(self):
acl = self.check_app('backend')
resources_types = acl.get_all_resources_types()
perms = ['read', 'grant', 'delete', 'update']
acl_rid = self.get_admin_user_rid()
if acl_rid == 0:
return
GrantEmployeeACLPerm(acl).grant_by_rid(acl_rid, True)
results = list(filter(lambda t: t['name'] == '操作权限', resources_types['groups']))
if len(results) == 0:
payload = dict(
app_id=acl.app_name,
name='操作权限',
description='',
perms=perms
)
resource_type = acl.create_resources_type(payload)
else:
resource_type = results[0]
resource_type_id = resource_type['id']
existed_perms = resources_types.get('id2perms', {}).get(resource_type_id, [])
existed_perms = [p['name'] for p in existed_perms]
new_perms = []
for perm in perms:
if perm not in existed_perms:
new_perms.append(perm)
if len(new_perms) > 0:
resource_type['perms'] = existed_perms + new_perms
acl.update_resources_type(resource_type_id, resource_type)
resource_list = acl.get_resource_by_type(None, None, resource_type['id'])
for name in ['公司信息', '公司架构', '通知设置']:
target = list(filter(lambda r: r['name'] == name, resource_list))
if len(target) == 0:
payload = dict(
type_id=resource_type['id'],
app_id=acl.app_name,
name=name,
)
resource = acl.create_resource(payload)
else:
resource = target[0]
if acl_rid > 0:
acl.grant_resource(acl_rid, resource['id'], perms)
@staticmethod
def check_app(app_name):
@ -210,7 +248,57 @@ def common_check_new_columns():
"""
add new columns to tables
"""
CheckNewColumn().run()
from api.extensions import db
from sqlalchemy import inspect, text
def get_model_by_table_name(_table_name):
registry = getattr(db.Model, 'registry', None)
class_registry = getattr(registry, '_class_registry', None)
for _model in class_registry.values():
if hasattr(_model, '__tablename__') and _model.__tablename__ == _table_name:
return _model
return None
def add_new_column(target_table_name, new_column):
column_type = new_column.type.compile(engine.dialect)
default_value = new_column.default.arg if new_column.default else None
sql = "ALTER TABLE " + target_table_name + " ADD COLUMN " + new_column.name + " " + column_type
if new_column.comment:
sql += f" comment '{new_column.comment}'"
if column_type == 'JSON':
pass
elif default_value:
if column_type.startswith('VAR') or column_type.startswith('Text'):
if default_value is None or len(default_value) == 0:
pass
else:
sql += f" DEFAULT {default_value}"
sql = text(sql)
db.session.execute(sql)
engine = db.get_engine()
inspector = inspect(engine)
table_names = inspector.get_table_names()
for table_name in table_names:
existed_columns = inspector.get_columns(table_name)
existed_column_name_list = [c['name'] for c in existed_columns]
model = get_model_by_table_name(table_name)
if model is None:
continue
model_columns = getattr(getattr(getattr(model, '__table__'), 'columns'), '_all_columns')
for column in model_columns:
if column.name not in existed_column_name_list:
try:
add_new_column(table_name, column)
current_app.logger.info(f"add new column [{column.name}] in table [{table_name}] success.")
except Exception as e:
current_app.logger.error(f"add new column [{column.name}] in table [{table_name}] err:")
current_app.logger.error(e)
@click.command()

View File

@ -89,19 +89,6 @@ def db_setup():
"""
db.create_all()
try:
db.session.execute("set global sql_mode='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,"
"ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'")
db.session.commit()
except:
pass
try:
db.session.execute("set global tidb_enable_noop_functions='ON'")
db.session.commit()
except:
pass
@click.group()
def translate():

View File

@ -108,8 +108,7 @@ class AttributeManager(object):
return []
choice_values = choice_table.get_by(fl=["value", "option"], attr_id=attr_id)
return [[ValueTypeMap.serialize[value_type](choice_value['value']), choice_value['option'] or
{"label": ValueTypeMap.serialize[value_type](choice_value['value'])}]
return [[ValueTypeMap.serialize[value_type](choice_value['value']), choice_value['option']]
for choice_value in choice_values]
@staticmethod
@ -136,15 +135,6 @@ class AttributeManager(object):
choice_table and choice_table.get_by(attr_id=_id, only_query=True).delete()
db.session.flush()
@classmethod
def get_enum_map(cls, _attr_id, _attr=None):
attr = AttributeCache.get(_attr_id) if _attr_id else _attr
if attr and attr.is_choice:
choice_values = cls.get_choice_values(attr.id, attr.value_type, None, None)
return {i[0]: i[1]['label'] for i in choice_values if i[1] and i[1].get('label')}
return {}
@classmethod
def search_attributes(cls, name=None, alias=None, page=1, page_size=None):
"""
@ -177,30 +167,24 @@ class AttributeManager(object):
def get_attribute_by_name(self, name):
attr = Attribute.get_by(name=name, first=True)
if attr.get("is_choice"):
attr["choice_value"] = self.get_choice_values(attr["id"],
attr["value_type"],
attr["choice_web_hook"],
attr.get("choice_other"))
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"],
attr["choice_web_hook"], attr.get("choice_other"))
return attr
def get_attribute_by_alias(self, alias):
attr = Attribute.get_by(alias=alias, first=True)
if attr.get("is_choice"):
attr["choice_value"] = self.get_choice_values(attr["id"],
attr["value_type"],
attr["choice_web_hook"],
attr.get("choice_other"))
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"],
attr["choice_web_hook"], attr.get("choice_other"))
return attr
def get_attribute_by_id(self, _id):
attr = Attribute.get_by_id(_id).to_dict()
if attr.get("is_choice"):
attr["choice_value"] = self.get_choice_values(attr["id"],
attr["value_type"],
attr["choice_web_hook"],
attr.get("choice_other"))
attr["choice_value"] = self.get_choice_values(attr["id"], attr["value_type"],
attr["choice_web_hook"], attr.get("choice_other"))
return attr
@ -245,7 +229,7 @@ class AttributeManager(object):
is_choice = True if choice_value or kwargs.get('choice_web_hook') or kwargs.get('choice_other') else False
name = kwargs.pop("name")
if name in BUILTIN_KEYWORDS or kwargs.get('alias') in BUILTIN_KEYWORDS:
if name in BUILTIN_KEYWORDS:
return abort(400, ErrFormat.attribute_name_cannot_be_builtin)
while kwargs.get('choice_other'):
@ -415,7 +399,7 @@ class AttributeManager(object):
db.session.rollback()
current_app.logger.error("update attribute error, {0}".format(str(e)))
return abort(400, ErrFormat.update_attribute_failed.format(("id={}".format(_id))))
return abort(400, ErrFormat.update_attribute_failed.format(("id=".format(_id))))
new = attr.to_dict()
if not new['choice_web_hook'] and new['is_choice']:

View File

@ -1,51 +1,34 @@
# -*- coding:utf-8 -*-
import copy
import datetime
import json
import jsonpath
import os
from api.extensions import db
from api.lib.cmdb.auto_discovery.const import ClOUD_MAP
from api.lib.cmdb.cache import CITypeAttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.ci_type import CITypeGroupManager
from api.lib.cmdb.const import AutoDiscoveryType
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search
from api.lib.mixin import DBMixin
from api.lib.perm.acl.acl import is_app_admin
from api.lib.perm.acl.acl import validate_permission
from api.lib.utils import AESCrypto
from api.models.cmdb import AutoDiscoveryCI
from api.models.cmdb import AutoDiscoveryCIType
from api.models.cmdb import AutoDiscoveryRule
from flask import abort
from flask import current_app
from flask_login import current_user
from sqlalchemy import func
from api.extensions import db
from api.lib.cmdb.auto_discovery.const import CLOUD_MAP
from api.lib.cmdb.auto_discovery.const import DEFAULT_INNER
from api.lib.cmdb.auto_discovery.const import PRIVILEGED_USERS
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import AutoDiscoveryMappingCache
from api.lib.cmdb.cache import CITypeAttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci_type import CITypeGroupManager
from api.lib.cmdb.const import AutoDiscoveryType
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.custom_dashboard import SystemConfigManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search as ci_search
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.mixin import DBMixin
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import is_app_admin
from api.lib.perm.acl.acl import validate_permission
from api.lib.utils import AESCrypto
from api.models.cmdb import AutoDiscoveryAccount
from api.models.cmdb import AutoDiscoveryCI
from api.models.cmdb import AutoDiscoveryCIType
from api.models.cmdb import AutoDiscoveryCITypeRelation
from api.models.cmdb import AutoDiscoveryCounter
from api.models.cmdb import AutoDiscoveryExecHistory
from api.models.cmdb import AutoDiscoveryRule
from api.models.cmdb import AutoDiscoveryRuleSyncHistory
from api.tasks.cmdb import build_relations_for_ad_accept
from api.tasks.cmdb import write_ad_rule_sync_history
PWD = os.path.abspath(os.path.dirname(__file__))
app_cli = CMDBApp()
def parse_plugin_script(script):
@ -113,30 +96,14 @@ class AutoDiscoveryRuleCRUD(DBMixin):
else:
self.cls.create(**rule)
def _can_add(self, valid=True, **kwargs):
def _can_add(self, **kwargs):
self.cls.get_by(name=kwargs['name']) and abort(400, ErrFormat.adr_duplicate.format(kwargs['name']))
if kwargs.get('is_plugin') and kwargs.get('plugin_script') and valid:
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
acl = ACLManager(app_cli.app_name)
has_perm = True
try:
if not acl.has_permission(app_cli.op.Auto_Discovery,
app_cli.resource_type_name,
app_cli.op.create_plugin) and not is_app_admin(app_cli.app_name):
has_perm = False
except Exception:
if not is_app_admin(app_cli.app_name):
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
if not has_perm:
return abort(403, ErrFormat.no_permission.format(
app_cli.op.Auto_Discovery, app_cli.op.create_plugin))
kwargs['owner'] = current_user.uid
return kwargs
def _can_update(self, valid=True, **kwargs):
def _can_update(self, **kwargs):
existed = self.cls.get_by_id(kwargs['_id']) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['_id'])))
@ -148,22 +115,6 @@ class AutoDiscoveryRuleCRUD(DBMixin):
if other and other.id != existed.id:
return abort(400, ErrFormat.adr_duplicate.format(kwargs['name']))
if existed.is_plugin and valid:
acl = ACLManager(app_cli.app_name)
has_perm = True
try:
if not acl.has_permission(app_cli.op.Auto_Discovery,
app_cli.resource_type_name,
app_cli.op.update_plugin) and not is_app_admin(app_cli.app_name):
has_perm = False
except Exception:
if not is_app_admin(app_cli.app_name):
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
if not has_perm:
return abort(403, ErrFormat.no_permission.format(
app_cli.op.Auto_Discovery, app_cli.op.update_plugin))
return existed
def update(self, _id, **kwargs):
@ -171,44 +122,21 @@ class AutoDiscoveryRuleCRUD(DBMixin):
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
for item in AutoDiscoveryCIType.get_by(adr_id=_id, to_dict=False):
item.update(updated_at=datetime.datetime.now())
return super(AutoDiscoveryRuleCRUD, self).update(_id, filter_none=False, **kwargs)
def _can_delete(self, **kwargs):
if AutoDiscoveryCIType.get_by(adr_id=kwargs['_id'], first=True):
return abort(400, ErrFormat.adr_referenced)
existed = self.cls.get_by_id(kwargs['_id']) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['_id'])))
if existed.is_plugin:
acl = ACLManager(app_cli.app_name)
has_perm = True
try:
if not acl.has_permission(app_cli.op.Auto_Discovery,
app_cli.resource_type_name,
app_cli.op.delete_plugin) and not is_app_admin(app_cli.app_name):
has_perm = False
except Exception:
if not is_app_admin(app_cli.app_name):
return abort(403, ErrFormat.role_required.format(app_cli.admin_name))
if not has_perm:
return abort(403, ErrFormat.no_permission.format(
app_cli.op.Auto_Discovery, app_cli.op.delete_plugin))
return existed
return self._can_update(**kwargs)
class AutoDiscoveryCITypeCRUD(DBMixin):
cls = AutoDiscoveryCIType
@classmethod
def get_all(cls, type_ids=None):
res = cls.cls.get_by(to_dict=False)
return [i for i in res if type_ids is None or i.type_id in type_ids]
def get_all(cls):
return cls.cls.get_by(to_dict=False)
@classmethod
def get_by_id(cls, _id):
@ -219,59 +147,25 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
return cls.cls.get_by(type_id=type_id, to_dict=False)
@classmethod
def get_ad_attributes(cls, type_id):
result = []
adts = cls.get_by_type_id(type_id)
for adt in adts:
adr = AutoDiscoveryRuleCRUD.get_by_id(adt.adr_id)
if not adr:
continue
if adr.type == AutoDiscoveryType.HTTP:
for i in DEFAULT_INNER:
if adr.name == i['name']:
attrs = AutoDiscoveryHTTPManager.get_attributes(
i['en'], (adt.extra_option or {}).get('category')) or []
result.extend([i.get('name') for i in attrs])
break
elif adr.type == AutoDiscoveryType.SNMP:
attributes = AutoDiscoverySNMPManager.get_attributes()
result.extend([i.get('name') for i in (attributes or [])])
else:
result.extend([i.get('name') for i in (adr.attributes or [])])
return sorted(list(set(result)))
@classmethod
def get(cls, ci_id, oneagent_id, oneagent_name, last_update_at=None):
"""
OneAgent sync rules
:param ci_id:
:param oneagent_id:
:param oneagent_name:
:param last_update_at:
:return:
"""
def get(cls, ci_id, oneagent_id, last_update_at=None):
result = []
rules = cls.cls.get_by(to_dict=True)
for rule in rules:
if not rule['enabled']:
if rule.get('relation'):
continue
if isinstance(rule.get("extra_option"), dict):
decrypt_account(rule['extra_option'], rule['uid'])
if rule['extra_option'].get('_reference'):
rule['extra_option'].pop('password', None)
if isinstance(rule.get("extra_option"), dict) and rule['extra_option'].get('secret'):
if not (current_user.username == "cmdb_agent" or current_user.uid == rule['uid']):
rule['extra_option'].pop('secret', None)
rule['extra_option'].update(
AutoDiscoveryAccountCRUD().get_config_by_id(rule['extra_option']['_reference']))
else:
rule['extra_option']['secret'] = AESCrypto.decrypt(rule['extra_option']['secret'])
if oneagent_id and rule['agent_id'] == oneagent_id:
result.append(rule)
elif rule['query_expr']:
query = rule['query_expr'].lstrip('q').lstrip('=')
s = ci_search(query, fl=['_id'], count=1000000)
s = search(query, fl=['_id'], count=1000000)
try:
response, _, _, _, _, _ = s.search()
except SearchError as e:
@ -282,32 +176,25 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
result.append(rule)
break
elif not rule['agent_id'] and not rule['query_expr'] and rule['adr_id']:
try:
if not int(oneagent_id, 16): # excludes master
continue
except Exception:
pass
adr = AutoDiscoveryRuleCRUD.get_by_id(rule['adr_id'])
if not adr:
continue
if adr.type in (AutoDiscoveryType.SNMP, AutoDiscoveryType.HTTP):
continue
if not rule['updated_at']:
continue
result.append(rule)
ad_rules_updated_at = (SystemConfigManager.get('ad_rules_updated_at') or {}).get('option', {}).get('v') or ""
new_last_update_at = ""
for i in result:
i['adr'] = AutoDiscoveryRule.get_by_id(i['adr_id']).to_dict()
i['adr'].pop("attributes", None)
__last_update_at = max([i['updated_at'] or "", i['created_at'] or "",
i['adr']['created_at'] or "", i['adr']['updated_at'] or "", ad_rules_updated_at])
i['adr']['created_at'] or "", i['adr']['updated_at'] or ""])
if new_last_update_at < __last_update_at:
new_last_update_at = __last_update_at
write_ad_rule_sync_history.apply_async(args=(result, oneagent_id, oneagent_name, datetime.datetime.now()),
queue=CMDB_QUEUE)
if not last_update_at or new_last_update_at > last_update_at:
return result, new_last_update_at
else:
@ -326,7 +213,7 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
agent_id = agent_id.strip()
q = "op_duty:{0},-rd_duty:{0},oneagent_id:{1}"
s = ci_search(q.format(current_user.username, agent_id.strip()))
s = search(q.format(current_user.username, agent_id.strip()))
try:
response, _, _, _, _, _ = s.search()
if response:
@ -335,7 +222,7 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
current_app.logger.warning(e)
return abort(400, str(e))
s = ci_search(q.format(current_user.nickname, agent_id.strip()))
s = search(q.format(current_user.nickname, agent_id.strip()))
try:
response, _, _, _, _, _ = s.search()
if response:
@ -349,7 +236,7 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
if query_expr.startswith('q='):
query_expr = query_expr[2:]
s = ci_search(query_expr, count=1000000)
s = search(query_expr, count=1000000)
try:
response, _, _, _, _, _ = s.search()
for i in response:
@ -367,39 +254,19 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
def _can_add(**kwargs):
if kwargs.get('adr_id'):
adr = AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
AutoDiscoveryRule.get_by_id(kwargs['adr_id']) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(kwargs['adr_id'])))
if adr.type == AutoDiscoveryType.HTTP:
kwargs.setdefault('extra_option', dict())
en_name = None
for i in DEFAULT_INNER:
if i['name'] == adr.name:
en_name = i['en']
break
if en_name and kwargs['extra_option'].get('category'):
for item in CLOUD_MAP[en_name]:
if item["collect_key_map"].get(kwargs['extra_option']['category']):
kwargs["extra_option"]["collect_key"] = item["collect_key_map"][
kwargs['extra_option']['category']]
kwargs["extra_option"]["provider"] = en_name
break
if adr.type == AutoDiscoveryType.COMPONENTS and kwargs.get('extra_option'):
for i in DEFAULT_INNER:
if i['name'] == adr.name:
kwargs['extra_option']['collect_key'] = i['option'].get('collect_key')
break
# if not adr.is_plugin:
# other = self.cls.get_by(adr_id=adr.id, first=True, to_dict=False)
# if other:
# ci_type = CITypeCache.get(other.type_id)
# return abort(400, ErrFormat.adr_default_ref_once.format(ci_type.alias))
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
encrypt_account(kwargs.get('extra_option'))
ci_type = CITypeCache.get(kwargs['type_id'])
unique = AttributeCache.get(ci_type.unique_id)
if unique and unique.name not in (kwargs.get('attributes') or {}).values():
current_app.logger.warning((unique.name, kwargs.get('attributes'), ci_type.alias))
return abort(400, ErrFormat.ad_not_unique_key.format(unique.name))
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('secret'):
kwargs['extra_option']['secret'] = AESCrypto.encrypt(kwargs['extra_option']['secret'])
kwargs['uid'] = current_user.uid
@ -409,44 +276,11 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
existed = self.cls.get_by_id(kwargs['_id']) or abort(
404, ErrFormat.ad_not_found.format("id={}".format(kwargs['_id'])))
adr = AutoDiscoveryRule.get_by_id(existed.adr_id) or abort(
404, ErrFormat.adr_not_found.format("id={}".format(existed.adr_id)))
if adr.type == AutoDiscoveryType.HTTP:
kwargs.setdefault('extra_option', dict())
en_name = None
for i in DEFAULT_INNER:
if i['name'] == adr.name:
en_name = i['en']
break
if en_name and kwargs['extra_option'].get('category'):
for item in CLOUD_MAP[en_name]:
if item["collect_key_map"].get(kwargs['extra_option']['category']):
kwargs["extra_option"]["collect_key"] = item["collect_key_map"][
kwargs['extra_option']['category']]
kwargs["extra_option"]["provider"] = en_name
break
if adr.type == AutoDiscoveryType.COMPONENTS and kwargs.get('extra_option'):
for i in DEFAULT_INNER:
if i['name'] == adr.name:
kwargs['extra_option']['collect_key'] = i['option'].get('collect_key')
break
if 'attributes' in kwargs:
self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
ci_type = CITypeCache.get(existed.type_id)
unique = AttributeCache.get(ci_type.unique_id)
if unique and unique.name not in (kwargs.get('attributes') or {}).values():
current_app.logger.warning((unique.name, kwargs.get('attributes'), ci_type.alias))
return abort(400, ErrFormat.ad_not_unique_key.format(unique.name))
self.__valid_exec_target(kwargs.get('agent_id'), kwargs.get('query_expr'))
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('secret'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('password'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
return existed
@ -455,22 +289,10 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
encrypt_account(kwargs.get('extra_option'))
if isinstance(kwargs.get('extra_option'), dict) and kwargs['extra_option'].get('secret'):
kwargs['extra_option']['secret'] = AESCrypto.encrypt(kwargs['extra_option']['secret'])
inst = self._can_update(_id=_id, **kwargs)
if len(kwargs) == 1 and 'enabled' in kwargs: # enable or disable
pass
elif inst.agent_id != kwargs.get('agent_id') or inst.query_expr != kwargs.get('query_expr'):
for item in AutoDiscoveryRuleSyncHistory.get_by(adt_id=inst.id, to_dict=False):
item.delete(commit=False)
db.session.commit()
SystemConfigManager.create_or_update("ad_rules_updated_at",
dict(v=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")))
obj = inst.update(_id=_id, filter_none=False, **kwargs)
return obj
return super(AutoDiscoveryCITypeCRUD, self).update(_id, filter_none=False, **kwargs)
def _can_delete(self, **kwargs):
if AutoDiscoveryCICRUD.get_by_adt_id(kwargs['_id']):
@ -481,61 +303,6 @@ class AutoDiscoveryCITypeCRUD(DBMixin):
return existed
def delete(self, _id):
inst = self._can_delete(_id=_id)
inst.soft_delete()
for item in AutoDiscoveryRuleSyncHistory.get_by(adt_id=inst.id, to_dict=False):
item.delete(commit=False)
db.session.commit()
attributes = self.get_ad_attributes(inst.type_id)
for item in AutoDiscoveryCITypeRelationCRUD.get_by_type_id(inst.type_id):
if item.ad_key not in attributes:
item.soft_delete()
return inst
class AutoDiscoveryCITypeRelationCRUD(DBMixin):
cls = AutoDiscoveryCITypeRelation
@classmethod
def get_all(cls, type_ids=None):
res = cls.cls.get_by(to_dict=False)
return [i for i in res if type_ids is None or i.ad_type_id in type_ids]
@classmethod
def get_by_type_id(cls, type_id, to_dict=False):
return cls.cls.get_by(ad_type_id=type_id, to_dict=to_dict)
def upsert(self, ad_type_id, relations):
existed = self.cls.get_by(ad_type_id=ad_type_id, to_dict=False)
existed = {(i.ad_key, i.peer_type_id, i.peer_attr_id): i for i in existed}
new = []
for r in relations:
k = (r.get('ad_key'), r.get('peer_type_id'), r.get('peer_attr_id'))
if len(list(filter(lambda x: x, k))) == 3 and k not in existed:
self.cls.create(ad_type_id=ad_type_id, **r)
new.append(k)
for deleted in set(existed.keys()) - set(new):
existed[deleted].soft_delete()
return self.get_by_type_id(ad_type_id, to_dict=True)
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
class AutoDiscoveryCICRUD(DBMixin):
cls = AutoDiscoveryCI
@ -563,14 +330,15 @@ class AutoDiscoveryCICRUD(DBMixin):
@staticmethod
def get_attributes_by_type_id(type_id):
from api.lib.cmdb.ci_type import CITypeAttributeManager
attributes = [i for i in CITypeAttributeManager.get_attributes_by_type_id(type_id) or []]
from api.lib.cmdb.cache import CITypeAttributesCache
attributes = [i[1] for i in CITypeAttributesCache.get2(type_id) or []]
attr_names = set()
adts = AutoDiscoveryCITypeCRUD.get_by_type_id(type_id)
for adt in adts:
attr_names |= set((adt.attributes or {}).values())
return [attr for attr in attributes if attr['name'] in attr_names]
return [attr.to_dict() for attr in attributes if attr.name in attr_names]
@classmethod
def search(cls, page, page_size, fl=None, **kwargs):
@ -623,24 +391,16 @@ class AutoDiscoveryCICRUD(DBMixin):
changed = False
if existed is not None:
if existed.instance != kwargs['instance']:
instance = copy.deepcopy(existed.instance) or {}
instance.update(kwargs['instance'])
kwargs['instance'] = instance
existed.update(filter_none=False, **kwargs)
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
stdout="update resource: {}".format(kwargs.get('unique_value')))
changed = True
else:
existed = self.cls.create(**kwargs)
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
stdout="add resource: {}".format(kwargs.get('unique_value')))
changed = True
if adt.auto_accept and changed:
try:
self.accept(existed)
except Exception as e:
current_app.logger.error(e)
return abort(400, str(e))
elif changed:
existed.update(is_accept=False, accept_time=None, accept_by=None, filter_none=False)
@ -660,13 +420,6 @@ class AutoDiscoveryCICRUD(DBMixin):
inst.delete()
adt = AutoDiscoveryCIType.get_by_id(inst.adt_id)
if adt:
adt.update(updated_at=datetime.datetime.now())
AutoDiscoveryExecHistoryCRUD().add(type_id=inst.type_id,
stdout="delete resource: {}".format(inst.unique_value))
self._after_delete(inst)
return inst
@ -682,13 +435,6 @@ class AutoDiscoveryCICRUD(DBMixin):
not is_app_admin("cmdb") and validate_permission(ci_type.name, ResourceTypeEnum.CI, PermEnum.DELETE, "cmdb")
existed.delete()
adt = AutoDiscoveryCIType.get_by_id(existed.adt_id)
if adt:
adt.update(updated_at=datetime.datetime.now())
AutoDiscoveryExecHistoryCRUD().add(type_id=type_id,
stdout="delete resource: {}".format(unique_value))
# TODO: delete ci
@classmethod
@ -699,24 +445,36 @@ class AutoDiscoveryCICRUD(DBMixin):
adt = AutoDiscoveryCITypeCRUD.get_by_id(adc.adt_id) or abort(404, ErrFormat.adt_not_found)
ci_id = None
if adt.attributes:
ci_dict = {adt.attributes[k]: v for k, v in adc.instance.items() if k in adt.attributes}
ci_id = CIManager.add(adc.type_id, is_auto_discovery=True, **ci_dict)
ad_key2attr = adt.attributes or {}
if ad_key2attr:
ci_dict = {ad_key2attr[k]: None if not v and isinstance(v, (list, dict)) else v
for k, v in adc.instance.items() if k in ad_key2attr}
extra_option = adt.extra_option or {}
mapping, path_mapping = AutoDiscoveryHTTPManager.get_predefined_value_mapping(
extra_option.get('provider'), extra_option.get('category'))
if mapping:
ci_dict = {k: (mapping.get(k) or {}).get(str(v), v) for k, v in ci_dict.items()}
if path_mapping:
ci_dict = {k: jsonpath.jsonpath(v, path_mapping[k]) if k in path_mapping else v
for k, v in ci_dict.items()}
ci_id = CIManager.add(adc.type_id, is_auto_discovery=True, _is_admin=True, **ci_dict)
AutoDiscoveryExecHistoryCRUD().add(type_id=adt.type_id,
stdout="accept resource: {}".format(adc.unique_value))
relation_adts = AutoDiscoveryCIType.get_by(type_id=adt.type_id, adr_id=None, to_dict=False)
for r_adt in relation_adts:
if not r_adt.relation or ci_id is None:
continue
for ad_key in r_adt.relation:
if not adc.instance.get(ad_key):
continue
cmdb_key = r_adt.relation[ad_key]
query = "_type:{},{}:{}".format(cmdb_key.get('type_name'), cmdb_key.get('attr_name'),
adc.instance.get(ad_key))
s = search(query)
try:
response, _, _, _, _, _ = s.search()
except SearchError as e:
current_app.logger.warning(e)
return abort(400, str(e))
build_relations_for_ad_accept.apply_async(args=(adc.to_dict(), ci_id, ad_key2attr), queue=CMDB_QUEUE)
relation_ci_id = response and response[0]['_id']
if relation_ci_id:
try:
CIRelationManager.add(ci_id, relation_ci_id)
except:
try:
CIRelationManager.add(relation_ci_id, ci_id)
except:
pass
adc.update(is_accept=True,
accept_by=nickname or current_user.nickname,
@ -727,75 +485,17 @@ class AutoDiscoveryCICRUD(DBMixin):
class AutoDiscoveryHTTPManager(object):
@staticmethod
def get_categories(name):
categories = (CLOUD_MAP.get(name) or {}) or []
for item in copy.deepcopy(categories):
item.pop('map', None)
item.pop('collect_key_map', None)
return (ClOUD_MAP.get(name) or {}).get('categories') or []
return categories
def get_resources(self, name):
en_name = None
for i in DEFAULT_INNER:
if i['name'] == name:
en_name = i['en']
break
if en_name:
categories = self.get_categories(en_name)
return [j for i in categories for j in i['items']]
@staticmethod
def get_attributes(name, category):
tpt = ((ClOUD_MAP.get(name) or {}).get('map') or {}).get(category)
if tpt and os.path.exists(os.path.join(PWD, tpt)):
with open(os.path.join(PWD, tpt)) as f:
return json.loads(f.read())
return []
@staticmethod
def get_attributes(provider, resource):
for item in (CLOUD_MAP.get(provider) or {}):
for _resource in (item.get('map') or {}):
if _resource == resource:
tpt = item['map'][_resource]
if isinstance(tpt, dict):
tpt = tpt.get('template')
if tpt and os.path.exists(os.path.join(PWD, tpt)):
with open(os.path.join(PWD, tpt)) as f:
return json.loads(f.read())
return []
@staticmethod
def get_mapping(provider, resource):
for item in (CLOUD_MAP.get(provider) or {}):
for _resource in (item.get('map') or {}):
if _resource == resource:
mapping = item['map'][_resource]
if not isinstance(mapping, dict):
return {}
name = mapping.get('mapping')
mapping = AutoDiscoveryMappingCache.get(name)
if isinstance(mapping, dict):
return {mapping[key][provider]['key'].split('.')[0]: key for key in mapping if
(mapping[key].get(provider) or {}).get('key')}
return {}
@staticmethod
def get_predefined_value_mapping(provider, resource):
for item in (CLOUD_MAP.get(provider) or {}):
for _resource in (item.get('map') or {}):
if _resource == resource:
mapping = item['map'][_resource]
if not isinstance(mapping, dict):
return {}, {}
name = mapping.get('mapping')
mapping = AutoDiscoveryMappingCache.get(name)
if isinstance(mapping, dict):
return ({key: mapping[key][provider].get('map') for key in mapping if
mapping[key].get(provider, {}).get('map')},
{key: mapping[key][provider]['key'].split('.', 1)[1] for key in mapping if
((mapping[key].get(provider) or {}).get('key') or '').split('.')[1:]})
return {}, {}
class AutoDiscoverySNMPManager(object):
@ -806,191 +506,3 @@ class AutoDiscoverySNMPManager(object):
return json.loads(f.read())
return []
class AutoDiscoveryComponentsManager(object):
@staticmethod
def get_attributes(name):
if os.path.exists(os.path.join(PWD, "templates/{}.json".format(name))):
with open(os.path.join(PWD, "templates/{}.json".format(name))) as f:
return json.loads(f.read())
return []
class AutoDiscoveryRuleSyncHistoryCRUD(DBMixin):
cls = AutoDiscoveryRuleSyncHistory
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
def upsert(self, **kwargs):
existed = self.cls.get_by(adt_id=kwargs.get('adt_id'),
oneagent_id=kwargs.get('oneagent_id'),
oneagent_name=kwargs.get('oneagent_name'),
first=True,
to_dict=False)
if existed is not None:
existed.update(**kwargs)
else:
self.cls.create(**kwargs)
class AutoDiscoveryExecHistoryCRUD(DBMixin):
cls = AutoDiscoveryExecHistory
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
class AutoDiscoveryCounterCRUD(DBMixin):
cls = AutoDiscoveryCounter
def get(self, type_id):
res = self.cls.get_by(type_id=type_id, first=True, to_dict=True)
if res is None:
return dict(rule_count=0, exec_target_count=0, instance_count=0, accept_count=0,
this_month_count=0, this_week_count=0, last_month_count=0, last_week_count=0)
return res
def _can_add(self, **kwargs):
pass
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
def encrypt_account(config):
if isinstance(config, dict):
if config.get('secret'):
config['secret'] = AESCrypto.encrypt(config['secret'])
if config.get('password'):
config['password'] = AESCrypto.encrypt(config['password'])
def decrypt_account(config, uid):
if isinstance(config, dict):
if config.get('password'):
if not (current_user.username in PRIVILEGED_USERS or current_user.uid == uid):
config.pop('password', None)
else:
try:
config['password'] = AESCrypto.decrypt(config['password'])
except Exception as e:
current_app.logger.error('decrypt account failed: {}'.format(e))
if config.get('secret'):
if not (current_user.username in PRIVILEGED_USERS or current_user.uid == uid):
config.pop('secret', None)
else:
try:
config['secret'] = AESCrypto.decrypt(config['secret'])
except Exception as e:
current_app.logger.error('decrypt account failed: {}'.format(e))
class AutoDiscoveryAccountCRUD(DBMixin):
cls = AutoDiscoveryAccount
def get(self, adr_id):
res = self.cls.get_by(adr_id=adr_id, to_dict=True)
for i in res:
decrypt_account(i.get('config'), i['uid'])
return res
def get_config_by_id(self, _id):
res = self.cls.get_by_id(_id)
if not res:
return {}
config = res.to_dict().get('config') or {}
decrypt_account(config, res.uid)
return config
def _can_add(self, **kwargs):
encrypt_account(kwargs.get('config'))
kwargs['uid'] = current_user.uid
return kwargs
def upsert(self, adr_id, accounts):
existed_all = self.cls.get_by(adr_id=adr_id, to_dict=False)
account_names = {i['name'] for i in accounts}
name_changed = dict()
for account in accounts:
existed = None
if account.get('id'):
existed = self.cls.get_by_id(account.get('id'))
if existed is None:
continue
account.pop('id')
name_changed[existed.name] = account.get('name')
else:
account = self._can_add(**account)
if existed is not None:
if current_user.uid == existed.uid:
config = copy.deepcopy(existed.config) or {}
config.update(account.get('config') or {})
account['config'] = config
existed.update(**account)
else:
self.cls.create(adr_id=adr_id, **account)
for item in existed_all:
if name_changed.get(item.name, item.name) not in account_names:
if current_user.uid == item.uid:
item.soft_delete()
def _can_update(self, **kwargs):
existed = self.cls.get_by_id(kwargs['_id']) or abort(404, ErrFormat.not_found)
if isinstance(kwargs.get('config'), dict) and kwargs['config'].get('secret'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
if isinstance(kwargs.get('config'), dict) and kwargs['config'].get('password'):
if current_user.uid != existed.uid:
return abort(403, ErrFormat.adt_secret_no_permission)
return existed
def update(self, _id, **kwargs):
if kwargs.get('is_plugin') and kwargs.get('plugin_script'):
kwargs = check_plugin_script(**kwargs)
encrypt_account(kwargs.get('config'))
inst = self._can_update(_id=_id, **kwargs)
obj = inst.update(_id=_id, filter_none=False, **kwargs)
return obj
def _can_delete(self, **kwargs):
pass

View File

@ -2,38 +2,15 @@
from api.lib.cmdb.const import AutoDiscoveryType
PRIVILEGED_USERS = ("cmdb_agent", "worker", "admin")
DEFAULT_INNER = [
dict(name="阿里云", en="aliyun", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-aliyun'}, "en": "aliyun"}),
dict(name="腾讯云", en="tencentcloud", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-tengxunyun'}, "en": "tencentcloud"}),
dict(name="华为云", en="huaweicloud", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-huaweiyun'}, "en": "huaweicloud"}),
dict(name="AWS", en="aws", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-aws'}, "en": "aws"}),
dict(name="VCenter", en="vcenter", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'cmdb-vcenter'}, "category": "private_cloud", "en": "vcenter"}),
dict(name="KVM", en="kvm", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'ops-KVM'}, "category": "private_cloud", "en": "kvm"}),
dict(name="Nginx", en="nginx", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-nginx'}, "en": "nginx", "collect_key": "nginx"}),
dict(name="Apache", en="apache", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-apache'}, "en": "apache", "collect_key": "apache"}),
dict(name="Tomcat", en="tomcat", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-tomcat'}, "en": "tomcat", "collect_key": "tomcat"}),
dict(name="MySQL", en="mysql", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-mySQL'}, "en": "mysql", "collect_key": "mysql"}),
dict(name="MSSQL", en="mssql", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-SQLServer'}, "en": "mssql", "collect_key": "sqlserver"}),
dict(name="Oracle", en="oracle", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-oracle'}, "en": "oracle", "collect_key": "oracle"}),
dict(name="Redis", en="redis", type=AutoDiscoveryType.COMPONENTS, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-redis'}, "en": "redis", "collect_key": "redis"}),
DEFAULT_HTTP = [
dict(name="阿里云", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-aliyun'}}),
dict(name="腾讯云", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-tengxunyun'}}),
dict(name="华为云", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-huaweiyun'}}),
dict(name="AWS", type=AutoDiscoveryType.HTTP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-aws'}}),
dict(name="交换机", type=AutoDiscoveryType.SNMP, is_inner=True, is_plugin=False,
option={'icon': {'name': 'caise-jiaohuanji'}}),
@ -45,307 +22,32 @@ DEFAULT_INNER = [
option={'icon': {'name': 'caise-dayinji'}}),
]
CLOUD_MAP = {
"aliyun": [
{
"category": "计算",
"items": ["云服务器 ECS", "云服务器 Disk"],
"map": {
"云服务器 ECS": {"template": "templates/aliyun_ecs.json", "mapping": "ecs"},
"云服务器 Disk": {"template": "templates/aliyun_ecs_disk.json", "mapping": "evs"},
},
"collect_key_map": {
"云服务器 ECS": "ali.ecs",
"云服务器 Disk": "ali.ecs_disk",
},
},
{
"category": "网络与CDN",
"items": [
"内容分发CDN",
"负载均衡SLB",
"专有网络VPC",
"交换机Switch",
],
"map": {
"内容分发CDN": {"template": "templates/aliyun_cdn.json", "mapping": "CDN"},
"负载均衡SLB": {"template": "templates/aliyun_slb.json", "mapping": "loadbalancer"},
"专有网络VPC": {"template": "templates/aliyun_vpc.json", "mapping": "vpc"},
"交换机Switch": {"template": "templates/aliyun_switch.json", "mapping": "vswitch"},
},
"collect_key_map": {
"内容分发CDN": "ali.cdn",
"负载均衡SLB": "ali.slb",
"专有网络VPC": "ali.vpc",
"交换机Switch": "ali.switch",
},
},
{
"category": "存储",
"items": ["块存储EBS", "对象存储OSS"],
"map": {
"块存储EBS": {"template": "templates/aliyun_ebs.json", "mapping": "evs"},
"对象存储OSS": {"template": "templates/aliyun_oss.json", "mapping": "objectStorage"},
},
"collect_key_map": {
"块存储EBS": "ali.ebs",
"对象存储OSS": "ali.oss",
},
},
{
"category": "数据库",
"items": ["云数据库RDS MySQL", "云数据库RDS PostgreSQL", "云数据库 Redis"],
"map": {
"云数据库RDS MySQL": {"template": "templates/aliyun_rds_mysql.json", "mapping": "mysql"},
"云数据库RDS PostgreSQL": {"template": "templates/aliyun_rds_postgre.json", "mapping": "postgresql"},
"云数据库 Redis": {"template": "templates/aliyun_redis.json", "mapping": "redis"},
},
"collect_key_map": {
"云数据库RDS MySQL": "ali.rds_mysql",
"云数据库RDS PostgreSQL": "ali.rds_postgre",
"云数据库 Redis": "ali.redis",
},
},
],
"tencentcloud": [
{
"category": "计算",
"items": ["云服务器 CVM"],
"map": {
"云服务器 CVM": {"template": "templates/tencent_cvm.json", "mapping": "ecs"},
},
"collect_key_map": {
"云服务器 CVM": "tencent.cvm",
},
},
{
"category": "CDN与边缘",
"items": ["内容分发CDN"],
"map": {
"内容分发CDN": {"template": "templates/tencent_cdn.json", "mapping": "CDN"},
},
"collect_key_map": {
"内容分发CDN": "tencent.cdn",
},
},
{
"category": "网络",
"items": ["负载均衡CLB", "私有网络VPC", "子网"],
"map": {
"负载均衡CLB": {"template": "templates/tencent_clb.json", "mapping": "loadbalancer"},
"私有网络VPC": {"template": "templates/tencent_vpc.json", "mapping": "vpc"},
"子网": {"template": "templates/tencent_subnet.json", "mapping": "vswitch"},
},
"collect_key_map": {
"负载均衡CLB": "tencent.clb",
"私有网络VPC": "tencent.vpc",
"子网": "tencent.subnet",
},
},
{
"category": "存储",
"items": ["云硬盘CBS", "对象存储COS"],
"map": {
"云硬盘CBS": {"template": "templates/tencent_cbs.json", "mapping": "evs"},
"对象存储COS": {"template": "templates/tencent_cos.json", "mapping": "objectStorage"},
},
"collect_key_map": {
"云硬盘CBS": "tencent.cbs",
"对象存储COS": "tencent.cos",
},
},
{
"category": "数据库",
"items": ["云数据库 MySQL", "云数据库 PostgreSQL", "云数据库 Redis"],
"map": {
"云数据库 MySQL": {"template": "templates/tencent_rdb.json", "mapping": "mysql"},
"云数据库 PostgreSQL": {"template": "templates/tencent_postgres.json", "mapping": "postgresql"},
"云数据库 Redis": {"template": "templates/tencent_redis.json", "mapping": "redis"},
},
"collect_key_map": {
"云数据库 MySQL": "tencent.rdb",
"云数据库 PostgreSQL": "tencent.rds_postgres",
"云数据库 Redis": "tencent.redis",
},
},
],
"huaweicloud": [
{
"category": "计算",
"items": ["云服务器 ECS"],
"map": {
"云服务器 ECS": {"template": "templates/huaweicloud_ecs.json", "mapping": "ecs"},
},
"collect_key_map": {
"云服务器 ECS": "huawei.ecs",
},
},
{
"category": "CDN与智能边缘",
"items": ["内容分发网络CDN"],
"map": {
"内容分发网络CDN": {"template": "templates/huawei_cdn.json", "mapping": "CDN"},
},
"collect_key_map": {
"内容分发网络CDN": "huawei.cdn",
},
},
{
"category": "网络",
"items": ["弹性负载均衡ELB", "虚拟私有云VPC", "子网"],
"map": {
"弹性负载均衡ELB": {"template": "templates/huawei_elb.json", "mapping": "loadbalancer"},
"虚拟私有云VPC": {"template": "templates/huawei_vpc.json", "mapping": "vpc"},
"子网": {"template": "templates/huawei_subnet.json", "mapping": "vswitch"},
},
"collect_key_map": {
"弹性负载均衡ELB": "huawei.elb",
"虚拟私有云VPC": "huawei.vpc",
"子网": "huawei.subnet",
},
},
{
"category": "存储",
"items": ["云硬盘EVS", "对象存储OBS"],
"map": {
"云硬盘EVS": {"template": "templates/huawei_evs.json", "mapping": "evs"},
"对象存储OBS": {"template": "templates/huawei_obs.json", "mapping": "objectStorage"},
},
"collect_key_map": {
"云硬盘EVS": "huawei.evs",
"对象存储OBS": "huawei.obs",
},
},
{
"category": "数据库",
"items": ["云数据库RDS MySQL", "云数据库RDS PostgreSQL"],
"map": {
"云数据库RDS MySQL": {"template": "templates/huawei_rds_mysql.json", "mapping": "mysql"},
"云数据库RDS PostgreSQL": {"template": "templates/huawei_rds_postgre.json", "mapping": "postgresql"},
},
"collect_key_map": {
"云数据库RDS MySQL": "huawei.rds_mysql",
"云数据库RDS PostgreSQL": "huawei.rds_postgre",
},
},
{
"category": "应用中间件",
"items": ["分布式缓存Redis"],
"map": {
"分布式缓存Redis": {"template": "templates/huawei_dcs.json", "mapping": "redis"},
},
"collect_key_map": {
"分布式缓存Redis": "huawei.dcs",
},
},
],
"aws": [
{
"category": "计算",
"items": ["云服务器 EC2"],
"map": {
"云服务器 EC2": {"template": "templates/aws_ec2.json", "mapping": "ecs"},
},
"collect_key_map": {
"云服务器 EC2": "aws.ec2",
},
},
{"category": "网络与CDN", "items": [], "map": {}, "collect_key_map": {}},
],
"vcenter": [
{
"category": "计算",
"items": [
"主机",
"虚拟机",
"主机集群"
],
"map": {
"主机": "templates/vsphere_host.json",
"虚拟机": "templates/vsphere_vm.json",
"主机集群": "templates/vsphere_cluster.json",
},
"collect_key_map": {
"主机": "vsphere.host",
"虚拟机": "vsphere.vm",
"主机集群": "vsphere.cluster",
},
},
{
"category": "网络",
"items": [
"网络",
"标准交换机",
"分布式交换机",
],
"map": {
"网络": "templates/vsphere_network.json",
"标准交换机": "templates/vsphere_standard_switch.json",
"分布式交换机": "templates/vsphere_distributed_switch.json",
},
"collect_key_map": {
"网络": "vsphere.network",
"标准交换机": "vsphere.standard_switch",
"分布式交换机": "vsphere.distributed_switch",
},
},
{
"category": "存储",
"items": ["数据存储", "数据存储集群"],
"map": {
"数据存储": "templates/vsphere_datastore.json",
"数据存储集群": "templates/vsphere_storage_pod.json",
},
"collect_key_map": {
"数据存储": "vsphere.datastore",
"数据存储集群": "vsphere.storage_pod",
},
},
{
"category": "其他",
"items": ["资源池", "数据中心", "文件夹"],
"map": {
"资源池": "templates/vsphere_pool.json",
"数据中心": "templates/vsphere_datacenter.json",
"文件夹": "templates/vsphere_folder.json",
},
"collect_key_map": {
"资源池": "vsphere.pool",
"数据中心": "vsphere.datacenter",
"文件夹": "vsphere.folder",
},
},
],
"kvm": [
{
"category": "计算",
"items": ["虚拟机"],
"map": {
"虚拟机": "templates/kvm_vm.json",
},
"collect_key_map": {
"虚拟机": "kvm.vm",
},
},
{
"category": "存储",
"items": ["存储"],
"map": {
"存储": "templates/kvm_storage.json",
},
"collect_key_map": {
"存储": "kvm.storage",
},
},
{
"category": "network",
"items": ["网络"],
"map": {
"网络": "templates/kvm_network.json",
},
"collect_key_map": {
"网络": "kvm.network",
},
},
],
ClOUD_MAP = {
"aliyun": {
"categories": ["云服务器 ECS"],
"map": {
"云服务器 ECS": "templates/aliyun_ecs.json",
}
},
"tencentcloud": {
"categories": ["云服务器 CVM"],
"map": {
"云服务器 CVM": "templates/tencent_cvm.json",
}
},
"huaweicloud": {
"categories": ["云服务器 ECS"],
"map": {
"云服务器 ECS": "templates/huaweicloud_ecs.json",
}
},
"aws": {
"categories": ["云服务器 EC2"],
"map": {
"云服务器 EC2": "templates/aws_ec2.json",
}
},
}

View File

@ -2,27 +2,13 @@
from __future__ import unicode_literals
from collections import defaultdict
import datetime
import os
import yaml
from flask import current_app
import json
from api.extensions import cache
from api.extensions import db
from api.lib.cmdb.custom_dashboard import CustomDashboardManager
from api.models.cmdb import Attribute, AutoDiscoveryExecHistory
from api.models.cmdb import AutoDiscoveryCI
from api.models.cmdb import AutoDiscoveryCIType
from api.models.cmdb import AutoDiscoveryCITypeRelation
from api.models.cmdb import AutoDiscoveryCounter
from api.models.cmdb import AutoDiscoveryRuleSyncHistory
from api.models.cmdb import CI
from api.models.cmdb import Attribute
from api.models.cmdb import CIType
from api.models.cmdb import CITypeAttribute
from api.models.cmdb import PreferenceShowAttributes
from api.models.cmdb import PreferenceTreeView
from api.models.cmdb import RelationType
@ -240,9 +226,7 @@ class CITypeAttributeCache(object):
class CMDBCounterCache(object):
KEY = 'CMDB::Counter::dashboard'
KEY2 = 'CMDB::Counter::adc'
KEY3 = 'CMDB::Counter::sub'
KEY = 'CMDB::Counter'
@classmethod
def get(cls):
@ -255,7 +239,7 @@ class CMDBCounterCache(object):
@classmethod
def set(cls, result):
cache.set(cls.KEY, json.loads(json.dumps(result)), timeout=0)
cache.set(cls.KEY, result, timeout=0)
@classmethod
def reset(cls):
@ -277,7 +261,7 @@ class CMDBCounterCache(object):
cls.set(result)
return json.loads(json.dumps(result))
return result
@classmethod
def update(cls, custom, flush=True):
@ -299,38 +283,27 @@ class CMDBCounterCache(object):
result[custom['id']] = res
cls.set(result)
return json.loads(json.dumps(res))
return res
@classmethod
def relation_counter(cls, type_id, level, other_filer, type_ids):
@staticmethod
def relation_counter(type_id, level, other_filer, type_ids):
from api.lib.cmdb.search.ci_relation.search import Search as RelSearch
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search
from api.lib.cmdb.attribute import AttributeManager
query = "_type:{}".format(type_id)
if other_filer:
query = "{},{}".format(query, other_filer)
s = search(query, count=1000000)
try:
type_names, _, _, _, _, _ = s.search()
except SearchError as e:
current_app.logger.error(e)
return
root_type = CITypeCache.get(type_id)
show_attr_id = root_type and root_type.show_id
show_attr = AttributeCache.get(show_attr_id)
type_id_names = []
for i in type_names:
attr_value = i.get(show_attr and show_attr.name) or i.get(i.get('unique'))
enum_map = AttributeManager.get_enum_map(show_attr_id or i.get('unique'))
type_id_names = [(str(i.get('_id')), i.get(i.get('unique'))) for i in type_names]
type_id_names.append((str(i.get('_id')), enum_map.get(attr_value, attr_value)))
s = RelSearch([i[0] for i in type_id_names], level)
s = RelSearch([i[0] for i in type_id_names], level, other_filer or '')
try:
stats = s.statistics(type_ids, need_filter=False)
stats = s.statistics(type_ids)
except SearchError as e:
current_app.logger.error(e)
return
@ -358,12 +331,11 @@ class CMDBCounterCache(object):
return result
@classmethod
def attribute_counter(cls, custom):
@staticmethod
def attribute_counter(custom):
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search
from api.lib.cmdb.utils import ValueTypeMap
from api.lib.cmdb.attribute import AttributeManager
custom.setdefault('options', {})
type_id = custom.get('type_id')
@ -379,24 +351,16 @@ class CMDBCounterCache(object):
other_filter = "{}".format(other_filter) if other_filter else ''
if custom['options'].get('ret') == 'cis':
enum_map = {}
for _attr_id in attr_ids:
_attr = AttributeCache.get(_attr_id)
if _attr:
enum_map[_attr.alias] = AttributeManager.get_enum_map(_attr_id)
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
s = search(query, fl=attr_ids, ret_key='alias', count=100)
try:
cis, _, _, _, _, _ = s.search()
cis = [{k: (enum_map.get(k) or {}).get(v, v) for k, v in ci.items()} for ci in cis]
except SearchError as e:
current_app.logger.error(e)
return
return cis
origin_result = dict()
result = dict()
# level = 1
query = "_type:({}),{}".format(";".join(map(str, type_ids)), other_filter)
@ -406,18 +370,13 @@ class CMDBCounterCache(object):
except SearchError as e:
current_app.logger.error(e)
return
enum_map1 = AttributeManager.get_enum_map(attr_ids[0])
for i in (list(facet.values()) or [[]])[0]:
k = ValueTypeMap.serialize2[attr2value_type[0]](str(i[0]))
result[enum_map1.get(k, k)] = i[1]
origin_result[k] = i[1]
result[ValueTypeMap.serialize2[attr2value_type[0]](str(i[0]))] = i[1]
if len(attr_ids) == 1:
return result
# level = 2
enum_map2 = AttributeManager.get_enum_map(attr_ids[1])
for v in origin_result:
for v in result:
query = "_type:({}),{},{}:{}".format(";".join(map(str, type_ids)), other_filter, attr_ids[0], v)
s = search(query, fl=attr_ids, facet=[attr_ids[1]], count=1)
try:
@ -425,22 +384,18 @@ class CMDBCounterCache(object):
except SearchError as e:
current_app.logger.error(e)
return
result[enum_map1.get(v, v)] = dict()
origin_result[v] = dict()
result[v] = dict()
for i in (list(facet.values()) or [[]])[0]:
k = ValueTypeMap.serialize2[attr2value_type[1]](str(i[0]))
result[enum_map1.get(v, v)][enum_map2.get(k, k)] = i[1]
origin_result[v][k] = i[1]
result[v][ValueTypeMap.serialize2[attr2value_type[1]](str(i[0]))] = i[1]
if len(attr_ids) == 2:
return result
# level = 3
enum_map3 = AttributeManager.get_enum_map(attr_ids[2])
for v1 in origin_result:
if not isinstance(result[enum_map1.get(v1, v1)], dict):
for v1 in result:
if not isinstance(result[v1], dict):
continue
for v2 in origin_result[v1]:
for v2 in result[v1]:
query = "_type:({}),{},{}:{},{}:{}".format(";".join(map(str, type_ids)), other_filter,
attr_ids[0], v1, attr_ids[1], v2)
s = search(query, fl=attr_ids, facet=[attr_ids[2]], count=1)
@ -449,10 +404,9 @@ class CMDBCounterCache(object):
except SearchError as e:
current_app.logger.error(e)
return
result[enum_map1.get(v1, v1)][enum_map2.get(v2, v2)] = dict()
result[v1][v2] = dict()
for i in (list(facet.values()) or [[]])[0]:
k = ValueTypeMap.serialize2[attr2value_type[2]](str(i[0]))
result[enum_map1.get(v1, v1)][enum_map2.get(v2, v2)][enum_map3.get(k, k)] = i[1]
result[v1][v2][ValueTypeMap.serialize2[attr2value_type[2]](str(i[0]))] = i[1]
return result
@ -475,124 +429,3 @@ class CMDBCounterCache(object):
return
return numfound
@classmethod
def flush_adc_counter(cls):
res = db.session.query(CI.type_id, CI.is_auto_discovery)
result = dict()
for i in res:
result.setdefault(i.type_id, dict(total=0, auto_discovery=0))
result[i.type_id]['total'] += 1
if i.is_auto_discovery:
result[i.type_id]['auto_discovery'] += 1
cache.set(cls.KEY2, result, timeout=0)
res = db.session.query(AutoDiscoveryCI.created_at,
AutoDiscoveryCI.updated_at,
AutoDiscoveryCI.adt_id,
AutoDiscoveryCI.type_id,
AutoDiscoveryCI.is_accept).filter(AutoDiscoveryCI.deleted.is_(False))
today = datetime.datetime.today()
this_month = datetime.datetime(today.year, today.month, 1)
last_month = this_month - datetime.timedelta(days=1)
last_month = datetime.datetime(last_month.year, last_month.month, 1)
this_week = today - datetime.timedelta(days=datetime.date.weekday(today))
this_week = datetime.datetime(this_week.year, this_week.month, this_week.day)
last_week = this_week - datetime.timedelta(days=7)
last_week = datetime.datetime(last_week.year, last_week.month, last_week.day)
result = dict()
for i in res:
if i.type_id not in result:
result[i.type_id] = dict(instance_count=0, accept_count=0,
this_month_count=0, this_week_count=0, last_month_count=0, last_week_count=0)
adts = AutoDiscoveryCIType.get_by(type_id=i.type_id, to_dict=False)
result[i.type_id]['rule_count'] = len(adts) + AutoDiscoveryCITypeRelation.get_by(
ad_type_id=i.type_id, only_query=True).count()
result[i.type_id]['exec_target_count'] = len(
set([j.oneagent_id for adt in adts for j in db.session.query(
AutoDiscoveryRuleSyncHistory.oneagent_id).filter(
AutoDiscoveryRuleSyncHistory.adt_id == adt.id)]))
result[i.type_id]['instance_count'] += 1
if i.is_accept:
result[i.type_id]['accept_count'] += 1
if last_month <= i.created_at < this_month:
result[i.type_id]['last_month_count'] += 1
elif i.created_at >= this_month:
result[i.type_id]['this_month_count'] += 1
if last_week <= i.created_at < this_week:
result[i.type_id]['last_week_count'] += 1
elif i.created_at >= this_week:
result[i.type_id]['this_week_count'] += 1
for type_id in result:
existed = AutoDiscoveryCounter.get_by(type_id=type_id, first=True, to_dict=False)
if existed is None:
AutoDiscoveryCounter.create(type_id=type_id, **result[type_id])
else:
existed.update(**result[type_id])
for i in AutoDiscoveryCounter.get_by(to_dict=False):
if i.type_id not in result:
i.delete()
@classmethod
def clear_ad_exec_history(cls):
ci_types = CIType.get_by(to_dict=False)
for ci_type in ci_types:
for i in AutoDiscoveryExecHistory.get_by(type_id=ci_type.id, only_query=True).order_by(
AutoDiscoveryExecHistory.id.desc()).offset(50000):
i.delete(commit=False)
db.session.commit()
@classmethod
def get_adc_counter(cls):
return cache.get(cls.KEY2) or cls.flush_adc_counter()
@classmethod
def flush_sub_counter(cls):
result = dict(type_id2users=defaultdict(list))
types = db.session.query(PreferenceShowAttributes.type_id,
PreferenceShowAttributes.uid, PreferenceShowAttributes.created_at).filter(
PreferenceShowAttributes.deleted.is_(False)).group_by(
PreferenceShowAttributes.uid, PreferenceShowAttributes.type_id)
for i in types:
result['type_id2users'][i.type_id].append(i.uid)
types = PreferenceTreeView.get_by(to_dict=False)
for i in types:
if i.uid not in result['type_id2users'][i.type_id]:
result['type_id2users'][i.type_id].append(i.uid)
cache.set(cls.KEY3, result, timeout=0)
return result
@classmethod
def get_sub_counter(cls):
return cache.get(cls.KEY3) or cls.flush_sub_counter()
class AutoDiscoveryMappingCache(object):
PREFIX = 'CMDB::AutoDiscovery::Mapping::{}'
@classmethod
def get(cls, name):
res = cache.get(cls.PREFIX.format(name)) or {}
if not res:
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"auto_discovery/mapping/{}.yaml".format(name))
if os.path.exists(path):
with open(path, 'r') as f:
mapping = yaml.safe_load(f)
res = mapping.get('mapping') or {}
res and cache.set(cls.PREFIX.format(name), res, timeout=0)
return res

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,6 @@
# -*- coding:utf-8 -*-
from flask_babel import lazy_gettext as _l
from api.lib.utils import BaseEnum
@ -16,8 +14,6 @@ class ValueTypeEnum(BaseEnum):
JSON = "6"
PASSWORD = TEXT
LINK = TEXT
BOOL = "7"
REFERENCE = INT
class ConstraintEnum(BaseEnum):
@ -45,23 +41,20 @@ class OperateType(BaseEnum):
class CITypeOperateType(BaseEnum):
ADD = "0" # add CIType
UPDATE = "1" # update CIType
DELETE = "2" # delete CIType
ADD_ATTRIBUTE = "3"
UPDATE_ATTRIBUTE = "4"
DELETE_ATTRIBUTE = "5"
ADD_TRIGGER = "6"
UPDATE_TRIGGER = "7"
DELETE_TRIGGER = "8"
ADD_UNIQUE_CONSTRAINT = "9"
UPDATE_UNIQUE_CONSTRAINT = "10"
DELETE_UNIQUE_CONSTRAINT = "11"
ADD_RELATION = "12"
DELETE_RELATION = "13"
ADD_RECONCILIATION = "14"
UPDATE_RECONCILIATION = "15"
DELETE_RECONCILIATION = "16"
ADD = "0" # 新增模型
UPDATE = "1" # 修改模型
DELETE = "2" # 删除模型
ADD_ATTRIBUTE = "3" # 新增属性
UPDATE_ATTRIBUTE = "4" # 修改属性
DELETE_ATTRIBUTE = "5" # 删除属性
ADD_TRIGGER = "6" # 新增触发器
UPDATE_TRIGGER = "7" # 修改触发器
DELETE_TRIGGER = "8" # 删除触发器
ADD_UNIQUE_CONSTRAINT = "9" # 新增联合唯一
UPDATE_UNIQUE_CONSTRAINT = "10" # 修改联合唯一
DELETE_UNIQUE_CONSTRAINT = "11" # 删除联合唯一
ADD_RELATION = "12" # 新增关系
DELETE_RELATION = "13" # 删除关系
class RetKey(BaseEnum):
@ -77,7 +70,6 @@ class ResourceTypeEnum(BaseEnum):
RELATION_VIEW = "RelationView" # read/update/delete/grant
CI_FILTER = "CIFilter" # read
PAGE = "page" # read
TOPOLOGY_VIEW = "TopologyView" # read/update/delete/grant
class PermEnum(BaseEnum):
@ -97,8 +89,7 @@ class RoleEnum(BaseEnum):
class AutoDiscoveryType(BaseEnum):
AGENT = "agent"
SNMP = "snmp"
HTTP = "http" # cloud
COMPONENTS = "components"
HTTP = "http"
class AttributeDefaultValueEnum(BaseEnum):
@ -107,52 +98,12 @@ class AttributeDefaultValueEnum(BaseEnum):
AUTO_INC_ID = "$auto_inc_id"
class ExecuteStatusEnum(BaseEnum):
COMPLETED = '0'
FAILED = '1'
RUNNING = '2'
class RelationSourceEnum(BaseEnum):
ATTRIBUTE_VALUES = "0"
AUTO_DISCOVERY = "1"
class BuiltinModelEnum(BaseEnum):
IPAM_SUBNET = "ipam_subnet"
IPAM_ADDRESS = "ipam_address"
IPAM_SCOPE = "ipam_scope"
DCIM_REGION = "dcim_region"
DCIM_IDC = "dcim_idc"
DCIM_SERVER_ROOM = "dcim_server_room"
DCIM_RACK = "dcim_rack"
BUILTIN_ATTRIBUTES = {
"_updated_at": _l("Update Time"),
"_updated_by": _l("Updated By"),
}
CMDB_QUEUE = "one_cmdb_async"
REDIS_PREFIX_CI = "ONE_CMDB"
REDIS_PREFIX_CI_RELATION = "CMDB_CI_RELATION"
REDIS_PREFIX_CI_RELATION2 = "CMDB_CI_RELATION2"
BUILTIN_KEYWORDS = {'id', '_id', 'ci_id', 'type', '_type', 'ci_type', 'ticket_id', *BUILTIN_ATTRIBUTES.keys()}
class SysComputedAttributes(object):
from api.lib.cmdb.ipam.const import SubnetBuiltinAttributes
type2attr = {
BuiltinModelEnum.IPAM_SUBNET: {
SubnetBuiltinAttributes.HOSTS_COUNT,
SubnetBuiltinAttributes.ASSIGN_COUNT,
SubnetBuiltinAttributes.USED_COUNT,
SubnetBuiltinAttributes.FREE_COUNT
}
}
BUILTIN_KEYWORDS = {'id', '_id', 'ci_id', 'type', '_type', 'ci_type'}
L_TYPE = None
L_CI = None

View File

@ -1 +0,0 @@
# -*- coding:utf-8 -*-

View File

@ -1,33 +0,0 @@
# -*- coding:utf-8 -*-
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.const import ExistPolicy
class DCIMBase(object):
def __init__(self):
self.type_id = None
@staticmethod
def add_relation(parent_id, child_id):
if not parent_id or not child_id:
return
CIRelationManager().add(parent_id, child_id, valid=False, apply_async=False)
def add(self, parent_id, **kwargs):
ci_id = CIManager().add(self.type_id, exist_policy=ExistPolicy.REJECT, **kwargs)
if parent_id:
self.add_relation(parent_id, ci_id)
return ci_id
@classmethod
def update(cls, _id, **kwargs):
CIManager().update(_id, **kwargs)
@classmethod
def delete(cls, _id):
CIManager().delete(_id)

View File

@ -1,17 +0,0 @@
# -*- coding:utf-8 -*-
from api.lib.utils import BaseEnum
class RackBuiltinAttributes(BaseEnum):
U_COUNT = 'u_count'
U_START = 'u_start'
FREE_U_COUNT = 'free_u_count'
U_SLOT_ABNORMAL = 'u_slot_abnormal'
class OperateTypeEnum(BaseEnum):
ADD_DEVICE = "0"
REMOVE_DEVICE = "1"
MOVE_DEVICE = "2"

View File

@ -1,40 +0,0 @@
from flask_login import current_user
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.mixin import DBMixin
from api.models.cmdb import DCIMOperationHistory
class OperateHistoryManager(DBMixin):
cls = DCIMOperationHistory
@classmethod
def search(cls, page, page_size, fl=None, only_query=False, reverse=False, count_query=False,
last_size=None, **kwargs):
numfound, result = super(OperateHistoryManager, cls).search(page, page_size, fl, only_query, reverse,
count_query, last_size, **kwargs)
ci_ids = [i['ci_id'] for i in result]
id2ci = {i['_id']: i for i in (CIManager.get_cis_by_ids(ci_ids) or []) if i}
type2show_key = dict()
for i in id2ci.values():
if i.get('_type') not in type2show_key:
ci_type = CITypeCache.get(i.get('_type'))
if ci_type:
show_key = AttributeCache.get(ci_type.show_id or ci_type.unique_id)
type2show_key[i['_type']] = show_key and show_key.name
return numfound, result, id2ci, type2show_key
def _can_add(self, **kwargs):
kwargs['uid'] = current_user.uid
return kwargs
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass

View File

@ -1,19 +0,0 @@
# -*- coding:utf-8 -*-
from flask import abort
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.dcim.base import DCIMBase
from api.lib.cmdb.resp_format import ErrFormat
class IDCManager(DCIMBase):
def __init__(self):
super(IDCManager, self).__init__()
self.ci_type = CITypeCache.get(BuiltinModelEnum.DCIM_IDC) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_IDC))
self.type_id = self.ci_type.id

View File

@ -1,182 +0,0 @@
# -*- coding:utf-8 -*-
import itertools
import redis_lock
from flask import abort
from api.extensions import rd
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.dcim.base import DCIMBase
from api.lib.cmdb.dcim.const import OperateTypeEnum
from api.lib.cmdb.dcim.const import RackBuiltinAttributes
from api.lib.cmdb.dcim.history import OperateHistoryManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.lib.cmdb.search.ci_relation.search import Search as RelationSearch
class RackManager(DCIMBase):
def __init__(self):
super(RackManager, self).__init__()
self.ci_type = CITypeCache.get(BuiltinModelEnum.DCIM_RACK) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_RACK))
self.type_id = self.ci_type.id
@classmethod
def update(cls, _id, **kwargs):
if RackBuiltinAttributes.U_COUNT in kwargs:
devices, _, _, _, _, _ = RelationSearch(
[_id],
level=[1],
fl=[RackBuiltinAttributes.U_COUNT, RackBuiltinAttributes.U_START],
count=1000000).search()
for device in devices:
u_start = device.get(RackBuiltinAttributes.U_START)
u_count = device.get(RackBuiltinAttributes.U_COUNT) or 2
if u_start and u_start + u_count - 1 > kwargs[RackBuiltinAttributes.U_COUNT]:
return abort(400, ErrFormat.dcim_rack_u_count_invalid)
CIManager().update(_id, _sync=True, **kwargs)
if RackBuiltinAttributes.U_COUNT in kwargs:
payload = {RackBuiltinAttributes.FREE_U_COUNT: cls.calc_u_free_count(_id)}
CIManager().update(_id, _sync=True, **payload)
def delete(self, _id):
super(RackManager, self).delete(_id)
payload = {RackBuiltinAttributes.U_START: None}
_, _, second_cis = CIRelationManager.get_second_cis(_id, per_page='all')
for ci in second_cis:
CIManager().update(ci['_id'], **payload)
@staticmethod
def calc_u_free_count(rack_id, device_id=None, u_start=None, u_count=None):
rack = CIManager.get_ci_by_id(rack_id, need_children=False)
if not rack.get(RackBuiltinAttributes.U_COUNT):
return 0
if device_id is not None and u_count is None:
ci = CIManager().get_ci_by_id(device_id, need_children=False)
u_count = ci.get(RackBuiltinAttributes.U_COUNT) or 2
if u_start and u_start + u_count - 1 > rack.get(RackBuiltinAttributes.U_COUNT):
return abort(400, ErrFormat.dcim_rack_u_slot_invalid)
devices, _, _, _, _, _ = RelationSearch(
[rack_id],
level=[1],
fl=[RackBuiltinAttributes.U_COUNT, RackBuiltinAttributes.U_START],
count=1000000).search()
u_count_sum = 0
for device in devices:
u_count_sum += (device.get(RackBuiltinAttributes.U_COUNT) or 2)
if device_id is not None:
_u_start = device.get(RackBuiltinAttributes.U_START)
_u_count = device.get(RackBuiltinAttributes.U_COUNT) or 2
if not _u_start:
continue
if device.get('_id') != device_id and set(range(u_start, u_start + u_count)) & set(
range(_u_start, _u_start + _u_count)):
return abort(400, ErrFormat.dcim_rack_u_slot_invalid)
return rack[RackBuiltinAttributes.U_COUNT] - u_count_sum
def check_u_slot(self):
racks, _, _, _, _, _ = SearchFromDB(
"_type:{}".format(self.type_id),
count=10000000,
fl=[RackBuiltinAttributes.U_START, RackBuiltinAttributes.U_COUNT, RackBuiltinAttributes.U_SLOT_ABNORMAL],
parent_node_perm_passed=True).search()
for rack in racks:
devices, _, _, _, _, _ = RelationSearch(
[rack['_id']],
level=[1],
fl=[RackBuiltinAttributes.U_COUNT, RackBuiltinAttributes.U_START],
count=1000000).search()
u_slot_sets = []
for device in devices:
u_start = device.get(RackBuiltinAttributes.U_START)
u_count = device.get(RackBuiltinAttributes.U_COUNT) or 2
if u_start is not None and str(u_start).isdigit():
u_slot_sets.append(set(range(u_start, u_start + u_count)))
if len(u_slot_sets) > 1:
u_slot_abnormal = False
for a, b in itertools.combinations(u_slot_sets, 2):
if a.intersection(b):
u_slot_abnormal = True
break
if u_slot_abnormal != rack.get(RackBuiltinAttributes.U_SLOT_ABNORMAL):
payload = {RackBuiltinAttributes.U_SLOT_ABNORMAL: u_slot_abnormal}
CIManager().update(rack['_id'], **payload)
def add_device(self, rack_id, device_id, u_start, u_count=None):
with (redis_lock.Lock(rd.r, "DCIM_RACK_OPERATE_{}".format(rack_id), expire=10)):
self.calc_u_free_count(rack_id, device_id, u_start, u_count)
self.add_relation(rack_id, device_id)
payload = {RackBuiltinAttributes.U_START: u_start}
if u_count:
payload[RackBuiltinAttributes.U_COUNT] = u_count
CIManager().update(device_id, _sync=True, **payload)
payload = {
RackBuiltinAttributes.FREE_U_COUNT: self.calc_u_free_count(rack_id, device_id, u_start, u_count)}
CIManager().update(rack_id, _sync=True, **payload)
OperateHistoryManager().add(operate_type=OperateTypeEnum.ADD_DEVICE, rack_id=rack_id, ci_id=device_id)
def remove_device(self, rack_id, device_id):
with (redis_lock.Lock(rd.r, "DCIM_RACK_OPERATE_{}".format(rack_id), expire=10)):
CIRelationManager.delete_3(rack_id, device_id, apply_async=False, valid=False)
payload = {RackBuiltinAttributes.FREE_U_COUNT: self.calc_u_free_count(rack_id)}
CIManager().update(rack_id, _sync=True, **payload)
payload = {RackBuiltinAttributes.U_START: None}
CIManager().update(device_id, _sync=True, **payload)
OperateHistoryManager().add(operate_type=OperateTypeEnum.REMOVE_DEVICE, rack_id=rack_id, ci_id=device_id)
def move_device(self, rack_id, device_id, to_u_start):
with (redis_lock.Lock(rd.r, "DCIM_RACK_OPERATE_{}".format(rack_id), expire=10)):
payload = {RackBuiltinAttributes.FREE_U_COUNT: self.calc_u_free_count(rack_id, device_id, to_u_start)}
CIManager().update(rack_id, _sync=True, **payload)
CIManager().update(device_id, _sync=True, **{RackBuiltinAttributes.U_START: to_u_start})
OperateHistoryManager().add(operate_type=OperateTypeEnum.MOVE_DEVICE, rack_id=rack_id, ci_id=device_id)
def migrate_device(self, rack_id, device_id, to_rack_id, to_u_start):
with (redis_lock.Lock(rd.r, "DCIM_RACK_OPERATE_{}".format(rack_id), expire=10)):
self.calc_u_free_count(to_rack_id, device_id, to_u_start)
if rack_id != to_rack_id:
CIRelationManager.delete_3(rack_id, device_id, apply_async=False, valid=False)
self.add_relation(to_rack_id, device_id)
payload = {
RackBuiltinAttributes.FREE_U_COUNT: self.calc_u_free_count(to_rack_id, device_id, to_u_start)}
CIManager().update(to_rack_id, _sync=True, **payload)
CIManager().update(device_id, _sync=True, **{RackBuiltinAttributes.U_START: to_u_start})
if rack_id != to_rack_id:
payload = {RackBuiltinAttributes.FREE_U_COUNT: self.calc_u_free_count(rack_id)}
CIManager().update(rack_id, _sync=True, **payload)
OperateHistoryManager().add(operate_type=OperateTypeEnum.REMOVE_DEVICE, rack_id=rack_id, ci_id=device_id)
OperateHistoryManager().add(operate_type=OperateTypeEnum.ADD_DEVICE, rack_id=to_rack_id, ci_id=device_id)

View File

@ -1,29 +0,0 @@
# -*- coding:utf-8 -*-
from flask import abort
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.const import ExistPolicy
from api.lib.cmdb.resp_format import ErrFormat
class RegionManager(object):
def __init__(self):
self.ci_type = CITypeCache.get(BuiltinModelEnum.DCIM_REGION) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_REGION))
self.type_id = self.ci_type.id
def add(self, **kwargs):
return CIManager().add(self.type_id, exist_policy=ExistPolicy.REJECT, **kwargs)
@classmethod
def update(cls, _id, **kwargs):
CIManager().update(_id, **kwargs)
@classmethod
def delete(cls, _id):
CIManager().delete(_id)

View File

@ -1,56 +0,0 @@
# -*- coding:utf-8 -*-
from flask import abort
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.dcim.base import DCIMBase
from api.lib.cmdb.dcim.const import RackBuiltinAttributes
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
class ServerRoomManager(DCIMBase):
def __init__(self):
super(ServerRoomManager, self).__init__()
self.ci_type = CITypeCache.get(BuiltinModelEnum.DCIM_SERVER_ROOM) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_SERVER_ROOM))
self.type_id = self.ci_type.id
@staticmethod
def get_racks(_id, q=None):
rack_type = CITypeCache.get(BuiltinModelEnum.DCIM_RACK) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_RACK))
relations = CIRelation.get_by(first_ci_id=_id, only_query=True).join(
CI, CI.id == CIRelation.second_ci_id).filter(CI.type_id == rack_type.id)
rack_ids = [i.second_ci_id for i in relations]
q = "_type:{}".format(rack_type.id) if not q else "_type:{},{}".format(rack_type.id, q)
if rack_ids:
response, _, _, _, numfound, _ = SearchFromDB(
q,
ci_ids=list(rack_ids),
count=1000000,
parent_node_perm_passed=True).search()
else:
response, numfound = [], 0
counter = dict(rack_count=numfound)
u_count = 0
free_u_count = 0
for i in response:
_u_count = i.get(RackBuiltinAttributes.U_COUNT) or 0
u_count += _u_count
free_u_count += (_u_count if i.get(RackBuiltinAttributes.FREE_U_COUNT) is None else
i.get(RackBuiltinAttributes.FREE_U_COUNT))
counter["u_count"] = u_count
counter["u_used_count"] = u_count - free_u_count
counter["device_count"] = CIRelation.get_by(only_query=True).filter(
CIRelation.first_ci_id.in_(rack_ids)).count()
return counter, response

View File

@ -1,85 +0,0 @@
# -*- coding:utf-8 -*-
from collections import defaultdict
from flask import abort
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
class TreeViewManager(object):
@classmethod
def get(cls):
region_type = CITypeCache.get(BuiltinModelEnum.DCIM_REGION) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_REGION))
idc_type = CITypeCache.get(BuiltinModelEnum.DCIM_IDC) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_IDC))
server_room_type = CITypeCache.get(BuiltinModelEnum.DCIM_SERVER_ROOM) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_SERVER_ROOM))
rack_type = CITypeCache.get(BuiltinModelEnum.DCIM_RACK) or abort(
404, ErrFormat.dcim_builtin_model_not_found.format(BuiltinModelEnum.DCIM_RACK))
relations = defaultdict(set)
ids = set()
has_parent_ids = set()
for i in CIRelation.get_by(only_query=True).join(CI, CI.id == CIRelation.first_ci_id).filter(
CI.type_id.in_([region_type.id, idc_type.id])):
relations[i.first_ci_id].add(i.second_ci_id)
ids.add(i.first_ci_id)
ids.add(i.second_ci_id)
has_parent_ids.add(i.second_ci_id)
for i in CIRelation.get_by(only_query=True).join(
CI, CI.id == CIRelation.second_ci_id).filter(CI.type_id.in_([idc_type.id, server_room_type.id])):
relations[i.first_ci_id].add(i.second_ci_id)
ids.add(i.first_ci_id)
ids.add(i.second_ci_id)
has_parent_ids.add(i.second_ci_id)
for i in CI.get_by(only_query=True).filter(CI.type_id.in_([region_type.id, idc_type.id])):
ids.add(i.id)
for _id in ids:
if _id not in has_parent_ids:
relations[None].add(_id)
type2name = dict()
type2name[region_type.id] = AttributeCache.get(region_type.show_id or region_type.unique_id).name
type2name[idc_type.id] = AttributeCache.get(idc_type.show_id or idc_type.unique_id).name
type2name[server_room_type.id] = AttributeCache.get(server_room_type.show_id or server_room_type.unique_id).name
response, _, _, _, _, _ = SearchFromDB(
"_type:({})".format(";".join(map(str, [region_type.id, idc_type.id, server_room_type.id]))),
ci_ids=list(ids),
count=1000000,
fl=list(type2name.values()),
parent_node_perm_passed=True).search()
id2ci = {i['_id']: i for i in response}
def _build_tree(_tree, parent_id=None):
tree = []
for child_id in _tree.get(parent_id, []):
children = sorted(_build_tree(_tree, child_id), key=lambda x: x['_id'])
if not id2ci.get(child_id):
continue
ci = id2ci[child_id]
if ci['ci_type'] == BuiltinModelEnum.DCIM_SERVER_ROOM:
ci['rack_count'] = CIRelation.get_by(first_ci_id=child_id, only_query=True).join(
CI, CI.id == CIRelation.second_ci_id).filter(CI.type_id == rack_type.id).count()
tree.append({'children': children, **ci})
return tree
result = sorted(_build_tree(relations), key=lambda x: x['_id'])
return result, type2name

View File

@ -10,11 +10,9 @@ from api.extensions import db
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import RelationTypeCache
from api.lib.cmdb.const import OperateType
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.perms import CIFilterPermsCRUD
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.perm.acl.cache import UserCache
from api.models.cmdb import CI
from api.models.cmdb import Attribute
from api.models.cmdb import AttributeHistory
from api.models.cmdb import CIRelationHistory
@ -23,13 +21,12 @@ from api.models.cmdb import CITypeHistory
from api.models.cmdb import CITypeTrigger
from api.models.cmdb import CITypeUniqueConstraint
from api.models.cmdb import OperationRecord
from api.lib.cmdb.utils import TableMap
class AttributeHistoryManger(object):
@staticmethod
def get_records_for_attributes(start, end, username, page, page_size, operate_type, type_id,
ci_id=None, attr_id=None, ci_ids=None, more=False):
ci_id=None, attr_id=None):
records = db.session.query(OperationRecord, AttributeHistory).join(
AttributeHistory, OperationRecord.id == AttributeHistory.record_id)
@ -51,9 +48,6 @@ class AttributeHistoryManger(object):
if ci_id is not None:
records = records.filter(AttributeHistory.ci_id == ci_id)
if ci_ids and isinstance(ci_ids, list):
records = records.filter(AttributeHistory.ci_id.in_(ci_ids))
if attr_id is not None:
records = records.filter(AttributeHistory.attr_id == attr_id)
@ -61,39 +55,17 @@ class AttributeHistoryManger(object):
total = len(records)
res = {}
show_attr_set = {}
show_attr_cache = {}
for record in records:
record_id = record.OperationRecord.id
type_id = record.OperationRecord.type_id
ci_id = record.AttributeHistory.ci_id
show_attr_set[ci_id] = None
show_attr = show_attr_cache.setdefault(
type_id,
AttributeCache.get(
CITypeCache.get(type_id).show_id or CITypeCache.get(type_id).unique_id) if CITypeCache.get(type_id) else None
)
if show_attr:
attr_table = TableMap(attr=show_attr).table
attr_record = attr_table.get_by(attr_id=show_attr.id, ci_id=ci_id, first=True, to_dict=False)
show_attr_set[ci_id] = attr_record.value if attr_record else None
attr_hist = record.AttributeHistory.to_dict()
attr_hist['attr'] = AttributeCache.get(attr_hist['attr_id'])
if attr_hist['attr']:
attr_hist['attr_name'] = attr_hist['attr'].name
attr_hist['attr_alias'] = attr_hist['attr'].alias
if more:
attr_hist['is_list'] = attr_hist['attr'].is_list
attr_hist['is_computed'] = attr_hist['attr'].is_computed
attr_hist['is_password'] = attr_hist['attr'].is_password
attr_hist['default'] = attr_hist['attr'].default
attr_hist['value_type'] = attr_hist['attr'].value_type
attr_hist.pop("attr")
if record_id not in res:
record_dict = record.OperationRecord.to_dict()
record_dict['show_attr_value'] = show_attr_set.get(ci_id)
record_dict["user"] = UserCache.get(record_dict.get("uid"))
if record_dict["user"]:
record_dict['user'] = record_dict['user'].nickname
@ -189,14 +161,12 @@ class AttributeHistoryManger(object):
record = i.OperationRecord
item = dict(attr_name=attr.name,
attr_alias=attr.alias,
value_type=attr.value_type,
operate_type=hist.operate_type,
username=user and user.nickname,
old=hist.old,
new=hist.new,
created_at=record.created_at.strftime('%Y-%m-%d %H:%M:%S'),
record_id=record.id,
ticket_id=record.ticket_id,
hid=hist.id
)
result.append(item)
@ -230,9 +200,9 @@ class AttributeHistoryManger(object):
return username, timestamp, attr_dict, rel_dict
@staticmethod
def add(record_id, ci_id, history_list, type_id=None, ticket_id=None, flush=False, commit=True):
def add(record_id, ci_id, history_list, type_id=None, flush=False, commit=True):
if record_id is None:
record = OperationRecord.create(uid=current_user.uid, type_id=type_id, ticket_id=ticket_id)
record = OperationRecord.create(uid=current_user.uid, type_id=type_id)
record_id = record.id
for attr_id, operate_type, old, new in history_list or []:
@ -250,8 +220,8 @@ class AttributeHistoryManger(object):
class CIRelationHistoryManager(object):
@staticmethod
def add(rel_obj, operate_type=OperateType.ADD, uid=None):
record = OperationRecord.create(uid=uid or current_user.uid)
def add(rel_obj, operate_type=OperateType.ADD):
record = OperationRecord.create(uid=current_user.uid)
CIRelationHistory.create(relation_id=rel_obj.id,
record_id=record.id,
@ -300,7 +270,7 @@ class CITypeHistoryManager(object):
return numfound, result
@staticmethod
def add(operate_type, type_id, attr_id=None, trigger_id=None, unique_constraint_id=None, change=None, rc_id=None):
def add(operate_type, type_id, attr_id=None, trigger_id=None, unique_constraint_id=None, change=None):
if type_id is None and attr_id is not None:
from api.models.cmdb import CITypeAttribute
type_ids = [i.type_id for i in CITypeAttribute.get_by(attr_id=attr_id, to_dict=False)]
@ -313,7 +283,6 @@ class CITypeHistoryManager(object):
uid=current_user.uid,
attr_id=attr_id,
trigger_id=trigger_id,
rc_id=rc_id,
unique_constraint_id=unique_constraint_id,
change=change)
@ -325,7 +294,7 @@ class CITriggerHistoryManager(object):
def get(page, page_size, type_id=None, trigger_id=None, operate_type=None):
query = CITriggerHistory.get_by(only_query=True)
if type_id:
query = query.join(CI, CI.id == CITriggerHistory.ci_id).filter(CI.type_id == type_id)
query = query.filter(CITriggerHistory.type_id == type_id)
if trigger_id:
query = query.filter(CITriggerHistory.trigger_id == trigger_id)

View File

@ -1 +0,0 @@
# -*- coding:utf-8 -*-

View File

@ -1,132 +0,0 @@
# -*- coding:utf-8 -*-
import redis_lock
from flask import abort
from api.extensions import rd
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.ipam.const import IPAddressAssignStatus
from api.lib.cmdb.ipam.const import IPAddressBuiltinAttributes
from api.lib.cmdb.ipam.const import OperateTypeEnum
from api.lib.cmdb.ipam.const import SubnetBuiltinAttributes
from api.lib.cmdb.ipam.history import OperateHistoryManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.lib.cmdb.search.ci_relation.search import Search as RelationSearch
class IpAddressManager(object):
def __init__(self):
self.ci_type = CITypeCache.get(BuiltinModelEnum.IPAM_ADDRESS) or abort(
404, ErrFormat.ipam_address_model_not_found.format(BuiltinModelEnum.IPAM_ADDRESS))
self.type_id = self.ci_type.id
@staticmethod
def list_ip_address(parent_id):
numfound, _, result = CIRelationManager.get_second_cis(parent_id, per_page="all")
return numfound, result
def _get_cis(self, subnet_id, ips):
q = "_type:{},{}:({})".format(self.type_id, IPAddressBuiltinAttributes.IP, ";".join(ips or []))
response, _, _, _, _, _ = RelationSearch([subnet_id], level=[1], query=q, count=1000000).search()
return response
@staticmethod
def _add_relation(parent_id, child_id):
if not parent_id or not child_id:
return
CIRelationManager().add(parent_id, child_id, valid=False, apply_async=False)
@staticmethod
def calc_used_count(subnet_id):
q = "{}:(0;2),-{}:true".format(IPAddressBuiltinAttributes.ASSIGN_STATUS, IPAddressBuiltinAttributes.IS_USED)
return len(set(RelationSearch([subnet_id], level=[1], query=q, count=1000000).search(only_ids=True) or []))
@staticmethod
def _calc_assign_count(subnet_id):
q = "{}:(0;2)".format(IPAddressBuiltinAttributes.ASSIGN_STATUS)
return len(set(RelationSearch([subnet_id], level=[1], query=q, count=1000000).search(only_ids=True) or []))
def _update_subnet_count(self, subnet_id, assign_count_computed, used_count=None):
payload = {}
cur = CIManager.get_ci_by_id(subnet_id, need_children=False)
if assign_count_computed:
payload[SubnetBuiltinAttributes.ASSIGN_COUNT] = self._calc_assign_count(subnet_id)
if used_count is not None:
payload[SubnetBuiltinAttributes.USED_COUNT] = used_count
payload[SubnetBuiltinAttributes.FREE_COUNT] = (cur[SubnetBuiltinAttributes.HOSTS_COUNT] -
self.calc_used_count(subnet_id))
CIManager().update(subnet_id, **payload)
def assign_ips(self, ips, subnet_id, cidr, **kwargs):
"""
:param ips: ip list
:param subnet_id: subnet id
:param cidr: subnet cidr
:param kwargs: other attributes for ip address
:return:
"""
if subnet_id is not None:
subnet = CIManager.get_ci_by_id(subnet_id)
else:
cis, _, _, _, _, _ = SearchFromDB("_type:{},{}:{}".format(
BuiltinModelEnum.IPAM_SUBNET, SubnetBuiltinAttributes.CIDR, cidr),
parent_node_perm_passed=True).search()
if cis:
subnet = cis[0]
subnet_id = subnet['_id']
else:
return abort(400, ErrFormat.ipam_address_model_not_found)
with (redis_lock.Lock(rd.r, "IPAM_ASSIGN_ADDRESS_{}".format(subnet_id), expire=10)):
cis = self._get_cis(subnet_id, ips)
ip2ci = {ci[IPAddressBuiltinAttributes.IP]: ci for ci in cis}
ci_ids = []
for ip in ips:
kwargs['name'] = ip
kwargs[IPAddressBuiltinAttributes.IP] = ip
if ip not in ip2ci:
ci_id = CIManager.add(self.type_id, _sync=True, **kwargs)
else:
ci_id = ip2ci[ip]['_id']
CIManager().update(ci_id, _sync=True, **kwargs)
ci_ids.append(ci_id)
self._add_relation(subnet_id, ci_id)
if ips and IPAddressBuiltinAttributes.ASSIGN_STATUS in kwargs:
self._update_subnet_count(subnet_id, True)
if ips and IPAddressBuiltinAttributes.IS_USED in kwargs:
q = "{}:true".format(IPAddressBuiltinAttributes.IS_USED)
cur_used_ids = RelationSearch([subnet_id], level=[1], query=q).search(only_ids=True)
for _id in set(cur_used_ids) - set(ci_ids):
CIManager().update(_id, **{IPAddressBuiltinAttributes.IS_USED: False})
self._update_subnet_count(subnet_id, False, used_count=len(ips))
if kwargs.get(IPAddressBuiltinAttributes.ASSIGN_STATUS) in (
IPAddressAssignStatus.ASSIGNED, IPAddressAssignStatus.RESERVED):
OperateHistoryManager().add(operate_type=OperateTypeEnum.ASSIGN_ADDRESS,
cidr=subnet.get(SubnetBuiltinAttributes.CIDR),
description=" | ".join(ips))
elif kwargs.get(IPAddressBuiltinAttributes.ASSIGN_STATUS) == IPAddressAssignStatus.UNASSIGNED:
OperateHistoryManager().add(operate_type=OperateTypeEnum.REVOKE_ADDRESS,
cidr=subnet.get(SubnetBuiltinAttributes.CIDR),
description=" | ".join(ips))

View File

@ -1,35 +0,0 @@
# -*- coding:utf-8 -*-
from api.lib.utils import BaseEnum
class IPAddressAssignStatus(BaseEnum):
ASSIGNED = 0
UNASSIGNED = 1
RESERVED = 2
class OperateTypeEnum(BaseEnum):
ADD_SCOPE = "0"
UPDATE_SCOPE = "1"
DELETE_SCOPE = "2"
ADD_SUBNET = "3"
UPDATE_SUBNET = "4"
DELETE_SUBNET = "5"
ASSIGN_ADDRESS = "6"
REVOKE_ADDRESS = "7"
class SubnetBuiltinAttributes(BaseEnum):
NAME = 'name'
CIDR = 'cidr'
HOSTS_COUNT = 'hosts_count'
ASSIGN_COUNT = 'assign_count'
USED_COUNT = 'used_count'
FREE_COUNT = 'free_count'
class IPAddressBuiltinAttributes(BaseEnum):
IP = 'ip'
ASSIGN_STATUS = 'assign_status' # enum: 0 - assigned 1 - unassigned 2 - reserved
IS_USED = 'is_used' # bool

View File

@ -1,61 +0,0 @@
# -*- coding:utf-8 -*-
from flask_login import current_user
from api.lib.cmdb.ipam.const import IPAddressBuiltinAttributes
from api.lib.mixin import DBMixin
from api.models.cmdb import IPAMOperationHistory
from api.models.cmdb import IPAMSubnetScan
from api.models.cmdb import IPAMSubnetScanHistory
class OperateHistoryManager(DBMixin):
cls = IPAMOperationHistory
def _can_add(self, **kwargs):
kwargs['uid'] = current_user.uid
return kwargs
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass
class ScanHistoryManager(DBMixin):
cls = IPAMSubnetScanHistory
def _can_add(self, **kwargs):
return kwargs
def add(self, **kwargs):
kwargs.pop('_key', None)
kwargs.pop('_secret', None)
ci_id = kwargs.pop('ci_id', None)
existed = self.cls.get_by(exec_id=kwargs['exec_id'], first=True, to_dict=False)
if existed is None:
self.cls.create(**kwargs)
else:
existed.update(**kwargs)
if kwargs.get('ips'):
from api.lib.cmdb.ipam.address import IpAddressManager
IpAddressManager().assign_ips(kwargs['ips'], ci_id, kwargs.get('cidr'),
**{IPAddressBuiltinAttributes.IS_USED: 1})
scan_rule = IPAMSubnetScan.get_by(ci_id=ci_id, first=True, to_dict=False)
if scan_rule is not None:
scan_rule.update(last_scan_time=kwargs.get('start_at'))
for i in self.cls.get_by(subnet_scan_id=kwargs.get('subnet_scan_id'), only_query=True).order_by(
self.cls.id.desc()).offset(100):
i.delete()
def _can_update(self, **kwargs):
pass
def _can_delete(self, **kwargs):
pass

View File

@ -1,104 +0,0 @@
# -*- coding:utf-8 -*-
import json
from flask import abort
from api.extensions import rd
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
from api.models.cmdb import IPAMSubnetScan
class Stats(object):
def __init__(self):
self.address_type = CITypeCache.get(BuiltinModelEnum.IPAM_ADDRESS) or abort(
404, ErrFormat.ipam_address_model_not_found.format(BuiltinModelEnum.IPAM_ADDRESS))
self.address_type_id = self.address_type.id
self.subnet_type = CITypeCache.get(BuiltinModelEnum.IPAM_SUBNET) or abort(
404, ErrFormat.ipam_address_model_not_found.format(BuiltinModelEnum.IPAM_ADDRESS))
self.subnet_type_id = self.subnet_type.id
def leaf_nodes(self, parent_id):
if str(parent_id) == '0': # all
ci_ids = [i.id for i in CI.get_by(type_id=self.subnet_type_id, to_dict=False)]
has_children_ci_ids = [i.first_ci_id for i in CIRelation.get_by(
only_query=True).join(CI, CIRelation.second_ci_id == CI.id).filter(
CIRelation.first_ci_id.in_(ci_ids)).filter(CI.type_id == self.subnet_type_id)]
return list(set(ci_ids) - set(has_children_ci_ids))
else:
_type = CIManager().get_by_id(parent_id)
if not _type:
return abort(404, ErrFormat.ipam_subnet_not_found)
key = [(str(parent_id), _type.type_id)]
result = []
while True:
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(
[i[0] for i in key], REDIS_PREFIX_CI_RELATION) or []]]
for idx, i in enumerate(res):
if (not i or list(i)[0][1] == self.address_type_id) and key[idx][1] == self.subnet_type_id:
result.append(int(key[idx][0]))
res = [j for i in res for j in i] # [(id, type_id)]
if not res:
return result
key = res
def statistic_subnets(self, subnet_ids):
if subnet_ids:
response, _, _, _, _, _ = SearchFromDB(
"_type:{}".format(self.subnet_type_id),
ci_ids=subnet_ids,
count=1000000,
parent_node_perm_passed=True,
).search()
else:
response = []
scans = IPAMSubnetScan.get_by(only_query=True).filter(IPAMSubnetScan.ci_id.in_(list(map(int, subnet_ids))))
id2scan = {i.ci_id: i for i in scans}
address_num, address_free_num, address_assign_num, address_used_num = 0, 0, 0, 0
for subnet in response:
address_num += (subnet.get('hosts_count') or 0)
address_free_num += (subnet.get('free_count') or 0)
address_assign_num += (subnet.get('assign_count') or 0)
address_used_num += (subnet.get('used_count') or 0)
if id2scan.get(subnet['_id']):
subnet['scan_enabled'] = id2scan[subnet['_id']].scan_enabled
subnet['last_scan_time'] = id2scan[subnet['_id']].last_scan_time
else:
subnet['scan_enabled'] = False
subnet['last_scan_time'] = None
return response, address_num, address_free_num, address_assign_num, address_used_num
def summary(self, parent_id):
subnet_ids = self.leaf_nodes(parent_id)
subnets, address_num, address_free_num, address_assign_num, address_used_num = (
self.statistic_subnets(subnet_ids))
return dict(subnet_num=len(subnets),
address_num=address_num,
address_free_num=address_free_num,
address_assign_num=address_assign_num,
address_unassign_num=address_num - address_assign_num,
address_used_num=address_used_num,
address_used_free_num=address_num - address_used_num,
subnets=subnets)

View File

@ -1,355 +0,0 @@
# -*- coding:utf-8 -*-
from collections import defaultdict
import datetime
import ipaddress
from flask import abort
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.const import BuiltinModelEnum
from api.lib.cmdb.ipam.const import OperateTypeEnum
from api.lib.cmdb.ipam.const import SubnetBuiltinAttributes
from api.lib.cmdb.ipam.history import OperateHistoryManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
from api.models.cmdb import IPAMSubnetScan
class SubnetManager(object):
def __init__(self):
self.ci_type = CITypeCache.get(BuiltinModelEnum.IPAM_SUBNET) or abort(
404, ErrFormat.ipam_subnet_model_not_found.format(BuiltinModelEnum.IPAM_SUBNET))
self.type_id = self.ci_type.id
def scan_rules(self, oneagent_id, last_update_at=None):
result = []
rules = IPAMSubnetScan.get_by(agent_id=oneagent_id, to_dict=True)
ci_ids = [i['ci_id'] for i in rules]
if ci_ids:
response, _, _, _, _, _ = SearchFromDB("_type:{}".format(self.type_id),
ci_ids=list(ci_ids),
count=1000000,
fl=[SubnetBuiltinAttributes.CIDR],
parent_node_perm_passed=True).search()
id2ci = {i['_id']: i for i in response}
for rule in rules:
if rule['ci_id'] in id2ci:
rule[SubnetBuiltinAttributes.CIDR] = id2ci[rule['ci_id']][SubnetBuiltinAttributes.CIDR]
result.append(rule)
new_last_update_at = ""
for i in result:
__last_update_at = max([i['rule_updated_at'] or "", i['created_at'] or ""])
if new_last_update_at < __last_update_at:
new_last_update_at = __last_update_at
if not last_update_at or new_last_update_at > last_update_at:
return result, new_last_update_at
else:
return [], new_last_update_at
@staticmethod
def get_hosts(cidr):
try:
return list(map(str, ipaddress.ip_network(cidr).hosts()))
except ValueError:
return []
def get_by_id(self, subnet_id):
response, _, _, _, _, _ = SearchFromDB("_type:{}".format(self.type_id),
ci_ids=[subnet_id],
parent_node_perm_passed=True).search()
scan_rule = IPAMSubnetScan.get_by(ci_id=subnet_id, first=True, to_dict=True)
if scan_rule and response:
scan_rule.update(response[0])
return scan_rule
def tree_view(self):
scope = CITypeCache.get(BuiltinModelEnum.IPAM_SCOPE)
ci_types = scope and [scope.id, self.type_id] or [self.type_id]
relations = defaultdict(set)
ids = set()
has_parent_ids = set()
for i in CIRelation.get_by(only_query=True).join(
CI, CI.id == CIRelation.first_ci_id).filter(CI.type_id.in_(ci_types)):
relations[i.first_ci_id].add(i.second_ci_id)
ids.add(i.first_ci_id)
ids.add(i.second_ci_id)
has_parent_ids.add(i.second_ci_id)
for i in CIRelation.get_by(only_query=True).join(
CI, CI.id == CIRelation.second_ci_id).filter(CI.type_id.in_(ci_types)):
relations[i.first_ci_id].add(i.second_ci_id)
ids.add(i.first_ci_id)
ids.add(i.second_ci_id)
has_parent_ids.add(i.second_ci_id)
for i in CI.get_by(only_query=True).filter(CI.type_id.in_(ci_types)):
ids.add(i.id)
for _id in ids:
if _id not in has_parent_ids:
relations[None].add(_id)
type2name = dict()
type2name[self.type_id] = AttributeCache.get(self.ci_type.show_id or self.ci_type.unique_id).name
fl = [type2name[self.type_id]]
if scope:
type2name[scope.id] = AttributeCache.get(scope.show_id or scope.unique_id).name
fl.append(type2name[scope.id])
response, _, _, _, _, _ = SearchFromDB("_type:({})".format(";".join(map(str, ci_types))),
ci_ids=list(ids),
count=1000000,
fl=list(set(fl + [SubnetBuiltinAttributes.CIDR])),
parent_node_perm_passed=True).search()
id2ci = {i['_id']: i for i in response}
def _build_tree(_tree, parent_id=None):
tree = []
for child_id in _tree.get(parent_id, []):
children = sorted(_build_tree(_tree, child_id), key=lambda x: x['_id'])
if not id2ci.get(child_id):
continue
tree.append({'children': children, **id2ci[child_id]})
return tree
result = sorted(_build_tree(relations), key=lambda x: x['_id'])
return result, type2name
@staticmethod
def _is_valid_cidr(cidr):
try:
cidr = ipaddress.ip_network(cidr)
if not (8 <= cidr.prefixlen <= 31):
raise ValueError
return str(cidr)
except ValueError:
return abort(400, ErrFormat.ipam_cidr_invalid_notation.format(cidr))
def _check_root_node_is_overlapping(self, cidr, _id=None):
none_root_nodes = [i.id for i in CI.get_by(only_query=True).join(
CIRelation, CIRelation.second_ci_id == CI.id).filter(CI.type_id == self.type_id)]
all_nodes = [i.id for i in CI.get_by(type_id=self.type_id, to_dict=False, fl=['id'])]
root_nodes = set(all_nodes) - set(none_root_nodes) - set(_id and [_id] or [])
response, _, _, _, _, _ = SearchFromDB("_type:{}".format(self.type_id),
ci_ids=list(root_nodes),
count=1000000,
parent_node_perm_passed=True).search()
cur_subnet = ipaddress.ip_network(cidr)
for item in response:
if item['_id'] == _id:
continue
if cur_subnet.overlaps(ipaddress.ip_network(item.get(SubnetBuiltinAttributes.CIDR))):
return abort(400, ErrFormat.ipam_subnet_overlapped.format(cidr, item.get(SubnetBuiltinAttributes.CIDR)))
return cidr
def _check_child_node_is_overlapping(self, parent_id, cidr, _id=None):
child_nodes = [i.second_ci_id for i in CIRelation.get_by(
first_ci_id=parent_id, to_dict=False, fl=['second_ci_id']) if i.second_ci_id != _id]
if not child_nodes:
return
response, _, _, _, _, _ = SearchFromDB("_type:{}".format(self.type_id),
ci_ids=list(child_nodes),
count=1000000,
parent_node_perm_passed=True).search()
cur_subnet = ipaddress.ip_network(cidr)
for item in response:
if item['_id'] == _id:
continue
if cur_subnet.overlaps(ipaddress.ip_network(item.get(SubnetBuiltinAttributes.CIDR))):
return abort(400, ErrFormat.ipam_subnet_overlapped.format(cidr, item.get(SubnetBuiltinAttributes.CIDR)))
def validate_cidr(self, parent_id, cidr, _id=None):
cidr = self._is_valid_cidr(cidr)
if not parent_id:
return self._check_root_node_is_overlapping(cidr, _id)
parent_subnet = CIManager().get_ci_by_id(parent_id, need_children=False)
if parent_subnet['ci_type'] == BuiltinModelEnum.IPAM_SUBNET:
if parent_subnet.get(SubnetBuiltinAttributes.CIDR):
prefix = int(cidr.split('/')[1])
if int(parent_subnet[SubnetBuiltinAttributes.CIDR].split('/')[1]) >= prefix:
return abort(400, ErrFormat.ipam_subnet_prefix_length_invalid.format(prefix))
valid_subnets = [str(i) for i in
ipaddress.ip_network(parent_subnet[SubnetBuiltinAttributes.CIDR]).subnets(
new_prefix=prefix)]
if cidr not in valid_subnets:
return abort(400, ErrFormat.ipam_cidr_invalid_subnet.format(cidr, valid_subnets))
else:
return abort(400, ErrFormat.ipam_parent_subnet_node_cidr_cannot_empty)
self._check_child_node_is_overlapping(parent_id, cidr, _id)
return cidr
def _add_subnet(self, cidr, **kwargs):
kwargs[SubnetBuiltinAttributes.HOSTS_COUNT] = len(list(ipaddress.ip_network(cidr).hosts()))
kwargs[SubnetBuiltinAttributes.USED_COUNT] = 0
kwargs[SubnetBuiltinAttributes.ASSIGN_COUNT] = 0
kwargs[SubnetBuiltinAttributes.FREE_COUNT] = kwargs[SubnetBuiltinAttributes.HOSTS_COUNT]
return CIManager().add(self.type_id, cidr=cidr, **kwargs)
@staticmethod
def _add_scan_rule(ci_id, agent_id, cron, scan_enabled=True):
IPAMSubnetScan.create(ci_id=ci_id, agent_id=agent_id, cron=cron, scan_enabled=scan_enabled)
@staticmethod
def _add_relation(parent_id, child_id):
if not parent_id or not child_id:
return
CIRelationManager().add(parent_id, child_id, valid=False)
def add(self, cidr, parent_id, agent_id, cron, scan_enabled=True, **kwargs):
cidr = self.validate_cidr(parent_id, cidr)
ci_id = self._add_subnet(cidr, **kwargs)
self._add_scan_rule(ci_id, agent_id, cron, scan_enabled)
self._add_relation(parent_id, ci_id)
OperateHistoryManager().add(operate_type=OperateTypeEnum.ADD_SUBNET,
cidr=cidr,
description=cidr)
return ci_id
@staticmethod
def _update_subnet(_id, **kwargs):
return CIManager().update(_id, **kwargs)
@staticmethod
def _update_scan_rule(ci_id, agent_id, cron, scan_enabled=True):
existed = IPAMSubnetScan.get_by(ci_id=ci_id, first=True, to_dict=False)
if existed is not None:
existed.update(ci_id=ci_id, agent_id=agent_id, cron=cron, scan_enabled=scan_enabled,
rule_updated_at=datetime.datetime.now())
else:
IPAMSubnetScan.create(ci_id=ci_id, agent_id=agent_id, cron=cron, scan_enabled=scan_enabled)
def update(self, _id, **kwargs):
kwargs[SubnetBuiltinAttributes.CIDR] = self.validate_cidr(kwargs.pop('parent_id', None),
kwargs.get(SubnetBuiltinAttributes.CIDR), _id)
agent_id = kwargs.pop('agent_id', None)
cron = kwargs.pop('cron', None)
scan_enabled = kwargs.pop('scan_enabled', True)
cur = CIManager.get_ci_by_id(_id, need_children=False)
self._update_subnet(_id, **kwargs)
self._update_scan_rule(_id, agent_id, cron, scan_enabled)
OperateHistoryManager().add(operate_type=OperateTypeEnum.UPDATE_SUBNET,
cidr=cur.get(SubnetBuiltinAttributes.CIDR),
description="{} -> {}".format(cur.get(SubnetBuiltinAttributes.CIDR),
kwargs.get(SubnetBuiltinAttributes.CIDR)))
return _id
@classmethod
def delete(cls, _id):
if CIRelation.get_by(only_query=True).filter(CIRelation.first_ci_id == _id).first():
return abort(400, ErrFormat.ipam_subnet_cannot_delete)
existed = IPAMSubnetScan.get_by(ci_id=_id, first=True, to_dict=False)
existed and existed.delete()
delete_ci_ids = []
for i in CIRelation.get_by(first_ci_id=_id, to_dict=False):
delete_ci_ids.append(i.second_ci_id)
i.delete()
cur = CIManager.get_ci_by_id(_id, need_children=False)
CIManager().delete(_id)
OperateHistoryManager().add(operate_type=OperateTypeEnum.DELETE_SUBNET,
cidr=cur.get(SubnetBuiltinAttributes.CIDR),
description=cur.get(SubnetBuiltinAttributes.CIDR))
# batch_delete_ci.apply_async(args=(delete_ci_ids,))
return _id
class SubnetScopeManager(object):
def __init__(self):
self.ci_type = CITypeCache.get(BuiltinModelEnum.IPAM_SCOPE)
not self.ci_type and abort(400, ErrFormat.ipam_subnet_model_not_found.format(
BuiltinModelEnum.IPAM_SCOPE))
self.type_id = self.ci_type.id
def _add_scope(self, name):
return CIManager().add(self.type_id, name=name)
@staticmethod
def _add_relation(parent_id, child_id):
if not parent_id or not child_id:
return
CIRelationManager().add(parent_id, child_id, valid=False)
def add(self, parent_id, name):
ci_id = self._add_scope(name)
self._add_relation(parent_id, ci_id)
OperateHistoryManager().add(operate_type=OperateTypeEnum.ADD_SCOPE,
description=name)
return ci_id
@staticmethod
def _update_scope(_id, name):
return CIManager().update(_id, name=name)
def update(self, _id, name):
cur = CIManager.get_ci_by_id(_id, need_children=False)
res = self._update_scope(_id, name)
OperateHistoryManager().add(operate_type=OperateTypeEnum.UPDATE_SCOPE,
description="{} -> {}".format(cur.get('name'), name))
return res
@staticmethod
def delete(_id):
if CIRelation.get_by(first_ci_id=_id, first=True, to_dict=False):
return abort(400, ErrFormat.ipam_scope_cannot_delete)
cur = CIManager.get_ci_by_id(_id, need_children=False)
CIManager().delete(_id)
OperateHistoryManager().add(operate_type=OperateTypeEnum.DELETE_SCOPE,
description=cur.get('name'))
return _id

View File

@ -1,15 +1,12 @@
# -*- coding:utf-8 -*-
import copy
import functools
import redis_lock
from flask import abort
from flask import current_app
from flask import request
from flask_login import current_user
from api.extensions import db
from api.extensions import rd
from api.lib.cmdb.const import BUILTIN_ATTRIBUTES
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.mixin import DBMixin
@ -27,7 +24,7 @@ class CIFilterPermsCRUD(DBMixin):
result = {}
for i in res:
if i['attr_filter']:
i['attr_filter'] = i['attr_filter'].split(',') + list(BUILTIN_ATTRIBUTES.keys())
i['attr_filter'] = i['attr_filter'].split(',')
if i['rid'] not in result:
result[i['rid']] = i
@ -43,11 +40,6 @@ class CIFilterPermsCRUD(DBMixin):
result[i['rid']]['ci_filter'] = ""
result[i['rid']]['ci_filter'] += (i['ci_filter'] or "")
if i['id_filter']:
if not result[i['rid']]['id_filter']:
result[i['rid']]['id_filter'] = {}
result[i['rid']]['id_filter'].update(i['id_filter'] or {})
return result
def get_by_ids(self, _ids, type_id=None):
@ -62,7 +54,7 @@ class CIFilterPermsCRUD(DBMixin):
result = {}
for i in res:
if i['attr_filter']:
i['attr_filter'] = i['attr_filter'].split(',') + list(BUILTIN_ATTRIBUTES.keys())
i['attr_filter'] = i['attr_filter'].split(',')
if i['type_id'] not in result:
result[i['type_id']] = i
@ -78,11 +70,6 @@ class CIFilterPermsCRUD(DBMixin):
result[i['type_id']]['ci_filter'] = ""
result[i['type_id']]['ci_filter'] += (i['ci_filter'] or "")
if i['id_filter']:
if not result[i['type_id']]['id_filter']:
result[i['type_id']]['id_filter'] = {}
result[i['type_id']]['id_filter'].update(i['id_filter'] or {})
return result
@classmethod
@ -95,54 +82,6 @@ class CIFilterPermsCRUD(DBMixin):
type2filter_perms = cls().get_by_ids(list(map(int, [i['name'] for i in res2])), type_id=type_id)
return type2filter_perms.get(type_id, {}).get('attr_filter') or []
def _revoke_children(self, rid, id_filter, rebuild=True):
items = self.cls.get_by(rid=rid, ci_filter=None, attr_filter=None, to_dict=False)
for item in items:
changed, item_id_filter = False, copy.deepcopy(item.id_filter)
for prefix in id_filter:
for k, v in copy.deepcopy((item.id_filter or {})).items():
if k.startswith(prefix) and k != prefix:
item_id_filter.pop(k)
changed = True
if not item_id_filter and current_app.config.get('USE_ACL'):
item.soft_delete(commit=False)
ACLManager().del_resource(str(item.id), ResourceTypeEnum.CI_FILTER, rebuild=rebuild)
elif changed:
item.update(id_filter=item_id_filter, commit=False)
db.session.commit()
def _revoke_parent(self, rid, parent_path, rebuild=True):
parent_path = [i for i in parent_path.split(',') if i] or []
revoke_nodes = [','.join(parent_path[:i]) for i in range(len(parent_path), 0, -1)]
for node_path in revoke_nodes:
delete_item, can_deleted = None, True
items = self.cls.get_by(rid=rid, ci_filter=None, attr_filter=None, to_dict=False)
for item in items:
if node_path in item.id_filter:
delete_item = item
if any(filter(lambda x: x.startswith(node_path) and x != node_path, item.id_filter.keys())):
can_deleted = False
break
if can_deleted and delete_item:
id_filter = copy.deepcopy(delete_item.id_filter)
id_filter.pop(node_path)
delete_item = delete_item.update(id_filter=id_filter, filter_none=False)
if current_app.config.get('USE_ACL') and not id_filter:
ACLManager().del_resource(str(delete_item.id), ResourceTypeEnum.CI_FILTER, rebuild=False)
delete_item.soft_delete()
items.remove(delete_item)
if rebuild:
from api.tasks.acl import role_rebuild
from api.lib.perm.acl.const import ACL_QUEUE
from api.lib.perm.acl.cache import AppCache
role_rebuild.apply_async(args=(rid, AppCache.get('cmdb').id), queue=ACL_QUEUE)
def _can_add(self, **kwargs):
ci_filter = kwargs.get('ci_filter')
attr_filter = kwargs.get('attr_filter') or ""
@ -163,67 +102,34 @@ class CIFilterPermsCRUD(DBMixin):
def add(self, **kwargs):
kwargs = self._can_add(**kwargs) or kwargs
with redis_lock.Lock(rd.r, 'CMDB_FILTER_{}_{}'.format(kwargs['type_id'], kwargs['rid']), expire=10):
request_id_filter = {}
if kwargs.get('id_filter'):
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
rid=kwargs.get('rid'),
ci_filter=None,
attr_filter=None,
first=True, to_dict=False)
for _id, v in (kwargs.get('id_filter') or {}).items():
key = ",".join(([v['parent_path']] if v.get('parent_path') else []) + [str(_id)])
request_id_filter[key] = v['name']
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
rid=kwargs.get('rid'),
first=True, to_dict=False)
if obj is not None:
obj = obj.update(filter_none=False, **kwargs)
if not obj.attr_filter and not obj.ci_filter:
if current_app.config.get('USE_ACL'):
ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
else:
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
rid=kwargs.get('rid'),
id_filter=None,
first=True, to_dict=False)
is_recursive = kwargs.pop('is_recursive', 0)
if obj is not None:
if obj.id_filter and isinstance(kwargs.get('id_filter'), dict):
obj_id_filter = copy.deepcopy(obj.id_filter)
for k, v in request_id_filter.items():
obj_id_filter[k] = v
kwargs['id_filter'] = obj_id_filter
obj = obj.update(filter_none=False, **kwargs)
if not obj.attr_filter and not obj.ci_filter and not obj.id_filter:
if current_app.config.get('USE_ACL'):
ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER, rebuild=False)
obj.soft_delete()
if not is_recursive and request_id_filter:
self._revoke_children(obj.rid, request_id_filter, rebuild=False)
obj.soft_delete()
else:
if not kwargs.get('ci_filter') and not kwargs.get('attr_filter'):
return
else:
if not kwargs.get('ci_filter') and not kwargs.get('attr_filter') and not kwargs.get('id_filter'):
return
obj = self.cls.create(**kwargs)
if request_id_filter:
kwargs['id_filter'] = request_id_filter
if current_app.config.get('USE_ACL'):
try:
ACLManager().add_resource(obj.id, ResourceTypeEnum.CI_FILTER)
except:
pass
ACLManager().grant_resource_to_role_by_rid(obj.id,
kwargs.get('rid'),
ResourceTypeEnum.CI_FILTER)
obj = self.cls.create(**kwargs)
if current_app.config.get('USE_ACL'): # new resource
try:
ACLManager().add_resource(obj.id, ResourceTypeEnum.CI_FILTER)
except:
pass
ACLManager().grant_resource_to_role_by_rid(obj.id,
kwargs.get('rid'),
ResourceTypeEnum.CI_FILTER)
return obj
return obj
def _can_update(self, **kwargs):
pass
@ -232,84 +138,19 @@ class CIFilterPermsCRUD(DBMixin):
pass
def delete(self, **kwargs):
with redis_lock.Lock(rd.r, 'CMDB_FILTER_{}_{}'.format(kwargs['type_id'], kwargs['rid']), expire=10):
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
rid=kwargs.get('rid'),
id_filter=None,
first=True, to_dict=False)
if obj is not None:
resource = None
if current_app.config.get('USE_ACL'):
resource = ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
obj.soft_delete()
return resource
def delete2(self, **kwargs):
with redis_lock.Lock(rd.r, 'CMDB_FILTER_{}_{}'.format(kwargs['type_id'], kwargs['rid']), expire=10):
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
rid=kwargs.get('rid'),
ci_filter=None,
attr_filter=None,
first=True, to_dict=False)
request_id_filter = {}
for _id, v in (kwargs.get('id_filter') or {}).items():
key = ",".join(([v['parent_path']] if v.get('parent_path') else []) + [str(_id)])
request_id_filter[key] = v['name']
obj = self.cls.get_by(type_id=kwargs.get('type_id'),
rid=kwargs.get('rid'),
first=True, to_dict=False)
if obj is not None:
resource = None
if obj is not None:
if current_app.config.get('USE_ACL'):
resource = ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER)
id_filter = {}
for k, v in copy.deepcopy(obj.id_filter or {}).items(): # important
if k not in request_id_filter:
id_filter[k] = v
if not id_filter and current_app.config.get('USE_ACL'):
resource = ACLManager().del_resource(str(obj.id), ResourceTypeEnum.CI_FILTER, rebuild=False)
obj.soft_delete()
db.session.commit()
else:
obj.update(id_filter=id_filter)
self._revoke_children(kwargs.get('rid'), request_id_filter, rebuild=False)
self._revoke_parent(kwargs.get('rid'), kwargs.get('parent_path'))
obj.soft_delete()
return resource
def delete_id_filter_by_ci_id(self, ci_id):
items = self.cls.get_by(ci_filter=None, attr_filter=None, to_dict=False)
rebuild_roles = set()
for item in items:
id_filter = copy.deepcopy(item.id_filter)
changed = False
for node_path in item.id_filter:
if str(ci_id) in node_path:
id_filter.pop(node_path)
changed = True
if changed:
rebuild_roles.add(item.rid)
if not id_filter:
item.soft_delete(commit=False)
else:
item.update(id_filter=id_filter, commit=False)
db.session.commit()
if rebuild_roles:
from api.tasks.acl import role_rebuild
from api.lib.perm.acl.const import ACL_QUEUE
from api.lib.perm.acl.cache import AppCache
for rid in rebuild_roles:
role_rebuild.apply_async(args=(rid, AppCache.get('cmdb').id), queue=ACL_QUEUE)
def has_perm_for_ci(arg_name, resource_type, perm, callback=None, app=None):
def decorator_has_perm(func):

View File

@ -1,8 +1,8 @@
# -*- coding:utf-8 -*-
from collections import defaultdict
import copy
import six
import toposort
from flask import abort
@ -14,22 +14,16 @@ from api.lib.cmdb.attribute import AttributeManager
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeAttributesCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.cache import CMDBCounterCache
from api.lib.cmdb.ci_type import CITypeAttributeManager
from api.lib.cmdb.const import BUILTIN_ATTRIBUTES
from api.lib.cmdb.const import ConstraintEnum
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.const import SysComputedAttributes
from api.lib.cmdb.perms import CIFilterPermsCRUD
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.exception import AbortException
from api.lib.perm.acl.acl import ACLManager
from api.models.cmdb import CITypeGroup
from api.models.cmdb import CITypeGroupItem
from api.models.cmdb import CITypeAttribute
from api.models.cmdb import CITypeRelation
from api.models.cmdb import PreferenceCITypeOrder
from api.models.cmdb import PreferenceRelationView
from api.models.cmdb import PreferenceSearchOption
from api.models.cmdb import PreferenceShowAttributes
@ -44,48 +38,15 @@ class PreferenceManager(object):
@staticmethod
def get_types(instance=False, tree=False):
ci_type_order = sorted(PreferenceCITypeOrder.get_by(uid=current_user.uid, to_dict=False), key=lambda x: x.order)
type2group = {}
for i in db.session.query(CITypeGroupItem, CITypeGroup).join(
CITypeGroup, CITypeGroup.id == CITypeGroupItem.group_id).filter(
CITypeGroup.deleted.is_(False)).filter(CITypeGroupItem.deleted.is_(False)):
type2group[i.CITypeGroupItem.type_id] = i.CITypeGroup.to_dict()
types = db.session.query(PreferenceShowAttributes.type_id).filter(
PreferenceShowAttributes.uid == current_user.uid).filter(
PreferenceShowAttributes.deleted.is_(False)).group_by(
PreferenceShowAttributes.type_id).all() if instance else []
types = sorted(types, key=lambda x: {i.type_id: idx for idx, i in enumerate(
ci_type_order) if not i.is_tree}.get(x.type_id, 1))
group_types = []
other_types = []
group2idx = {}
type_ids = set()
for ci_type in types:
type_id = ci_type.type_id
type_ids.add(type_id)
type_dict = CITypeCache.get(type_id).to_dict()
if type_id not in type2group:
other_types.append(type_dict)
else:
group = type2group[type_id]
if group['id'] not in group2idx:
group_types.append(type2group[type_id])
group2idx[group['id']] = len(group_types) - 1
group_types[group2idx[group['id']]].setdefault('ci_types', []).append(type_dict)
if other_types:
group_types.append(dict(ci_types=other_types))
tree_types = PreferenceTreeView.get_by(uid=current_user.uid, to_dict=False) if tree else []
tree_types = sorted(tree_types, key=lambda x: {i.type_id: idx for idx, i in enumerate(
ci_type_order) if i.is_tree}.get(x.type_id, 1))
type_ids = set([i.type_id for i in types + tree_types])
tree_types = [CITypeCache.get(_type.type_id).to_dict() for _type in tree_types]
for _type in tree_types:
type_ids.add(_type['id'])
return dict(group_types=group_types, tree_types=tree_types, type_ids=list(type_ids))
return [CITypeCache.get(type_id).to_dict() for type_id in type_ids]
@staticmethod
def get_types2(instance=False, tree=False):
@ -98,83 +59,67 @@ class PreferenceManager(object):
:param tree:
:return:
"""
result = dict(self=dict(instance=[], tree=[], type_id2subs_time=dict()))
result.update(CMDBCounterCache.get_sub_counter())
ci_type_order = sorted(PreferenceCITypeOrder.get_by(uid=current_user.uid, to_dict=False), key=lambda x: x.order)
result = dict(self=dict(instance=[], tree=[], type_id2subs_time=dict()),
type_id2users=dict())
if instance:
types = db.session.query(PreferenceShowAttributes.type_id,
PreferenceShowAttributes.uid, PreferenceShowAttributes.created_at).filter(
PreferenceShowAttributes.deleted.is_(False)).filter(
PreferenceShowAttributes.uid == current_user.uid).group_by(
PreferenceShowAttributes.deleted.is_(False)).group_by(
PreferenceShowAttributes.uid, PreferenceShowAttributes.type_id)
for i in types:
result['self']['instance'].append(i.type_id)
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
result['self']['type_id2subs_time'][i.type_id] = i.created_at
if i.uid == current_user.uid:
result['self']['instance'].append(i.type_id)
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
result['self']['type_id2subs_time'][i.type_id] = i.created_at
instance_order = [i.type_id for i in ci_type_order if not i.is_tree]
if len(instance_order) == len(result['self']['instance']):
result['self']['instance'] = instance_order
result['type_id2users'].setdefault(i.type_id, []).append(i.uid)
if tree:
types = PreferenceTreeView.get_by(uid=current_user.uid, to_dict=False)
types = PreferenceTreeView.get_by(to_dict=False)
for i in types:
result['self']['tree'].append(i.type_id)
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
result['self']['type_id2subs_time'][i.type_id] = i.created_at
if i.uid == current_user.uid:
result['self']['tree'].append(i.type_id)
if str(i.created_at) > str(result['self']['type_id2subs_time'].get(i.type_id, "")):
result['self']['type_id2subs_time'][i.type_id] = i.created_at
tree_order = [i.type_id for i in ci_type_order if i.is_tree]
if len(tree_order) == len(result['self']['tree']):
result['self']['tree'] = tree_order
result['type_id2users'].setdefault(i.type_id, [])
if i.uid not in result['type_id2users'][i.type_id]:
result['type_id2users'][i.type_id].append(i.uid)
return result
@staticmethod
def get_show_attributes(type_id):
_type = CITypeCache.get(type_id) or abort(404, ErrFormat.ci_type_not_found)
type_id = _type and _type.id
if not isinstance(type_id, six.integer_types):
_type = CITypeCache.get(type_id)
type_id = _type and _type.id
attrs = PreferenceShowAttributes.get_by(uid=current_user.uid, type_id=type_id, to_dict=False)
attrs = db.session.query(PreferenceShowAttributes, CITypeAttribute.order).join(
CITypeAttribute, CITypeAttribute.attr_id == PreferenceShowAttributes.attr_id).filter(
PreferenceShowAttributes.uid == current_user.uid).filter(
PreferenceShowAttributes.type_id == type_id).filter(
PreferenceShowAttributes.deleted.is_(False)).filter(CITypeAttribute.deleted.is_(False)).filter(
CITypeAttribute.type_id == type_id).all()
result = []
for i in sorted(attrs, key=lambda x: x.order):
if i.attr_id:
item = i.attr.to_dict()
elif i.builtin_attr:
item = dict(name=i.builtin_attr, alias=BUILTIN_ATTRIBUTES[i.builtin_attr])
else:
item = dict(name="", alias="")
item.update(dict(is_fixed=i.is_fixed))
for i in sorted(attrs, key=lambda x: x.PreferenceShowAttributes.order):
item = i.PreferenceShowAttributes.attr.to_dict()
item.update(dict(is_fixed=i.PreferenceShowAttributes.is_fixed))
result.append(item)
is_subscribed = True
if not attrs:
result = CITypeAttributeManager.get_attributes_by_type_id(type_id,
choice_web_hook_parse=False,
choice_other_parse=False)
result = [i for i in result if i['default_show']]
for i in BUILTIN_ATTRIBUTES:
result.append(dict(name=i, alias=BUILTIN_ATTRIBUTES[i]))
attrs = db.session.query(CITypeAttribute).filter(
CITypeAttribute.type_id == type_id).filter(
CITypeAttribute.deleted.is_(False)).filter(
CITypeAttribute.default_show.is_(True)).order_by(CITypeAttribute.order)
result = [i.attr.to_dict() for i in attrs]
is_subscribed = False
for i in result:
if i.get("is_choice"):
if i["is_choice"]:
i.update(dict(choice_value=AttributeManager.get_choice_values(
i["id"], i["value_type"], i.get("choice_web_hook"), i.get("choice_other"))))
if (_type.name in SysComputedAttributes.type2attr and
i['name'] in SysComputedAttributes.type2attr[_type.name]):
i['sys_computed'] = True
else:
i['sys_computed'] = False
i["id"], i["value_type"], i["choice_web_hook"], i.get("choice_other"))))
return is_subscribed, result
@ -186,52 +131,29 @@ class PreferenceManager(object):
_attr, is_fixed = x
else:
_attr, is_fixed = x, False
if _attr in BUILTIN_ATTRIBUTES:
attr = None
builtin_attr = _attr
else:
attr = AttributeCache.get(_attr) or abort(
404, ErrFormat.attribute_not_found.format("id={}".format(_attr)))
builtin_attr = None
attr = AttributeCache.get(_attr) or abort(404, ErrFormat.attribute_not_found.format("id={}".format(_attr)))
existed = PreferenceShowAttributes.get_by(type_id=type_id,
uid=current_user.uid,
attr_id=attr and attr.id,
builtin_attr=builtin_attr,
attr_id=attr.id,
first=True,
to_dict=False)
if existed is None:
PreferenceShowAttributes.create(type_id=type_id,
uid=current_user.uid,
attr_id=attr and attr.id,
builtin_attr=builtin_attr,
attr_id=attr.id,
order=order,
is_fixed=is_fixed)
else:
existed.update(order=order, is_fixed=is_fixed)
attr_dict = {(int(i[0]) if i[0].isdigit() else i[0]) if isinstance(i, list) else
(int(i) if i.isdigit() else i): j for i, j in attr_order}
attr_dict = {int(i[0]) if isinstance(i, list) else int(i): j for i, j in attr_order}
for i in existed_all:
if (i.attr_id and i.attr_id not in attr_dict) or (i.builtin_attr and i.builtin_attr not in attr_dict):
if i.attr_id not in attr_dict:
i.soft_delete()
if not existed_all and attr_order:
cls.add_ci_type_order_item(type_id, is_tree=False)
elif not PreferenceShowAttributes.get_by(type_id=type_id, uid=current_user.uid, to_dict=False):
cls.delete_ci_type_order_item(type_id, is_tree=False)
@staticmethod
def get_tree_view():
ci_type_order = sorted(PreferenceCITypeOrder.get_by(uid=current_user.uid, is_tree=True, to_dict=False),
key=lambda x: x.order)
res = PreferenceTreeView.get_by(uid=current_user.uid, to_dict=True)
if ci_type_order:
res = sorted(res, key=lambda x: {ii.type_id: idx for idx, ii in enumerate(
ci_type_order)}.get(x['type_id'], 1))
for item in res:
if item["levels"]:
ci_type = CITypeCache.get(item['type_id']).to_dict()
@ -250,8 +172,8 @@ class PreferenceManager(object):
return res
@classmethod
def create_or_update_tree_view(cls, type_id, levels):
@staticmethod
def create_or_update_tree_view(type_id, levels):
attrs = CITypeAttributesCache.get(type_id)
for idx, i in enumerate(levels):
for attr in attrs:
@ -263,12 +185,9 @@ class PreferenceManager(object):
if existed is not None:
if not levels:
existed.soft_delete()
cls.delete_ci_type_order_item(type_id, is_tree=True)
return existed
return existed.update(levels=levels)
elif levels:
cls.add_ci_type_order_item(type_id, is_tree=True)
return PreferenceTreeView.create(levels=levels, type_id=type_id, uid=current_user.uid)
@staticmethod
@ -287,14 +206,12 @@ class PreferenceManager(object):
else:
views = _views
view2cr_ids = defaultdict(list)
name2view = dict()
view2cr_ids = dict()
result = dict()
name2id = list()
for view in views:
view2cr_ids[view['name']].extend(view['cr_ids'])
view2cr_ids.setdefault(view['name'], []).extend(view['cr_ids'])
name2id.append([view['name'], view['id']])
name2view[view['name']] = view
id2type = dict()
for view_name in view2cr_ids:
@ -338,8 +255,6 @@ class PreferenceManager(object):
topo_flatten=topo_flatten,
level2constraint=level2constraint,
leaf=leaf,
option=name2view[view_name]['option'],
is_public=name2view[view_name]['is_public'],
leaf2show_types=leaf2show_types,
node2show_types=node2show_types,
show_types=[CITypeCache.get(j).to_dict()
@ -347,26 +262,18 @@ class PreferenceManager(object):
for type_id in id2type:
id2type[type_id] = CITypeCache.get(type_id).to_dict()
id2type[type_id]['unique_name'] = AttributeCache.get(id2type[type_id]['unique_id']).name
if id2type[type_id]['show_id']:
show_attr = AttributeCache.get(id2type[type_id]['show_id'])
id2type[type_id]['show_name'] = show_attr and show_attr.name
return result, id2type, sorted(name2id, key=lambda x: x[1])
@classmethod
def create_or_update_relation_view(cls, name=None, cr_ids=None, _id=None, is_public=False, option=None):
def create_or_update_relation_view(cls, name, cr_ids, is_public=False):
if not cr_ids:
return abort(400, ErrFormat.preference_relation_view_node_required)
if _id is None:
existed = PreferenceRelationView.get_by(name=name, to_dict=False, first=True)
else:
existed = PreferenceRelationView.get_by_id(_id)
existed = PreferenceRelationView.get_by(name=name, to_dict=False, first=True)
current_app.logger.debug(existed)
if existed is None:
PreferenceRelationView.create(name=name, cr_ids=cr_ids, uid=current_user.uid,
is_public=is_public, option=option)
PreferenceRelationView.create(name=name, cr_ids=cr_ids, uid=current_user.uid, is_public=is_public)
if current_app.config.get("USE_ACL"):
ACLManager().add_resource(name, ResourceTypeEnum.RELATION_VIEW)
@ -374,11 +281,6 @@ class PreferenceManager(object):
RoleEnum.CMDB_READ_ALL,
ResourceTypeEnum.RELATION_VIEW,
permissions=[PermEnum.READ])
else:
if existed.name != name and current_app.config.get("USE_ACL"):
ACLManager().update_resource(existed.name, name, ResourceTypeEnum.RELATION_VIEW)
existed.update(name=name, cr_ids=cr_ids, is_public=is_public, option=option)
return cls.get_relation_view()
@ -407,22 +309,14 @@ class PreferenceManager(object):
def add_search_option(**kwargs):
kwargs['uid'] = current_user.uid
if kwargs['name'] in ('__recent__', '__favor__', '__relation_favor__'):
if kwargs['name'] == '__recent__':
for i in PreferenceSearchOption.get_by(
only_query=True, name=kwargs['name'], uid=current_user.uid).order_by(
PreferenceSearchOption.id.desc()).offset(20):
i.delete()
else:
existed = PreferenceSearchOption.get_by(uid=current_user.uid,
name=kwargs.get('name'),
prv_id=kwargs.get('prv_id'),
ptv_id=kwargs.get('ptv_id'),
type_id=kwargs.get('type_id'),
)
if existed:
return abort(400, ErrFormat.preference_search_option_exists)
existed = PreferenceSearchOption.get_by(uid=current_user.uid,
name=kwargs.get('name'),
prv_id=kwargs.get('prv_id'),
ptv_id=kwargs.get('ptv_id'),
type_id=kwargs.get('type_id'),
)
if existed:
return abort(400, ErrFormat.preference_search_option_exists)
return PreferenceSearchOption.create(**kwargs)
@ -462,9 +356,6 @@ class PreferenceManager(object):
for i in PreferenceTreeView.get_by(type_id=type_id, uid=uid, to_dict=False):
i.soft_delete()
for i in PreferenceCITypeOrder.get_by(type_id=type_id, uid=uid, to_dict=False):
i.soft_delete()
@staticmethod
def can_edit_relation(parent_id, child_id):
views = PreferenceRelationView.get_by(to_dict=False)
@ -490,36 +381,3 @@ class PreferenceManager(object):
return False
return True
@staticmethod
def add_ci_type_order_item(type_id, is_tree=False):
max_order = PreferenceCITypeOrder.get_by(
uid=current_user.uid, is_tree=is_tree, only_query=True).order_by(PreferenceCITypeOrder.order.desc()).first()
order = (max_order and max_order.order + 1) or 1
PreferenceCITypeOrder.create(type_id=type_id, is_tree=is_tree, uid=current_user.uid, order=order)
@staticmethod
def delete_ci_type_order_item(type_id, is_tree=False):
existed = PreferenceCITypeOrder.get_by(uid=current_user.uid, type_id=type_id, is_tree=is_tree,
first=True, to_dict=False)
existed and existed.soft_delete()
@staticmethod
def upsert_ci_type_order(type_ids, is_tree=False):
for idx, type_id in enumerate(type_ids):
order = idx + 1
existed = PreferenceCITypeOrder.get_by(uid=current_user.uid, type_id=type_id, is_tree=is_tree,
to_dict=False, first=True)
if existed is not None:
existed.update(order=order, flush=True)
else:
PreferenceCITypeOrder.create(uid=current_user.uid, type_id=type_id, is_tree=is_tree, order=order,
flush=True)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error("upsert citype order failed: {}".format(e))
return abort(400, ErrFormat.unknown_error)

View File

@ -16,9 +16,8 @@ class ErrFormat(CommonErrFormat):
argument_file_not_found = _l("The file doesn't seem to be uploaded") # 文件似乎并未上传
attribute_not_found = _l("Attribute {} does not exist!") # 属性 {} 不存在!
# 该属性是模型的唯一标识,不能被删除!
attribute_is_unique_id = _l(
"This attribute is the unique identifier of the model and cannot be deleted!")
"This attribute is the unique identifier of the model and cannot be deleted!") # 该属性是模型的唯一标识,不能被删除!
attribute_is_ref_by_type = _l(
"This attribute is referenced by model {} and cannot be deleted!") # 该属性被模型 {} 引用, 不能删除!
attribute_value_type_cannot_change = _l(
@ -36,7 +35,7 @@ class ErrFormat(CommonErrFormat):
"Only creators and administrators are allowed to delete attributes!") # 目前只允许 属性创建人、管理员 删除属性!
# 属性字段名不能是内置字段: id, _id, ci_id, type, _type, ci_type
attribute_name_cannot_be_builtin = _l(
"Attribute field names cannot be built-in fields: id, _id, ci_id, type, _type, ci_type, ticket_id")
"Attribute field names cannot be built-in fields: id, _id, ci_id, type, _type, ci_type")
attribute_choice_other_invalid = _l(
"Predefined value: Other model request parameters are illegal!") # 预定义值: 其他模型请求参数不合法!
@ -45,8 +44,6 @@ class ErrFormat(CommonErrFormat):
unique_value_not_found = _l("The model's primary key {} does not exist!") # 模型的主键 {} 不存在!
unique_key_required = _l("Primary key {} is missing") # 主键字段 {} 缺失
ci_is_already_existed = _l("CI already exists!") # CI 已经存在!
ci_reference_not_found = _l("{}: CI reference {} does not exist!") # {}: CI引用 {} 不存在!
ci_reference_invalid = _l("{}: CI reference {} is illegal!") # {}, CI引用 {} 不合法!
relation_constraint = _l("Relationship constraint: {}, verification failed") # 关系约束: {}, 校验失败
# 多对多关系 限制: 模型 {} <-> {} 已经存在多对多关系!
m2m_relation_constraint = _l(
@ -63,11 +60,6 @@ class ErrFormat(CommonErrFormat):
only_owner_can_delete = _l("Only the creator can delete it!") # 只有创建人才能删除它!
ci_exists_and_cannot_delete_type = _l(
"The model cannot be deleted because the CI already exists") # 因为CI已经存在不能删除模型
ci_exists_and_cannot_delete_inheritance = _l(
"The inheritance cannot be deleted because the CI already exists") # 因为CI已经存在不能删除继承关系
ci_type_inheritance_cannot_delete = _l("The model is inherited and cannot be deleted") # 该模型被继承, 不能删除
ci_type_referenced_cannot_delete = _l(
"The model is referenced by attribute {} and cannot be deleted") # 该模型被属性 {} 引用, 不能删除
# 因为关系视图 {} 引用了该模型,不能删除模型
ci_relation_view_exists_and_cannot_delete_type = _l(
@ -84,8 +76,6 @@ class ErrFormat(CommonErrFormat):
unique_constraint_invalid = _l("Uniquely constrained attributes cannot be JSON and multi-valued")
ci_type_trigger_duplicate = _l("Duplicated trigger") # 重复的触发器
ci_type_trigger_not_found = _l("Trigger {} does not exist") # 触发器 {} 不存在
ci_type_reconciliation_duplicate = _l("Duplicated reconciliation rule") # 重复的校验规则
ci_type_reconciliation_not_found = _l("Reconciliation rule {} does not exist") # 规则 {} 不存在
record_not_found = _l("Operation record {} does not exist") # 操作记录 {} 不存在
cannot_delete_unique = _l("Unique identifier cannot be deleted") # 不能删除唯一标识
@ -104,7 +94,7 @@ class ErrFormat(CommonErrFormat):
# 属性 {} 的值必须是唯一的, 当前值 {} 已存在
attribute_value_unique_required = _l("The value of attribute {} must be unique, {} already exists")
attribute_value_required = _l("Attribute {} value must exist") # 属性 {} 值必须存在
attribute_value_out_of_range = _l("Out of range value, the maximum value is 2147483647")
# 新增或者修改属性值未知错误: {}
attribute_value_unknown_error = _l("Unknown error when adding or modifying attribute value: {}")
@ -130,8 +120,7 @@ class ErrFormat(CommonErrFormat):
adr_default_ref_once = _l("The default auto-discovery rule is already referenced by model {}!")
# unique_key方法必须返回非空字符串!
adr_unique_key_required = _l("The unique_key method must return a non-empty string!")
# attributes方法必须返回的是list
adr_plugin_attributes_list_required = _l("The attributes method must return a list")
adr_plugin_attributes_list_required = _l("The attributes method must return a list") # attributes方法必须返回的是list
# attributes方法返回的list不能为空!
adr_plugin_attributes_list_no_empty = _l("The list returned by the attributes method cannot be empty!")
# 只有管理员才可以定义执行机器为: 所有节点!
@ -147,32 +136,3 @@ class ErrFormat(CommonErrFormat):
password_save_failed = _l("Failed to save password: {}") # 保存密码失败: {}
password_load_failed = _l("Failed to get password: {}") # 获取密码失败: {}
cron_time_format_invalid = _l("Scheduling time format error") # 调度时间格式错误
reconciliation_title = _l("CMDB data reconciliation results") # CMDB数据合规检查结果
reconciliation_body = _l("Number of {} illegal: {}") # "{} 不合规数: {}"
topology_exists = _l("Topology view {} already exists") # 拓扑视图 {} 已经存在
topology_group_exists = _l("Topology group {} already exists") # 拓扑视图分组 {} 已经存在
# 因为该分组下定义了拓扑视图,不能删除
topo_view_exists_cannot_delete_group = _l("The group cannot be deleted because the topology view already exists")
relation_path_search_src_target_required = _l("Both the source model and the target model must be selected")
builtin_type_cannot_update_name = _l("The names of built-in models cannot be changed")
# # IPAM
ipam_subnet_model_not_found = _l("The subnet model {} does not exist")
ipam_address_model_not_found = _l("The IP Address model {} does not exist")
ipam_cidr_invalid_notation = _l("CIDR {} is an invalid notation")
ipam_cidr_invalid_subnet = _l("Invalid CIDR: {}, available subnets: {}")
ipam_subnet_prefix_length_invalid = _l("Invalid subnet prefix length: {}")
ipam_parent_subnet_node_cidr_cannot_empty = _l("parent node cidr must be required")
ipam_subnet_overlapped = _l("{} and {} overlap")
ipam_subnet_cannot_delete = _l("Cannot delete because child nodes exist")
ipam_subnet_not_found = _l("Subnet is not found")
ipam_scope_cannot_delete = _l("Cannot delete because child nodes exist")
# # DCIM
dcim_builtin_model_not_found = _l("The dcim model {} does not exist")
dcim_rack_u_slot_invalid = _l("Irregularities in Rack Units")
dcim_rack_u_count_invalid = _l("The device's position is greater than the rack unit height")

View File

@ -16,13 +16,10 @@ def search(query=None,
ret_key=RetKey.NAME,
count=1,
sort=None,
excludes=None,
use_id_filter=False,
use_ci_filter=True):
excludes=None):
if current_app.config.get("USE_ES"):
s = SearchFromES(query, fl, facet, page, ret_key, count, sort)
else:
s = SearchFromDB(query, fl, facet, page, ret_key, count, sort, excludes=excludes,
use_id_filter=use_id_filter, use_ci_filter=use_ci_filter)
s = SearchFromDB(query, fl, facet, page, ret_key, count, sort, excludes=excludes)
return s

View File

@ -56,13 +56,13 @@ QUERY_CI_BY_ATTR_NAME = """
SELECT {0}.ci_id
FROM {0}
WHERE {0}.attr_id={1:d}
AND ({0}.value {2})
AND {0}.value {2}
"""
QUERY_CI_BY_ID = """
SELECT c_cis.id as ci_id
FROM c_cis
WHERE c_cis.id {}
WHERE c_cis.id={}
"""
QUERY_CI_BY_TYPE = """
@ -107,12 +107,3 @@ FROM
WHERE c_value_index_datetime.value LIKE "{0}") AS {1}
GROUP BY {1}.ci_id
"""
QUERY_CI_BY_NO_ATTR_IN = """
SELECT *
FROM
(SELECT c_value_index_texts.ci_id
FROM c_value_index_texts
WHERE c_value_index_texts.value in ({0})) AS {1}
GROUP BY {1}.ci_id
"""

View File

@ -4,9 +4,8 @@
from __future__ import unicode_literals
import copy
import six
import time
from flask import abort
from flask import current_app
from flask_login import current_user
from jinja2 import Template
@ -16,7 +15,6 @@ from api.extensions import db
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.const import BUILTIN_ATTRIBUTES
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RetKey
@ -28,7 +26,6 @@ from api.lib.cmdb.search.ci.db.query_sql import FACET_QUERY
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_ATTR_NAME
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_ID
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_NO_ATTR
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_NO_ATTR_IN
from api.lib.cmdb.search.ci.db.query_sql import QUERY_CI_BY_TYPE
from api.lib.cmdb.search.ci.db.query_sql import QUERY_UNION_CI_ATTRIBUTE_IS_NULL
from api.lib.cmdb.utils import TableMap
@ -47,11 +44,7 @@ class Search(object):
count=1,
sort=None,
ci_ids=None,
excludes=None,
parent_node_perm_passed=False,
use_id_filter=False,
use_ci_filter=True,
only_ids=False):
excludes=None):
self.orig_query = query
self.fl = fl or []
self.excludes = excludes or []
@ -61,20 +54,12 @@ class Search(object):
self.count = count
self.sort = sort
self.ci_ids = ci_ids or []
self.raw_ci_ids = copy.deepcopy(self.ci_ids)
self.query_sql = ""
self.type_id_list = []
self.only_type_query = False
self.parent_node_perm_passed = parent_node_perm_passed
self.use_id_filter = use_id_filter
self.use_ci_filter = use_ci_filter
self.only_ids = only_ids
self.multi_type_has_ci_filter = False
self.valid_type_names = []
self.type2filter_perms = dict()
self.is_app_admin = is_app_admin('cmdb') or current_user.username == "worker"
self.is_app_admin = self.is_app_admin or (not self.use_ci_filter and not self.use_id_filter)
@staticmethod
def _operator_proc(key):
@ -108,90 +93,52 @@ class Search(object):
else:
raise SearchError(ErrFormat.attribute_not_found.format(key))
def _type_query_handler(self, v, queries, is_sub=False):
def _type_query_handler(self, v, queries):
new_v = v[1:-1].split(";") if v.startswith("(") and v.endswith(")") else [v]
type_num = len(new_v)
type_id_list = []
for _v in new_v:
ci_type = CITypeCache.get(_v)
if type_num == 1 and not self.sort and ci_type and ci_type.default_order_attr:
if len(new_v) == 1 and not self.sort and ci_type and ci_type.default_order_attr:
self.sort = ci_type.default_order_attr
if ci_type is not None:
if self.valid_type_names == "ALL" or ci_type.name in self.valid_type_names:
if not is_sub:
self.type_id_list.append(str(ci_type.id))
type_id_list.append(str(ci_type.id))
if ci_type.id in self.type2filter_perms and not is_sub:
self.type_id_list.append(str(ci_type.id))
if ci_type.id in self.type2filter_perms:
ci_filter = self.type2filter_perms[ci_type.id].get('ci_filter')
if ci_filter and self.use_ci_filter and not self.use_id_filter:
if ci_filter:
sub = []
ci_filter = Template(ci_filter).render(user=current_user)
for i in ci_filter.split(','):
if type_num == 1:
if i.startswith("~") and not sub:
queries.append(i)
else:
sub.append(i)
if i.startswith("~") and not sub:
queries.append(i)
else:
sub.append(i)
if sub:
if type_num == 1:
queries.append(dict(operator="&", queries=sub))
else:
if str(ci_type.id) in self.type_id_list:
self.type_id_list.remove(str(ci_type.id))
type_id_list.remove(str(ci_type.id))
sub.extend([i for i in queries[1:] if isinstance(i, (six.string_types, list))])
queries.append(dict(operator="&", queries=sub))
sub.insert(0, "_type:{}".format(ci_type.id))
queries.append(dict(operator="|", queries=sub))
self.multi_type_has_ci_filter = True
if self.type2filter_perms[ci_type.id].get('attr_filter'):
if type_num == 1:
if not self.fl:
self.fl = set(self.type2filter_perms[ci_type.id]['attr_filter'])
else:
fl = set(self.fl) & set(self.type2filter_perms[ci_type.id]['attr_filter'])
not fl and abort(400, ErrFormat.ci_filter_perm_attr_no_permission.format(self.fl))
self.fl = fl
if not self.fl:
self.fl = set(self.type2filter_perms[ci_type.id]['attr_filter'])
else:
self.fl = self.fl or {}
if not self.fl or isinstance(self.fl, dict):
self.fl[ci_type.id] = set(self.type2filter_perms[ci_type.id]['attr_filter'])
if self.type2filter_perms[ci_type.id].get('id_filter') and self.use_id_filter:
if not self.raw_ci_ids:
self.ci_ids = list(self.type2filter_perms[ci_type.id]['id_filter'].keys())
if self.use_id_filter and not self.ci_ids and not self.is_app_admin:
self.raw_ci_ids = [0]
self.fl = set(self.fl) & set(self.type2filter_perms[ci_type.id]['attr_filter'])
else:
raise SearchError(ErrFormat.no_permission.format(ci_type.alias, PermEnum.READ))
else:
raise SearchError(ErrFormat.ci_type_not_found2.format(_v))
if type_num != len(self.type_id_list) and queries and queries[0].startswith('_type') and not is_sub:
queries[0] = "_type:({})".format(";".join(self.type_id_list))
if type_id_list:
type_ids = ",".join(type_id_list)
if self.type_id_list:
type_ids = ",".join(self.type_id_list)
_query_sql = QUERY_CI_BY_TYPE.format(type_ids)
if self.only_type_query or self.multi_type_has_ci_filter:
if self.only_type_query:
return _query_sql
elif type_num > 1: # there must be instance-level access control
return "select c_cis.id as ci_id from c_cis where c_cis.id=0"
else:
return ""
return ""
@staticmethod
def _id_query_handler(v):
if ";" in v:
return QUERY_CI_BY_ID.format("in {}".format(v.replace(';', ',')))
else:
return QUERY_CI_BY_ID.format("= {}".format(v))
return QUERY_CI_BY_ID.format(v)
@staticmethod
def _in_query_handler(attr, v, is_not):
@ -205,7 +152,6 @@ class Search(object):
"NOT LIKE" if is_not else "LIKE",
_v.replace("*", "%")) for _v in new_v])
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, in_query)
return _query_sql
@staticmethod
@ -221,7 +167,6 @@ class Search(object):
"NOT BETWEEN" if is_not else "BETWEEN",
start.replace("*", "%"), end.replace("*", "%"))
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, range_query)
return _query_sql
@staticmethod
@ -238,7 +183,6 @@ class Search(object):
comparison_query = "{0} '{1}'".format(v[0], v[1:].replace("*", "%"))
_query_sql = QUERY_CI_BY_ATTR_NAME.format(table_name, attr.id, comparison_query)
return _query_sql
@staticmethod
@ -250,7 +194,6 @@ class Search(object):
elif field.startswith("-"):
field = field[1:]
sort_type = "DESC"
return field, sort_type
def __sort_by_id(self, sort_type, query_sql):
@ -260,7 +203,7 @@ class Search(object):
return ret_sql.format(query_sql, "ORDER BY B.ci_id {1} LIMIT {0:d}, {2};".format(
(self.page - 1) * self.count, sort_type, self.count))
elif self.type_id_list and not self.multi_type_has_ci_filter:
elif self.type_id_list:
self.query_sql = "SELECT B.ci_id FROM ({0}) AS B {1}".format(
query_sql,
"INNER JOIN c_cis on c_cis.id=B.ci_id WHERE c_cis.type_id IN ({0}) ".format(
@ -285,7 +228,7 @@ class Search(object):
def __sort_by_type(self, sort_type, query_sql):
ret_sql = "SELECT SQL_CALC_FOUND_ROWS DISTINCT B.ci_id FROM ({0}) AS B {1}"
if self.type_id_list and not self.multi_type_has_ci_filter:
if self.type_id_list:
self.query_sql = "SELECT B.ci_id FROM ({0}) AS B {1}".format(
query_sql,
"INNER JOIN c_cis on c_cis.id=B.ci_id WHERE c_cis.type_id IN ({0}) ".format(
@ -309,23 +252,16 @@ class Search(object):
(self.page - 1) * self.count, sort_type, self.count))
def __sort_by_field(self, field, sort_type, query_sql):
if field not in BUILTIN_ATTRIBUTES:
attr = AttributeCache.get(field)
attr_id = attr.id
attr = AttributeCache.get(field)
attr_id = attr.id
table_name = TableMap(attr=attr).table_name
_v_query_sql = """SELECT {0}.ci_id, {1}.value
FROM ({2}) AS {0} INNER JOIN {1} ON {1}.ci_id = {0}.ci_id
WHERE {1}.attr_id = {3}""".format("ALIAS", table_name, query_sql, attr_id)
new_table = _v_query_sql
table_name = TableMap(attr=attr).table_name
_v_query_sql = """SELECT ALIAS.ci_id, {0}.value
FROM ({1}) AS ALIAS INNER JOIN {0} ON {0}.ci_id = ALIAS.ci_id
WHERE {0}.attr_id = {2}""".format(table_name, query_sql, attr_id)
new_table = _v_query_sql
else:
_v_query_sql = """SELECT c_cis.id AS ci_id, c_cis.{0} AS value
FROM c_cis INNER JOIN ({1}) AS ALIAS ON ALIAS.ci_id = c_cis.id""".format(
field[1:], query_sql)
new_table = _v_query_sql
if self.only_type_query or not self.type_id_list or self.multi_type_has_ci_filter:
if self.only_type_query or not self.type_id_list:
return ("SELECT SQL_CALC_FOUND_ROWS DISTINCT C.ci_id FROM ({0}) AS C ORDER BY C.value {2} "
"LIMIT {1:d}, {3};".format(new_table, (self.page - 1) * self.count, sort_type, self.count))
@ -363,9 +299,7 @@ class Search(object):
INNER JOIN ({2}) as {3} USING(ci_id)""".format(query_sql, alias, _query_sql, alias + "A")
elif operator == "|" or operator == "|~":
query_sql = "SELECT * FROM ({0}) as {1} UNION ALL SELECT * FROM ({2}) as {3}".format(query_sql, alias,
_query_sql,
alias + "A")
query_sql = "SELECT * FROM ({0}) as {1} UNION ALL ({2})".format(query_sql, alias, _query_sql)
elif operator == "~":
query_sql = """SELECT * FROM ({0}) as {1} LEFT JOIN ({2}) as {3} USING(ci_id)
@ -388,11 +322,6 @@ class Search(object):
return numfound, res
def __get_type2filter_perms(self):
res2 = ACLManager('cmdb').get_resources(ResourceTypeEnum.CI_FILTER)
if res2:
self.type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
def __get_types_has_read(self):
"""
:return: _type:(type1;type2)
@ -402,23 +331,14 @@ class Search(object):
self.valid_type_names = {i['name'] for i in res if PermEnum.READ in i['permissions']}
self.__get_type2filter_perms()
for type_id in self.type2filter_perms:
ci_type = CITypeCache.get(type_id)
if ci_type:
if self.type2filter_perms[type_id].get('id_filter'):
if self.use_id_filter:
self.valid_type_names.add(ci_type.name)
elif self.type2filter_perms[type_id].get('ci_filter'):
if self.use_ci_filter:
self.valid_type_names.add(ci_type.name)
else:
self.valid_type_names.add(ci_type.name)
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
if res2:
self.type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
return "_type:({})".format(";".join(self.valid_type_names))
def __confirm_type_first(self, queries):
has_type = False
result = []
@ -437,14 +357,11 @@ class Search(object):
if not q.startswith("("):
raise SearchError(ErrFormat.ci_search_Parentheses_invalid)
if ":" not in q: # multi-line search
result.append(q[1:-1].split(';'))
else:
operator, q = self._operator_proc(q)
if q.endswith(")"):
result.append(dict(operator=operator, queries=[q[1:-1]]))
operator, q = self._operator_proc(q)
if q.endswith(")"):
result.append(dict(operator=operator, queries=[q[1:-1]]))
sub = dict(operator=operator, queries=[q[1:]])
sub = dict(operator=operator, queries=[q[1:]])
elif q.endswith(")") and sub:
sub['queries'].append(q[:-1])
result.append(copy.deepcopy(sub))
@ -454,10 +371,8 @@ class Search(object):
else:
result.append(q)
if self.parent_node_perm_passed:
self.__get_type2filter_perms()
self.valid_type_names = "ALL"
elif result and not has_type and not self.is_app_admin:
_is_app_admin = is_app_admin('cmdb') or current_user.username == "worker"
if result and not has_type and not _is_app_admin:
type_q = self.__get_types_has_read()
if id_query:
ci = CIManager.get_by_id(id_query)
@ -466,21 +381,23 @@ class Search(object):
result.insert(0, "_type:{}".format(ci.type_id))
else:
result.insert(0, type_q)
elif self.is_app_admin:
elif _is_app_admin:
self.valid_type_names = "ALL"
else:
self.__get_types_has_read()
current_app.logger.warning(result)
return result
def __query_by_attr(self, q, queries, alias, is_sub=False):
def __query_by_attr(self, q, queries, alias):
k = q.split(":")[0].strip()
v = "\:".join(q.split(":")[1:]).strip()
v = v.replace("'", "\\'")
v = v.replace('"', '\\"')
field, field_type, operator, attr = self._attr_name_proc(k)
if field == "_type":
_query_sql = self._type_query_handler(v, queries, is_sub)
_query_sql = self._type_query_handler(v, queries)
elif field == "_id":
_query_sql = self._id_query_handler(v)
@ -494,9 +411,6 @@ class Search(object):
if field_type == ValueTypeEnum.DATE and len(v) == 10:
v = "{} 00:00:00".format(v)
if field_type == ValueTypeEnum.BOOL and "*" not in str(v):
v = str(int(v in current_app.config.get('BOOL_TRUE')))
# in query
if v.startswith("(") and v.endswith(")"):
_query_sql = self._in_query_handler(attr, v, is_not)
@ -527,36 +441,26 @@ class Search(object):
return alias, _query_sql, operator
def __query_build_by_field(self, queries, is_first=True, only_type_query_special=True, alias='A', operator='&',
is_sub=False):
def __query_build_by_field(self, queries, is_first=True, only_type_query_special=True, alias='A', operator='&'):
query_sql = ""
for q in queries:
# current_app.logger.debug(q)
_query_sql = ""
if isinstance(q, dict):
if len(q['queries']) == 1 and ";" in q['queries'][0]:
values = q['queries'][0].split(";")
in_values = ",".join("'{0}'".format(v) for v in values)
_query_sql = QUERY_CI_BY_NO_ATTR_IN.format(in_values, alias)
operator = q['operator']
else:
alias, _query_sql, operator = self.__query_build_by_field(q['queries'], True, True, alias,
is_sub=True)
operator = q['operator']
alias, _query_sql, operator = self.__query_build_by_field(q['queries'], True, True, alias)
current_app.logger.info(_query_sql)
current_app.logger.info((operator, is_first, alias))
operator = q['operator']
elif ":" in q and not q.startswith("*"):
alias, _query_sql, operator = self.__query_by_attr(q, queries, alias, is_sub)
alias, _query_sql, operator = self.__query_by_attr(q, queries, alias)
elif q == "*":
continue
elif q:
if not isinstance(q, list):
q = q.replace("'", "\\'")
q = q.replace('"', '\\"')
q = q.replace("*", "%").replace('\\n', '%')
_query_sql = QUERY_CI_BY_NO_ATTR.format(q, alias)
else:
_query_sql = QUERY_CI_BY_NO_ATTR_IN.format(",".join("'{0}'".format(v) for v in q), alias)
q = q.replace("'", "\\'")
q = q.replace('"', '\\"')
q = q.replace("*", "%").replace('\\n', '%')
_query_sql = QUERY_CI_BY_NO_ATTR.format(q, alias)
if is_first and _query_sql and not self.only_type_query:
query_sql = "SELECT * FROM ({0}) AS {1}".format(_query_sql, alias)
@ -575,7 +479,7 @@ class Search(object):
def _filter_ids(self, query_sql):
if self.ci_ids:
return "SELECT * FROM ({0}) AS IN_QUERY WHERE IN_QUERY.ci_id IN ({1})".format(
query_sql, ",".join(list(set(map(str, self.ci_ids)))))
query_sql, ",".join(list(map(str, self.ci_ids))))
return query_sql
@ -600,15 +504,13 @@ class Search(object):
queries = handle_arg_list(self.orig_query)
queries = self._extra_handle_query_expr(queries)
queries = self.__confirm_type_first(queries)
current_app.logger.debug(queries)
_, query_sql, _ = self.__query_build_by_field(queries)
s = time.time()
if query_sql:
query_sql = self._filter_ids(query_sql)
if self.raw_ci_ids and not self.ci_ids:
return 0, []
self.query_sql = query_sql
# current_app.logger.debug(query_sql)
numfound, res = self._execute_sql(query_sql)
@ -637,22 +539,17 @@ class Search(object):
return facet_result
def _fl_build(self):
if isinstance(self.fl, list):
_fl = list()
for f in self.fl:
k, _, _, _ = self._attr_name_proc(f)
if k:
_fl.append(k)
_fl = list()
for f in self.fl:
k, _, _, _ = self._attr_name_proc(f)
if k:
_fl.append(k)
return _fl
else:
return self.fl
return _fl
def search(self):
numfound, ci_ids = self._query_build_raw()
ci_ids = list(map(str, ci_ids))
if self.only_ids:
return ci_ids
_fl = self._fl_build()
@ -665,8 +562,6 @@ class Search(object):
if ci_ids:
response = CIManager.get_cis_by_ids(ci_ids, ret_key=self.ret_key, fields=_fl, excludes=self.excludes)
for res in response:
if not res:
continue
ci_type = res.get("ci_type")
if ci_type not in counter.keys():
counter[ci_type] = 0
@ -674,8 +569,3 @@ class Search(object):
total = len(response)
return response, counter, total, self.page, numfound, facet
def get_ci_ids(self):
_, ci_ids = self._query_build_raw()
return ci_ids

View File

@ -1,40 +1,25 @@
# -*- coding:utf-8 -*-
from collections import Counter
from collections import defaultdict
import copy
import json
import networkx as nx
import sys
from collections import Counter
from flask import abort
from flask import current_app
from flask_login import current_user
from api.extensions import rd
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.ci_type import CITypeRelationManager
from api.lib.cmdb.const import ConstraintEnum
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.perms import CIFilterPermsCRUD
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search.ci.db.search import Search as SearchFromDB
from api.lib.cmdb.search.ci.es.search import Search as SearchFromES
from api.lib.cmdb.utils import TableMap
from api.lib.cmdb.utils import ValueTypeMap
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import is_app_admin
from api.models.cmdb import CI
from api.models.cmdb import CITypeRelation
from api.models.cmdb import RelationType
from api.models.cmdb import CIRelation
class Search(object):
def __init__(self, root_id=None,
def __init__(self, root_id,
level=None,
query=None,
fl=None,
@ -44,9 +29,7 @@ class Search(object):
sort=None,
reverse=False,
ancestor_ids=None,
descendant_ids=None,
has_m2m=None,
root_parent_path=None):
has_m2m=None):
self.orig_query = query
self.fl = fl
self.facet_field = facet_field
@ -63,8 +46,6 @@ class Search(object):
level[0] if isinstance(level, list) and level else level)
self.ancestor_ids = ancestor_ids
self.descendant_ids = descendant_ids
self.root_parent_path = root_parent_path or []
self.has_m2m = has_m2m or False
if not self.has_m2m:
if self.ancestor_ids:
@ -75,23 +56,27 @@ class Search(object):
if _l < int(level) and c == ConstraintEnum.Many2Many:
self.has_m2m = True
self.type2filter_perms = {}
self.is_app_admin = is_app_admin('cmdb') or current_user.username == "worker"
def _get_ids(self, ids):
if self.level[-1] == 1 and len(ids) == 1:
if self.ancestor_ids is None:
return [i.second_ci_id for i in CIRelation.get_by(first_ci_id=ids[0], to_dict=False)]
else:
seconds = {i.second_ci_id for i in CIRelation.get_by(first_ci_id=ids[0],
ancestor_ids=self.ancestor_ids,
to_dict=False)}
return list(seconds)
merge_ids = []
key = []
_tmp = []
for level in range(1, sorted(self.level)[-1] + 1):
if len(self.descendant_ids or []) >= level and self.type2filter_perms.get(self.descendant_ids[level - 1]):
id_filter_limit, _ = self._get_ci_filter(self.type2filter_perms[self.descendant_ids[level - 1]])
else:
id_filter_limit = {}
if not self.has_m2m:
key, prefix = list(map(str, ids)), REDIS_PREFIX_CI_RELATION
_tmp = map(lambda x: json.loads(x).keys(),
filter(lambda x: x is not None, rd.get(ids, REDIS_PREFIX_CI_RELATION) or []))
ids = [j for i in _tmp for j in i]
key, prefix = ids, REDIS_PREFIX_CI_RELATION
else:
if not self.ancestor_ids:
@ -107,16 +92,12 @@ class Search(object):
key = list(set(["{},{}".format(i, j) for idx, i in enumerate(key) for j in _tmp[idx]]))
prefix = REDIS_PREFIX_CI_RELATION2
if not key or id_filter_limit is None:
_tmp = list(map(lambda x: json.loads(x).keys() if x else [], rd.get(key, prefix) or []))
ids = [j for i in _tmp for j in i]
if not key:
return []
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
_tmp = [[i[0] for i in x if (not id_filter_limit or (
key[idx] not in id_filter_limit or int(i[0]) in id_filter_limit[key[idx]]) or
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
ids = [j for i in _tmp for j in i]
if level in self.level:
merge_ids.extend(ids)
@ -139,28 +120,7 @@ class Search(object):
return merge_ids
def _has_read_perm_from_parent_nodes(self):
self.root_parent_path = list(map(str, self.root_parent_path))
if str(self.root_id).isdigit() and str(self.root_id) not in self.root_parent_path:
self.root_parent_path.append(str(self.root_id))
self.root_parent_path = set(self.root_parent_path)
if self.is_app_admin:
self.type2filter_perms = {}
return True
res = ACLManager().get_resources(ResourceTypeEnum.CI_FILTER) or {}
self.type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res]))) or {}
for _, filters in self.type2filter_perms.items():
if set((filters.get('id_filter') or {}).keys()) & self.root_parent_path:
return True
return True
def search(self, only_ids=False):
use_ci_filter = len(self.descendant_ids or []) == self.level[0] - 1
parent_node_perm_passed = not self.is_app_admin and self._has_read_perm_from_parent_nodes()
def search(self):
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
cis = [CI.get_by_id(_id) or abort(404, ErrFormat.ci_not_found.format("id={}".format(_id))) for _id in ids]
@ -201,106 +161,42 @@ class Search(object):
page=self.page,
count=self.count,
sort=self.sort,
ci_ids=merge_ids,
parent_node_perm_passed=parent_node_perm_passed,
use_ci_filter=use_ci_filter,
only_ids=only_ids).search()
ci_ids=merge_ids).search()
def _get_ci_filter(self, filter_perms, ci_filters=None):
ci_filters = ci_filters or []
if ci_filters:
result = {}
for item in ci_filters:
res = SearchFromDB('_type:{},{}'.format(item['type_id'], item['ci_filter']),
count=sys.maxsize, parent_node_perm_passed=True).get_ci_ids()
if res:
result[item['type_id']] = set(res)
return {}, result if result else None
result = dict()
if filter_perms.get('id_filter'):
for k in filter_perms['id_filter']:
node_path = k.split(',')
if len(node_path) == 1:
result[int(node_path[0])] = 1
elif not self.has_m2m:
result.setdefault(node_path[-2], set()).add(int(node_path[-1]))
else:
result.setdefault(','.join(node_path[:-1]), set()).add(int(node_path[-1]))
if result:
return result, None
else:
return None, None
return {}, None
def statistics(self, type_ids, need_filter=True):
def statistics(self, type_ids):
self.level = int(self.level)
acl = ACLManager('cmdb')
type2filter_perms = dict()
if not self.is_app_admin:
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
if res2:
type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
_tmp, tmp_res = [], []
_tmp = []
level2ids = {}
for lv in range(1, self.level + 1):
level2ids[lv] = []
if need_filter:
id_filter_limit, ci_filter_limit = None, None
if len(self.descendant_ids or []) >= lv and type2filter_perms.get(self.descendant_ids[lv - 1]):
id_filter_limit, _ = self._get_ci_filter(type2filter_perms[self.descendant_ids[lv - 1]])
elif type_ids and self.level == lv:
ci_filters = [type2filter_perms[type_id] for type_id in type_ids if type_id in type2filter_perms]
if ci_filters:
id_filter_limit, ci_filter_limit = self._get_ci_filter({}, ci_filters=ci_filters)
else:
id_filter_limit = {}
else:
id_filter_limit = {}
else:
id_filter_limit, ci_filter_limit = {}, {}
if lv == 1:
if not self.has_m2m:
key, prefix = [str(i) for i in ids], REDIS_PREFIX_CI_RELATION
key, prefix = ids, REDIS_PREFIX_CI_RELATION
else:
key = ["{},{}".format(self.ancestor_ids, _id) for _id in ids]
if not self.ancestor_ids:
key, prefix = [str(i) for i in ids], REDIS_PREFIX_CI_RELATION
key, prefix = ids, REDIS_PREFIX_CI_RELATION
else:
key = ["{},{}".format(self.ancestor_ids, _id) for _id in ids]
prefix = REDIS_PREFIX_CI_RELATION2
level2ids[lv] = [[i] for i in key]
if not key or id_filter_limit is None:
_tmp = [[]] * len(ids)
if not key:
_tmp = []
continue
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
_tmp = []
if type_ids and lv == self.level:
_tmp = [[i for i in x if i[1] in type_ids and
(not id_filter_limit or (key[idx] not in id_filter_limit or
int(i[0]) in id_filter_limit[key[idx]]) or
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
_tmp = list(map(lambda x: [i for i in x if i[1] in type_ids],
(map(lambda x: list(json.loads(x).items()),
[i or '{}' for i in rd.get(key, prefix) or []]))))
else:
_tmp = [[i for i in x if (not id_filter_limit or (key[idx] not in id_filter_limit or
int(i[0]) in id_filter_limit[key[idx]]) or
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
if ci_filter_limit:
_tmp = [[j for j in i if j[1] not in ci_filter_limit or int(j[0]) in ci_filter_limit[j[1]]]
for i in _tmp]
_tmp = list(map(lambda x: list(json.loads(x).items()),
[i or '{}' for i in rd.get(key, prefix) or []]))
else:
for idx, item in enumerate(_tmp):
if item:
if not self.has_m2m:
@ -312,28 +208,19 @@ class Search(object):
level2ids[lv].append(key)
if key:
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
if type_ids and lv == self.level:
tmp_res = [[i for i in x if i[1] in type_ids and
(not id_filter_limit or (
key[idx] not in id_filter_limit or
int(i[0]) in id_filter_limit[key[idx]]) or
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
__tmp = map(lambda x: [(_id, type_id) for _id, type_id in json.loads(x).items()
if type_id in type_ids],
filter(lambda x: x is not None,
rd.get(key, prefix) or []))
else:
tmp_res = [[i for i in x if (not id_filter_limit or (
key[idx] not in id_filter_limit or
int(i[0]) in id_filter_limit[key[idx]]) or
int(i[0]) in id_filter_limit)] for idx, x in
enumerate(res)]
if ci_filter_limit:
tmp_res = [[j for j in i if j[1] not in ci_filter_limit or
int(j[0]) in ci_filter_limit[j[1]]] for i in tmp_res]
__tmp = map(lambda x: list(json.loads(x).items()),
filter(lambda x: x is not None,
rd.get(key, prefix) or []))
else:
tmp_res = []
__tmp = []
if tmp_res:
_tmp[idx] = [j for i in tmp_res for j in i]
_tmp[idx] = [j for i in __tmp for j in i]
else:
_tmp[idx] = []
level2ids[lv].append([])
@ -344,251 +231,3 @@ class Search(object):
detail={str(_id): dict(Counter([i[1] for i in _tmp[idx]]).items()) for idx, _id in enumerate(ids)})
return result
def search_full(self, type_ids):
def _get_id2name(_type_id):
ci_type = CITypeCache.get(_type_id)
attr = AttributeCache.get(ci_type.unique_id)
value_table = TableMap(attr=attr).table
serializer = ValueTypeMap.serialize[attr.value_type]
unique_value = {i.ci_id: serializer(i.value) for i in value_table.get_by(attr_id=attr.id, to_dict=False)}
attr = AttributeCache.get(ci_type.show_id)
if attr:
value_table = TableMap(attr=attr).table
serializer = ValueTypeMap.serialize[attr.value_type]
show_value = {i.ci_id: serializer(i.value) for i in value_table.get_by(attr_id=attr.id, to_dict=False)}
else:
show_value = unique_value
return show_value, unique_value
self.level = int(self.level)
acl = ACLManager('cmdb')
type2filter_perms = dict()
if not self.is_app_admin:
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
if res2:
type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
ids = [self.root_id] if not isinstance(self.root_id, list) else self.root_id
level_ids = [str(i) for i in ids]
result = []
id2children = {}
id2name = _get_id2name(type_ids[0])
for i in level_ids:
item = dict(id=int(i),
type_id=type_ids[0],
isLeaf=False,
title=id2name[0].get(int(i)),
uniqueValue=id2name[1].get(int(i)),
children=[])
result.append(item)
id2children[str(i)] = item['children']
for lv in range(1, self.level):
type_id = type_ids[lv]
if len(type_ids or []) >= lv and type2filter_perms.get(type_id):
id_filter_limit, _ = self._get_ci_filter(type2filter_perms[type_id])
else:
id_filter_limit = {}
if self.has_m2m and lv != 1:
key, prefix = [i for i in level_ids], REDIS_PREFIX_CI_RELATION2
else:
key, prefix = [i.split(',')[-1] for i in level_ids], REDIS_PREFIX_CI_RELATION
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
res = [[i for i in x if i[1] == type_id and (not id_filter_limit or (key[idx] not in id_filter_limit or
int(i[0]) in id_filter_limit[key[idx]]) or
int(i[0]) in id_filter_limit)] for idx, x in enumerate(res)]
_level_ids = []
id2name = _get_id2name(type_id)
for idx, node_path in enumerate(level_ids):
for child_id, _ in (res[idx] or []):
item = dict(id=int(child_id),
type_id=type_id,
isLeaf=True if lv == self.level - 1 else False,
title=id2name[0].get(int(child_id)),
uniqueValue=id2name[1].get(int(child_id)),
children=[])
id2children[node_path].append(item)
_node_path = "{},{}".format(node_path, child_id)
_level_ids.append(_node_path)
id2children[_node_path] = item['children']
level_ids = _level_ids
return result
@staticmethod
def _get_src_ids(src):
q = src.get('q') or ''
if not q.startswith('_type:'):
q = "_type:{},{}".format(src['type_id'], q)
return SearchFromDB(q, use_ci_filter=True, only_ids=True, count=100000).search()
@staticmethod
def _filter_target_ids(target_ids, type_ids, q):
if not q.startswith('_type:'):
q = "_type:({}),{}".format(";".join(map(str, type_ids)), q)
ci_ids = SearchFromDB(q, ci_ids=target_ids, use_ci_filter=True, only_ids=True, count=100000).search()
cis = CI.get_by(fl=['id', 'type_id'], only_query=True).filter(CI.id.in_(ci_ids))
return [(str(i.id), i.type_id) for i in cis]
@staticmethod
def _path2level(src_type_id, target_type_ids, path):
if not src_type_id or not target_type_ids:
return abort(400, ErrFormat.relation_path_search_src_target_required)
graph = nx.DiGraph()
graph.add_edges_from([(n, _path[idx + 1]) for _path in path for idx, n in enumerate(_path[:-1])])
relation_types = defaultdict(dict)
level2type = defaultdict(set)
type2show_key = dict()
for _path in path:
for idx, node in enumerate(_path[1:]):
level2type[idx + 1].add(node)
src = CITypeCache.get(_path[idx])
target = CITypeCache.get(node)
relation_type = RelationType.get_by(only_query=True).join(
CITypeRelation, CITypeRelation.relation_type_id == RelationType.id).filter(
CITypeRelation.parent_id == src.id).filter(CITypeRelation.child_id == target.id).first()
relation_types[src.alias].update({target.alias: relation_type.name})
if src.id not in type2show_key:
type2show_key[src.id] = AttributeCache.get(src.show_id or src.unique_id).name
if target.id not in type2show_key:
type2show_key[target.id] = AttributeCache.get(target.show_id or target.unique_id).name
nodes = graph.nodes()
return level2type, list(nodes), relation_types, type2show_key
def _build_graph(self, source_ids, source_type_id, level2type, target_type_ids, acl):
type2filter_perms = dict()
if not self.is_app_admin:
res2 = acl.get_resources(ResourceTypeEnum.CI_FILTER)
if res2:
type2filter_perms = CIFilterPermsCRUD().get_by_ids(list(map(int, [i['name'] for i in res2])))
target_type_ids = set(target_type_ids)
graph = nx.DiGraph()
target_ids = []
key = [(str(i), source_type_id) for i in source_ids]
graph.add_nodes_from(key)
for level in level2type:
filter_type_ids = level2type[level]
id_filter_limit = dict()
for _type_id in filter_type_ids:
if type2filter_perms.get(_type_id):
_id_filter_limit, _ = self._get_ci_filter(type2filter_perms[_type_id])
id_filter_limit.update(_id_filter_limit)
has_target = filter_type_ids & target_type_ids
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get([i[0] for i in key],
REDIS_PREFIX_CI_RELATION) or []]]
_key = []
for idx, _id in enumerate(key):
valid_targets = [i for i in res[idx] if i[1] in filter_type_ids and
(not id_filter_limit or int(i[0]) in id_filter_limit)]
_key.extend(valid_targets)
graph.add_edges_from(zip([_id] * len(valid_targets), valid_targets))
if has_target:
target_ids.extend([j[0] for i in res for j in i if j[1] in target_type_ids])
key = copy.deepcopy(_key)
return graph, target_ids
@staticmethod
def _find_paths(graph, source_ids, source_type_id, target_ids, valid_path, max_depth=6):
paths = []
for source_id in source_ids:
_paths = nx.all_simple_paths(graph,
source=(source_id, source_type_id),
target=target_ids,
cutoff=max_depth)
for __path in _paths:
if tuple([i[1] for i in __path]) in valid_path:
paths.append([i[0] for i in __path])
return paths
@staticmethod
def _wrap_path_result(paths, types, valid_path, target_types, type2show_key):
ci_ids = [j for i in paths for j in i]
response, _, _, _, _, _ = SearchFromDB("_type:({})".format(";".join(map(str, types))),
use_ci_filter=False,
ci_ids=list(map(int, ci_ids)),
count=1000000).search()
id2ci = {str(i.get('_id')): i if i['_type'] in target_types else {
type2show_key[i['_type']]: i[type2show_key[i['_type']]],
"ci_type_alias": i["ci_type_alias"],
"_type": i["_type"],
} for i in response}
result = defaultdict(list)
counter = defaultdict(int)
for path in paths:
key = "-".join([id2ci.get(i, {}).get('ci_type_alias') or '' for i in path])
if tuple([id2ci.get(i, {}).get('_type') for i in path]) in valid_path:
counter[key] += 1
result[key].append(path)
return result, counter, id2ci
def search_by_path(self, source, target, path):
"""
:param source: {type_id: id, q: expr}
:param target: {type_ids: [id], q: expr}
:param path: [source_type_id, ..., target_type_id], use type id
:return:
"""
acl = ACLManager('cmdb')
if not self.is_app_admin:
res = {i['name'] for i in acl.get_resources(ResourceTypeEnum.CI_TYPE)}
for type_id in (source.get('type_id') and [source['type_id']] or []) + (target.get('type_ids') or []):
_type = CITypeCache.get(type_id)
if _type and _type.name not in res:
return abort(403, ErrFormat.no_permission.format(_type.alias, PermEnum.READ))
target['type_ids'] = [i[-1] for i in path]
level2type, types, relation_types, type2show_key = self._path2level(
source.get('type_id'), target.get('type_ids'), path)
if not level2type:
return [], {}, 0, self.page, 0, {}, {}
source_ids = self._get_src_ids(source)
graph, target_ids = self._build_graph(source_ids, source['type_id'], level2type, target['type_ids'], acl)
target_ids = self._filter_target_ids(target_ids, target['type_ids'], target.get('q') or '')
paths = self._find_paths(graph,
source_ids,
source['type_id'],
set(target_ids),
{tuple(i): 1 for i in path})
numfound = len(paths)
paths = paths[(self.page - 1) * self.count:self.page * self.count]
response, counter, id2ci = self._wrap_path_result(paths,
types,
{tuple(i): 1 for i in path},
set(target.get('type_ids') or []),
type2show_key)
return response, counter, len(paths), self.page, numfound, id2ci, relation_types, type2show_key

View File

@ -1,251 +0,0 @@
# -*- coding:utf-8 -*-
import json
from flask import abort
from flask import current_app
from flask_login import current_user
from werkzeug.exceptions import BadRequest
from api.extensions import rd
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.ci_type import CITypeRelationManager
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search as ci_search
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import is_app_admin
from api.models.cmdb import TopologyView
from api.models.cmdb import TopologyViewGroup
class TopologyViewManager(object):
group_cls = TopologyViewGroup
cls = TopologyView
@classmethod
def get_name_by_id(cls, _id):
res = cls.cls.get_by_id(_id)
return res and res.name
def get_view_by_id(self, _id):
res = self.cls.get_by_id(_id)
return res and res.to_dict() or {}
@classmethod
def add_group(cls, name, order):
if order is None:
cur_max_order = cls.group_cls.get_by(only_query=True).order_by(cls.group_cls.order.desc()).first()
cur_max_order = cur_max_order and cur_max_order.order or 0
order = cur_max_order + 1
cls.group_cls.get_by(name=name, first=True, to_dict=False) and abort(
400, ErrFormat.topology_group_exists.format(name))
return cls.group_cls.create(name=name, order=order)
def update_group(self, group_id, name, view_ids):
existed = self.group_cls.get_by_id(group_id) or abort(404, ErrFormat.not_found)
if name is not None and name != existed.name:
existed.update(name=name)
for idx, view_id in enumerate(view_ids):
view = self.cls.get_by_id(view_id)
if view is not None:
view.update(group_id=group_id, order=idx)
return existed.to_dict()
@classmethod
def delete_group(cls, _id):
existed = cls.group_cls.get_by_id(_id) or abort(404, ErrFormat.not_found)
if cls.cls.get_by(group_id=_id, first=True):
return abort(400, ErrFormat.topo_view_exists_cannot_delete_group)
existed.soft_delete()
@classmethod
def group_order(cls, group_ids):
for idx, group_id in enumerate(group_ids):
group = cls.group_cls.get_by_id(group_id)
group.update(order=idx + 1)
@classmethod
def add(cls, name, group_id, option, order=None, **kwargs):
cls.cls.get_by(name=name, first=True) and abort(400, ErrFormat.topology_exists.format(name))
if order is None:
cur_max_order = cls.cls.get_by(group_id=group_id, only_query=True).order_by(
cls.cls.order.desc()).first()
cur_max_order = cur_max_order and cur_max_order.order or 0
order = cur_max_order + 1
inst = cls.cls.create(name=name, group_id=group_id, option=option, order=order, **kwargs).to_dict()
if current_app.config.get('USE_ACL'):
try:
ACLManager().add_resource(name, ResourceTypeEnum.TOPOLOGY_VIEW)
except BadRequest:
pass
ACLManager().grant_resource_to_role(name,
current_user.username,
ResourceTypeEnum.TOPOLOGY_VIEW)
return inst
@classmethod
def update(cls, _id, **kwargs):
existed = cls.cls.get_by_id(_id) or abort(404, ErrFormat.not_found)
existed_name = existed.name
inst = existed.update(filter_none=False, **kwargs).to_dict()
if current_app.config.get('USE_ACL') and existed_name != kwargs.get('name') and kwargs.get('name'):
try:
ACLManager().update_resource(existed_name, kwargs['name'], ResourceTypeEnum.TOPOLOGY_VIEW)
except BadRequest:
pass
return inst
@classmethod
def delete(cls, _id):
existed = cls.cls.get_by_id(_id) or abort(404, ErrFormat.not_found)
existed.soft_delete()
if current_app.config.get("USE_ACL"):
ACLManager().del_resource(existed.name, ResourceTypeEnum.TOPOLOGY_VIEW)
@classmethod
def group_inner_order(cls, _ids):
for idx, _id in enumerate(_ids):
topology = cls.cls.get_by_id(_id)
topology.update(order=idx + 1)
@classmethod
def get_all(cls):
resources = None
if current_app.config.get('USE_ACL') and not is_app_admin('cmdb'):
resources = set([i.get('name') for i in ACLManager().get_resources(ResourceTypeEnum.TOPOLOGY_VIEW)])
groups = cls.group_cls.get_by(to_dict=True)
groups = sorted(groups, key=lambda x: x['order'])
group2pos = {group['id']: idx for idx, group in enumerate(groups)}
topo_views = sorted(cls.cls.get_by(to_dict=True), key=lambda x: x['order'])
other_group = dict(views=[])
for view in topo_views:
if resources is not None and view['name'] not in resources:
continue
if view['group_id']:
groups[group2pos[view['group_id']]].setdefault('views', []).append(view)
else:
other_group['views'].append(view)
if other_group['views']:
groups.append(other_group)
return groups
@staticmethod
def relation_from_ci_type(type_id):
nodes, edges = CITypeRelationManager.get_relations_by_type_id(type_id)
return dict(nodes=nodes, edges=edges)
def topology_view(self, view_id=None, preview=None):
if view_id is not None:
view = self.cls.get_by_id(view_id) or abort(404, ErrFormat.not_found)
central_node_type, central_node_instances, path = (view.central_node_type,
view.central_node_instances, view.path)
else:
central_node_type = preview.get('central_node_type')
central_node_instances = preview.get('central_node_instances')
path = preview.get('path')
nodes, links = [], []
_type = CITypeCache.get(central_node_type)
if not _type:
return dict(nodes=nodes, links=links)
type2meta = {_type.id: _type.icon}
root_ids = []
show_key = AttributeCache.get(_type.show_id or _type.unique_id)
q = (central_node_instances[2:] if central_node_instances.startswith('q=') else
central_node_instances)
s = ci_search(q, fl=['_id', show_key.name], use_id_filter=False, use_ci_filter=False, count=1000000)
try:
response, _, _, _, _, _ = s.search()
except SearchError as e:
current_app.logger.info(e)
return dict(nodes=nodes, links=links)
for i in response:
root_ids.append(i['_id'])
nodes.append(dict(id=str(i['_id']), name=i[show_key.name], type_id=central_node_type))
if not root_ids:
return dict(nodes=nodes, links=links)
prefix = REDIS_PREFIX_CI_RELATION
key = list(map(str, root_ids))
id2node = {}
for level in sorted([i for i in path.keys() if int(i) > 0]):
type_ids = {int(i) for i in path[level]}
res = [json.loads(x).items() for x in [i or '{}' for i in rd.get(key, prefix) or []]]
new_key = []
for idx, from_id in enumerate(key):
for to_id, type_id in res[idx]:
if type_id in type_ids:
links.append({'from': from_id, 'to': to_id})
id2node[to_id] = {'id': to_id, 'type_id': type_id}
new_key.append(to_id)
if type_id not in type2meta:
type2meta[type_id] = CITypeCache.get(type_id).icon
key = new_key
ci_ids = list(map(int, root_ids))
for level in sorted([i for i in path.keys() if int(i) < 0]):
type_ids = {int(i) for i in path[level]}
res = CIRelationManager.get_parent_ids(ci_ids)
_ci_ids = []
for to_id in res:
for from_id, type_id in res[to_id]:
if type_id in type_ids:
from_id, to_id = str(from_id), str(to_id)
links.append({'from': from_id, 'to': to_id})
id2node[from_id] = {'id': str(from_id), 'type_id': type_id}
_ci_ids.append(from_id)
if type_id not in type2meta:
type2meta[type_id] = CITypeCache.get(type_id).icon
ci_ids = _ci_ids
fl = set()
type_ids = {t for lv in path if lv != '0' for t in path[lv]}
type2show = {}
for type_id in type_ids:
ci_type = CITypeCache.get(type_id)
if ci_type:
attr = AttributeCache.get(ci_type.show_id or ci_type.unique_id)
if attr:
fl.add(attr.name)
type2show[type_id] = attr.name
if id2node:
s = ci_search("_id:({})".format(';'.join(id2node.keys())), fl=list(fl),
use_id_filter=False, use_ci_filter=False, count=1000000)
try:
response, _, _, _, _, _ = s.search()
except SearchError:
return dict(nodes=nodes, links=links)
for i in response:
id2node[str(i['_id'])]['name'] = i[type2show[str(i['_type'])]]
nodes.extend(id2node.values())
return dict(nodes=nodes, links=links, type2meta=type2meta)

View File

@ -7,52 +7,25 @@ import json
import re
import six
from flask import current_app
import api.models.cmdb as model
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.resp_format import ErrFormat
TIME_RE = re.compile(r'(?:[01]\d|2[0-3]):[0-5]\d:[0-5]\d')
class ValueDeserializeError(Exception):
pass
def string2int(x):
v = int(float(x))
if v > 2147483647:
raise ValueDeserializeError(ErrFormat.attribute_value_out_of_range)
return v
return int(float(x))
def str2date(x):
def str2datetime(x):
try:
return datetime.datetime.strptime(x, "%Y-%m-%d").date()
except ValueError:
pass
try:
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").date()
except ValueError:
pass
def str2datetime(x):
x = x.replace('T', ' ')
x = x.replace('Z', '')
try:
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S")
except ValueError:
pass
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M")
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S")
class ValueTypeMap(object):
@ -62,9 +35,8 @@ class ValueTypeMap(object):
ValueTypeEnum.TEXT: lambda x: x,
ValueTypeEnum.TIME: lambda x: TIME_RE.findall(x)[0],
ValueTypeEnum.DATETIME: str2datetime,
ValueTypeEnum.DATE: str2date,
ValueTypeEnum.DATE: str2datetime,
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
ValueTypeEnum.BOOL: lambda x: x in current_app.config.get('BOOL_TRUE'),
}
serialize = {
@ -75,7 +47,6 @@ class ValueTypeMap(object):
ValueTypeEnum.DATE: lambda x: x.strftime("%Y-%m-%d") if not isinstance(x, six.string_types) else x,
ValueTypeEnum.DATETIME: lambda x: x.strftime("%Y-%m-%d %H:%M:%S") if not isinstance(x, six.string_types) else x,
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
ValueTypeEnum.BOOL: lambda x: x in current_app.config.get('BOOL_TRUE'),
}
serialize2 = {
@ -86,7 +57,6 @@ class ValueTypeMap(object):
ValueTypeEnum.DATE: lambda x: (x.decode() if not isinstance(x, six.string_types) else x).split()[0],
ValueTypeEnum.DATETIME: lambda x: x.decode() if not isinstance(x, six.string_types) else x,
ValueTypeEnum.JSON: lambda x: json.loads(x) if isinstance(x, six.string_types) and x else x,
ValueTypeEnum.BOOL: lambda x: x in current_app.config.get('BOOL_TRUE'),
}
choice = {
@ -108,7 +78,6 @@ class ValueTypeMap(object):
'index_{0}'.format(ValueTypeEnum.TIME): model.CIIndexValueText,
'index_{0}'.format(ValueTypeEnum.FLOAT): model.CIIndexValueFloat,
'index_{0}'.format(ValueTypeEnum.JSON): model.CIValueJson,
'index_{0}'.format(ValueTypeEnum.BOOL): model.CIIndexValueInteger,
}
table_name = {
@ -121,7 +90,6 @@ class ValueTypeMap(object):
'index_{0}'.format(ValueTypeEnum.TIME): 'c_value_index_texts',
'index_{0}'.format(ValueTypeEnum.FLOAT): 'c_value_index_floats',
'index_{0}'.format(ValueTypeEnum.JSON): 'c_value_json',
'index_{0}'.format(ValueTypeEnum.BOOL): 'c_value_index_integers',
}
es_type = {

View File

@ -3,18 +3,16 @@
from __future__ import unicode_literals
import importlib.util
import copy
import jinja2
import imp
import os
import re
import tempfile
import jinja2
from flask import abort
from flask import current_app
from jinja2schema import infer
from jinja2schema import to_json_schema
from werkzeug.exceptions import BadRequest
from api.extensions import db
from api.lib.cmdb.attribute import AttributeManager
@ -25,7 +23,6 @@ from api.lib.cmdb.const import ValueTypeEnum
from api.lib.cmdb.history import AttributeHistoryManger
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.utils import TableMap
from api.lib.cmdb.utils import ValueDeserializeError
from api.lib.cmdb.utils import ValueTypeMap
from api.lib.utils import handle_arg_list
from api.models.cmdb import CI
@ -47,7 +44,7 @@ class AttributeValueManager(object):
"""
return AttributeCache.get(key)
def get_attr_values(self, fields, ci_id, ret_key="name", unique_key=None, use_master=False, enum_map=None):
def get_attr_values(self, fields, ci_id, ret_key="name", unique_key=None, use_master=False):
"""
:param fields:
@ -55,7 +52,6 @@ class AttributeValueManager(object):
:param ret_key: It can be name or alias
:param unique_key: primary attribute
:param use_master: Only for master-slave read-write separation
:param enum_map:
:return:
"""
res = dict()
@ -77,12 +73,6 @@ class AttributeValueManager(object):
else:
res[field_name] = ValueTypeMap.serialize[attr.value_type](rs[0].value) if rs else None
if enum_map and field_name in enum_map:
if attr.is_list:
res[field_name] = [enum_map[field_name].get(i, i) for i in res[field_name]]
else:
res[field_name] = enum_map[field_name].get(res[field_name], res[field_name])
if unique_key is not None and attr.id == unique_key.id and rs:
res['unique'] = unique_key.name
res['unique_alias'] = unique_key.alias
@ -90,20 +80,16 @@ class AttributeValueManager(object):
return res
@staticmethod
def _deserialize_value(alias, value_type, value):
def _deserialize_value(value_type, value):
if not value:
return value
deserialize = ValueTypeMap.deserialize[value_type]
try:
v = deserialize(value)
if value_type in (ValueTypeEnum.DATE, ValueTypeEnum.DATETIME):
return str(v)
return v
except ValueDeserializeError as e:
return abort(400, ErrFormat.attribute_value_invalid2.format(alias, e))
except ValueError:
return abort(400, ErrFormat.attribute_value_invalid2.format(alias, value))
return abort(400, ErrFormat.attribute_value_invalid.format(value))
@staticmethod
def _check_is_choice(attr, value_type, value):
@ -131,33 +117,19 @@ class AttributeValueManager(object):
if type_attr and type_attr.is_required and not value and value != 0:
return abort(400, ErrFormat.attribute_value_required.format(attr.alias))
@staticmethod
def check_re(expr, alias, value):
if not re.compile(expr).match(str(value)):
return abort(400, ErrFormat.attribute_value_invalid2.format(alias, value))
def _validate(self, attr, value, value_table, ci=None, type_id=None, ci_id=None, type_attr=None):
ci = ci or {}
v = self._deserialize_value(attr.value_type, value)
def _validate(self, attr, value, value_table, ci=None, type_id=None, ci_id=None, type_attr=None, unique_name=None):
if not attr.is_reference:
ci = ci or {}
v = self._deserialize_value(attr.alias, attr.value_type, value)
attr.is_choice and value and self._check_is_choice(attr, attr.value_type, v)
else:
v = value or None
(attr.is_unique or attr.name == unique_name) and self._check_is_unique(
attr.is_choice and value and self._check_is_choice(attr, attr.value_type, v)
attr.is_unique and self._check_is_unique(
value_table, attr, ci and ci.id or ci_id, ci and ci.type_id or type_id, v)
self._check_is_required(ci and ci.type_id or type_id, attr, v, type_attr=type_attr)
if attr.is_reference:
return v
if v == "" and attr.value_type not in (ValueTypeEnum.TEXT,):
v = None
if attr.re_check and value:
self.check_re(attr.re_check, attr.alias, value)
return v
@staticmethod
@ -165,10 +137,9 @@ class AttributeValueManager(object):
return AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id)
@staticmethod
def write_change2(changed, record_id=None, ticket_id=None):
def write_change2(changed, record_id=None):
for ci_id, attr_id, operate_type, old, new, type_id in changed:
record_id = AttributeHistoryManger.add(record_id, ci_id, [(attr_id, operate_type, old, new)], type_id,
ticket_id=ticket_id,
commit=False, flush=False)
try:
db.session.commit()
@ -198,11 +169,11 @@ class AttributeValueManager(object):
try:
path = script_f.name
name = os.path.basename(path)[:-3]
dir_name, name = os.path.dirname(path), os.path.basename(path)[:-3]
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
fp, path, desc = imp.find_module(name, [dir_name])
mod = imp.load_module(name, fp, path, desc)
if hasattr(mod, 'computed'):
return mod.computed()
@ -237,10 +208,7 @@ class AttributeValueManager(object):
if computed_value is not None:
ci_dict[attr['name']] = computed_value
def valid_attr_value(self, ci_dict, type_id, ci_id, name2attr,
alias2attr=None,
ci_attr2type_attr=None,
unique_name=None):
def valid_attr_value(self, ci_dict, type_id, ci_id, name2attr, alias2attr=None, ci_attr2type_attr=None):
key2attr = dict()
alias2attr = alias2attr or {}
ci_attr2type_attr = ci_attr2type_attr or {}
@ -253,29 +221,17 @@ class AttributeValueManager(object):
try:
if attr.is_list:
if isinstance(value, dict):
if value.get('op') == "delete":
value['v'] = [ValueTypeMap.serialize[attr.value_type](
self._deserialize_value(attr.alias, attr.value_type, i))
for i in handle_arg_list(value['v'])]
continue
_value = value.get('v') or []
else:
_value = value
value_list = [self._validate(attr, i, value_table, ci=None, type_id=type_id, ci_id=ci_id,
type_attr=ci_attr2type_attr.get(attr.id))
for i in handle_arg_list(_value)]
ci_dict[key] = value_list if not isinstance(value, dict) else dict(op=value.get('op'), v=value_list)
for i in handle_arg_list(value)]
ci_dict[key] = value_list
if not value_list:
self._check_is_required(type_id, attr, '')
else:
value = self._validate(attr, value, value_table, ci=None, type_id=type_id, ci_id=ci_id,
type_attr=ci_attr2type_attr.get(attr.id),
unique_name=unique_name)
type_attr=ci_attr2type_attr.get(attr.id))
ci_dict[key] = value
except BadRequest as e:
raise
except Exception as e:
current_app.logger.warning(str(e))
@ -284,17 +240,15 @@ class AttributeValueManager(object):
return key2attr
def create_or_update_attr_value(self, ci, ci_dict, key2attr, ticket_id=None):
def create_or_update_attr_value(self, ci, ci_dict, key2attr):
"""
add or update attribute value, then write history
:param ci: instance object
:param ci_dict: attribute dict
:param key2attr: attr key to attr
:param ticket_id:
:return:
"""
changed = []
has_dynamic = False
for key, value in ci_dict.items():
attr = key2attr.get(key)
if not attr:
@ -303,90 +257,46 @@ class AttributeValueManager(object):
if attr.is_list:
existed_attrs = value_table.get_by(attr_id=attr.id, ci_id=ci.id, to_dict=False)
existed_values = [(ValueTypeMap.serialize[attr.value_type](i.value) if
i.value or i.value == 0 else i.value) for i in existed_attrs]
existed_values = [i.value for i in existed_attrs]
added = set(value) - set(existed_values)
deleted = set(existed_values) - set(value)
for v in added:
value_table.create(ci_id=ci.id, attr_id=attr.id, value=v, flush=False, commit=False)
changed.append((ci.id, attr.id, OperateType.ADD, None, v, ci.type_id))
if isinstance(value, dict):
if value.get('op') == "add":
for v in (value.get('v') or []):
if v not in existed_values:
value_table.create(ci_id=ci.id, attr_id=attr.id, value=v, flush=False, commit=False)
if not attr.is_dynamic:
changed.append((ci.id, attr.id, OperateType.ADD, None, v, ci.type_id))
else:
has_dynamic = True
elif value.get('op') == "delete":
for v in (value.get('v') or []):
if v in existed_values:
existed_attrs[existed_values.index(v)].delete(flush=False, commit=False)
if not attr.is_dynamic:
changed.append((ci.id, attr.id, OperateType.DELETE, v, None, ci.type_id))
else:
has_dynamic = True
else:
# Comparison array starts from which position changes
min_len = min(len(value), len(existed_values))
index = 0
while index < min_len:
if value[index] != existed_values[index]:
break
index += 1
# Delete first and then add to ensure id sorting
for idx in range(index, len(existed_attrs)):
existed_attr = existed_attrs[idx]
existed_attr.delete(flush=False, commit=False)
if not attr.is_dynamic:
changed.append((ci.id, attr.id, OperateType.DELETE, existed_values[idx], None, ci.type_id))
else:
has_dynamic = True
for idx in range(index, len(value)):
value_table.create(ci_id=ci.id, attr_id=attr.id, value=value[idx], flush=False, commit=False)
if not attr.is_dynamic:
changed.append((ci.id, attr.id, OperateType.ADD, None, value[idx], ci.type_id))
else:
has_dynamic = True
for v in deleted:
existed_attr = existed_attrs[existed_values.index(v)]
existed_attr.delete(flush=False, commit=False)
changed.append((ci.id, attr.id, OperateType.DELETE, v, None, ci.type_id))
else:
existed_attr = value_table.get_by(attr_id=attr.id, ci_id=ci.id, first=True, to_dict=False)
existed_value = existed_attr and existed_attr.value
existed_value = (ValueTypeMap.serialize[attr.value_type](existed_value) if
existed_value or existed_value == 0 else existed_value)
if existed_value is None and value is not None:
value_table.create(ci_id=ci.id, attr_id=attr.id, value=value, flush=False, commit=False)
if not attr.is_dynamic:
changed.append((ci.id, attr.id, OperateType.ADD, None, value, ci.type_id))
else:
has_dynamic = True
changed.append((ci.id, attr.id, OperateType.ADD, None, value, ci.type_id))
else:
if existed_value != value and existed_attr:
if existed_value != value:
if value is None:
existed_attr.delete(flush=False, commit=False)
else:
existed_attr.update(value=value, flush=False, commit=False)
if not attr.is_dynamic:
changed.append((ci.id, attr.id, OperateType.UPDATE, existed_value, value, ci.type_id))
else:
has_dynamic = True
changed.append((ci.id, attr.id, OperateType.UPDATE, existed_value, value, ci.type_id))
if changed or has_dynamic:
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.warning(str(e))
return abort(400, ErrFormat.attribute_value_unknown_error.format(e.args[0]))
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.warning(str(e))
return abort(400, ErrFormat.attribute_value_unknown_error.format(e.args[0]))
return self.write_change2(changed, ticket_id=ticket_id), has_dynamic
else:
return None, has_dynamic
return self.write_change2(changed)
@staticmethod
def delete_attr_value(attr_id, ci_id, commit=True):
def delete_attr_value(attr_id, ci_id):
attr = AttributeCache.get(attr_id)
if attr is not None:
value_table = TableMap(attr=attr).table
for item in value_table.get_by(attr_id=attr.id, ci_id=ci_id, to_dict=False):
item.delete(commit=commit)
item.delete()

View File

@ -10,11 +10,6 @@ from api.lib.perm.acl.role import RoleCRUD, RoleRelationCRUD
from api.lib.perm.acl.user import UserCRUD
def validate_app(app_id):
app = AppCache.get(app_id)
return app.id if app else None
class ACLManager(object):
def __init__(self, app_name='acl', uid=None):
self.log = current_app.logger
@ -138,8 +133,7 @@ class ACLManager(object):
numfound, res = ResourceCRUD.search(q, u, self.validate_app().id, rt_id, page, page_size)
return res
@staticmethod
def grant_resource(rid, resource_id, perms):
def grant_resource(self, rid, resource_id, perms):
PermissionCRUD.grant(rid, perms, resource_id=resource_id, group_id=None)
@staticmethod
@ -147,7 +141,3 @@ class ACLManager(object):
rt = AppCRUD.add(**payload)
return rt.to_dict()
def role_has_perms(self, rid, resource_name, resource_type_name, perm):
app_id = validate_app(self.app_name)
return RoleCRUD.has_permission(rid, resource_name, resource_type_name, app_id, perm)

View File

@ -35,32 +35,3 @@ AuthCommonConfigAutoRedirect = 'auto_redirect'
class TestType(BaseEnum):
Connect = 'connect'
Login = 'login'
MIMEExtMap = {
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx',
'application/msword': '.doc',
'application/vnd.ms-word.document.macroEnabled.12': '.docm',
'application/vnd.ms-excel': '.xls',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx',
'application/vnd.ms-excel.sheet.macroEnabled.12': '.xlsm',
'application/vnd.ms-powerpoint': '.ppt',
'application/vnd.openxmlformats-officedocument.presentationml.presentation': '.pptx',
'application/vnd.ms-powerpoint.presentation.macroEnabled.12': '.pptm',
'application/zip': '.zip',
'application/x-7z-compressed': '.7z',
'application/json': '.json',
'application/pdf': '.pdf',
'image/png': '.png',
'image/bmp': '.bmp',
'image/prs.btif': '.btif',
'image/gif': '.gif',
'image/jpeg': '.jpg',
'image/tiff': '.tif',
'image/vnd.microsoft.icon': '.ico',
'image/webp': '.webp',
'image/svg+xml': '.svg',
'image/vnd.adobe.photoshop': '.psd',
'text/plain': '.txt',
'text/csv': '.csv',
}

View File

@ -1,39 +0,0 @@
import functools
from flask import abort, session, current_app
from api.lib.common_setting.acl import ACLManager
from api.lib.common_setting.resp_format import ErrFormat
from api.lib.perm.acl.acl import is_app_admin
def perms_role_required(app_name, resource_type_name, resource_name, perm, role_name=None):
def decorator_perms_role_required(func):
@functools.wraps(func)
def wrapper_required(*args, **kwargs):
acl = ACLManager(app_name)
has_perms = False
try:
has_perms = acl.role_has_perms(session["acl"]['rid'], resource_name, resource_type_name, perm)
except Exception as e:
current_app.logger.error(f"acl role_has_perms err: {e}")
# resource_type not exist, continue check role
if role_name:
if role_name not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin(app_name):
abort(403, ErrFormat.role_required.format(role_name))
return func(*args, **kwargs)
else:
abort(403, ErrFormat.resource_no_permission.format(resource_name, perm))
if not has_perms:
if role_name:
if role_name not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin(app_name):
abort(403, ErrFormat.role_required.format(role_name))
else:
abort(403, ErrFormat.resource_no_permission.format(resource_name, perm))
return func(*args, **kwargs)
return wrapper_required
return decorator_perms_role_required

View File

@ -470,58 +470,8 @@ class EditDepartmentInACL(object):
return f"edit_department_name_in_acl, rid: {d_rid}, success"
@classmethod
def remove_from_old_department_role(cls, e_list, acl):
result = []
for employee in e_list:
employee_acl_rid = employee.get('e_acl_rid')
if employee_acl_rid == 0:
result.append("employee_acl_rid == 0")
continue
cls.remove_single_employee_from_old_department(acl, employee, result)
@staticmethod
def remove_single_employee_from_old_department(acl, employee, result):
from api.models.acl import Role
old_department = DepartmentCRUD.get_department_by_id(employee.get('department_id'), False)
if not old_department:
return False
old_role = Role.get_by(first=True, name=old_department.department_name, app_id=None)
old_d_rid_in_acl = old_role.get('id') if old_role else 0
if old_d_rid_in_acl == 0:
return False
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
payload = {
'app_id': 'acl',
'parent_id': d_acl_rid,
}
try:
acl.remove_user_from_role(employee.get('e_acl_rid'), payload)
current_app.logger.info(f"remove {employee.get('e_acl_rid')} from {d_acl_rid}")
except Exception as e:
err = f"remove_user_from_role e_acl_rid: {employee.get('e_acl_rid')}, parent_id: {d_acl_rid}, err: {e}"
result.append(err)
return True
@staticmethod
def add_employee_to_new_department(acl, employee_acl_rid, new_department_acl_rid, result):
payload = {
'app_id': 'acl',
'child_ids': [employee_acl_rid],
}
try:
acl.add_user_to_role(new_department_acl_rid, payload)
current_app.logger.info(f"add {employee_acl_rid} to {new_department_acl_rid}")
except Exception as e:
result.append(
f"add_user_to_role employee_acl_rid: {employee_acl_rid}, parent_id: {new_department_acl_rid}, \
err: {e}")
@classmethod
def edit_employee_department_in_acl(cls, e_list: list, new_d_id: int, op_uid: int):
def edit_employee_department_in_acl(e_list: list, new_d_id: int, op_uid: int):
result = []
new_department = DepartmentCRUD.get_department_by_id(new_d_id, False)
if not new_department:
@ -531,11 +481,7 @@ class EditDepartmentInACL(object):
from api.models.acl import Role
new_role = Role.get_by(first=True, name=new_department.department_name, app_id=None)
new_d_rid_in_acl = new_role.get('id') if new_role else 0
acl = ACLManager('acl', str(op_uid))
if new_d_rid_in_acl == 0:
# only remove from old department role
cls.remove_from_old_department_role(e_list, acl)
return
if new_d_rid_in_acl != new_department.acl_rid:
@ -545,15 +491,43 @@ class EditDepartmentInACL(object):
new_department_acl_rid = new_department.acl_rid if new_d_rid_in_acl == new_department.acl_rid else \
new_d_rid_in_acl
acl = ACLManager('acl', str(op_uid))
for employee in e_list:
old_department = DepartmentCRUD.get_department_by_id(employee.get('department_id'), False)
if not old_department:
continue
employee_acl_rid = employee.get('e_acl_rid')
if employee_acl_rid == 0:
result.append("employee_acl_rid == 0")
result.append(f"employee_acl_rid == 0")
continue
cls.remove_single_employee_from_old_department(acl, employee, result)
old_role = Role.get_by(first=True, name=old_department.department_name, app_id=None)
old_d_rid_in_acl = old_role.get('id') if old_role else 0
if old_d_rid_in_acl == 0:
return
if old_d_rid_in_acl != old_department.acl_rid:
old_department.update(
acl_rid=old_d_rid_in_acl
)
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
payload = {
'app_id': 'acl',
'parent_id': d_acl_rid,
}
try:
acl.remove_user_from_role(employee_acl_rid, payload)
except Exception as e:
result.append(
f"remove_user_from_role employee_acl_rid: {employee_acl_rid}, parent_id: {d_acl_rid}, err: {e}")
# 在新部门中添加员工
cls.add_employee_to_new_department(acl, employee_acl_rid, new_department_acl_rid, result)
payload = {
'app_id': 'acl',
'child_ids': [employee_acl_rid],
}
try:
acl.add_user_to_role(new_department_acl_rid, payload)
except Exception as e:
result.append(
f"add_user_to_role employee_acl_rid: {employee_acl_rid}, parent_id: {d_acl_rid}, err: {e}")
return result

View File

@ -4,7 +4,7 @@ import traceback
from datetime import datetime
import requests
from flask import abort, current_app
from flask import abort
from flask_login import current_user
from sqlalchemy import or_, literal_column, func, not_, and_
from werkzeug.datastructures import MultiDict
@ -16,7 +16,7 @@ from wtforms import validators
from api.extensions import db
from api.lib.common_setting.acl import ACLManager
from api.lib.common_setting.const import OperatorType
from api.lib.perm.acl.const import ACL_QUEUE
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.common_setting.resp_format import ErrFormat
from api.models.common_setting import Employee, Department
@ -141,7 +141,7 @@ class EmployeeCRUD(object):
def add(**kwargs):
try:
res = CreateEmployee().create_single(**kwargs)
refresh_employee_acl_info.apply_async(args=(res.employee_id,), queue=ACL_QUEUE)
refresh_employee_acl_info.apply_async(args=(), queue=CMDB_QUEUE)
return res
except Exception as e:
abort(400, str(e))
@ -171,7 +171,7 @@ class EmployeeCRUD(object):
if len(e_list) > 0:
edit_employee_department_in_acl.apply_async(
args=(e_list, new_department_id, current_user.uid),
queue=ACL_QUEUE
queue=CMDB_QUEUE
)
return existed
@ -478,7 +478,7 @@ class EmployeeCRUD(object):
Employee.deleted == 0,
Employee.block == block,
]
if isinstance(department_id, list):
if type(department_id) == list:
if len(department_id) == 0:
return []
else:
@ -577,6 +577,7 @@ class EmployeeCRUD(object):
@staticmethod
def import_employee(employee_list):
res = CreateEmployee().batch_create(employee_list)
refresh_employee_acl_info.apply_async(args=(), queue=CMDB_QUEUE)
return res
@staticmethod
@ -702,7 +703,6 @@ class EmployeeCRUD(object):
try:
last_login = datetime.strptime(last_login, '%Y-%m-%d %H:%M:%S')
except Exception as e:
current_app.logger.error(f"strptime {last_login} err: {e}")
last_login = datetime.now()
else:
last_login = datetime.now()
@ -713,7 +713,6 @@ class EmployeeCRUD(object):
)
return last_login
except Exception as e:
current_app.logger.error(f"update last_login err: {e}")
return
@ -789,11 +788,9 @@ class CreateEmployee(object):
if existed:
return existed
res = Employee.create(
return Employee.create(
**kwargs
)
refresh_employee_acl_info.apply_async(args=(res.employee_id,), queue=ACL_QUEUE)
return res
@staticmethod
def get_department_by_name(d_name):
@ -900,75 +897,3 @@ class EmployeeUpdateByUidForm(Form):
avatar = StringField(validators=[])
sex = StringField(validators=[])
mobile = StringField(validators=[])
class GrantEmployeeACLPerm(object):
"""
Grant ACL Permission After Create New Employee
"""
def __init__(self, acl=None):
self.perms_by_create_resources_type = ['read', 'grant', 'delete', 'update']
self.perms_by_common_grant = ['read']
self.resource_name_list = ['公司信息', '公司架构', '通知设置']
self.acl = acl if acl else self.check_app('backend')
self.resources_types = self.acl.get_all_resources_types()
self.resources_type = self.get_resources_type()
self.resource_list = self.acl.get_resource_by_type(None, None, self.resources_type['id'])
@staticmethod
def check_app(app_name):
acl = ACLManager(app_name)
payload = dict(
name=app_name,
description=app_name
)
app = acl.validate_app()
if not app:
acl.create_app(payload)
return acl
def get_resources_type(self):
results = list(filter(lambda t: t['name'] == '操作权限', self.resources_types['groups']))
if len(results) == 0:
payload = dict(
app_id=self.acl.app_name,
name='操作权限',
description='',
perms=self.perms_by_create_resources_type
)
resource_type = self.acl.create_resources_type(payload)
else:
resource_type = results[0]
resource_type_id = resource_type['id']
existed_perms = self.resources_types.get('id2perms', {}).get(resource_type_id, [])
existed_perms = [p['name'] for p in existed_perms]
new_perms = []
for perm in self.perms_by_create_resources_type:
if perm not in existed_perms:
new_perms.append(perm)
if len(new_perms) > 0:
resource_type['perms'] = existed_perms + new_perms
self.acl.update_resources_type(resource_type_id, resource_type)
return resource_type
def grant(self, rid_list):
[self.grant_by_rid(rid) for rid in rid_list if rid > 0]
def grant_by_rid(self, rid, is_admin=False):
for name in self.resource_name_list:
resource = list(filter(lambda r: r['name'] == name, self.resource_list))
if len(resource) == 0:
payload = dict(
type_id=self.resources_type['id'],
app_id=self.acl.app_name,
name=name,
)
resource = self.acl.create_resource(payload)
else:
resource = resource[0]
perms = self.perms_by_create_resources_type if is_admin else self.perms_by_common_grant
self.acl.grant_resource(rid, resource['id'], perms)

View File

@ -2,7 +2,7 @@ import requests
from api.lib.common_setting.const import BotNameMap
from api.lib.common_setting.resp_format import ErrFormat
from api.models.common_setting import NoticeConfig
from api.models.common_setting import CompanyInfo, NoticeConfig
from wtforms import Form
from wtforms import StringField
from wtforms import validators

View File

@ -80,5 +80,3 @@ class ErrFormat(CommonErrFormat):
ldap_test_username_required = _l("LDAP test username required") # LDAP测试用户名必填
company_wide = _l("Company wide") # 全公司
resource_no_permission = _l("No permission to access resource {}, perm {} ") # 没有权限访问 {} 资源的 {} 权限"

View File

@ -1,68 +0,0 @@
class OperationPermission(object):
def __init__(self, resource_perms):
for _r in resource_perms:
setattr(self, _r['page'], _r['page'])
for _p in _r['perms']:
setattr(self, _p, _p)
class BaseApp(object):
resource_type_name = 'OperationPermission'
all_resource_perms = []
def __init__(self):
self.admin_name = None
self.roles = []
self.app_name = 'acl'
self.require_create_resource_type = self.resource_type_name
self.extra_create_resource_type_list = []
self.op = None
@staticmethod
def format_role(role_name, role_type, acl_rid, resource_perms, description=''):
return dict(
role_name=role_name,
role_type=role_type,
acl_rid=acl_rid,
description=description,
resource_perms=resource_perms,
)
class CMDBApp(BaseApp):
all_resource_perms = [
{"page": "Big_Screen", "page_cn": "大屏", "perms": ["read"]},
{"page": "Dashboard", "page_cn": "仪表盘", "perms": ["read"]},
{"page": "Resource_Search", "page_cn": "资源搜索", "perms": ["read"]},
{"page": "Auto_Discovery_Pool", "page_cn": "自动发现池", "perms": ["read"]},
{"page": "My_Subscriptions", "page_cn": "我的订阅", "perms": ["read"]},
{"page": "Bulk_Import", "page_cn": "批量导入", "perms": ["read"]},
{"page": "Model_Configuration", "page_cn": "模型配置",
"perms": ["read", "create_CIType", "create_CIType_group", "update_CIType_group",
"delete_CIType_group", "download_CIType"]},
{"page": "Backend_Management", "page_cn": "后台管理", "perms": ["read"]},
{"page": "Customized_Dashboard", "page_cn": "定制仪表盘", "perms": ["read"]},
{"page": "Service_Tree_Definition", "page_cn": "服务树定义", "perms": ["read"]},
{"page": "Model_Relationships", "page_cn": "模型关系", "perms": ["read"]},
{"page": "Operation_Audit", "page_cn": "操作审计", "perms": ["read"]},
{"page": "Relationship_Types", "page_cn": "关系类型", "perms": ["read"]},
{"page": "Auto_Discovery", "page_cn": "自动发现",
"perms": ["read", "create_plugin", "update_plugin", "delete_plugin"]
},
{"page": "TopologyView", "page_cn": "拓扑视图",
"perms": ["read", "create_topology_group", "update_topology_group", "delete_topology_group",
"create_topology_view"],
},
{"page": "IPAM", "page_cn": "IPAM", "perms": ["read"]},
{"page": "DCIM", "page_cn": "数据中心", "perms": ["read"]},
]
def __init__(self):
super().__init__()
self.admin_name = 'cmdb_admin'
self.app_name = 'cmdb'
self.op = OperationPermission(self.all_resource_perms)

View File

@ -1,10 +1,5 @@
# -*- coding:utf-8 -*-
from datetime import datetime
from flask import current_app
from sqlalchemy import inspect, text
from sqlalchemy.dialects.mysql import ENUM
from api.extensions import db
def get_cur_time_str(split_flag='-'):
@ -28,115 +23,3 @@ class BaseEnum(object):
if not attr.startswith("_") and not callable(getattr(cls, attr))
}
return cls._ALL_
class CheckNewColumn(object):
def __init__(self):
self.engine = db.get_engine()
self.inspector = inspect(self.engine)
self.table_names = self.inspector.get_table_names()
@staticmethod
def get_model_by_table_name(_table_name):
registry = getattr(db.Model, 'registry', None)
class_registry = getattr(registry, '_class_registry', None)
for _model in class_registry.values():
if hasattr(_model, '__tablename__') and _model.__tablename__ == _table_name:
return _model
return None
def run(self):
for table_name in self.table_names:
self.check_by_table(table_name)
def check_by_table(self, table_name):
existed_columns = self.inspector.get_columns(table_name)
enum_columns = []
existed_column_name_list = []
for c in existed_columns:
if isinstance(c['type'], ENUM):
enum_columns.append(c['name'])
existed_column_name_list.append(c['name'])
model = self.get_model_by_table_name(table_name)
if model is None:
return
model_columns = getattr(getattr(getattr(model, '__table__'), 'columns'), '_all_columns')
for column in model_columns:
if column.name not in existed_column_name_list:
add_res = self.add_new_column(table_name, column)
if not add_res:
continue
current_app.logger.info(f"add new column [{column.name}] in table [{table_name}] success.")
if column.name in enum_columns:
enum_columns.remove(column.name)
self.add_new_index(table_name, column)
if len(enum_columns) > 0:
self.check_enum_column(enum_columns, existed_columns, model_columns, table_name)
def add_new_column(self, target_table_name, new_column):
try:
column_type = new_column.type.compile(self.engine.dialect)
default_value = new_column.default.arg if new_column.default else None
sql = "ALTER TABLE " + target_table_name + " ADD COLUMN " + f"`{new_column.name}`" + " " + column_type
if new_column.comment:
sql += f" comment '{new_column.comment}'"
if column_type == 'JSON':
pass
elif default_value:
if column_type.startswith('VAR') or column_type.startswith('Text'):
if default_value is None or len(default_value) == 0:
pass
else:
sql += f" DEFAULT {default_value}"
sql = text(sql)
db.session.execute(sql)
return True
except Exception as e:
err = f"add_new_column [{new_column.name}] to table [{target_table_name}] err: {e}"
current_app.logger.error(err)
return False
@staticmethod
def add_new_index(target_table_name, new_column):
try:
if new_column.index:
index_name = f"{target_table_name}_{new_column.name}"
sql = "CREATE INDEX " + f"{index_name}" + " ON " + target_table_name + " (" + new_column.name + ")"
db.session.execute(sql)
current_app.logger.info(f"add new index [{index_name}] in table [{target_table_name}] success.")
return True
except Exception as e:
err = f"add_new_index [{new_column.name}] to table [{target_table_name}] err: {e}"
current_app.logger.error(err)
return False
@staticmethod
def check_enum_column(enum_columns, existed_columns, model_columns, table_name):
for column_name in enum_columns:
try:
enum_column = list(filter(lambda x: x['name'] == column_name, existed_columns))[0]
old_enum_value = enum_column.get('type', {}).enums
target_column = list(filter(lambda x: x.name == column_name, model_columns))[0]
new_enum_value = target_column.type.enums
if set(old_enum_value) == set(new_enum_value):
continue
enum_values_str = ','.join(["'{}'".format(value) for value in new_enum_value])
sql = f"ALTER TABLE {table_name} MODIFY COLUMN" + f"`{column_name}`" + f" enum({enum_values_str})"
db.session.execute(sql)
current_app.logger.info(
f"modify column [{column_name}] ENUM: {new_enum_value} in table [{table_name}] success.")
except Exception as e:
current_app.logger.error(
f"modify column ENUM [{column_name}] in table [{table_name}] err: {e}")

View File

@ -1,7 +1,7 @@
# -*- coding:utf-8 -*-
from flask import current_app
from flask import abort
from sqlalchemy import func
from api.extensions import db
@ -13,41 +13,25 @@ class DBMixin(object):
cls = None
@classmethod
def search(cls, page, page_size, fl=None, only_query=False, reverse=False, count_query=False,
last_size=None, **kwargs):
def search(cls, page, page_size, fl=None, only_query=False, reverse=False, count_query=False, **kwargs):
page = get_page(page)
page_size = get_page_size(page_size)
if fl is None:
query = db.session.query(cls.cls)
query = db.session.query(cls.cls).filter(cls.cls.deleted.is_(False))
else:
query = db.session.query(*[getattr(cls.cls, i) for i in fl])
query = db.session.query(*[getattr(cls.cls, i) for i in fl]).filter(cls.cls.deleted.is_(False))
_query = None
if count_query:
_query = db.session.query(func.count(cls.cls.id))
if hasattr(cls.cls, 'deleted'):
query = query.filter(cls.cls.deleted.is_(False))
if _query:
_query = _query.filter(cls.cls.deleted.is_(False))
_query = db.session.query(func.count(cls.cls.id)).filter(cls.cls.deleted.is_(False))
for k in kwargs:
if hasattr(cls.cls, k):
if isinstance(kwargs[k], list):
query = query.filter(getattr(cls.cls, k).in_(kwargs[k]))
if count_query:
_query = _query.filter(getattr(cls.cls, k).in_(kwargs[k]))
else:
if "*" in str(kwargs[k]):
query = query.filter(getattr(cls.cls, k).ilike(kwargs[k].replace('*', '%')))
if count_query:
_query = _query.filter(getattr(cls.cls, k).ilike(kwargs[k].replace('*', '%')))
else:
query = query.filter(getattr(cls.cls, k) == kwargs[k])
if count_query:
_query = _query.filter(getattr(cls.cls, k) == kwargs[k])
query = query.filter(getattr(cls.cls, k) == kwargs[k])
if count_query:
_query = _query.filter(getattr(cls.cls, k) == kwargs[k])
if reverse in current_app.config.get('BOOL_TRUE'):
if reverse:
query = query.order_by(cls.cls.id.desc())
if only_query and not count_query:
@ -56,15 +40,14 @@ class DBMixin(object):
return _query, query
numfound = query.count()
if not last_size:
return numfound, [i.to_dict() if fl is None else getattr(i, '_asdict')()
for i in query.offset((page - 1) * page_size).limit(page_size)]
else:
offset = numfound - last_size
if offset < 0:
offset = 0
return numfound, [i.to_dict() if fl is None else getattr(i, '_asdict')()
for i in query.offset(offset).limit(last_size)]
return numfound, [i.to_dict() if fl is None else getattr(i, '_asdict')()
for i in query.offset((page - 1) * page_size).limit(page_size)]
def _must_be_required(self, _id):
existed = self.cls.get_by_id(_id)
existed or abort(404, "Factor [{}] does not exist".format(_id))
return existed
def _can_add(self, **kwargs):
raise NotImplementedError

View File

@ -58,7 +58,7 @@ def _request_messenger(subject, body, tos, sender, payload):
def notify_send(subject, body, methods, tos, payload=None):
payload = payload or {}
payload = {k: '' if v is None else v for k, v in payload.items()}
payload = {k: v or '' for k, v in payload.items()}
subject = Template(subject).render(payload)
body = Template(body).render(payload)

View File

@ -6,7 +6,7 @@ from functools import wraps
from flask import abort
from flask import request
from api.lib.perm.acl.cache import AppCache
from api.lib.perm.acl.cache import AppCache, AppAccessTokenCache
from api.lib.perm.acl.resp_format import ErrFormat

View File

@ -148,16 +148,16 @@ class ACLManager(object):
if group:
PermissionCRUD.revoke(rid, permissions, group_id=group.id, rebuild=rebuild)
def del_resource(self, name, resource_type_name=None, rebuild=True):
def del_resource(self, name, resource_type_name=None):
resource = self._get_resource(name, resource_type_name)
if resource:
return ResourceCRUD.delete(resource.id, rebuild=rebuild)
return ResourceCRUD.delete(resource.id)
def has_permission(self, resource_name, resource_type, perm, resource_id=None, rid=None):
def has_permission(self, resource_name, resource_type, perm, resource_id=None):
if is_app_admin(self.app_id):
return True
role = self._get_role(current_user.username) if rid is None else RoleCache.get(rid)
role = self._get_role(current_user.username)
role or abort(404, ErrFormat.role_not_found.format(current_user.username))

View File

@ -376,7 +376,7 @@ class AuditCRUD(object):
origin=origin, current=current, extra=extra, source=source.value)
@classmethod
def add_login_log(cls, username, is_ok, description, _id=None, logout_at=None, ip=None, browser=None):
def add_login_log(cls, username, is_ok, description, _id=None, logout_at=None):
if _id is not None:
existed = AuditLoginLog.get_by_id(_id)
if existed is not None:
@ -387,10 +387,8 @@ class AuditCRUD(object):
is_ok=is_ok,
description=description,
logout_at=logout_at,
ip=(ip or request.headers.get('X-Forwarded-For') or
request.headers.get('X-Real-IP') or request.remote_addr or '').split(',')[0],
browser=browser or request.headers.get('User-Agent'),
channel=request.values.get('channel', 'web'),
ip=request.headers.get('X-Real-IP') or request.remote_addr,
browser=request.headers.get('User-Agent'),
)
if logout_at is None:

View File

@ -2,12 +2,10 @@
import msgpack
import redis_lock
from api.extensions import cache
from api.extensions import db
from api.extensions import rd
from api.lib.decorator import flush_db
from api.lib.utils import Lock
from api.models.acl import App
from api.models.acl import Permission
from api.models.acl import Resource
@ -138,14 +136,14 @@ class HasResourceRoleCache(object):
@classmethod
def add(cls, rid, app_id):
with redis_lock.Lock(rd.r, 'HasResourceRoleCache', expire=10):
with Lock('HasResourceRoleCache'):
c = cls.get(app_id)
c[rid] = 1
cache.set(cls.PREFIX_KEY.format(app_id), c, timeout=0)
@classmethod
def remove(cls, rid, app_id):
with redis_lock.Lock(rd.r, 'HasResourceRoleCache', expire=10):
with Lock('HasResourceRoleCache'):
c = cls.get(app_id)
c.pop(rid, None)
cache.set(cls.PREFIX_KEY.format(app_id), c, timeout=0)
@ -158,10 +156,9 @@ class RoleRelationCache(object):
PREFIX_RESOURCES2 = "RoleRelationResources2::id::{0}::AppId::{1}"
@classmethod
def get_parent_ids(cls, rid, app_id, force=False):
def get_parent_ids(cls, rid, app_id):
parent_ids = cache.get(cls.PREFIX_PARENT.format(rid, app_id))
if not parent_ids or force:
db.session.commit()
if not parent_ids:
from api.lib.perm.acl.role import RoleRelationCRUD
parent_ids = RoleRelationCRUD.get_parent_ids(rid, app_id)
cache.set(cls.PREFIX_PARENT.format(rid, app_id), parent_ids, timeout=0)
@ -169,10 +166,9 @@ class RoleRelationCache(object):
return parent_ids
@classmethod
def get_child_ids(cls, rid, app_id, force=False):
def get_child_ids(cls, rid, app_id):
child_ids = cache.get(cls.PREFIX_CHILDREN.format(rid, app_id))
if not child_ids or force:
db.session.commit()
if not child_ids:
from api.lib.perm.acl.role import RoleRelationCRUD
child_ids = RoleRelationCRUD.get_child_ids(rid, app_id)
cache.set(cls.PREFIX_CHILDREN.format(rid, app_id), child_ids, timeout=0)
@ -180,16 +176,14 @@ class RoleRelationCache(object):
return child_ids
@classmethod
def get_resources(cls, rid, app_id, force=False):
def get_resources(cls, rid, app_id):
"""
:param rid:
:param app_id:
:param force:
:return: {id2perms: {resource_id: [perm,]}, group2perms: {group_id: [perm, ]}}
"""
resources = cache.get(cls.PREFIX_RESOURCES.format(rid, app_id))
if not resources or force:
db.session.commit()
if not resources:
from api.lib.perm.acl.role import RoleCRUD
resources = RoleCRUD.get_resources(rid, app_id)
if resources['id2perms'] or resources['group2perms']:
@ -198,10 +192,9 @@ class RoleRelationCache(object):
return resources or {}
@classmethod
def get_resources2(cls, rid, app_id, force=False):
def get_resources2(cls, rid, app_id):
r_g = cache.get(cls.PREFIX_RESOURCES2.format(rid, app_id))
if not r_g or force:
db.session.commit()
if not r_g:
res = cls.get_resources(rid, app_id)
id2perms = res['id2perms']
group2perms = res['group2perms']
@ -230,28 +223,22 @@ class RoleRelationCache(object):
@classmethod
@flush_db
def rebuild(cls, rid, app_id):
if app_id is None:
app_ids = [None] + [i.id for i in App.get_by(to_dict=False)]
cls.clean(rid, app_id)
cls.get_parent_ids(rid, app_id)
cls.get_child_ids(rid, app_id)
resources = cls.get_resources(rid, app_id)
if resources.get('id2perms') or resources.get('group2perms'):
HasResourceRoleCache.add(rid, app_id)
else:
app_ids = [app_id]
for _app_id in app_ids:
cls.clean(rid, _app_id)
cls.get_parent_ids(rid, _app_id, force=True)
cls.get_child_ids(rid, _app_id, force=True)
resources = cls.get_resources(rid, _app_id, force=True)
if resources.get('id2perms') or resources.get('group2perms'):
HasResourceRoleCache.add(rid, _app_id)
else:
HasResourceRoleCache.remove(rid, _app_id)
cls.get_resources2(rid, _app_id, force=True)
HasResourceRoleCache.remove(rid, app_id)
cls.get_resources2(rid, app_id)
@classmethod
@flush_db
def rebuild2(cls, rid, app_id):
cache.delete(cls.PREFIX_RESOURCES2.format(rid, app_id))
cls.get_resources2(rid, app_id, force=True)
cls.get_resources2(rid, app_id)
@classmethod
def clean(cls, rid, app_id):

View File

@ -71,7 +71,7 @@ class PermissionCRUD(object):
@classmethod
def get_all2(cls, resource_name, resource_type_name, app_id):
rt = ResourceType.get_by(name=resource_type_name, app_id=app_id, first=True, to_dict=False)
rt = ResourceType.get_by(name=resource_type_name, first=True, to_dict=False)
rt or abort(404, ErrFormat.resource_type_not_found.format(resource_type_name))
r = Resource.get_by(name=resource_name, resource_type_id=rt.id, app_id=app_id, first=True, to_dict=False)
@ -79,8 +79,7 @@ class PermissionCRUD(object):
return r and cls.get_all(r.id)
@staticmethod
def grant(rid, perms, resource_id=None, group_id=None, rebuild=True,
source=AuditOperateSource.acl, force_update=False):
def grant(rid, perms, resource_id=None, group_id=None, rebuild=True, source=AuditOperateSource.acl):
app_id = None
rt_id = None
@ -107,23 +106,8 @@ class PermissionCRUD(object):
if not perms:
perms = [i.get('name') for i in ResourceTypeCRUD.get_perms(group.resource_type_id)]
if force_update:
revoke_role_permissions = []
existed_perms = RolePermission.get_by(rid=rid,
app_id=app_id,
group_id=group_id,
resource_id=resource_id,
to_dict=False)
for role_perm in existed_perms:
perm = PermissionCache.get(role_perm.perm_id, rt_id)
if perm and perm.name not in perms:
role_perm.soft_delete()
revoke_role_permissions.append(role_perm)
AuditCRUD.add_permission_log(app_id, AuditOperateType.revoke, rid, rt_id,
revoke_role_permissions, source=source)
_role_permissions = []
for _perm in set(perms):
perm = PermissionCache.get(_perm, rt_id)
if not perm:
@ -290,14 +274,12 @@ class PermissionCRUD(object):
perm2resource.setdefault(_perm, []).append(resource_id)
for _perm in perm2resource:
perm = PermissionCache.get(_perm, resource_type_id)
if perm is None:
continue
exists = RolePermission.get_by(rid=rid,
app_id=app_id,
perm_id=perm.id,
__func_in___key_resource_id=perm2resource[_perm],
to_dict=False)
for existed in exists:
existeds = RolePermission.get_by(rid=rid,
app_id=app_id,
perm_id=perm.id,
__func_in___key_resource_id=perm2resource[_perm],
to_dict=False)
for existed in existeds:
existed.deleted = True
existed.deleted_at = datetime.datetime.now()
db.session.add(existed)

View File

@ -2,6 +2,7 @@
from flask import abort
from flask import current_app
from api.extensions import db
from api.lib.perm.acl.audit import AuditCRUD
@ -126,18 +127,11 @@ class ResourceTypeCRUD(object):
existed_ids = [i.id for i in existed]
current_ids = []
rebuild_rids = set()
for i in existed:
if i.name not in perms:
i.soft_delete(commit=False)
for rp in RolePermission.get_by(perm_id=i.id, to_dict=False):
rp.soft_delete(commit=False)
rebuild_rids.add((rp.app_id, rp.rid))
i.soft_delete()
else:
current_ids.append(i.id)
db.session.commit()
for _app_id, _rid in rebuild_rids:
role_rebuild.apply_async(args=(_rid, _app_id), queue=ACL_QUEUE)
for i in perms:
if i not in existed_names:
@ -315,12 +309,9 @@ class ResourceCRUD(object):
return resource
@staticmethod
def delete(_id, rebuild=True, app_id=None):
def delete(_id):
resource = Resource.get_by_id(_id) or abort(404, ErrFormat.resource_not_found.format("id={}".format(_id)))
if app_id is not None and resource.app_id != app_id:
return abort(404, ErrFormat.resource_not_found.format("id={}".format(_id)))
origin = resource.to_dict()
resource.soft_delete()
@ -331,9 +322,8 @@ class ResourceCRUD(object):
i.soft_delete()
rebuilds.append((i.rid, i.app_id))
if rebuild:
for rid, app_id in set(rebuilds):
role_rebuild.apply_async(args=(rid, app_id), queue=ACL_QUEUE)
for rid, app_id in set(rebuilds):
role_rebuild.apply_async(args=(rid, app_id), queue=ACL_QUEUE)
AuditCRUD.add_resource_log(resource.app_id, AuditOperateType.delete,
AuditScope.resource, resource.id, origin, {}, {})

View File

@ -1,13 +1,14 @@
# -*- coding:utf-8 -*-
import redis_lock
import time
import six
from flask import abort
from flask import current_app
from sqlalchemy import or_
from api.extensions import db
from api.extensions import rd
from api.lib.perm.acl.app import AppCRUD
from api.lib.perm.acl.audit import AuditCRUD, AuditOperateType, AuditScope
from api.lib.perm.acl.cache import AppCache
@ -61,9 +62,7 @@ class RoleRelationCRUD(object):
id2parents = {}
for i in res:
parent = RoleCache.get(i.parent_id)
if parent:
id2parents.setdefault(rid2uid.get(i.child_id, i.child_id), []).append(parent.to_dict())
id2parents.setdefault(rid2uid.get(i.child_id, i.child_id), []).append(RoleCache.get(i.parent_id).to_dict())
return id2parents
@ -142,27 +141,24 @@ class RoleRelationCRUD(object):
@classmethod
def add(cls, role, parent_id, child_ids, app_id):
with redis_lock.Lock(rd.r, "ROLE_RELATION_ADD", expire=10):
db.session.commit()
result = []
for child_id in child_ids:
existed = RoleRelation.get_by(parent_id=parent_id, child_id=child_id, app_id=app_id)
if existed:
continue
result = []
for child_id in child_ids:
existed = RoleRelation.get_by(parent_id=parent_id, child_id=child_id, app_id=app_id)
if existed:
continue
RoleRelationCache.clean(parent_id, app_id)
RoleRelationCache.clean(child_id, app_id)
if parent_id in cls.recursive_child_ids(child_id, app_id):
return abort(400, ErrFormat.inheritance_dead_loop)
if parent_id in cls.recursive_child_ids(child_id, app_id):
return abort(400, ErrFormat.inheritance_dead_loop)
result.append(RoleRelation.create(parent_id=parent_id, child_id=child_id, app_id=app_id).to_dict())
if app_id is None:
for app in AppCRUD.get_all():
if app.name != "acl":
RoleRelationCache.clean(child_id, app.id)
RoleRelationCache.clean(parent_id, app_id)
RoleRelationCache.clean(child_id, app_id)
if app_id is None:
for app in AppCRUD.get_all():
if app.name != "acl":
RoleRelationCache.clean(child_id, app.id)
result.append(RoleRelation.create(parent_id=parent_id, child_id=child_id, app_id=app_id).to_dict())
AuditCRUD.add_role_log(app_id, AuditOperateType.role_relation_add,
AuditScope.role_relation, role.id, {}, {},
@ -376,16 +372,16 @@ class RoleCRUD(object):
resource_type_id = resource_type and resource_type.id
result = dict(resources=dict(), groups=dict())
# s = time.time()
s = time.time()
parent_ids = RoleRelationCRUD.recursive_parent_ids(rid, app_id)
# current_app.logger.info('parent ids {0}: {1}'.format(parent_ids, time.time() - s))
current_app.logger.info('parent ids {0}: {1}'.format(parent_ids, time.time() - s))
for parent_id in parent_ids:
_resources, _groups = cls._extend_resources(parent_id, resource_type_id, app_id)
# current_app.logger.info('middle1: {0}'.format(time.time() - s))
current_app.logger.info('middle1: {0}'.format(time.time() - s))
_merge(result['resources'], _resources)
# current_app.logger.info('middle2: {0}'.format(time.time() - s))
# current_app.logger.info(len(_groups))
current_app.logger.info('middle2: {0}'.format(time.time() - s))
current_app.logger.info(len(_groups))
if not group_flat:
_merge(result['groups'], _groups)
else:
@ -396,7 +392,7 @@ class RoleCRUD(object):
item.setdefault('permissions', [])
item['permissions'] = list(set(item['permissions'] + _groups[rg_id]['permissions']))
result['resources'][item['id']] = item
# current_app.logger.info('End: {0}'.format(time.time() - s))
current_app.logger.info('End: {0}'.format(time.time() - s))
result['resources'] = list(result['resources'].values())
result['groups'] = list(result['groups'].values())

View File

@ -51,12 +51,12 @@ def _auth_with_key():
user, authenticated = User.query.authenticate_with_key(key, secret, req_args, path)
if user and authenticated:
login_user(user)
# reset_session(user)
reset_session(user)
return True
role, authenticated = Role.query.authenticate_with_key(key, secret, req_args, path)
if role and authenticated:
# reset_session(None, role=role.name)
reset_session(None, role=role.name)
return True
return False

View File

@ -194,7 +194,7 @@ def validate(ticket):
def _parse_tag(string, tag):
"""
Used for parsing xml. Search string for the first occurrence of
Used for parsing xml. Search string for the first occurence of
<tag>.....</tag> and return text (stripped of leading and tailing
whitespace) between tags. Return "" if tag not found.
"""

View File

@ -29,6 +29,6 @@ class CommonErrFormat(object):
role_required = _l("Role {} can only operate!") # 角色 {} 才能操作!
user_not_found = _l("User {} does not exist") # 用户 {} 不存在
no_permission = _l("For resource: {}, you do not have {} permission!") # 您没有资源: {} 的{}权限!
no_permission = _l("You do not have {} permission for resource: {}!") # 您没有资源: {} 的{}权限!
no_permission2 = _l("You do not have permission to operate!") # 您没有操作权限!
no_permission_only_owner = _l("Only the creator or administrator has permission!") # 只有创建人或者管理员才有权限!

View File

@ -1,15 +1,19 @@
import json
import os
import secrets
import sys
import threading
from base64 import b64decode, b64encode
from Cryptodome.Protocol.SecretSharing import Shamir
from colorama import Back, Fore, Style, init as colorama_init
from colorama import Back
from colorama import Fore
from colorama import Style
from colorama import init as colorama_init
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import padding
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers import algorithms
from cryptography.hazmat.primitives.ciphers import modes
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from flask import current_app
@ -23,17 +27,11 @@ backend_encrypt_key_name = "encrypt_key"
backend_root_key_salt_name = "root_key_salt"
backend_encrypt_key_salt_name = "encrypt_key_salt"
backend_seal_key = "seal_status"
success = "success"
seal_status = True
secrets_encrypt_key = ""
secrets_root_key = ""
def string_to_bytes(value):
if not value:
return ""
if isinstance(value, bytes):
return value
if sys.version_info.major == 2:
@ -46,8 +44,6 @@ def string_to_bytes(value):
class Backend:
def __init__(self, backend=None):
self.backend = backend
# cache is a redis object
self.cache = backend.cache
def get(self, key):
return self.backend.get(key)
@ -58,33 +54,23 @@ class Backend:
def update(self, key, value):
return self.backend.update(key, value)
def get_shares(self, key):
return self.backend.get_shares(key)
def set_shares(self, key, value):
return self.backend.set_shares(key, value)
class KeyManage:
def __init__(self, trigger=None, backend=None):
self.trigger = trigger
self.backend = backend
self.share_key = "cmdb::secret::secrets_share"
if backend:
self.backend = Backend(backend)
def init_app(self, app, backend=None):
if (sys.argv[0].endswith("gunicorn") or
(len(sys.argv) > 1 and sys.argv[1] in ("run", "cmdb-password-data-migrate"))):
self.backend = backend
threading.Thread(target=self.watch_root_key, args=(app,), daemon=True).start()
self.trigger = app.config.get("INNER_TRIGGER_TOKEN")
if not self.trigger:
return
self.backend = backend
resp = self.auto_unseal()
self.print_response(resp)
@ -138,8 +124,6 @@ class KeyManage:
return new_shares
def is_valid_root_key(self, root_key):
if not root_key:
return False
root_key_hash, ok = self.hash_root_key(root_key)
if not ok:
return root_key_hash, ok
@ -151,42 +135,35 @@ class KeyManage:
else:
return "", True
def auth_root_secret(self, root_key, app):
with app.app_context():
msg, ok = self.is_valid_root_key(root_key)
if not ok:
return {
"message": msg,
"status": "failed"
}
def auth_root_secret(self, root_key):
msg, ok = self.is_valid_root_key(root_key)
if not ok:
return {
"message": msg,
"status": "failed"
}
encrypt_key_aes = self.backend.get(backend_encrypt_key_name)
if not encrypt_key_aes:
return {
"message": "encrypt key is empty",
"status": "failed"
}
encrypt_key_aes = self.backend.get(backend_encrypt_key_name)
if not encrypt_key_aes:
return {
"message": "encrypt key is empty",
"status": "failed"
}
secret_encrypt_key, ok = InnerCrypt.aes_decrypt(string_to_bytes(root_key), encrypt_key_aes)
secrets_encrypt_key, ok = InnerCrypt.aes_decrypt(string_to_bytes(root_key), encrypt_key_aes)
if ok:
msg, ok = self.backend.update(backend_seal_key, "open")
if ok:
msg, ok = self.backend.update(backend_seal_key, "open")
if ok:
global secrets_encrypt_key, secrets_root_key
secrets_encrypt_key = secret_encrypt_key
secrets_root_key = root_key
self.backend.cache.set(self.share_key, json.dumps([]))
return {"message": success, "status": success}
return {"message": msg, "status": "failed"}
else:
return {
"message": secret_encrypt_key,
"status": "failed"
}
def parse_shares(self, shares, app):
if len(shares) >= global_key_threshold:
recovered_secret = Shamir.combine(shares[:global_key_threshold], False)
return self.auth_root_secret(b64encode(recovered_secret), app)
current_app.config["secrets_encrypt_key"] = secrets_encrypt_key
current_app.config["secrets_root_key"] = root_key
current_app.config["secrets_shares"] = []
return {"message": success, "status": success}
return {"message": msg, "status": "failed"}
else:
return {
"message": secrets_encrypt_key,
"status": "failed"
}
def unseal(self, key):
if not self.is_seal():
@ -198,12 +175,14 @@ class KeyManage:
try:
t = [i for i in b64decode(key)]
v = (int("".join([chr(i) for i in t[-2:]])), bytes(t[:-2]))
shares = self.backend.get_shares(self.share_key)
shares = current_app.config.get("secrets_shares", [])
if v not in shares:
shares.append(v)
self.set_shares(shares)
current_app.config["secrets_shares"] = shares
if len(shares) >= global_key_threshold:
return self.parse_shares(shares, current_app)
recovered_secret = Shamir.combine(shares[:global_key_threshold], False)
return self.auth_root_secret(b64encode(recovered_secret))
else:
return {
"message": "waiting for inputting other unseal key {0}/{1}".format(len(shares),
@ -263,11 +242,8 @@ class KeyManage:
msg, ok = self.backend.add(backend_seal_key, "open")
if not ok:
return {"message": msg, "status": "failed"}, False
global secrets_encrypt_key, secrets_root_key
secrets_encrypt_key = encrypt_key
secrets_root_key = root_key
current_app.config["secrets_root_key"] = root_key
current_app.config["secrets_encrypt_key"] = encrypt_key
self.print_token(shares, root_token=root_key)
return {"message": "OK",
@ -290,7 +266,7 @@ class KeyManage:
}
# TODO
elif len(self.trigger.strip()) == 24:
res = self.auth_root_secret(self.trigger.encode(), current_app)
res = self.auth_root_secret(self.trigger.encode())
if res.get("status") == success:
return {
"message": success,
@ -322,31 +298,22 @@ class KeyManage:
"message": msg,
"status": "failed",
}
self.clear()
self.backend.cache.publish(self.share_key, "clear")
current_app.config["secrets_root_key"] = ''
current_app.config["secrets_encrypt_key"] = ''
return {
"message": success,
"status": success
}
@staticmethod
def clear():
global secrets_encrypt_key, secrets_root_key
secrets_encrypt_key = ''
secrets_root_key = ''
def is_seal(self):
"""
If there is no initialization or the root key is inconsistent, it is considered to be in a sealed state..
If there is no initialization or the root key is inconsistent, it is considered to be in a sealed state.
:return:
"""
# secrets_root_key = current_app.config.get("secrets_root_key")
if not secrets_root_key:
return True
secrets_root_key = current_app.config.get("secrets_root_key")
msg, ok = self.is_valid_root_key(secrets_root_key)
if not ok:
return True
return true
status = self.backend.get(backend_seal_key)
return status == "block"
@ -382,53 +349,22 @@ class KeyManage:
}
print(status_colors.get(status, Fore.GREEN), message, Style.RESET_ALL)
def set_shares(self, values):
new_value = list()
for v in values:
new_value.append((v[0], b64encode(v[1]).decode("utf-8")))
self.backend.cache.publish(self.share_key, json.dumps(new_value))
self.backend.cache.set(self.share_key, json.dumps(new_value))
def watch_root_key(self, app):
pubsub = self.backend.cache.pubsub()
pubsub.subscribe(self.share_key)
new_value = set()
for message in pubsub.listen():
if message["type"] == "message":
if message["data"] == b"clear":
self.clear()
continue
try:
value = json.loads(message["data"].decode("utf-8"))
for v in value:
new_value.add((v[0], b64decode(v[1])))
except Exception as e:
return []
if len(new_value) >= global_key_threshold:
self.parse_shares(list(new_value), app)
new_value = set()
class InnerCrypt:
def __init__(self):
self.encrypt_key = b64decode(secrets_encrypt_key)
# self.encrypt_key = b64decode(secrets_encrypt_key, "".encode("utf-8"))
secrets_encrypt_key = current_app.config.get("secrets_encrypt_key", "")
self.encrypt_key = b64decode(secrets_encrypt_key.encode("utf-8"))
def encrypt(self, plaintext):
"""
encrypt method contain aes currently
"""
if not self.encrypt_key:
return ValueError("secret is disabled, please seal firstly"), False
return self.aes_encrypt(self.encrypt_key, plaintext)
def decrypt(self, ciphertext):
"""
decrypt method contain aes currently
"""
if not self.encrypt_key:
return ValueError("secret is disabled, please seal firstly"), False
return self.aes_decrypt(self.encrypt_key, ciphertext)
@classmethod
@ -445,7 +381,6 @@ class InnerCrypt:
return b64encode(iv + ciphertext).decode("utf-8"), True
except Exception as e:
return str(e), False
@classmethod

View File

@ -1,13 +1,8 @@
import base64
import json
from api.models.cmdb import InnerKV
from api.extensions import rd
class InnerKVManger(object):
def __init__(self):
self.cache = rd.r
pass
@classmethod
@ -38,26 +33,3 @@ class InnerKVManger(object):
return "success", True
return "update failed", True
@classmethod
def get_shares(cls, key):
new_value = list()
v = rd.get_str(key)
if not v:
return new_value
try:
value = json.loads(v.decode("utf-8"))
for v in value:
new_value.append((v[0], base64.b64decode(v[1])))
except Exception as e:
return []
return new_value
@classmethod
def set_shares(cls, key, value):
new_value = list()
for v in value:
new_value.append((v[0], base64.b64encode(v[1]).decode("utf-8")))
rd.set_str(key, json.dumps(new_value))

View File

@ -1,6 +1,9 @@
# -*- coding:utf-8 -*-
import base64
import sys
import time
from typing import Set
import elasticsearch
import redis
@ -116,23 +119,6 @@ class RedisHandler(object):
except Exception as e:
current_app.logger.error("delete redis key error, {0}".format(str(e)))
def set_str(self, key, value, expired=None):
try:
if expired:
self.r.setex(key, expired, value)
else:
self.r.set(key, value)
except Exception as e:
current_app.logger.error("set redis error, {0}".format(str(e)))
def get_str(self, key):
try:
value = self.r.get(key)
except Exception as e:
current_app.logger.error("get redis error, {0}".format(str(e)))
return
return value
class ESHandler(object):
def __init__(self, flask_app=None):
@ -227,6 +213,52 @@ class ESHandler(object):
return 0, [], {}
class Lock(object):
def __init__(self, name, timeout=10, app=None, need_lock=True):
self.lock_key = name
self.need_lock = need_lock
self.timeout = timeout
if not app:
app = current_app
self.app = app
try:
self.redis = redis.Redis(host=self.app.config.get('CACHE_REDIS_HOST'),
port=self.app.config.get('CACHE_REDIS_PORT'),
password=self.app.config.get('CACHE_REDIS_PASSWORD'))
except:
self.app.logger.error("cannot connect redis")
raise Exception("cannot connect redis")
def lock(self, timeout=None):
if not timeout:
timeout = self.timeout
retry = 0
while retry < 100:
timestamp = time.time() + timeout + 1
_lock = self.redis.setnx(self.lock_key, timestamp)
if _lock == 1 or (
time.time() > float(self.redis.get(self.lock_key) or sys.maxsize) and
time.time() > float(self.redis.getset(self.lock_key, timestamp) or sys.maxsize)):
break
else:
retry += 1
time.sleep(0.6)
if retry >= 100:
raise Exception("get lock failed...")
def release(self):
if time.time() < float(self.redis.get(self.lock_key)):
self.redis.delete(self.lock_key)
def __enter__(self):
if self.need_lock:
self.lock()
def __exit__(self, exc_type, exc_val, exc_tb):
if self.need_lock:
self.release()
class AESCrypto(object):
BLOCK_SIZE = 16 # Bytes
pad = lambda s: s + ((AESCrypto.BLOCK_SIZE - len(s) % AESCrypto.BLOCK_SIZE) *

View File

@ -88,11 +88,11 @@ def webhook_request(webhook, payload):
params = webhook.get('parameters') or None
if isinstance(params, dict):
params = json.loads(Template(json.dumps(params)).render(payload).encode('utf-8'))
params = json.loads(Template(json.dumps(params)).render(payload))
headers = json.loads(Template(json.dumps(webhook.get('headers') or {})).render(payload))
data = Template(json.dumps(webhook.get('body', ''))).render(payload).encode('utf-8')
data = Template(json.dumps(webhook.get('body', ''))).render(payload)
auth = _wrap_auth(**webhook.get('authorization', {}))
if (webhook.get('authorization', {}).get("type") or '').lower() == 'oauth2.0':

View File

@ -105,8 +105,8 @@ class User(CRUDModel, SoftDeleteMixin):
_password = db.Column("password", db.String(80))
key = db.Column(db.String(32), nullable=False)
secret = db.Column(db.String(32), nullable=False)
date_joined = db.Column(db.DateTime, default=datetime.now)
last_login = db.Column(db.DateTime, default=datetime.now)
date_joined = db.Column(db.DateTime, default=datetime.utcnow)
last_login = db.Column(db.DateTime, default=datetime.utcnow)
block = db.Column(db.Boolean, default=False)
has_logined = db.Column(db.Boolean, default=False)
wx_id = db.Column(db.String(32))
@ -356,7 +356,7 @@ class AuditLoginLog(Model2):
__tablename__ = "acl_audit_login_logs"
username = db.Column(db.String(64), index=True)
channel = db.Column(db.Enum('web', 'api', 'ssh'), default="web")
channel = db.Column(db.Enum('web', 'api'), default="web")
ip = db.Column(db.String(15))
browser = db.Column(db.String(256))
description = db.Column(db.String(128))

View File

@ -2,6 +2,7 @@
import datetime
from sqlalchemy.dialects.mysql import DOUBLE
from api.extensions import db
@ -10,7 +11,6 @@ from api.lib.cmdb.const import CIStatusEnum
from api.lib.cmdb.const import CITypeOperateType
from api.lib.cmdb.const import ConstraintEnum
from api.lib.cmdb.const import OperateType
from api.lib.cmdb.const import RelationSourceEnum
from api.lib.cmdb.const import ValueTypeEnum
from api.lib.database import Model
from api.lib.database import Model2
@ -46,31 +46,17 @@ class CIType(Model):
name = db.Column(db.String(32), nullable=False)
alias = db.Column(db.String(32), nullable=False)
unique_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"), nullable=False)
show_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"))
enabled = db.Column(db.Boolean, default=True, nullable=False)
is_attached = db.Column(db.Boolean, default=False, nullable=False)
icon = db.Column(db.Text)
order = db.Column(db.SmallInteger, default=0, nullable=False)
default_order_attr = db.Column(db.String(33))
unique_key = db.relationship("Attribute", backref="c_ci_types.unique_id",
primaryjoin="Attribute.id==CIType.unique_id", foreign_keys=[unique_id])
show_key = db.relationship("Attribute", backref="c_ci_types.show_id",
primaryjoin="Attribute.id==CIType.show_id", foreign_keys=[show_id])
unique_key = db.relationship("Attribute", backref="c_ci_types.unique_id")
uid = db.Column(db.Integer, index=True)
class CITypeInheritance(Model):
__tablename__ = "c_ci_type_inheritance"
parent_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
child_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
parent = db.relationship("CIType", primaryjoin="CIType.id==CITypeInheritance.parent_id")
child = db.relationship("CIType", primaryjoin="CIType.id==CITypeInheritance.child_id")
class CITypeRelation(Model):
__tablename__ = "c_ci_type_relations"
@ -79,12 +65,6 @@ class CITypeRelation(Model):
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
constraint = db.Column(db.Enum(*ConstraintEnum.all()), default=ConstraintEnum.One2Many)
parent_attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id")) # CMDB > 2.4.5: deprecated
child_attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id")) # CMDB > 2.4.5: deprecated
parent_attr_ids = db.Column(db.JSON) # [parent_attr_id, ]
child_attr_ids = db.Column(db.JSON) # [child_attr_id, ]
parent = db.relationship("CIType", primaryjoin="CIType.id==CITypeRelation.parent_id")
child = db.relationship("CIType", primaryjoin="CIType.id==CITypeRelation.child_id")
relation_type = db.relationship("RelationType", backref="c_ci_type_relations.relation_type_id")
@ -104,11 +84,6 @@ class Attribute(Model):
is_link = db.Column(db.Boolean, default=False)
is_password = db.Column(db.Boolean, default=False)
is_sortable = db.Column(db.Boolean, default=False)
is_dynamic = db.Column(db.Boolean, default=False)
is_bool = db.Column(db.Boolean, default=False)
is_reference = db.Column(db.Boolean, default=False)
reference_type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'))
default = db.Column(db.JSON) # {"default": None}
@ -119,8 +94,6 @@ class Attribute(Model):
_choice_web_hook = db.Column('choice_web_hook', db.JSON)
choice_other = db.Column(db.JSON)
re_check = db.Column(db.Text)
uid = db.Column(db.Integer, index=True)
option = db.Column(db.JSON)
@ -217,26 +190,6 @@ class CITriggerHistory(Model):
webhook = db.Column(db.Text)
class TopologyViewGroup(Model):
__tablename__ = 'c_topology_view_groups'
name = db.Column(db.String(64), index=True)
order = db.Column(db.Integer, default=0)
class TopologyView(Model):
__tablename__ = 'c_topology_views'
name = db.Column(db.String(64), index=True)
group_id = db.Column(db.Integer, db.ForeignKey('c_topology_view_groups.id'))
category = db.Column(db.String(32))
central_node_type = db.Column(db.Integer)
central_node_instances = db.Column(db.Text)
path = db.Column(db.JSON)
order = db.Column(db.Integer, default=0)
option = db.Column(db.JSON)
class CITypeUniqueConstraint(Model):
__tablename__ = "c_c_t_u_c"
@ -253,7 +206,6 @@ class CI(Model):
status = db.Column(db.Enum(*CIStatusEnum.all(), name="status"))
heartbeat = db.Column(db.DateTime, default=lambda: datetime.datetime.now())
is_auto_discovery = db.Column('a', db.Boolean, default=False)
updated_by = db.Column(db.String(64))
ci_type = db.relationship("CIType", backref="c_cis.type_id")
@ -265,7 +217,6 @@ class CIRelation(Model):
second_ci_id = db.Column(db.Integer, db.ForeignKey("c_cis.id"), nullable=False)
relation_type_id = db.Column(db.Integer, db.ForeignKey("c_relation_types.id"), nullable=False)
more = db.Column(db.Integer, db.ForeignKey("c_cis.id"))
source = db.Column(db.Enum(*RelationSourceEnum.all()), name="source")
ancestor_ids = db.Column(db.String(128), index=True)
@ -462,7 +413,6 @@ class CITypeHistory(Model):
attr_id = db.Column(db.Integer)
trigger_id = db.Column(db.Integer)
rc_id = db.Column(db.Integer)
unique_constraint_id = db.Column(db.Integer)
uid = db.Column(db.Integer, index=True)
@ -476,7 +426,6 @@ class PreferenceShowAttributes(Model):
uid = db.Column(db.Integer, index=True, nullable=False)
type_id = db.Column(db.Integer, db.ForeignKey("c_ci_types.id"), nullable=False)
attr_id = db.Column(db.Integer, db.ForeignKey("c_attributes.id"))
builtin_attr = db.Column(db.String(256), nullable=True)
order = db.Column(db.SmallInteger, default=0)
is_fixed = db.Column(db.Boolean, default=False)
@ -499,7 +448,6 @@ class PreferenceRelationView(Model):
name = db.Column(db.String(64), index=True, nullable=False)
cr_ids = db.Column(db.JSON) # [{parent_id: x, child_id: y}]
is_public = db.Column(db.Boolean, default=False)
option = db.Column(db.JSON)
class PreferenceSearchOption(Model):
@ -516,15 +464,6 @@ class PreferenceSearchOption(Model):
option = db.Column(db.JSON)
class PreferenceCITypeOrder(Model):
__tablename__ = "c_pcto"
uid = db.Column(db.Integer, index=True, nullable=False)
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'))
order = db.Column(db.SmallInteger, default=0)
is_tree = db.Column(db.Boolean, default=False) # True is tree view, False is resource view
# custom
class CustomDashboard(Model):
__tablename__ = "c_c_d"
@ -536,7 +475,6 @@ class CustomDashboard(Model):
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'))
attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'))
builtin_attr = db.Column(db.String(256), nullable=True)
level = db.Column(db.Integer)
options = db.Column(db.JSON)
@ -574,28 +512,18 @@ class AutoDiscoveryCIType(Model):
attributes = db.Column(db.JSON) # {ad_key: cmdb_key}
relation = db.Column(db.JSON) # [{ad_key: {type_id: x, attr_id: x}}], CMDB > 2.4.5: deprecated
relation = db.Column(db.JSON) # [{ad_key: {type_id: x, attr_id: x}}]
auto_accept = db.Column(db.Boolean, default=False)
agent_id = db.Column(db.String(8), index=True)
query_expr = db.Column(db.Text)
interval = db.Column(db.Integer) # seconds, > 2.4.5: deprecated
interval = db.Column(db.Integer) # seconds
cron = db.Column(db.String(128))
extra_option = db.Column(db.JSON)
uid = db.Column(db.Integer, index=True)
enabled = db.Column(db.Boolean, default=True)
class AutoDiscoveryCITypeRelation(Model):
__tablename__ = "c_ad_ci_type_relations"
ad_type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
ad_key = db.Column(db.String(128))
peer_type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'), nullable=False)
peer_attr_id = db.Column(db.Integer, db.ForeignKey('c_attributes.id'), nullable=False)
class AutoDiscoveryCI(Model):
@ -613,45 +541,6 @@ class AutoDiscoveryCI(Model):
accept_time = db.Column(db.DateTime)
class AutoDiscoveryRuleSyncHistory(Model2):
__tablename__ = "c_ad_rule_sync_histories"
adt_id = db.Column(db.Integer, db.ForeignKey('c_ad_ci_types.id'))
oneagent_id = db.Column(db.String(8))
oneagent_name = db.Column(db.String(64))
sync_at = db.Column(db.DateTime, default=datetime.datetime.now())
class AutoDiscoveryExecHistory(Model2):
__tablename__ = "c_ad_exec_histories"
type_id = db.Column(db.Integer, index=True)
stdout = db.Column(db.Text)
class AutoDiscoveryCounter(Model2):
__tablename__ = "c_ad_counter"
type_id = db.Column(db.Integer, index=True)
rule_count = db.Column(db.Integer, default=0)
exec_target_count = db.Column(db.Integer, default=0)
instance_count = db.Column(db.Integer, default=0)
accept_count = db.Column(db.Integer, default=0)
this_month_count = db.Column(db.Integer, default=0)
this_week_count = db.Column(db.Integer, default=0)
last_month_count = db.Column(db.Integer, default=0)
last_week_count = db.Column(db.Integer, default=0)
class AutoDiscoveryAccount(Model):
__tablename__ = "c_ad_accounts"
uid = db.Column(db.Integer, index=True)
name = db.Column(db.String(64))
adr_id = db.Column(db.Integer, db.ForeignKey('c_ad_rules.id'))
config = db.Column(db.JSON)
class CIFilterPerms(Model):
__tablename__ = "c_ci_filter_perms"
@ -659,7 +548,6 @@ class CIFilterPerms(Model):
type_id = db.Column(db.Integer, db.ForeignKey('c_ci_types.id'))
ci_filter = db.Column(db.Text)
attr_filter = db.Column(db.Text)
id_filter = db.Column(db.JSON) # {node_path: unique_value}
rid = db.Column(db.Integer, index=True)
@ -669,52 +557,3 @@ class InnerKV(Model):
key = db.Column(db.String(128), index=True)
value = db.Column(db.Text)
class IPAMSubnetScan(Model):
__tablename__ = "c_ipam_subnet_scans"
ci_id = db.Column(db.Integer, index=True, nullable=False)
scan_enabled = db.Column(db.Boolean, default=True)
rule_updated_at = db.Column(db.DateTime)
last_scan_time = db.Column(db.DateTime)
# scan rules
agent_id = db.Column(db.String(8), index=True)
cron = db.Column(db.String(128))
class IPAMSubnetScanHistory(Model2):
__tablename__ = "c_ipam_subnet_scan_histories"
subnet_scan_id = db.Column(db.Integer, index=True)
exec_id = db.Column(db.String(64), index=True)
cidr = db.Column(db.String(18), index=True)
start_at = db.Column(db.DateTime)
end_at = db.Column(db.DateTime)
status = db.Column(db.Integer, default=0) # 0 is ok
stdout = db.Column(db.Text)
ip_num = db.Column(db.Integer)
ips = db.Column(db.JSON) # keep only the last 10 records
class IPAMOperationHistory(Model2):
__tablename__ = "c_ipam_operation_histories"
from api.lib.cmdb.ipam.const import OperateTypeEnum
uid = db.Column(db.Integer, index=True)
cidr = db.Column(db.String(18), index=True)
operate_type = db.Column(db.Enum(*OperateTypeEnum.all()))
description = db.Column(db.Text)
class DCIMOperationHistory(Model2):
__tablename__ = "c_dcim_operation_histories"
from api.lib.cmdb.dcim.const import OperateTypeEnum
uid = db.Column(db.Integer, index=True)
rack_id = db.Column(db.Integer, index=True)
ci_id = db.Column(db.Integer, index=True)
operate_type = db.Column(db.Enum(*OperateTypeEnum.all()))

View File

@ -3,13 +3,12 @@
import json
import re
import redis_lock
from celery_once import QueueOnce
from flask import current_app
from werkzeug.exceptions import BadRequest
from werkzeug.exceptions import NotFound
from api.extensions import celery
from api.extensions import rd
from api.lib.decorator import flush_db
from api.lib.decorator import reconnect_db
from api.lib.perm.acl.audit import AuditCRUD
@ -26,14 +25,14 @@ from api.models.acl import Role
from api.models.acl import Trigger
@celery.task(name="acl.role_rebuild", queue=ACL_QUEUE, )
@celery.task(name="acl.role_rebuild",
queue=ACL_QUEUE,)
@flush_db
@reconnect_db
def role_rebuild(rids, app_id):
rids = rids if isinstance(rids, list) else [rids]
for rid in rids:
with redis_lock.Lock(rd.r, "ROLE_REBUILD_{}_{}".format(rid, app_id), expire=10):
RoleRelationCache.rebuild(rid, app_id)
RoleRelationCache.rebuild(rid, app_id)
current_app.logger.info("Role {0} App {1} rebuild..........".format(rids, app_id))

View File

@ -1,11 +1,10 @@
# -*- coding:utf-8 -*-
# -*- coding:utf-8 -*-
import datetime
import json
import redis_lock
import time
from flask import current_app
from flask import has_request_context
from flask_login import login_user
import api.lib.cmdb.ci
@ -13,23 +12,16 @@ from api.extensions import celery
from api.extensions import db
from api.extensions import es
from api.extensions import rd
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeAttributesCache
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.cmdb.const import REDIS_PREFIX_CI
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION
from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION2
from api.lib.cmdb.const import RelationSourceEnum
from api.lib.cmdb.perms import CIFilterPermsCRUD
from api.lib.cmdb.utils import TableMap
from api.lib.decorator import flush_db
from api.lib.decorator import reconnect_db
from api.lib.perm.acl.cache import UserCache
from api.lib.utils import Lock
from api.lib.utils import handle_arg_list
from api.models.cmdb import Attribute
from api.models.cmdb import AutoDiscoveryCI
from api.models.cmdb import AutoDiscoveryCIType
from api.models.cmdb import AutoDiscoveryCITypeRelation
from api.models.cmdb import CI
from api.models.cmdb import CIRelation
from api.models.cmdb import CITypeAttribute
@ -40,8 +32,8 @@ from api.models.cmdb import CITypeAttribute
@reconnect_db
def ci_cache(ci_id, operate_type, record_id):
from api.lib.cmdb.ci import CITriggerManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.ci_type import CITypeAttributeManager
time.sleep(0.01)
m = api.lib.cmdb.ci.CIManager()
ci_dict = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
@ -54,37 +46,18 @@ def ci_cache(ci_id, operate_type, record_id):
current_app.logger.info("{0} flush..........".format(ci_id))
if operate_type:
if not has_request_context():
current_app.test_request_context().push()
login_user(UserCache.get('worker'))
current_app.test_request_context().push()
login_user(UserCache.get('worker'))
_, enum_map = CITypeAttributeManager.get_attr_names_label_enum(ci_dict.get('_type'))
payload = dict()
for k, v in ci_dict.items():
if k in enum_map:
if isinstance(v, list):
payload[k] = [enum_map[k].get(i, i) for i in v]
else:
payload[k] = enum_map[k].get(v, v)
else:
payload[k] = v
CITriggerManager.fire(operate_type, payload, record_id)
ci_dict and CIRelationManager.build_by_attribute(ci_dict)
@celery.task(name="cmdb.rebuild_relation_for_attribute_changed", queue=CMDB_QUEUE)
@reconnect_db
def rebuild_relation_for_attribute_changed(ci_type_relation, uid):
from api.lib.cmdb.ci import CIRelationManager
CIRelationManager.rebuild_all_by_attribute(ci_type_relation, uid)
CITriggerManager.fire(operate_type, ci_dict, record_id)
@celery.task(name="cmdb.batch_ci_cache", queue=CMDB_QUEUE)
@flush_db
@reconnect_db
def batch_ci_cache(ci_ids, ): # only for attribute change index
time.sleep(1)
for ci_id in ci_ids:
m = api.lib.cmdb.ci.CIManager()
ci_dict = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False)
@ -99,7 +72,7 @@ def batch_ci_cache(ci_ids, ): # only for attribute change index
@celery.task(name="cmdb.ci_delete", queue=CMDB_QUEUE)
@reconnect_db
def ci_delete(ci_id, type_id):
def ci_delete(ci_id):
current_app.logger.info(ci_id)
if current_app.config.get("USE_ES"):
@ -107,28 +80,9 @@ def ci_delete(ci_id, type_id):
else:
rd.delete(ci_id, REDIS_PREFIX_CI)
instance = AutoDiscoveryCI.get_by(ci_id=ci_id, to_dict=False, first=True)
if instance is not None:
adt = AutoDiscoveryCIType.get_by_id(instance.adt_id)
if adt:
adt.update(updated_at=datetime.datetime.now())
instance.delete()
for attr in Attribute.get_by(reference_type_id=type_id, to_dict=False):
table = TableMap(attr=attr).table
for i in getattr(table, 'get_by')(attr_id=attr.id, value=ci_id, to_dict=False):
i.delete()
ci_cache(i.ci_id, None, None)
current_app.logger.info("{0} delete..........".format(ci_id))
@celery.task(name="cmdb.delete_id_filter", queue=CMDB_QUEUE)
@reconnect_db
def delete_id_filter(ci_id):
CIFilterPermsCRUD().delete_id_filter_by_ci_id(ci_id)
@celery.task(name="cmdb.ci_delete_trigger", queue=CMDB_QUEUE)
@reconnect_db
def ci_delete_trigger(trigger, operate_type, ci_dict):
@ -145,18 +99,19 @@ def ci_delete_trigger(trigger, operate_type, ci_dict):
@flush_db
@reconnect_db
def ci_relation_cache(parent_id, child_id, ancestor_ids):
with redis_lock.Lock(rd.r, "CIRelation_{}".format(parent_id), expire=10):
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
children = json.loads(children) if children is not None else {}
with Lock("CIRelation_{}".format(parent_id)):
if ancestor_ids is None:
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
children = json.loads(children) if children is not None else {}
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
first=True, to_dict=False)
if str(child_id) not in children:
children[str(child_id)] = cr.second_ci.type_id
cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, ancestor_ids=ancestor_ids,
first=True, to_dict=False)
if str(child_id) not in children:
children[str(child_id)] = cr.second_ci.type_id
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
if ancestor_ids is not None:
else:
key = "{},{}".format(ancestor_ids, parent_id)
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
grandson = json.loads(grandson) if grandson is not None else {}
@ -186,9 +141,8 @@ def ci_relation_add(parent_dict, child_id, uid):
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search
if not has_request_context():
current_app.test_request_context().push()
login_user(UserCache.get(uid))
current_app.test_request_context().push()
login_user(UserCache.get(uid))
for parent in parent_dict:
parent_ci_type_name, _attr_name = parent.strip()[1:].split('.', 1)
@ -210,7 +164,7 @@ def ci_relation_add(parent_dict, child_id, uid):
for ci in response:
try:
CIRelationManager.add(ci['_id'], child_id)
ci_relation_cache(ci['_id'], child_id, None)
ci_relation_cache(ci['_id'], child_id)
except Exception as e:
current_app.logger.warning(e)
finally:
@ -223,16 +177,17 @@ def ci_relation_add(parent_dict, child_id, uid):
@celery.task(name="cmdb.ci_relation_delete", queue=CMDB_QUEUE)
@reconnect_db
def ci_relation_delete(parent_id, child_id, ancestor_ids):
with redis_lock.Lock(rd.r, "CIRelation_{}".format(parent_id), expire=10):
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
children = json.loads(children) if children is not None else {}
with Lock("CIRelation_{}".format(parent_id)):
if ancestor_ids is None:
children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0]
children = json.loads(children) if children is not None else {}
if str(child_id) in children:
children.pop(str(child_id))
if str(child_id) in children:
children.pop(str(child_id))
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION)
if ancestor_ids is not None:
else:
key = "{},{}".format(ancestor_ids, parent_id)
grandson = rd.get([key], REDIS_PREFIX_CI_RELATION2)[0]
grandson = json.loads(grandson) if grandson is not None else {}
@ -275,124 +230,11 @@ def ci_type_attribute_order_rebuild(type_id, uid):
def calc_computed_attribute(attr_id, uid):
from api.lib.cmdb.ci import CIManager
if not has_request_context():
current_app.test_request_context().push()
login_user(UserCache.get(uid))
current_app.test_request_context().push()
login_user(UserCache.get(uid))
cim = CIManager()
for i in CITypeAttribute.get_by(attr_id=attr_id, to_dict=False):
cis = CI.get_by(type_id=i.type_id, to_dict=False)
for ci in cis:
cim.update(ci.id, {})
@celery.task(name="cmdb.write_ad_rule_sync_history", queue=CMDB_QUEUE)
@reconnect_db
def write_ad_rule_sync_history(rules, oneagent_id, oneagent_name, sync_at):
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryRuleSyncHistoryCRUD
for rule in rules:
AutoDiscoveryRuleSyncHistoryCRUD().upsert(adt_id=rule['id'],
oneagent_id=oneagent_id,
oneagent_name=oneagent_name,
sync_at=sync_at,
commit=False)
try:
db.session.commit()
except Exception as e:
current_app.logger.error("write auto discovery rule sync history failed: {}".format(e))
db.session.rollback()
@celery.task(name="cmdb.build_relations_for_ad_accept", queue=CMDB_QUEUE)
@reconnect_db
def build_relations_for_ad_accept(adc, ci_id, ad_key2attr):
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search as ci_search
current_app.test_request_context().push()
login_user(UserCache.get('worker'))
relation_ads = AutoDiscoveryCITypeRelation.get_by(ad_type_id=adc['type_id'], to_dict=False)
for r_adt in relation_ads:
ad_key = r_adt.ad_key
if not adc['instance'].get(ad_key):
continue
ad_key_values = [adc['instance'].get(ad_key)] if not isinstance(
adc['instance'].get(ad_key), list) else adc['instance'].get(ad_key)
for ad_key_value in ad_key_values:
query = "_type:{},{}:{}".format(r_adt.peer_type_id, r_adt.peer_attr_id, ad_key_value)
s = ci_search(query, use_ci_filter=False, count=1000000)
try:
response, _, _, _, _, _ = s.search()
except SearchError as e:
current_app.logger.error("build_relations_for_ad_accept failed: {}".format(e))
return
for relation_ci in response:
relation_ci_id = relation_ci['_id']
try:
CIRelationManager.add(ci_id, relation_ci_id,
valid=False,
source=RelationSourceEnum.AUTO_DISCOVERY)
except:
try:
CIRelationManager.add(relation_ci_id, ci_id,
valid=False,
source=RelationSourceEnum.AUTO_DISCOVERY)
except:
pass
# build relations in reverse
relation_ads = AutoDiscoveryCITypeRelation.get_by(peer_type_id=adc['type_id'], to_dict=False)
attr2ad_key = {v: k for k, v in ad_key2attr.items()}
for r_adt in relation_ads:
attr = AttributeCache.get(r_adt.peer_attr_id)
ad_key = attr2ad_key.get(attr and attr.name)
if not ad_key:
continue
ad_value = adc['instance'].get(ad_key)
peer_ad_key = r_adt.ad_key
peer_instances = AutoDiscoveryCI.get_by(type_id=r_adt.ad_type_id, to_dict=False)
for peer_instance in peer_instances:
peer_ad_values = peer_instance.instance.get(peer_ad_key)
peer_ad_values = [peer_ad_values] if not isinstance(peer_ad_values, list) else peer_ad_values
if ad_value in peer_ad_values and peer_instance.ci_id:
try:
CIRelationManager.add(peer_instance.ci_id, ci_id,
valid=False,
source=RelationSourceEnum.AUTO_DISCOVERY)
except:
try:
CIRelationManager.add(ci_id, peer_instance.ci_id,
valid=False,
source=RelationSourceEnum.AUTO_DISCOVERY)
except:
pass
@celery.task(name="cmdb.dcim_calc_u_free_count", queue=CMDB_QUEUE)
@reconnect_db
def dcim_calc_u_free_count():
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.dcim.rack import RackManager
from api.lib.cmdb.dcim.const import RackBuiltinAttributes
if not has_request_context():
current_app.test_request_context().push()
login_user(UserCache.get('worker'))
try:
rack_m = RackManager()
except Exception:
return
racks = CI.get_by(type_id=rack_m.type_id, to_dict=False)
for rack in racks:
payload = {RackBuiltinAttributes.FREE_U_COUNT: rack_m.calc_u_free_count(rack.id)}
CIManager().update(rack.id, **payload)

View File

@ -3,14 +3,14 @@ from flask import current_app
from api.extensions import celery
from api.lib.common_setting.acl import ACLManager
from api.lib.perm.acl.const import ACL_QUEUE
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.common_setting.resp_format import ErrFormat
from api.models.common_setting import Department, Employee
from api.lib.decorator import flush_db
from api.lib.decorator import reconnect_db
@celery.task(name="common_setting.edit_employee_department_in_acl", queue=ACL_QUEUE)
@celery.task(name="common_setting.edit_employee_department_in_acl", queue=CMDB_QUEUE)
@flush_db
@reconnect_db
def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
@ -49,20 +49,21 @@ def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
continue
old_d_rid_in_acl = role_map.get(old_department.department_name, 0)
if old_d_rid_in_acl > 0:
if old_d_rid_in_acl != old_department.acl_rid:
old_department.update(
acl_rid=old_d_rid_in_acl
)
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
payload = {
'app_id': 'acl',
'parent_id': d_acl_rid,
}
try:
acl.remove_user_from_role(employee_acl_rid, payload)
except Exception as e:
result.append(ErrFormat.acl_remove_user_from_role_failed.format(str(e)))
if old_d_rid_in_acl == 0:
return
if old_d_rid_in_acl != old_department.acl_rid:
old_department.update(
acl_rid=old_d_rid_in_acl
)
d_acl_rid = old_department.acl_rid if old_d_rid_in_acl == old_department.acl_rid else old_d_rid_in_acl
payload = {
'app_id': 'acl',
'parent_id': d_acl_rid,
}
try:
acl.remove_user_from_role(employee_acl_rid, payload)
except Exception as e:
result.append(ErrFormat.acl_remove_user_from_role_failed.format(str(e)))
payload = {
'app_id': 'acl',
@ -76,10 +77,10 @@ def edit_employee_department_in_acl(e_list, new_d_id, op_uid):
return result
@celery.task(name="common_setting.refresh_employee_acl_info", queue=ACL_QUEUE)
@celery.task(name="common_setting.refresh_employee_acl_info", queue=CMDB_QUEUE)
@flush_db
@reconnect_db
def refresh_employee_acl_info(current_employee_id=None):
def refresh_employee_acl_info():
acl = ACLManager('acl')
role_map = {role['name']: role for role in acl.get_all_roles()}
@ -89,12 +90,8 @@ def refresh_employee_acl_info(current_employee_id=None):
query = Employee.query.filter(*criterion).order_by(
Employee.created_at.desc()
)
current_employee_rid = 0
for em in query.all():
if current_employee_id and em.employee_id == current_employee_id:
current_employee_rid = em.acl_rid if em.acl_rid else 0
if em.acl_uid and em.acl_rid:
continue
role = role_map.get(em.username, None)
@ -108,9 +105,6 @@ def refresh_employee_acl_info(current_employee_id=None):
if not em.acl_rid:
params['acl_rid'] = role.get('id', 0)
if current_employee_id and em.employee_id == current_employee_id:
current_employee_rid = params['acl_rid'] if params.get('acl_rid', 0) else 0
try:
em.update(**params)
current_app.logger.info(
@ -119,12 +113,3 @@ def refresh_employee_acl_info(current_employee_id=None):
except Exception as e:
current_app.logger.error(str(e))
continue
if current_employee_rid and current_employee_rid > 0:
try:
from api.lib.common_setting.employee import GrantEmployeeACLPerm
GrantEmployeeACLPerm().grant_by_rid(current_employee_rid, False)
current_app.logger.info(f"GrantEmployeeACLPerm success, current_employee_rid: {current_employee_rid}")
except Exception as e:
current_app.logger.error(str(e))

View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PROJECT VERSION\n"
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
"POT-Creation-Date: 2024-11-26 18:54+0800\n"
"POT-Creation-Date: 2024-01-03 11:39+0800\n"
"PO-Revision-Date: 2023-12-25 20:21+0800\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language: zh\n"
@ -16,7 +16,7 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.16.0\n"
"Generated-By: Babel 2.14.0\n"
#: api/lib/resp_format.py:7
msgid "unauthorized"
@ -81,7 +81,7 @@ msgid "User {} does not exist"
msgstr "用户 {} 不存在"
#: api/lib/resp_format.py:32
msgid "For resource: {}, you do not have {} permission!"
msgid "You do not have {} permission for resource: {}!"
msgstr "您没有资源: {} 的{}权限!"
#: api/lib/resp_format.py:33
@ -92,14 +92,6 @@ msgstr "您没有操作权限!"
msgid "Only the creator or administrator has permission!"
msgstr "只有创建人或者管理员才有权限!"
#: api/lib/cmdb/const.py:133
msgid "Update Time"
msgstr "更新时间"
#: api/lib/cmdb/const.py:134
msgid "Updated By"
msgstr "更新人"
#: api/lib/cmdb/resp_format.py:9
msgid "CI Model"
msgstr "模型配置"
@ -177,8 +169,8 @@ msgstr "目前只允许 属性创建人、管理员 删除属性!"
#: api/lib/cmdb/resp_format.py:37
msgid ""
"Attribute field names cannot be built-in fields: id, _id, ci_id, type, "
"_type, ci_type, ticket_id"
msgstr "属性字段名不能是内置字段: id, _id, ci_id, type, _type, ci_type, ci_type, ticket_id"
"_type, ci_type"
msgstr "属性字段名不能是内置字段: id, _id, ci_id, type, _type, ci_type"
#: api/lib/cmdb/resp_format.py:39
msgid "Predefined value: Other model request parameters are illegal!"
@ -205,357 +197,245 @@ msgid "CI already exists!"
msgstr "CI 已经存在!"
#: api/lib/cmdb/resp_format.py:47
msgid "{}: CI reference {} does not exist!"
msgstr "{}: CI引用 {} 不存在!"
#: api/lib/cmdb/resp_format.py:48
msgid "{}: CI reference {} is illegal!"
msgstr "{}, CI引用 {} 不合法!"
#: api/lib/cmdb/resp_format.py:49
msgid "Relationship constraint: {}, verification failed"
msgstr "关系约束: {}, 校验失败"
#: api/lib/cmdb/resp_format.py:51
#: api/lib/cmdb/resp_format.py:49
msgid ""
"Many-to-many relationship constraint: Model {} <-> {} already has a many-"
"to-many relationship!"
msgstr "多对多关系 限制: 模型 {} <-> {} 已经存在多对多关系!"
#: api/lib/cmdb/resp_format.py:54
#: api/lib/cmdb/resp_format.py:52
msgid "CI relationship: {} does not exist"
msgstr "CI关系: {} 不存在"
#: api/lib/cmdb/resp_format.py:57
#: api/lib/cmdb/resp_format.py:55
msgid "In search expressions, not supported before parentheses: or, not"
msgstr "搜索表达式里小括号前不支持: 或、非"
#: api/lib/cmdb/resp_format.py:59
#: api/lib/cmdb/resp_format.py:57
msgid "Model {} does not exist"
msgstr "模型 {} 不存在"
#: api/lib/cmdb/resp_format.py:60
#: api/lib/cmdb/resp_format.py:58
msgid "Model {} already exists"
msgstr "模型 {} 已经存在"
#: api/lib/cmdb/resp_format.py:61
#: api/lib/cmdb/resp_format.py:59
msgid "The primary key is undefined or has been deleted"
msgstr "主键未定义或者已被删除"
#: api/lib/cmdb/resp_format.py:62
#: api/lib/cmdb/resp_format.py:60
msgid "Only the creator can delete it!"
msgstr "只有创建人才能删除它!"
#: api/lib/cmdb/resp_format.py:63
#: api/lib/cmdb/resp_format.py:61
msgid "The model cannot be deleted because the CI already exists"
msgstr "因为CI已经存在不能删除模型"
#: api/lib/cmdb/resp_format.py:65
msgid "The inheritance cannot be deleted because the CI already exists"
msgstr "因为CI已经存在不能删除继承关系"
#: api/lib/cmdb/resp_format.py:67
msgid "The model is inherited and cannot be deleted"
msgstr "该模型被继承, 不能删除"
#: api/lib/cmdb/resp_format.py:68
msgid "The model is referenced by attribute {} and cannot be deleted"
msgstr "该模型被属性 {} 引用, 不能删除"
#: api/lib/cmdb/resp_format.py:72
msgid ""
"The model cannot be deleted because the model is referenced by the "
"relational view {}"
msgstr "因为关系视图 {} 引用了该模型,不能删除模型"
#: api/lib/cmdb/resp_format.py:74
#: api/lib/cmdb/resp_format.py:67
msgid "Model group {} does not exist"
msgstr "模型分组 {} 不存在"
#: api/lib/cmdb/resp_format.py:75
#: api/lib/cmdb/resp_format.py:68
msgid "Model group {} already exists"
msgstr "模型分组 {} 已经存在"
#: api/lib/cmdb/resp_format.py:76
#: api/lib/cmdb/resp_format.py:69
msgid "Model relationship {} does not exist"
msgstr "模型关系 {} 不存在"
#: api/lib/cmdb/resp_format.py:77
#: api/lib/cmdb/resp_format.py:70
msgid "Attribute group {} already exists"
msgstr "属性分组 {} 已存在"
#: api/lib/cmdb/resp_format.py:78
#: api/lib/cmdb/resp_format.py:71
msgid "Attribute group {} does not exist"
msgstr "属性分组 {} 不存在"
#: api/lib/cmdb/resp_format.py:80
#: api/lib/cmdb/resp_format.py:73
msgid "Attribute group <{0}> - attribute <{1}> does not exist"
msgstr "属性组<{0}> - 属性<{1}> 不存在"
#: api/lib/cmdb/resp_format.py:81
#: api/lib/cmdb/resp_format.py:74
msgid "The unique constraint already exists!"
msgstr "唯一约束已经存在!"
#: api/lib/cmdb/resp_format.py:83
#: api/lib/cmdb/resp_format.py:76
msgid "Uniquely constrained attributes cannot be JSON and multi-valued"
msgstr "唯一约束的属性不能是 JSON 和 多值"
#: api/lib/cmdb/resp_format.py:84
#: api/lib/cmdb/resp_format.py:77
msgid "Duplicated trigger"
msgstr "重复的触发器"
#: api/lib/cmdb/resp_format.py:85
#: api/lib/cmdb/resp_format.py:78
msgid "Trigger {} does not exist"
msgstr "触发器 {} 不存在"
#: api/lib/cmdb/resp_format.py:86
msgid "Duplicated reconciliation rule"
msgstr ""
#: api/lib/cmdb/resp_format.py:87
msgid "Reconciliation rule {} does not exist"
msgstr "关系类型 {} 不存在"
#: api/lib/cmdb/resp_format.py:89
#: api/lib/cmdb/resp_format.py:80
msgid "Operation record {} does not exist"
msgstr "操作记录 {} 不存在"
#: api/lib/cmdb/resp_format.py:90
#: api/lib/cmdb/resp_format.py:81
msgid "Unique identifier cannot be deleted"
msgstr "不能删除唯一标识"
#: api/lib/cmdb/resp_format.py:91
#: api/lib/cmdb/resp_format.py:82
msgid "Cannot delete default sorted attributes"
msgstr "不能删除默认排序的属性"
#: api/lib/cmdb/resp_format.py:93
#: api/lib/cmdb/resp_format.py:84
msgid "No node selected"
msgstr "没有选择节点"
#: api/lib/cmdb/resp_format.py:94
#: api/lib/cmdb/resp_format.py:85
msgid "This search option does not exist!"
msgstr "该搜索选项不存在!"
#: api/lib/cmdb/resp_format.py:95
#: api/lib/cmdb/resp_format.py:86
msgid "This search option has a duplicate name!"
msgstr "该搜索选项命名重复!"
#: api/lib/cmdb/resp_format.py:97
#: api/lib/cmdb/resp_format.py:88
msgid "Relationship type {} already exists"
msgstr "关系类型 {} 已经存在"
#: api/lib/cmdb/resp_format.py:98
#: api/lib/cmdb/resp_format.py:89
msgid "Relationship type {} does not exist"
msgstr "关系类型 {} 不存在"
#: api/lib/cmdb/resp_format.py:100
#: api/lib/cmdb/resp_format.py:91
msgid "Invalid attribute value: {}"
msgstr "无效的属性值: {}"
#: api/lib/cmdb/resp_format.py:101
#: api/lib/cmdb/resp_format.py:92
msgid "{} Invalid value: {}"
msgstr "{} 无效的值: {}"
msgstr "无效的值: {}"
#: api/lib/cmdb/resp_format.py:102
#: api/lib/cmdb/resp_format.py:93
msgid "{} is not in the predefined values"
msgstr "{} 不在预定义值里"
#: api/lib/cmdb/resp_format.py:104
#: api/lib/cmdb/resp_format.py:95
msgid "The value of attribute {} must be unique, {} already exists"
msgstr "属性 {} 的值必须是唯一的, 当前值 {} 已存在"
#: api/lib/cmdb/resp_format.py:105
#: api/lib/cmdb/resp_format.py:96
msgid "Attribute {} value must exist"
msgstr "属性 {} 值必须存在"
#: api/lib/cmdb/resp_format.py:106
msgid "Out of range value, the maximum value is 2147483647"
msgstr "超过最大值限制, 最大值是2147483647"
#: api/lib/cmdb/resp_format.py:108
#: api/lib/cmdb/resp_format.py:99
msgid "Unknown error when adding or modifying attribute value: {}"
msgstr "新增或者修改属性值未知错误: {}"
#: api/lib/cmdb/resp_format.py:110
#: api/lib/cmdb/resp_format.py:101
msgid "Duplicate custom name"
msgstr "订制名重复"
#: api/lib/cmdb/resp_format.py:112
#: api/lib/cmdb/resp_format.py:103
msgid "Number of models exceeds limit: {}"
msgstr "模型数超过限制: {}"
#: api/lib/cmdb/resp_format.py:113
#: api/lib/cmdb/resp_format.py:104
msgid "The number of CIs exceeds the limit: {}"
msgstr "CI数超过限制: {}"
#: api/lib/cmdb/resp_format.py:115
#: api/lib/cmdb/resp_format.py:106
msgid "Auto-discovery rule: {} already exists!"
msgstr "自动发现规则: {} 已经存在!"
#: api/lib/cmdb/resp_format.py:116
#: api/lib/cmdb/resp_format.py:107
msgid "Auto-discovery rule: {} does not exist!"
msgstr "自动发现规则: {} 不存在!"
#: api/lib/cmdb/resp_format.py:118
#: api/lib/cmdb/resp_format.py:109
msgid "This auto-discovery rule is referenced by the model and cannot be deleted!"
msgstr "该自动发现规则被模型引用, 不能删除!"
#: api/lib/cmdb/resp_format.py:120
#: api/lib/cmdb/resp_format.py:111
msgid "The application of auto-discovery rules cannot be defined repeatedly!"
msgstr "自动发现规则的应用不能重复定义!"
#: api/lib/cmdb/resp_format.py:121
#: api/lib/cmdb/resp_format.py:112
msgid "The auto-discovery you want to modify: {} does not exist!"
msgstr "您要修改的自动发现: {} 不存在!"
#: api/lib/cmdb/resp_format.py:122
#: api/lib/cmdb/resp_format.py:113
msgid "Attribute does not include unique identifier: {}"
msgstr "属性字段没有包括唯一标识: {}"
#: api/lib/cmdb/resp_format.py:123
#: api/lib/cmdb/resp_format.py:114
msgid "The auto-discovery instance does not exist!"
msgstr "自动发现的实例不存在!"
#: api/lib/cmdb/resp_format.py:124
#: api/lib/cmdb/resp_format.py:115
msgid "The model is not associated with this auto-discovery!"
msgstr "模型并未关联该自动发现!"
#: api/lib/cmdb/resp_format.py:125
#: api/lib/cmdb/resp_format.py:116
msgid "Only the creator can modify the Secret!"
msgstr "只有创建人才能修改Secret!"
#: api/lib/cmdb/resp_format.py:127
#: api/lib/cmdb/resp_format.py:118
msgid "This rule already has auto-discovery instances and cannot be deleted!"
msgstr "该规则已经有自动发现的实例, 不能被删除!"
#: api/lib/cmdb/resp_format.py:129
#: api/lib/cmdb/resp_format.py:120
msgid "The default auto-discovery rule is already referenced by model {}!"
msgstr "该默认的自动发现规则 已经被模型 {} 引用!"
#: api/lib/cmdb/resp_format.py:131
#: api/lib/cmdb/resp_format.py:122
msgid "The unique_key method must return a non-empty string!"
msgstr "unique_key方法必须返回非空字符串!"
#: api/lib/cmdb/resp_format.py:132
#: api/lib/cmdb/resp_format.py:123
msgid "The attributes method must return a list"
msgstr "attributes方法必须返回的是list"
#: api/lib/cmdb/resp_format.py:134
#: api/lib/cmdb/resp_format.py:125
msgid "The list returned by the attributes method cannot be empty!"
msgstr "attributes方法返回的list不能为空!"
#: api/lib/cmdb/resp_format.py:136
#: api/lib/cmdb/resp_format.py:127
msgid "Only administrators can define execution targets as: all nodes!"
msgstr "只有管理员才可以定义执行机器为: 所有节点!"
#: api/lib/cmdb/resp_format.py:137
#: api/lib/cmdb/resp_format.py:128
msgid "Execute targets permission check failed: {}"
msgstr "执行机器权限检查不通过: {}"
#: api/lib/cmdb/resp_format.py:139
#: api/lib/cmdb/resp_format.py:130
msgid "CI filter authorization must be named!"
msgstr "CI过滤授权 必须命名!"
#: api/lib/cmdb/resp_format.py:140
#: api/lib/cmdb/resp_format.py:131
msgid "CI filter authorization is currently not supported or query"
msgstr "CI过滤授权 暂时不支持 或 查询"
#: api/lib/cmdb/resp_format.py:143
#: api/lib/cmdb/resp_format.py:134
msgid "You do not have permission to operate attribute {}!"
msgstr "您没有属性 {} 的操作权限!"
#: api/lib/cmdb/resp_format.py:144
#: api/lib/cmdb/resp_format.py:135
msgid "You do not have permission to operate this CI!"
msgstr "您没有该CI的操作权限!"
#: api/lib/cmdb/resp_format.py:146
#: api/lib/cmdb/resp_format.py:137
msgid "Failed to save password: {}"
msgstr "保存密码失败: {}"
#: api/lib/cmdb/resp_format.py:147
#: api/lib/cmdb/resp_format.py:138
msgid "Failed to get password: {}"
msgstr "获取密码失败: {}"
#: api/lib/cmdb/resp_format.py:149
msgid "Scheduling time format error"
msgstr "{}格式错误,应该为:%Y-%m-%d %H:%M:%S"
#: api/lib/cmdb/resp_format.py:150
msgid "CMDB data reconciliation results"
msgstr "CMDB数据合规检查结果"
#: api/lib/cmdb/resp_format.py:151
msgid "Number of {} illegal: {}"
msgstr "{} 不合规数: {}"
#: api/lib/cmdb/resp_format.py:153
msgid "Topology view {} already exists"
msgstr "拓扑视图 {} 已经存在"
#: api/lib/cmdb/resp_format.py:154
msgid "Topology group {} already exists"
msgstr "拓扑视图分组 {} 已经存在"
#: api/lib/cmdb/resp_format.py:156
msgid "The group cannot be deleted because the topology view already exists"
msgstr "因为该分组下定义了拓扑视图,不能删除"
#: api/lib/cmdb/resp_format.py:158
msgid "Both the source model and the target model must be selected"
msgstr "源模型和目标模型不能为空!"
#: api/lib/cmdb/resp_format.py:160
msgid "The names of built-in models cannot be changed"
msgstr "内置模型的名字不能修改"
#: api/lib/cmdb/resp_format.py:162
msgid "The subnet model {} does not exist"
msgstr "子网模型 {} 不存在!"
#: api/lib/cmdb/resp_format.py:163
msgid "The IP Address model {} does not exist"
msgstr "IP地址模型 {} 不存在!"
#: api/lib/cmdb/resp_format.py:164
msgid "CIDR {} is an invalid notation"
msgstr "CIDR {} 写法不正确!"
#: api/lib/cmdb/resp_format.py:165
msgid "Invalid CIDR: {}, available subnets: {}"
msgstr "无效的CIDR: {}, 可用的子网: {}"
#: api/lib/cmdb/resp_format.py:166
msgid "Invalid subnet prefix length: {}"
msgstr "无效的子网前缀长度: {}"
#: api/lib/cmdb/resp_format.py:167
msgid "parent node cidr must be required"
msgstr "必须要有父节点"
#: api/lib/cmdb/resp_format.py:168
msgid "{} and {} overlap"
msgstr "{} 和 {} 有重叠"
#: api/lib/cmdb/resp_format.py:169 api/lib/cmdb/resp_format.py:171
msgid "Cannot delete because child nodes exist"
msgstr "因为子节点已经存在,不能删除"
#: api/lib/cmdb/resp_format.py:170
msgid "Subnet is not found"
msgstr "子网不存在"
#: api/lib/cmdb/resp_format.py:174
msgid "The dcim model {} does not exist"
msgstr "DCIM模型 {} 不存在!"
#: api/lib/cmdb/resp_format.py:175
msgid "Irregularities in Rack Units"
msgstr "机架U位异常!"
#: api/lib/cmdb/resp_format.py:176
msgid "The device's position is greater than the rack unit height"
msgstr "有设备的位置大于机柜的U数!"
#: api/lib/common_setting/resp_format.py:8
msgid "Company info already existed"
msgstr "公司信息已存在,无法创建!"
@ -812,10 +692,6 @@ msgstr "LDAP测试用户名必填"
msgid "Company wide"
msgstr "全公司"
#: api/lib/common_setting/resp_format.py:84
msgid "No permission to access resource {}, perm {} "
msgstr "您没有资源: {} 的 {} 权限"
#: api/lib/perm/acl/resp_format.py:9
msgid "login successful"
msgstr "登录成功"

View File

@ -1,6 +1,7 @@
# -*- coding:utf-8 -*-
import datetime
import jwt
import six
from flask import abort
@ -16,12 +17,10 @@ from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.audit import AuditCRUD
from api.lib.perm.acl.cache import AppCache
from api.lib.perm.acl.cache import RoleCache
from api.lib.perm.acl.cache import User
from api.lib.perm.acl.cache import UserCache
from api.lib.perm.acl.resp_format import ErrFormat
from api.lib.perm.acl.role import RoleRelationCRUD
from api.lib.perm.auth import auth_abandoned
from api.lib.perm.auth import auth_with_app_token
from api.models.acl import Role
@ -39,9 +38,8 @@ class LoginView(APIView):
username = request.values.get("username") or request.values.get("email")
password = request.values.get("password")
_role = None
auth_with_ldap = request.values.get('auth_with_ldap', True)
config = AuthenticateDataCRUD(AuthenticateType.LDAP).get()
if (config.get('enabled') or config.get('enable')) and auth_with_ldap:
if config.get('enabled') or config.get('enable'):
from api.lib.perm.authentication.ldap import authenticate_with_ldap
user, authenticated = authenticate_with_ldap(username, password)
else:
@ -125,17 +123,10 @@ class AuthWithKeyView(APIView):
if not user.get('username'):
user['username'] = user.get('name')
result = dict(user=user,
authenticated=authenticated,
rid=role and role.id,
can_proxy=can_proxy)
if request.values.get('need_parentRoles') in current_app.config.get('BOOL_TRUE'):
app_id = AppCache.get(request.values.get('app_id'))
parent_ids = RoleRelationCRUD.recursive_parent_ids(role and role.id, app_id and app_id.id)
result['user']['parentRoles'] = [RoleCache.get(rid).name for rid in set(parent_ids) if RoleCache.get(rid)]
return self.jsonify(result)
return self.jsonify(user=user,
authenticated=authenticated,
rid=role and role.id,
can_proxy=can_proxy)
class AuthWithTokenView(APIView):
@ -192,8 +183,6 @@ class LogoutView(APIView):
def post(self):
logout_user()
AuditCRUD.add_login_log(None, None, None,
_id=session.get('LOGIN_ID') or request.values.get('LOGIN_ID'),
logout_at=datetime.datetime.now())
AuditCRUD.add_login_log(None, None, None, _id=session.get('LOGIN_ID'), logout_at=datetime.datetime.now())
self.jsonify(code=200)

View File

@ -11,7 +11,6 @@ from flask_login import current_user
from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import AuditCRUD
from api.lib.perm.acl.acl import role_required
from api.lib.perm.acl.cache import AppCache
from api.lib.perm.acl.cache import UserCache
@ -49,13 +48,6 @@ class GetUserInfoView(APIView):
role=dict(permissions=user_info.get('parents')),
avatar=user_info.get('avatar'))
if request.values.get('channel'):
_id = AuditCRUD.add_login_log(name, True, ErrFormat.login_succeed,
ip=request.values.get('ip'),
browser=request.values.get('browser'))
session['LOGIN_ID'] = _id
result['LOGIN_ID'] = _id
current_app.logger.info("get user info for3: {}".format(result))
return self.jsonify(result=result)

View File

@ -1,36 +1,26 @@
# -*- coding:utf-8 -*-
import copy
import json
import uuid
from io import BytesIO
from flask import abort
from flask import current_app
from flask import request
from flask_login import current_user
from io import BytesIO
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryAccountCRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryCICRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryCITypeCRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryCITypeRelationCRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryComponentsManager
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryCounterCRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryExecHistoryCRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryHTTPManager
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryRuleCRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoveryRuleSyncHistoryCRUD
from api.lib.cmdb.auto_discovery.auto_discovery import AutoDiscoverySNMPManager
from api.lib.cmdb.auto_discovery.const import DEFAULT_INNER
from api.lib.cmdb.auto_discovery.const import PRIVILEGED_USERS
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.auto_discovery.const import DEFAULT_HTTP
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.ipam.subnet import SubnetManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search as ci_search
from api.lib.cmdb.search.ci import search
from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.exception import AbortException
from api.lib.perm.acl.acl import has_perm_from_args
from api.lib.utils import AESCrypto
from api.lib.utils import get_page
@ -47,19 +37,14 @@ class AutoDiscoveryRuleView(APIView):
rebuild = False
exists = {i['name'] for i in res}
for i in copy.deepcopy(DEFAULT_INNER):
for i in DEFAULT_HTTP:
if i['name'] not in exists:
i.pop('en', None)
AutoDiscoveryRuleCRUD().add(**i)
rebuild = True
if rebuild:
_, res = AutoDiscoveryRuleCRUD.search(page=1, page_size=100000, **request.values)
for i in res:
if i['type'] == 'http':
i['resources'] = AutoDiscoveryHTTPManager().get_resources(i['name'])
return self.jsonify(res)
@args_required("name", value_required=True)
@ -113,39 +98,24 @@ class AutoDiscoveryRuleTemplateFileView(APIView):
class AutoDiscoveryRuleHTTPView(APIView):
url_prefix = ("/adr/http/<string:name>/categories",
"/adr/http/<string:name>/attributes",
"/adr/http/<string:name>/mapping",
"/adr/snmp/<string:name>/attributes",
"/adr/components/<string:name>/attributes",)
url_prefix = ("/adr/http/<string:name>/categories", "/adr/http/<string:name>/attributes",
"/adr/snmp/<string:name>/attributes")
def get(self, name):
if "snmp" in request.url:
return self.jsonify(AutoDiscoverySNMPManager.get_attributes())
if "components" in request.url:
return self.jsonify(AutoDiscoveryComponentsManager.get_attributes(name))
if "attributes" in request.url:
resource = request.values.get('resource')
return self.jsonify(AutoDiscoveryHTTPManager.get_attributes(name, resource))
if "mapping" in request.url:
resource = request.values.get('resource')
return self.jsonify(AutoDiscoveryHTTPManager.get_mapping(name, resource))
category = request.values.get('category')
return self.jsonify(AutoDiscoveryHTTPManager.get_attributes(name, category))
return self.jsonify(AutoDiscoveryHTTPManager.get_categories(name))
class AutoDiscoveryCITypeView(APIView):
url_prefix = ("/adt/ci_types/<int:type_id>",
"/adt/ci_types/<int:type_id>/attributes",
"/adt/<int:adt_id>")
url_prefix = ("/adt/ci_types/<int:type_id>", "/adt/<int:adt_id>")
def get(self, type_id):
if "attributes" in request.url:
return self.jsonify(AutoDiscoveryCITypeCRUD.get_ad_attributes(type_id))
_, res = AutoDiscoveryCITypeCRUD.search(page=1, page_size=100000, type_id=type_id, **request.values)
for i in res:
if isinstance(i.get("extra_option"), dict) and i['extra_option'].get('secret'):
@ -153,11 +123,6 @@ class AutoDiscoveryCITypeView(APIView):
i['extra_option'].pop('secret', None)
else:
i['extra_option']['secret'] = AESCrypto.decrypt(i['extra_option']['secret'])
if isinstance(i.get("extra_option"), dict) and i['extra_option'].get('password'):
if not (current_user.username == "cmdb_agent" or current_user.uid == i['uid']):
i['extra_option'].pop('password', None)
else:
i['extra_option']['password'] = AESCrypto.decrypt(i['extra_option']['password'])
return self.jsonify(res)
@ -181,27 +146,6 @@ class AutoDiscoveryCITypeView(APIView):
return self.jsonify(adt_id=adt_id)
class AutoDiscoveryCITypeRelationView(APIView):
url_prefix = ("/adt/ci_types/<int:type_id>/relations", "/adt/relations/<int:_id>")
def get(self, type_id):
_, res = AutoDiscoveryCITypeRelationCRUD.search(page=1, page_size=100000, ad_type_id=type_id, **request.values)
return self.jsonify(res)
@args_required("relations")
def post(self, type_id):
return self.jsonify(AutoDiscoveryCITypeRelationCRUD().upsert(type_id, request.values['relations']))
def put(self):
return self.post()
def delete(self, _id):
AutoDiscoveryCITypeRelationCRUD().delete(_id)
return self.jsonify(id=_id)
class AutoDiscoveryCIView(APIView):
url_prefix = ("/adc", "/adc/<int:adc_id>", "/adc/ci_types/<int:type_id>/attributes", "/adc/ci_types")
@ -227,7 +171,6 @@ class AutoDiscoveryCIView(APIView):
@args_required("type_id")
@args_required("adt_id")
@args_required("instance")
@args_required("unique_value")
def post(self):
request.values.pop("_key", None)
request.values.pop("_secret", None)
@ -270,134 +213,24 @@ class AutoDiscoveryRuleSyncView(APIView):
url_prefix = ("/adt/sync",)
def get(self):
if current_user.username not in PRIVILEGED_USERS:
if current_user.username not in ("cmdb_agent", "worker", "admin"):
return abort(403)
oneagent_name = request.values.get('oneagent_name')
oneagent_id = request.values.get('oneagent_id')
last_update_at = request.values.get('last_update_at')
response = []
if AttributeCache.get('oneagent_id'):
query = "oneagent_id:{}".format(oneagent_id)
s = ci_search(query)
try:
response, _, _, _, _, _ = s.search()
except SearchError as e:
import traceback
current_app.logger.error(traceback.format_exc())
return abort(400, str(e))
for res in response:
if res.get('{}_name'.format(res['ci_type'])) == oneagent_name or oneagent_name == res.get('oneagent_name'):
ci_id = res["_id"]
rules, last_update_at = AutoDiscoveryCITypeCRUD.get(ci_id, oneagent_id, oneagent_name, last_update_at)
return self.jsonify(rules=rules, last_update_at=last_update_at)
rules, last_update_at1 = AutoDiscoveryCITypeCRUD.get(None, oneagent_id, oneagent_name, last_update_at)
query = "{},oneagent_id:{}".format(oneagent_name, oneagent_id)
current_app.logger.info(query)
s = search(query)
try:
subnet_scan_rules, last_update_at2 = SubnetManager().scan_rules(oneagent_id, last_update_at)
except AbortException:
subnet_scan_rules, last_update_at2 = [], ""
response, _, _, _, _, _ = s.search()
except SearchError as e:
import traceback
current_app.logger.error(traceback.format_exc())
return abort(400, str(e))
return self.jsonify(rules=rules,
subnet_scan_rules=subnet_scan_rules,
last_update_at=max(last_update_at1 or "", last_update_at2 or ""))
ci_id = response and response[0]["_id"]
rules, last_update_at = AutoDiscoveryCITypeCRUD.get(ci_id, oneagent_id, last_update_at)
class AutoDiscoveryRuleSyncHistoryView(APIView):
url_prefix = ("/adt/<int:adt_id>/sync/histories",)
def get(self, adt_id):
page = get_page(request.values.pop('page', 1))
page_size = get_page_size(request.values.pop('page_size', None))
numfound, res = AutoDiscoveryRuleSyncHistoryCRUD.search(page=page,
page_size=page_size,
adt_id=adt_id,
**request.values)
return self.jsonify(page=page,
page_size=page_size,
numfound=numfound,
total=len(res),
result=res)
class AutoDiscoveryTestView(APIView):
url_prefix = ("/adt/<int:adt_id>/test", "/adt/test/<string:exec_id>/result")
def get(self, exec_id):
return self.jsonify(stdout="1\n2\n3", exec_id=exec_id)
def post(self, adt_id):
return self.jsonify(exec_id=uuid.uuid4().hex)
class AutoDiscoveryExecHistoryView(APIView):
url_prefix = ("/adc/exec/histories",)
@args_required('type_id')
def get(self):
page = get_page(request.values.pop('page', 1))
page_size = get_page_size(request.values.pop('page_size', None))
last_size = request.values.pop('last_size', None)
if last_size and last_size.isdigit():
last_size = int(last_size)
numfound, res = AutoDiscoveryExecHistoryCRUD.search(page=page,
page_size=page_size,
last_size=last_size,
**request.values)
return self.jsonify(page=page,
page_size=page_size,
numfound=numfound,
total=len(res),
result=res)
@args_required('type_id')
@args_required('stdout')
def post(self):
AutoDiscoveryExecHistoryCRUD().add(type_id=request.values.get('type_id'),
stdout=request.values.get('stdout'))
return self.jsonify(code=200)
class AutoDiscoveryCounterView(APIView):
url_prefix = ("/adc/counter",)
@args_required('type_id')
def get(self):
type_id = request.values.get('type_id')
return self.jsonify(AutoDiscoveryCounterCRUD().get(type_id))
class AutoDiscoveryAccountView(APIView):
url_prefix = ("/adr/accounts", "/adr/accounts/<int:account_id>")
@args_required('adr_id')
def get(self):
adr_id = request.values.get('adr_id')
return self.jsonify(AutoDiscoveryAccountCRUD().get(adr_id))
@args_required('adr_id')
@args_required('accounts', value_required=False)
def post(self):
AutoDiscoveryAccountCRUD().upsert(**request.values)
return self.jsonify(code=200)
@args_required('config')
def put(self, account_id):
res = AutoDiscoveryAccountCRUD().update(account_id, **request.values)
return self.jsonify(res.to_dict())
def delete(self, account_id):
AutoDiscoveryAccountCRUD().delete(account_id)
return self.jsonify(account_id=account_id)
return self.jsonify(rules=rules, last_update_at=last_update_at)

View File

@ -11,12 +11,12 @@ from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.ci import CIRelationManager
from api.lib.cmdb.const import ExistPolicy
from api.lib.cmdb.const import ResourceTypeEnum, PermEnum
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RetKey
from api.lib.cmdb.perms import has_perm_for_ci
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci import search as ci_search
from api.lib.decorator import args_required
from api.lib.cmdb.search.ci import search
from api.lib.perm.acl.acl import has_perm_from_args
from api.lib.utils import get_page
from api.lib.utils import get_page_size
@ -77,7 +77,6 @@ class CIView(APIView):
@has_perm_for_ci("ci_type", ResourceTypeEnum.CI, PermEnum.ADD, lambda x: CITypeCache.get(x))
def post(self):
ci_type = request.values.get("ci_type")
ticket_id = request.values.pop("ticket_id", None)
_no_attribute_policy = request.values.get("no_attribute_policy", ExistPolicy.IGNORE)
exist_policy = request.values.pop('exist_policy', None)
@ -89,7 +88,6 @@ class CIView(APIView):
exist_policy=exist_policy or ExistPolicy.REJECT,
_no_attribute_policy=_no_attribute_policy,
_is_admin=request.values.pop('__is_admin', None) or False,
ticket_id=ticket_id,
**ci_dict)
return self.jsonify(ci_id=ci_id)
@ -98,7 +96,6 @@ class CIView(APIView):
def put(self, ci_id=None):
args = request.values
ci_type = args.get("ci_type")
ticket_id = request.values.pop("ticket_id", None)
_no_attribute_policy = args.get("no_attribute_policy", ExistPolicy.IGNORE)
ci_dict = self._wrap_ci_dict()
@ -106,7 +103,6 @@ class CIView(APIView):
if ci_id is not None:
manager.update(ci_id,
_is_admin=request.values.pop('__is_admin', None) or False,
ticket_id=ticket_id,
**ci_dict)
else:
request.values.pop('exist_policy', None)
@ -114,7 +110,6 @@ class CIView(APIView):
exist_policy=ExistPolicy.REPLACE,
_no_attribute_policy=_no_attribute_policy,
_is_admin=request.values.pop('__is_admin', None) or False,
ticket_id=ticket_id,
**ci_dict)
return self.jsonify(ci_id=ci_id)
@ -157,10 +152,9 @@ class CISearchView(APIView):
ret_key = RetKey.NAME
facet = handle_arg_list(request.values.get("facet", ""))
sort = request.values.get("sort")
use_id_filter = request.values.get("use_id_filter", False) in current_app.config.get('BOOL_TRUE')
start = time.time()
s = ci_search(query, fl, facet, page, ret_key, count, sort, excludes, use_id_filter=use_id_filter)
s = search(query, fl, facet, page, ret_key, count, sort, excludes)
try:
response, counter, total, page, numfound, facet = s.search()
except SearchError as e:
@ -227,6 +221,7 @@ class CIHeartbeatView(APIView):
class CIFlushView(APIView):
url_prefix = ("/ci/flush", "/ci/<int:ci_id>/flush")
# @auth_abandoned
def get(self, ci_id=None):
from api.tasks.cmdb import ci_cache
from api.lib.cmdb.const import CMDB_QUEUE
@ -255,24 +250,3 @@ class CIPasswordView(APIView):
def post(self, ci_id, attr_id):
return self.get(ci_id, attr_id)
class CIBaselineView(APIView):
url_prefix = ("/ci/baseline", "/ci/<int:ci_id>/baseline/rollback")
@args_required("before_date")
def get(self):
ci_ids = handle_arg_list(request.values.get('ci_ids'))
before_date = request.values.get('before_date')
return self.jsonify(CIManager().baseline(list(map(int, ci_ids)), before_date))
@args_required("before_date")
@has_perm_for_ci("ci_id", ResourceTypeEnum.CI, PermEnum.UPDATE, CIManager.get_type)
def post(self, ci_id):
if 'rollback' in request.url:
before_date = request.values.get('before_date')
return self.jsonify(**CIManager().rollback(ci_id, before_date))
return self.get(ci_id)

View File

@ -2,6 +2,7 @@
import time
from flask import abort
from flask import current_app
from flask import request
@ -12,6 +13,7 @@ from api.lib.cmdb.resp_format import ErrFormat
from api.lib.cmdb.search import SearchError
from api.lib.cmdb.search.ci_relation.search import Search
from api.lib.decorator import args_required
from api.lib.perm.auth import auth_abandoned
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.lib.utils import handle_arg_list
@ -29,14 +31,11 @@ class CIRelationSearchView(APIView):
level: default is 1
facet: statistic
"""
page = get_page(request.values.get("page", 1))
count = get_page_size(request.values.get("count") or request.values.get("page_size"))
root_id = request.values.get('root_id')
ancestor_ids = request.values.get('ancestor_ids') or None # only for many to many
root_parent_path = handle_arg_list(request.values.get('root_parent_path') or '')
descendant_ids = list(map(int, handle_arg_list(request.values.get('descendant_ids', []))))
level = list(map(int, handle_arg_list(request.values.get('level', '1'))))
query = request.values.get('q', "")
@ -48,8 +47,7 @@ class CIRelationSearchView(APIView):
start = time.time()
s = Search(root_id, level, query, fl, facet, page, count, sort, reverse,
ancestor_ids=ancestor_ids, has_m2m=has_m2m, root_parent_path=root_parent_path,
descendant_ids=descendant_ids)
ancestor_ids=ancestor_ids, has_m2m=has_m2m)
try:
response, counter, total, page, numfound, facet = s.search()
except SearchError as e:
@ -64,55 +62,19 @@ class CIRelationSearchView(APIView):
result=response)
class CIRelationSearchPathView(APIView):
url_prefix = ("/ci_relations/path/s", "/ci_relations/path/search")
@args_required("source", "target", "path")
def post(self):
"""@params: page: page number
page_size | count: page size
source: source CIType, e.g. {type_id: 1, q: `search expr`}
target: target CIType, e.g. {type_ids: [2], q: `search expr`}
path: Path from the Source CIType to the Target CIType, e.g. [1, ..., 2]
"""
page = get_page(request.values.get("page", 1))
count = get_page_size(request.values.get("count") or request.values.get("page_size"))
source = request.values.get("source")
target = request.values.get("target")
path = request.values.get("path")
s = Search(page=page, count=count)
try:
(response, counter, total, page, numfound, id2ci,
relation_types, type2show_key) = s.search_by_path(source, target, path)
except SearchError as e:
return abort(400, str(e))
return self.jsonify(numfound=numfound,
total=total,
page=page,
counter=counter,
paths=response,
id2ci=id2ci,
relation_types=relation_types,
type2show_key=type2show_key)
class CIRelationStatisticsView(APIView):
url_prefix = "/ci_relations/statistics"
@auth_abandoned
def get(self):
root_ids = list(map(int, handle_arg_list(request.values.get('root_ids'))))
level = request.values.get('level', 1)
type_ids = set(map(int, handle_arg_list(request.values.get('type_ids', []))))
ancestor_ids = request.values.get('ancestor_ids') or None # only for many to many
descendant_ids = list(map(int, handle_arg_list(request.values.get('descendant_ids', []))))
has_m2m = request.values.get("has_m2m") in current_app.config.get('BOOL_TRUE')
start = time.time()
s = Search(root_ids, level, ancestor_ids=ancestor_ids, descendant_ids=descendant_ids, has_m2m=has_m2m)
s = Search(root_ids, level, ancestor_ids=ancestor_ids, has_m2m=has_m2m)
try:
result = s.statistics(type_ids)
except SearchError as e:
@ -122,26 +84,6 @@ class CIRelationStatisticsView(APIView):
return self.jsonify(result)
class CIRelationSearchFullView(APIView):
url_prefix = "/ci_relations/search/full"
def get(self):
root_ids = list(map(int, handle_arg_list(request.values.get('root_ids'))))
level = request.values.get('level', 1)
type_ids = list(map(int, handle_arg_list(request.values.get('type_ids', []))))
has_m2m = request.values.get("has_m2m") in current_app.config.get('BOOL_TRUE')
start = time.time()
s = Search(root_ids, level, has_m2m=has_m2m)
try:
result = s.search_full(type_ids)
except SearchError as e:
return abort(400, str(e))
current_app.logger.debug("search time is :{0}".format(time.time() - start))
return self.jsonify(result)
class GetSecondCIsView(APIView):
url_prefix = "/ci_relations/<int:first_ci_id>/second_cis"

View File

@ -2,28 +2,26 @@
import json
from io import BytesIO
from flask import abort
from flask import current_app
from flask import request
from io import BytesIO
from flask import session
from api.lib.cmdb.cache import AttributeCache
from api.lib.cmdb.cache import CITypeCache
from api.lib.cmdb.ci import CITriggerManager
from api.lib.cmdb.ci_type import CITypeAttributeGroupManager
from api.lib.cmdb.ci_type import CITypeAttributeManager
from api.lib.cmdb.ci_type import CITypeGroupManager
from api.lib.cmdb.ci_type import CITypeInheritanceManager
from api.lib.cmdb.ci_type import CITypeManager
from api.lib.cmdb.ci_type import CITypeTemplateManager
from api.lib.cmdb.ci_type import CITypeTriggerManager
from api.lib.cmdb.ci_type import CITypeUniqueConstraintManager
from api.lib.cmdb.const import PermEnum, ResourceTypeEnum
from api.lib.cmdb.const import PermEnum, ResourceTypeEnum, RoleEnum
from api.lib.cmdb.perms import CIFilterPermsCRUD
from api.lib.cmdb.preference import PreferenceManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.perm.acl.acl import ACLManager
@ -37,40 +35,17 @@ from api.lib.perm.auth import auth_with_app_token
from api.lib.utils import handle_arg_list
from api.resource import APIView
app_cli = CMDBApp()
class CITypeView(APIView):
url_prefix = ("/ci_types", "/ci_types/<int:type_id>", "/ci_types/<string:type_name>",
"/ci_types/icons")
url_prefix = ("/ci_types", "/ci_types/<int:type_id>", "/ci_types/<string:type_name>")
def get(self, type_id=None, type_name=None):
if request.url.endswith("icons"):
return self.jsonify(CITypeManager().get_icons())
q = request.args.get("type_name")
q = request.values.get("type_name")
type_ids = handle_arg_list(request.values.get("type_ids"))
type_ids = type_ids or (type_id and [type_id])
if type_ids:
ci_types = []
for _type_id in type_ids:
ci_type = CITypeCache.get(_type_id)
if ci_type is None:
return abort(404, ErrFormat.ci_type_not_found)
ci_type = ci_type.to_dict()
ci_type['parent_ids'] = CITypeInheritanceManager.get_parents(_type_id)
ci_type['show_name'] = ci_type.get('show_id') and AttributeCache.get(ci_type['show_id']).name
ci_type['unique_name'] = ci_type['unique_id'] and AttributeCache.get(ci_type['unique_id']).name
ci_types.append(ci_type)
if type_id is not None:
ci_types = [CITypeCache.get(type_id).to_dict()]
elif type_name is not None:
ci_type = CITypeCache.get(type_name)
if ci_type is not None:
ci_type = ci_type.to_dict()
ci_type['parent_ids'] = CITypeInheritanceManager.get_parents(ci_type['id'])
ci_types = [ci_type]
else:
ci_types = []
ci_types = [CITypeCache.get(type_name).to_dict()]
else:
ci_types = CITypeManager().get_ci_types(q)
count = len(ci_types)
@ -78,7 +53,7 @@ class CITypeView(APIView):
return self.jsonify(numfound=count, ci_types=ci_types)
@args_required("name")
@args_validate(CITypeManager.cls, exclude_args=['parent_ids'])
@args_validate(CITypeManager.cls)
def post(self):
params = request.values
@ -109,29 +84,10 @@ class CITypeView(APIView):
return self.jsonify(type_id=type_id)
class CITypeInheritanceView(APIView):
url_prefix = ("/ci_types/inheritance",)
@args_required("parent_ids")
@args_required("child_id")
@has_perm_from_args("child_id", ResourceTypeEnum.CI, PermEnum.CONFIG, CITypeManager.get_name_by_id)
def post(self):
CITypeInheritanceManager.add(request.values['parent_ids'], request.values['child_id'])
return self.jsonify(**request.values)
@args_required("parent_id")
@args_required("child_id")
@has_perm_from_args("child_id", ResourceTypeEnum.CI, PermEnum.CONFIG, CITypeManager.get_name_by_id)
def delete(self):
CITypeInheritanceManager.delete(request.values['parent_id'], request.values['child_id'])
return self.jsonify(**request.values)
class CITypeGroupView(APIView):
url_prefix = ("/ci_types/groups",
"/ci_types/groups/config",
"/ci_types/groups/order",
"/ci_types/groups/<int:gid>")
def get(self):
@ -140,8 +96,7 @@ class CITypeGroupView(APIView):
return self.jsonify(CITypeGroupManager.get(need_other, config_required))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Configuration,
app_cli.op.create_CIType_group, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_required("name")
@args_validate(CITypeGroupManager.cls)
def post(self):
@ -152,6 +107,15 @@ class CITypeGroupView(APIView):
@args_validate(CITypeGroupManager.cls)
def put(self, gid=None):
if "/order" in request.url:
if RoleEnum.CONFIG not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin("cmdb"):
return abort(403, ErrFormat.role_required.format(RoleEnum.CONFIG))
group_ids = request.values.get('group_ids')
CITypeGroupManager.order(group_ids)
return self.jsonify(group_ids=group_ids)
name = request.values.get('name') or abort(400, ErrFormat.argument_value_required.format("name"))
type_ids = request.values.get('type_ids')
@ -159,8 +123,7 @@ class CITypeGroupView(APIView):
return self.jsonify(gid=gid)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Configuration,
app_cli.op.delete_CIType_group, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def delete(self, gid):
type_ids = request.values.get("type_ids")
CITypeGroupManager.delete(gid, type_ids)
@ -168,18 +131,6 @@ class CITypeGroupView(APIView):
return self.jsonify(gid=gid)
class CITypeGroupOrderView(APIView):
url_prefix = "/ci_types/groups/order"
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Configuration,
app_cli.op.update_CIType_group, app_cli.admin_name)
def put(self):
group_ids = request.values.get('group_ids')
CITypeGroupManager.order(group_ids)
return self.jsonify(group_ids=group_ids)
class CITypeQueryView(APIView):
url_prefix = "/ci_types/query"
@ -297,8 +248,8 @@ class CITypeAttributeTransferView(APIView):
@args_required('from')
@args_required('to')
def post(self, type_id):
_from = request.values.get('from') # {'attr_id': xx, 'group_id': xx, 'group_name': xx}
_to = request.values.get('to') # {'group_id': xx, 'group_name': xx, 'order': xxx}
_from = request.values.get('from') # {'attr_id': xx, 'group_id': xx}
_to = request.values.get('to') # {'group_id': xx, 'order': xxx}
CITypeAttributeManager.transfer(type_id, _from, _to)
@ -311,8 +262,8 @@ class CITypeAttributeGroupTransferView(APIView):
@args_required('from')
@args_required('to')
def post(self, type_id):
_from = request.values.get('from') # group_id or group_name
_to = request.values.get('to') # group_id or group_name
_from = request.values.get('from') # group_id
_to = request.values.get('to') # group_id
CITypeAttributeGroupManager.transfer(type_id, _from, _to)
@ -345,7 +296,7 @@ class CITypeAttributeGroupView(APIView):
attr_order = list(zip(attrs, orders))
group = CITypeAttributeGroupManager.create_or_update(type_id, name, attr_order, order)
current_app.logger.warning(group.id)
return self.jsonify(group_id=group.id)
@has_perm_from_args("type_id", ResourceTypeEnum.CI, PermEnum.CONFIG, CITypeManager.get_name_by_id)
@ -359,27 +310,25 @@ class CITypeAttributeGroupView(APIView):
attr_order = list(zip(attrs, orders))
CITypeAttributeGroupManager.update(group_id, name, attr_order, order)
return self.jsonify(group_id=group_id)
@has_perm_from_args("type_id", ResourceTypeEnum.CI, PermEnum.CONFIG, CITypeManager.get_name_by_id)
def delete(self, group_id):
CITypeAttributeGroupManager.delete(group_id)
return self.jsonify(group_id=group_id)
class CITypeTemplateView(APIView):
url_prefix = ("/ci_types/template/import", "/ci_types/template/export")
url_prefix = ("/ci_types/template/import", "/ci_types/template/export", "/ci_types/<int:type_id>/template/export")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Configuration,
app_cli.op.download_CIType, app_cli.admin_name)
def get(self): # export
type_ids = list(map(int, handle_arg_list(request.values.get('type_ids')))) or None
return self.jsonify(dict(ci_type_template=CITypeTemplateManager.export_template(type_ids=type_ids)))
@role_required(RoleEnum.CONFIG)
def get(self, type_id=None): # export
if type_id is not None:
return self.jsonify(dict(ci_type_template=CITypeTemplateManager.export_template_by_type(type_id)))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Configuration,
app_cli.op.download_CIType, app_cli.admin_name)
return self.jsonify(dict(ci_type_template=CITypeTemplateManager.export_template()))
@role_required(RoleEnum.CONFIG)
def post(self): # import
tpt = request.values.get('ci_type_template') or {}
@ -399,8 +348,7 @@ class CITypeCanDefineComputed(APIView):
class CITypeTemplateFileView(APIView):
url_prefix = ("/ci_types/template/import/file", "/ci_types/template/export/file")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Configuration,
app_cli.op.download_CIType, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def get(self): # export
tpt_json = CITypeTemplateManager.export_template()
tpt_json = dict(ci_type_template=tpt_json)
@ -415,8 +363,7 @@ class CITypeTemplateFileView(APIView):
mimetype='application/json',
max_age=0)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Configuration,
app_cli.op.download_CIType, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def post(self): # import
f = request.files.get('file')
@ -501,16 +448,6 @@ class CITypeTriggerView(APIView):
return self.jsonify(code=200)
class CITypeTriggerTestView(APIView):
url_prefix = ("/ci_types/<int:type_id>/triggers/<int:_id>/test_notify",)
@has_perm_from_args("type_id", ResourceTypeEnum.CI, PermEnum.CONFIG, CITypeManager.get_name_by_id)
def post(self, type_id, _id):
CITriggerManager().trigger_notify_test(type_id, _id)
return self.jsonify(code=200)
class CITypeGrantView(APIView):
url_prefix = "/ci_types/<int:type_id>/roles/<int:rid>/grant"
@ -526,19 +463,18 @@ class CITypeGrantView(APIView):
if not acl.has_permission(type_name, ResourceTypeEnum.CI_TYPE, PermEnum.GRANT) and not is_app_admin('cmdb'):
return abort(403, ErrFormat.no_permission.format(type_name, PermEnum.GRANT))
if perms and not request.values.get('id_filter'):
acl.grant_resource_to_role_by_rid(type_name, rid, ResourceTypeEnum.CI_TYPE, perms, rebuild=False)
acl.grant_resource_to_role_by_rid(type_name, rid, ResourceTypeEnum.CI_TYPE, perms, rebuild=False)
new_resource = None
if 'ci_filter' in request.values or 'attr_filter' in request.values or 'id_filter' in request.values:
new_resource = CIFilterPermsCRUD().add(type_id=type_id, rid=rid, **request.values)
if not new_resource:
if request.values.get('ci_filter') or request.values.get('attr_filter'):
CIFilterPermsCRUD().add(type_id=type_id, rid=rid, **request.values)
else:
from api.tasks.acl import role_rebuild
from api.lib.perm.acl.const import ACL_QUEUE
app_id = AppCache.get('cmdb').id
current_app.logger.info((rid, app_id))
role_rebuild.apply_async(args=(rid, app_id), queue=ACL_QUEUE)
current_app.logger.info('done')
return self.jsonify(code=200)
@ -559,18 +495,10 @@ class CITypeRevokeView(APIView):
if not acl.has_permission(type_name, ResourceTypeEnum.CI_TYPE, PermEnum.GRANT) and not is_app_admin('cmdb'):
return abort(403, ErrFormat.no_permission.format(type_name, PermEnum.GRANT))
app_id = AppCache.get('cmdb').id
resource = None
if request.values.get('id_filter'):
CIFilterPermsCRUD().delete2(
type_id=type_id, rid=rid, id_filter=request.values['id_filter'],
parent_path=request.values.get('parent_path'))
return self.jsonify(type_id=type_id, rid=rid)
acl.revoke_resource_from_role_by_rid(type_name, rid, ResourceTypeEnum.CI_TYPE, perms, rebuild=False)
app_id = AppCache.get('cmdb').id
resource = None
if PermEnum.READ in perms or not perms:
resource = CIFilterPermsCRUD().delete(type_id=type_id, rid=rid)

View File

@ -8,19 +8,16 @@ from api.lib.cmdb.ci_type import CITypeManager
from api.lib.cmdb.ci_type import CITypeRelationManager
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.preference import PreferenceManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import has_perm_from_args
from api.lib.perm.acl.acl import is_app_admin
from api.lib.utils import handle_arg_list
from api.lib.perm.acl.acl import role_required
from api.resource import APIView
app_cli = CMDBApp()
class GetChildrenView(APIView):
url_prefix = ("/ci_type_relations/<int:parent_id>/children",
@ -41,38 +38,21 @@ class GetParentsView(APIView):
return self.jsonify(parents=CITypeRelationManager.get_parents(child_id))
class CITypeRelationPathView(APIView):
url_prefix = ("/ci_type_relations/path",)
@args_required("source_type_id", "target_type_ids")
def get(self):
source_type_id = request.values.get("source_type_id")
target_type_ids = handle_arg_list(request.values.get("target_type_ids"))
paths = CITypeRelationManager.find_path(source_type_id, target_type_ids)
return self.jsonify(paths=paths)
class CITypeRelationView(APIView):
url_prefix = ("/ci_type_relations", "/ci_type_relations/<int:parent_id>/<int:child_id>")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Service_Tree_Definition,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def get(self):
res, type2attributes = CITypeRelationManager.get()
res = CITypeRelationManager.get()
return self.jsonify(relations=res, type2attributes=type2attributes)
return self.jsonify(res)
@has_perm_from_args("parent_id", ResourceTypeEnum.CI, PermEnum.CONFIG, CITypeManager.get_name_by_id)
@args_required("relation_type_id")
def post(self, parent_id, child_id):
relation_type_id = request.values.get("relation_type_id")
constraint = request.values.get("constraint")
parent_attr_ids = request.values.get("parent_attr_ids")
child_attr_ids = request.values.get("child_attr_ids")
ctr_id = CITypeRelationManager.add(parent_id, child_id, relation_type_id, constraint,
parent_attr_ids, child_attr_ids)
ctr_id = CITypeRelationManager.add(parent_id, child_id, relation_type_id, constraint)
return self.jsonify(ctr_id=ctr_id)
@ -86,8 +66,7 @@ class CITypeRelationView(APIView):
class CITypeRelationDelete2View(APIView):
url_prefix = "/ci_type_relations/<int:ctr_id>"
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Model_Relationships,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def delete(self, ctr_id):
CITypeRelationManager.delete(ctr_id)

View File

@ -3,16 +3,14 @@
from flask import request
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.custom_dashboard import CustomDashboardManager
from api.lib.cmdb.custom_dashboard import SystemConfigManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.perm.acl.acl import role_required
from api.resource import APIView
app_cli = CMDBApp()
class CustomDashboardApiView(APIView):
url_prefix = ("/custom_dashboard", "/custom_dashboard/<int:_id>", "/custom_dashboard/batch",
@ -21,8 +19,7 @@ class CustomDashboardApiView(APIView):
def get(self):
return self.jsonify(CustomDashboardManager.get())
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Customized_Dashboard,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_validate(CustomDashboardManager.cls)
def post(self):
if request.url.endswith("/preview"):
@ -35,8 +32,7 @@ class CustomDashboardApiView(APIView):
return self.jsonify(res)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Customized_Dashboard,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_validate(CustomDashboardManager.cls)
def put(self, _id=None):
if _id is not None:
@ -51,8 +47,7 @@ class CustomDashboardApiView(APIView):
return self.jsonify(id2options=request.values.get('id2options'))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Customized_Dashboard,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def delete(self, _id):
CustomDashboardManager.delete(_id)
@ -62,14 +57,12 @@ class CustomDashboardApiView(APIView):
class SystemConfigApiView(APIView):
url_prefix = ("/system_config",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Service_Tree_Definition,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_required("name", value_required=True)
def get(self):
return self.jsonify(SystemConfigManager.get(request.values['name']))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Service_Tree_Definition,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_validate(SystemConfigManager.cls)
@args_required("name", value_required=True)
@args_required("option", value_required=True)
@ -81,8 +74,7 @@ class SystemConfigApiView(APIView):
def put(self, _id=None):
return self.post()
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Service_Tree_Definition,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_required("name")
def delete(self):
CustomDashboardManager.delete(request.values['name'])

View File

@ -1 +0,0 @@
# -*- coding:utf-8 -*-

View File

@ -1,30 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.dcim.history import OperateHistoryManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.lib.utils import handle_arg_list
from api.resource import APIView
app_cli = CMDBApp()
class DCIMOperateHistoryView(APIView):
url_prefix = ("/dcim/history/operate",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def get(self):
page = get_page(request.values.pop("page", 1))
page_size = get_page_size(request.values.pop("page_size", None))
operate_type = handle_arg_list(request.values.pop('operate_type', []))
if operate_type:
request.values["operate_type"] = operate_type
numfound, result, id2ci, type2show_key = OperateHistoryManager.search(page, page_size, **request.values)
return self.jsonify(numfound=numfound, result=result, id2ci=id2ci, type2show_key=type2show_key)

View File

@ -1,35 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.dcim.idc import IDCManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.resource import APIView
app_cli = CMDBApp()
class IDCView(APIView):
url_prefix = ("/dcim/idc", "/dcim/idc/<int:_id>")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def post(self):
parent_id = request.values.pop("parent_id")
return self.jsonify(ci_id=IDCManager().add(parent_id, **request.values))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def put(self, _id):
IDCManager().update(_id, **request.values)
return self.jsonify(ci_id=_id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def delete(self, _id):
IDCManager().delete(_id)
return self.jsonify(ci_id=_id)

View File

@ -1,102 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.const import CMDB_QUEUE
from api.lib.cmdb.dcim.const import RackBuiltinAttributes
from api.lib.cmdb.dcim.rack import RackManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.resource import APIView
from api.tasks.cmdb import dcim_calc_u_free_count
app_cli = CMDBApp()
class RackView(APIView):
url_prefix = ("/dcim/rack", "/dcim/rack/<int:_id>")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
@args_required("parent_id")
def post(self):
parent_id = request.values.pop("parent_id")
return self.jsonify(ci_id=RackManager().add(parent_id, **request.values))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def put(self, _id):
RackManager().update(_id, **request.values)
return self.jsonify(ci_id=_id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def delete(self, _id):
RackManager().delete(_id)
return self.jsonify(ci_id=_id)
class RackDetailView(APIView):
url_prefix = ("/dcim/rack/<int:rack_id>/device/<int:device_id>",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
@args_required(RackBuiltinAttributes.U_START)
def post(self, rack_id, device_id):
u_start = request.values.pop(RackBuiltinAttributes.U_START)
u_count = request.values.get(RackBuiltinAttributes.U_COUNT)
RackManager().add_device(rack_id, device_id, u_start, u_count)
return self.jsonify(rack_id=rack_id, device_id=device_id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
@args_required("to_u_start")
def put(self, rack_id, device_id):
to_u_start = request.values.pop("to_u_start")
RackManager().move_device(rack_id, device_id, to_u_start)
return self.jsonify(rack_id=rack_id, device_id=device_id, to_u_start=to_u_start)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def delete(self, rack_id, device_id):
RackManager().remove_device(rack_id, device_id)
return self.jsonify(code=200)
class RackDeviceMigrateView(APIView):
url_prefix = ("/dcim/rack/<int:rack_id>/device/<int:device_id>/migrate",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
@args_required("to_rack_id")
@args_required("to_u_start")
def put(self, rack_id, device_id):
to_rack_id = request.values.pop("to_rack_id")
to_u_start = request.values.pop("to_u_start")
RackManager().migrate_device(rack_id, device_id, to_rack_id, to_u_start)
return self.jsonify(rack_id=rack_id,
device_id=device_id,
to_u_start=to_u_start,
to_rack_id=to_rack_id)
class RackCalcUFreeCountView(APIView):
url_prefix = ("/dcim/rack/calc_u_free_count",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def post(self):
dcim_calc_u_free_count.apply_async(queue=CMDB_QUEUE)
return self.jsonify(code=200)

View File

@ -1,33 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.dcim.region import RegionManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.resource import APIView
app_cli = CMDBApp()
class RegionView(APIView):
url_prefix = ("/dcim/region", "/dcim/region/<int:_id>")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def post(self):
return self.jsonify(ci_id=RegionManager().add(**request.values))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def put(self, _id):
RegionManager().update(_id, **request.values)
return self.jsonify(ci_id=_id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def delete(self, _id):
RegionManager().delete(_id)
return self.jsonify(ci_id=_id)

View File

@ -1,43 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.dcim.server_room import ServerRoomManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.resource import APIView
app_cli = CMDBApp()
class ServerRoomView(APIView):
url_prefix = ("/dcim/server_room", "/dcim/server_room/<int:_id>", "/dcim/server_room/<int:_id>/racks")
def get(self, _id):
q = request.values.get('q')
counter, result = ServerRoomManager.get_racks(_id, q)
return self.jsonify(counter=counter, result=result)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
@args_required("parent_id")
def post(self):
parent_id = request.values.pop("parent_id")
return self.jsonify(ci_id=ServerRoomManager().add(parent_id, **request.values))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def put(self, _id):
ServerRoomManager().update(_id, **request.values)
return self.jsonify(ci_id=_id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def delete(self, _id):
ServerRoomManager().delete(_id)
return self.jsonify(ci_id=_id)

View File

@ -1,19 +0,0 @@
# -*- coding:utf-8 -*-
from api.lib.cmdb.dcim.tree_view import TreeViewManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.resource import APIView
app_cli = CMDBApp()
class DCIMTreeView(APIView):
url_prefix = "/dcim/tree_view"
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.DCIM,
app_cli.op.read, app_cli.admin_name)
def get(self):
result, type2name = TreeViewManager.get()
return self.jsonify(result=result, type2name=type2name)

View File

@ -5,29 +5,28 @@ import datetime
from flask import abort
from flask import request
from flask import session
from api.lib.cmdb.ci import CIManager
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.history import AttributeHistoryManger
from api.lib.cmdb.history import CITriggerHistoryManager
from api.lib.cmdb.history import CITypeHistoryManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.perm.acl.acl import has_perm_from_args
from api.lib.perm.acl.acl import is_app_admin
from api.lib.perm.acl.acl import role_required
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.resource import APIView
app_cli = CMDBApp()
class RecordView(APIView):
url_prefix = ("/history/records/attribute", "/history/records/relation")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Operation_Audit,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def get(self):
page = get_page(request.values.get("page", 1))
page_size = get_page_size(request.values.get("page_size"))
@ -81,21 +80,18 @@ class CIHistoryView(APIView):
class CITriggerHistoryView(APIView):
url_prefix = ("/history/ci_triggers/<int:ci_id>",)
url_prefix = ("/history/ci_triggers/<int:ci_id>", "/history/ci_triggers")
@has_perm_from_args("ci_id", ResourceTypeEnum.CI, PermEnum.READ, CIManager.get_type_name)
def get(self, ci_id):
result = CITriggerHistoryManager.get_by_ci_id(ci_id)
def get(self, ci_id=None):
if ci_id is not None:
result = CITriggerHistoryManager.get_by_ci_id(ci_id)
return self.jsonify(result)
return self.jsonify(result)
if RoleEnum.CONFIG not in session.get("acl", {}).get("parentRoles", []) and not is_app_admin("cmdb"):
return abort(403, ErrFormat.role_required.format(RoleEnum.CONFIG))
class CIsTriggerHistoryView(APIView):
url_prefix = ("/history/ci_triggers",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Operation_Audit,
app_cli.op.read, app_cli.admin_name)
def get(self):
type_id = request.values.get("type_id")
trigger_id = request.values.get("trigger_id")
operate_type = request.values.get("operate_type")
@ -119,8 +115,7 @@ class CIsTriggerHistoryView(APIView):
class CITypeHistoryView(APIView):
url_prefix = "/history/ci_types"
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Operation_Audit,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
def get(self):
type_id = request.values.get("type_id")
username = request.values.get("username")

View File

@ -1 +0,0 @@
# -*- coding:utf-8 -*-

View File

@ -1,39 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.ipam.address import IpAddressManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.lib.utils import handle_arg_list
from api.resource import APIView
app_cli = CMDBApp()
class IPAddressView(APIView):
url_prefix = ("/ipam/address",)
@args_required("parent_id")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def get(self):
parent_id = request.args.get("parent_id")
numfound, result = IpAddressManager.list_ip_address(parent_id)
return self.jsonify(numfound=numfound, result=result)
@args_required("ips")
@args_required("assign_status", value_required=False)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def post(self):
ips = handle_arg_list(request.values.pop("ips"))
parent_id = request.values.pop("parent_id", None)
cidr = request.values.pop("cidr", None)
IpAddressManager().assign_ips(ips, parent_id, cidr, **request.values)
return self.jsonify(code=200)

View File

@ -1,53 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.ipam.history import OperateHistoryManager
from api.lib.cmdb.ipam.history import ScanHistoryManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.lib.utils import get_page
from api.lib.utils import get_page_size
from api.lib.utils import handle_arg_list
from api.resource import APIView
app_cli = CMDBApp()
class IPAMOperateHistoryView(APIView):
url_prefix = ("/ipam/history/operate",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def get(self):
page = get_page(request.values.pop("page", 1))
page_size = get_page_size(request.values.pop("page_size", None))
operate_type = handle_arg_list(request.values.pop('operate_type', []))
if operate_type:
request.values["operate_type"] = operate_type
numfound, result = OperateHistoryManager.search(page, page_size, **request.values)
return self.jsonify(numfound=numfound, result=result)
class IPAMScanHistoryView(APIView):
url_prefix = ("/ipam/history/scan",)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def get(self):
page = get_page(request.values.pop("page", 1))
page_size = get_page_size(request.values.pop("page_size", None))
numfound, result = ScanHistoryManager.search(page, page_size, **request.values)
return self.jsonify(numfound=numfound, result=result)
@args_required("exec_id")
def post(self):
ScanHistoryManager().add(**request.values)
return self.jsonify(code=200)

View File

@ -1,24 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.ipam.stats import Stats
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.resource import APIView
app_cli = CMDBApp()
class IPAMStatsView(APIView):
url_prefix = '/ipam/stats'
@args_required("parent_id")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def get(self):
parent_id = request.values.get("parent_id")
return self.jsonify(Stats().summary(parent_id))

View File

@ -1,75 +0,0 @@
# -*- coding:utf-8 -*-
from flask import request
from api.lib.cmdb.ipam.subnet import SubnetManager
from api.lib.cmdb.ipam.subnet import SubnetScopeManager
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.resource import APIView
app_cli = CMDBApp()
class SubnetView(APIView):
url_prefix = ("/ipam/subnet", "/ipam/subnet/hosts", "/ipam/subnet/<int:_id>")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def get(self, _id=None):
if "hosts" in request.url:
return self.jsonify(SubnetManager.get_hosts(request.values.get('cidr')))
if _id is not None:
return self.jsonify(SubnetManager().get_by_id(_id))
result, type2name = SubnetManager().tree_view()
return self.jsonify(result=result, type2name=type2name)
@args_required("cidr")
@args_required("parent_id", value_required=False)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def post(self):
cidr = request.values.pop("cidr")
parent_id = request.values.pop("parent_id")
agent_id = request.values.pop("agent_id", None)
cron = request.values.pop("cron", None)
return self.jsonify(SubnetManager().add(cidr, parent_id, agent_id, cron, **request.values))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def put(self, _id):
return self.jsonify(id=SubnetManager().update(_id, **request.values))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def delete(self, _id):
return self.jsonify(id=SubnetManager().delete(_id))
class SubnetScopeView(APIView):
url_prefix = ("/ipam/scope", "/ipam/scope/<int:_id>")
@args_required("parent_id", value_required=False)
@args_required("name")
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def post(self):
parent_id = request.values.pop("parent_id")
name = request.values.pop("name")
return self.jsonify(SubnetScopeManager().add(parent_id, name))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def put(self, _id):
return self.jsonify(id=SubnetScopeManager().update(_id, **request.values))
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.IPAM,
app_cli.op.read, app_cli.admin_name)
def delete(self, _id):
return self.jsonify(id=SubnetScopeManager.delete(_id))

View File

@ -2,28 +2,25 @@
from flask import abort
from flask import current_app
from flask import request
from api.lib.cmdb.ci_type import CITypeManager
from api.lib.cmdb.const import PermEnum
from api.lib.cmdb.const import ResourceTypeEnum
from api.lib.cmdb.const import RoleEnum
from api.lib.cmdb.perms import CIFilterPermsCRUD
from api.lib.cmdb.preference import PreferenceManager
from api.lib.cmdb.resp_format import ErrFormat
from api.lib.common_setting.decorator import perms_role_required
from api.lib.common_setting.role_perm_base import CMDBApp
from api.lib.decorator import args_required
from api.lib.decorator import args_validate
from api.lib.perm.acl.acl import ACLManager
from api.lib.perm.acl.acl import has_perm_from_args
from api.lib.perm.acl.acl import is_app_admin
from api.lib.perm.acl.acl import role_required
from api.lib.perm.acl.acl import validate_permission
from api.lib.utils import handle_arg_list
from api.resource import APIView
app_cli = CMDBApp()
class PreferenceShowCITypesView(APIView):
url_prefix = ("/preference/ci_types", "/preference/ci_types2")
@ -99,38 +96,29 @@ class PreferenceTreeApiView(APIView):
class PreferenceRelationApiView(APIView):
url_prefix = ("/preference/relation/view", "/preference/relation/view/<int:_id>")
url_prefix = "/preference/relation/view"
def get(self):
views, id2type, name2id = PreferenceManager.get_relation_view()
return self.jsonify(views=views, id2type=id2type, name2id=name2id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Service_Tree_Definition,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_required("name")
@args_required("cr_ids")
@args_validate(PreferenceManager.pref_rel_cls)
def post(self):
name = request.values.get("name")
is_public = request.values.get("is_public") in current_app.config.get('BOOL_TRUE')
cr_ids = request.values.get("cr_ids")
option = request.values.get("option") or None
views, id2type, name2id = PreferenceManager.create_or_update_relation_view(name, cr_ids, is_public=is_public,
option=option)
views, id2type, name2id = PreferenceManager.create_or_update_relation_view(name, cr_ids)
return self.jsonify(views=views, id2type=id2type, name2id=name2id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Service_Tree_Definition,
app_cli.op.read, app_cli.admin_name)
@args_required("name")
def put(self, _id):
views, id2type, name2id = PreferenceManager.create_or_update_relation_view(_id=_id, **request.values)
@role_required(RoleEnum.CONFIG)
def put(self):
return self.post()
return self.jsonify(views=views, id2type=id2type, name2id=name2id)
@perms_role_required(app_cli.app_name, app_cli.resource_type_name, app_cli.op.Service_Tree_Definition,
app_cli.op.read, app_cli.admin_name)
@role_required(RoleEnum.CONFIG)
@args_required("name")
def delete(self):
name = request.values.get("name")
@ -199,15 +187,3 @@ class PreferenceRelationRevokeView(APIView):
acl.revoke_resource_from_role_by_rid(name, rid, ResourceTypeEnum.RELATION_VIEW, perms)
return self.jsonify(code=200)
class PreferenceCITypeOrderView(APIView):
url_prefix = ("/preference/ci_types/order",)
def post(self):
type_ids = request.values.get("type_ids")
is_tree = request.values.get("is_tree") in current_app.config.get('BOOL_TRUE')
PreferenceManager.upsert_ci_type_order(type_ids, is_tree)
return self.jsonify(type_ids=type_ids, is_tree=is_tree)

Some files were not shown because too many files have changed in this diff Show More