mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-15 16:42:34 +00:00
Compare commits
4 Commits
pr@dev@fea
...
v2.21.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e56fc93a6e | ||
|
|
eb17183d97 | ||
|
|
b0057ecb9d | ||
|
|
a141a8d2c2 |
@@ -1,4 +1,5 @@
|
||||
.git
|
||||
logs/*
|
||||
data/*
|
||||
.github
|
||||
tmp/*
|
||||
@@ -6,8 +7,4 @@ django.db
|
||||
celerybeat.pid
|
||||
### Vagrant ###
|
||||
.vagrant/
|
||||
apps/xpack/.git
|
||||
.history/
|
||||
.idea
|
||||
.venv/
|
||||
.env
|
||||
apps/xpack/.git
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -0,0 +1,2 @@
|
||||
*.mmdb filter=lfs diff=lfs merge=lfs -text
|
||||
*.mo filter=lfs diff=lfs merge=lfs -text
|
||||
|
||||
10
.github/ISSUE_TEMPLATE/----.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/----.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
name: 需求建议
|
||||
about: 提出针对本项目的想法和建议
|
||||
title: "[Feature] "
|
||||
labels: 类型:需求
|
||||
assignees: ibuler
|
||||
|
||||
---
|
||||
|
||||
**请描述您的需求或者改进建议.**
|
||||
72
.github/ISSUE_TEMPLATE/1_bug_report.yml
vendored
72
.github/ISSUE_TEMPLATE/1_bug_report.yml
vendored
@@ -1,72 +0,0 @@
|
||||
name: '🐛 Bug Report'
|
||||
description: 'Report an Bug'
|
||||
title: '[Bug] '
|
||||
labels: ['🐛 Bug']
|
||||
assignees:
|
||||
- baijiangjie
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: 'Product Version'
|
||||
description: The versions prior to v2.28 (inclusive) are no longer supported.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 'Product Edition'
|
||||
options:
|
||||
- label: 'Community Edition'
|
||||
- label: 'Enterprise Edition'
|
||||
- label: 'Enterprise Trial Edition'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 'Installation Method'
|
||||
options:
|
||||
- label: 'Online Installation (One-click command installation)'
|
||||
- label: 'Offline Package Installation'
|
||||
- label: 'All-in-One'
|
||||
- label: '1Panel'
|
||||
- label: 'Kubernetes'
|
||||
- label: 'Source Code'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Environment Information'
|
||||
description: Please provide a clear and concise description outlining your environment information.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '🐛 Bug Description'
|
||||
description:
|
||||
Please provide a clear and concise description of the defect. If the issue is complex, please provide detailed explanations. <br/>
|
||||
Unclear descriptions will not be processed. Please ensure you provide enough detail and information to support replicating and fixing the defect.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Recurrence Steps'
|
||||
description: Please provide a clear and concise description outlining how to reproduce the issue.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Expected Behavior'
|
||||
description: Please provide a clear and concise description of what you expect to happen.
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Additional Information'
|
||||
description: Please add any additional background information about the issue here.
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Attempted Solutions'
|
||||
description: If you have already attempted to solve the issue, please list the solutions you have tried here.
|
||||
60
.github/ISSUE_TEMPLATE/2_question.yml
vendored
60
.github/ISSUE_TEMPLATE/2_question.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: '🤔 Question'
|
||||
description: 'Pose a question'
|
||||
title: '[Question] '
|
||||
labels: ['🤔 Question']
|
||||
assignees:
|
||||
- baijiangjie
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: 'Product Version'
|
||||
description: The versions prior to v2.28 (inclusive) are no longer supported.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 'Product Edition'
|
||||
options:
|
||||
- label: 'Community Edition'
|
||||
- label: 'Enterprise Edition'
|
||||
- label: 'Enterprise Trial Edition'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 'Installation Method'
|
||||
options:
|
||||
- label: 'Online Installation (One-click command installation)'
|
||||
- label: 'Offline Package Installation'
|
||||
- label: 'All-in-One'
|
||||
- label: '1Panel'
|
||||
- label: 'Kubernetes'
|
||||
- label: 'Source Code'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Environment Information'
|
||||
description: Please provide a clear and concise description outlining your environment information.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '🤔 Question Description'
|
||||
description: |
|
||||
Please provide a clear and concise description of the defect. If the issue is complex, please provide detailed explanations. <br/>
|
||||
Unclear descriptions will not be processed.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Expected Behavior'
|
||||
description: Please provide a clear and concise description of what you expect to happen.
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Additional Information'
|
||||
description: Please add any additional background information about the issue here.
|
||||
56
.github/ISSUE_TEMPLATE/3_feature_request.yml
vendored
56
.github/ISSUE_TEMPLATE/3_feature_request.yml
vendored
@@ -1,56 +0,0 @@
|
||||
name: '⭐️ Feature Request'
|
||||
description: 'Suggest an idea'
|
||||
title: '[Feature] '
|
||||
labels: ['⭐️ Feature Request']
|
||||
assignees:
|
||||
- baijiangjie
|
||||
- ibuler
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: 'Product Version'
|
||||
description: The versions prior to v2.28 (inclusive) are no longer supported.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 'Product Edition'
|
||||
options:
|
||||
- label: 'Community Edition'
|
||||
- label: 'Enterprise Edition'
|
||||
- label: 'Enterprise Trial Edition'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 'Installation Method'
|
||||
options:
|
||||
- label: 'Online Installation (One-click command installation)'
|
||||
- label: 'Offline Package Installation'
|
||||
- label: 'All-in-One'
|
||||
- label: '1Panel'
|
||||
- label: 'Kubernetes'
|
||||
- label: 'Source Code'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '⭐️ Feature Description'
|
||||
description: |
|
||||
Please add a clear and concise description of the problem you aim to solve with this feature request.<br/>
|
||||
Unclear descriptions will not be processed.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Proposed Solution'
|
||||
description: Please provide a clear and concise description of the solution you desire.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 'Additional Information'
|
||||
description: Please add any additional background information about the issue here.
|
||||
72
.github/ISSUE_TEMPLATE/4_bug_report_cn.yml
vendored
72
.github/ISSUE_TEMPLATE/4_bug_report_cn.yml
vendored
@@ -1,72 +0,0 @@
|
||||
name: '🐛 反馈缺陷'
|
||||
description: '反馈一个缺陷'
|
||||
title: '[Bug] '
|
||||
labels: ['🐛 Bug']
|
||||
assignees:
|
||||
- baijiangjie
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: '产品版本'
|
||||
description: 不再支持 v2.28(含)之前的版本。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: '版本类型'
|
||||
options:
|
||||
- label: '社区版'
|
||||
- label: '企业版'
|
||||
- label: '企业试用版'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: '安装方式'
|
||||
options:
|
||||
- label: '在线安装 (一键命令安装)'
|
||||
- label: '离线包安装'
|
||||
- label: 'All-in-One'
|
||||
- label: '1Panel'
|
||||
- label: 'Kubernetes'
|
||||
- label: '源码安装'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '环境信息'
|
||||
description: 请提供一个清晰且简洁的描述,说明你的环境信息。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '🐛 缺陷描述'
|
||||
description: |
|
||||
请提供一个清晰且简洁的缺陷描述,如果问题比较复杂,也请详细说明。<br/>
|
||||
针对不清晰的描述信息将不予处理。请确保提供足够的细节和信息,以支持对缺陷进行复现和修复。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '复现步骤'
|
||||
description: 请提供一个清晰且简洁的描述,说明如何复现问题。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '期望结果'
|
||||
description: 请提供一个清晰且简洁的描述,说明你期望发生什么。
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '补充信息'
|
||||
description: 在这里添加关于问题的任何其他背景信息。
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '尝试过的解决方案'
|
||||
description: 如果你已经尝试解决问题,请在此列出你尝试过的解决方案。
|
||||
61
.github/ISSUE_TEMPLATE/5_question_cn.yml
vendored
61
.github/ISSUE_TEMPLATE/5_question_cn.yml
vendored
@@ -1,61 +0,0 @@
|
||||
name: '🤔 问题咨询'
|
||||
description: '提出一个问题'
|
||||
title: '[Question] '
|
||||
labels: ['🤔 Question']
|
||||
assignees:
|
||||
- baijiangjie
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: '产品版本'
|
||||
description: 不再支持 v2.28(含)之前的版本。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: '版本类型'
|
||||
options:
|
||||
- label: '社区版'
|
||||
- label: '企业版'
|
||||
- label: '企业试用版'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: '安装方式'
|
||||
options:
|
||||
- label: '在线安装 (一键命令安装)'
|
||||
- label: '离线包安装'
|
||||
- label: 'All-in-One'
|
||||
- label: '1Panel'
|
||||
- label: 'Kubernetes'
|
||||
- label: '源码安装'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '环境信息'
|
||||
description: 请在此详细描述你的环境信息,如操作系统、浏览器和部署架构等。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '🤔 问题描述'
|
||||
description: |
|
||||
请提供一个清晰且简洁的问题描述,如果问题比较复杂,也请详细说明。<br/>
|
||||
针对不清晰的描述信息将不予处理。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '期望结果'
|
||||
description: 请提供一个清晰且简洁的描述,说明你期望发生什么。
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '补充信息'
|
||||
description: 在这里添加关于问题的任何其他背景信息。
|
||||
|
||||
56
.github/ISSUE_TEMPLATE/6_feature_request_cn.yml
vendored
56
.github/ISSUE_TEMPLATE/6_feature_request_cn.yml
vendored
@@ -1,56 +0,0 @@
|
||||
name: '⭐️ 功能需求'
|
||||
description: '提出需求或建议'
|
||||
title: '[Feature] '
|
||||
labels: ['⭐️ Feature Request']
|
||||
assignees:
|
||||
- baijiangjie
|
||||
- ibuler
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: '产品版本'
|
||||
description: 不再支持 v2.28(含)之前的版本。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: '版本类型'
|
||||
options:
|
||||
- label: '社区版'
|
||||
- label: '企业版'
|
||||
- label: '企业试用版'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: '安装方式'
|
||||
options:
|
||||
- label: '在线安装 (一键命令安装)'
|
||||
- label: '离线包安装'
|
||||
- label: 'All-in-One'
|
||||
- label: '1Panel'
|
||||
- label: 'Kubernetes'
|
||||
- label: '源码安装'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '⭐️ 需求描述'
|
||||
description: |
|
||||
请添加一个清晰且简洁的问题描述,阐述你希望通过这个功能需求解决的问题。<br/>
|
||||
针对不清晰的描述信息将不予处理。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '解决方案'
|
||||
description: 请清晰且简洁地描述你想要的解决方案。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: '补充信息'
|
||||
description: 在这里添加关于问题的任何其他背景信息。
|
||||
22
.github/ISSUE_TEMPLATE/bug---.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/bug---.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: Bug 提交
|
||||
about: 提交产品缺陷帮助我们更好的改进
|
||||
title: "[Bug] "
|
||||
labels: 类型:bug
|
||||
assignees: wojiushixiaobai
|
||||
|
||||
---
|
||||
|
||||
**JumpServer 版本(v1.5.9以下不再支持)**
|
||||
|
||||
|
||||
**浏览器版本**
|
||||
|
||||
|
||||
**Bug 描述**
|
||||
|
||||
|
||||
**Bug 重现步骤(有截图更好)**
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
10
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
name: 问题咨询
|
||||
about: 提出针对本项目安装部署、使用及其他方面的相关问题
|
||||
title: "[Question] "
|
||||
labels: 类型:提问
|
||||
assignees: wojiushixiaobai
|
||||
|
||||
---
|
||||
|
||||
**请描述您的问题.**
|
||||
14
.github/dependabot.yml.bak
vendored
14
.github/dependabot.yml.bak
vendored
@@ -1,14 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "uv"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
time: "09:30"
|
||||
timezone: "Asia/Shanghai"
|
||||
target-branch: dev
|
||||
groups:
|
||||
python-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
3
.github/release-config.yml
vendored
3
.github/release-config.yml
vendored
@@ -41,5 +41,4 @@ version-resolver:
|
||||
default: patch
|
||||
template: |
|
||||
## 版本变化 What’s Changed
|
||||
$CHANGES
|
||||
|
||||
$CHANGES
|
||||
74
.github/workflows/build-base-image.yml
vendored
74
.github/workflows/build-base-image.yml
vendored
@@ -1,74 +0,0 @@
|
||||
name: Build and Push Base Image
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'dev'
|
||||
- 'v*'
|
||||
paths:
|
||||
- poetry.lock
|
||||
- pyproject.toml
|
||||
- Dockerfile-base
|
||||
- package.json
|
||||
- go.mod
|
||||
- yarn.lock
|
||||
- pom.xml
|
||||
- install_deps.sh
|
||||
- utils/clean_site_packages.sh
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:qemu-v7.0.0-28
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract date
|
||||
id: vars
|
||||
run: echo "IMAGE_TAG=$(date +'%Y%m%d_%H%M%S')" >> $GITHUB_ENV
|
||||
|
||||
- name: Extract repository name
|
||||
id: repo
|
||||
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push multi-arch image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
file: Dockerfile-base
|
||||
tags: jumpserver/core-base:${{ env.IMAGE_TAG }}
|
||||
|
||||
- name: Update Dockerfile
|
||||
run: |
|
||||
sed -i 's|-base:.* AS stage-build|-base:${{ env.IMAGE_TAG }} AS stage-build|' Dockerfile
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git config --global user.name 'github-actions[bot]'
|
||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
git add Dockerfile
|
||||
git commit -m "perf: Update Dockerfile with new base image tag"
|
||||
git push origin ${{ github.event.pull_request.head.ref }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
31
.github/workflows/check-compilemessages.yml
vendored
31
.github/workflows/check-compilemessages.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Check I18n files CompileMessages
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'dev'
|
||||
paths:
|
||||
- 'apps/i18n/core/**/*.po'
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
jobs:
|
||||
compile-messages-check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and check compilemessages
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
push: false
|
||||
file: Dockerfile
|
||||
target: stage-build
|
||||
tags: jumpserver/core:stage-build
|
||||
24
.github/workflows/discord-release.yml
vendored
24
.github/workflows/discord-release.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Publish Release to Discord
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
send_discord_notification:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.event.release.tag_name, 'v4.')
|
||||
steps:
|
||||
- name: Send release notification to Discord
|
||||
env:
|
||||
WEBHOOK_URL: ${{ secrets.DISCORD_CHANGELOG_WEBHOOK }}
|
||||
run: |
|
||||
# 获取标签名称和 release body
|
||||
TAG_NAME="${{ github.event.release.tag_name }}"
|
||||
RELEASE_BODY="${{ github.event.release.body }}"
|
||||
|
||||
# 使用 jq 构建 JSON 数据,以确保安全传递
|
||||
JSON_PAYLOAD=$(jq -n --arg tag "# JumpServer $TAG_NAME Released! 🚀" --arg body "$RELEASE_BODY" '{content: "\($tag)\n\($body)"}')
|
||||
|
||||
# 使用 curl 发送 JSON 数据
|
||||
curl -X POST -H "Content-Type: application/json" -d "$JSON_PAYLOAD" "$WEBHOOK_URL"
|
||||
24
.github/workflows/docs-release.yml
vendored
24
.github/workflows/docs-release.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Auto update docs changelog
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
update_docs_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.event.release.tag_name, 'v4.')
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Update docs changelog
|
||||
env:
|
||||
TAG_NAME: ${{ github.event.release.tag_name }}
|
||||
DOCS_TOKEN: ${{ secrets.DOCS_TOKEN }}
|
||||
run: |
|
||||
git config --global user.name 'BaiJiangJie'
|
||||
git config --global user.email 'jiangjie.bai@fit2cloud.com'
|
||||
|
||||
git clone https://$DOCS_TOKEN@github.com/jumpservice/documentation.git
|
||||
cd documentation/utils
|
||||
bash update_changelog.sh
|
||||
4
.github/workflows/issue-close-require.yml
vendored
4
.github/workflows/issue-close-require.yml
vendored
@@ -12,9 +12,7 @@ jobs:
|
||||
uses: actions-cool/issues-helper@v2
|
||||
with:
|
||||
actions: 'close-issues'
|
||||
labels: '⏳ Pending feedback'
|
||||
labels: '状态:待反馈'
|
||||
inactive-day: 30
|
||||
body: |
|
||||
You haven't provided feedback for over 30 days.
|
||||
We will close this issue. If you have any further needs, you can reopen it or submit a new issue.
|
||||
您超过 30 天未反馈信息,我们将关闭该 issue,如有需求您可以重新打开或者提交新的 issue。
|
||||
|
||||
2
.github/workflows/issue-close.yml
vendored
2
.github/workflows/issue-close.yml
vendored
@@ -13,4 +13,4 @@ jobs:
|
||||
if: ${{ !github.event.issue.pull_request }}
|
||||
with:
|
||||
actions: 'remove-labels'
|
||||
labels: '🔔 Pending processing,⏳ Pending feedback'
|
||||
labels: '状态:待处理,状态:待反馈'
|
||||
39
.github/workflows/issue-comment.yml
vendored
39
.github/workflows/issue-comment.yml
vendored
@@ -13,53 +13,26 @@ jobs:
|
||||
uses: actions-cool/issues-helper@v2
|
||||
with:
|
||||
actions: 'add-labels'
|
||||
labels: '🔔 Pending processing'
|
||||
labels: '状态:待处理'
|
||||
|
||||
- name: Remove require reply label
|
||||
uses: actions-cool/issues-helper@v2
|
||||
with:
|
||||
actions: 'remove-labels'
|
||||
labels: '⏳ Pending feedback'
|
||||
labels: '状态:待反馈'
|
||||
|
||||
add-label-if-is-member:
|
||||
add-label-if-not-author:
|
||||
runs-on: ubuntu-latest
|
||||
if: (github.event.issue.user.id != github.event.comment.user.id) && !github.event.issue.pull_request && (github.event.issue.state == 'open')
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Organization name
|
||||
id: org_name
|
||||
run: echo "data=$(echo '${{ github.repository }}' | cut -d '/' -f 1)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get Organization public members
|
||||
uses: octokit/request-action@v2.x
|
||||
id: members
|
||||
with:
|
||||
route: GET /orgs/${{ steps.org_name.outputs.data }}/public_members
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Process public members data
|
||||
# 将 members 中的数据转化为 login 字段的拼接字符串
|
||||
id: member_names
|
||||
run: echo "data=$(echo '${{ steps.members.outputs.data }}' | jq '[.[].login] | join(",")')" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
- run: "echo members: '${{ steps.members.outputs.data }}'"
|
||||
- run: "echo member names: '${{ steps.member_names.outputs.data }}'"
|
||||
- run: "echo comment user: '${{ github.event.comment.user.login }}'"
|
||||
- run: "echo contains? : '${{ contains(steps.member_names.outputs.data, github.event.comment.user.login) }}'"
|
||||
|
||||
- name: Add require replay label
|
||||
if: contains(steps.member_names.outputs.data, github.event.comment.user.login)
|
||||
uses: actions-cool/issues-helper@v2
|
||||
with:
|
||||
actions: 'add-labels'
|
||||
labels: '⏳ Pending feedback'
|
||||
labels: '状态:待反馈'
|
||||
|
||||
- name: Remove require handle label
|
||||
if: contains(steps.member_names.outputs.data, github.event.comment.user.login)
|
||||
uses: actions-cool/issues-helper@v2
|
||||
with:
|
||||
actions: 'remove-labels'
|
||||
labels: '🔔 Pending processing'
|
||||
labels: '状态:待处理'
|
||||
|
||||
2
.github/workflows/issue-open.yml
vendored
2
.github/workflows/issue-open.yml
vendored
@@ -13,4 +13,4 @@ jobs:
|
||||
if: ${{ !github.event.issue.pull_request }}
|
||||
with:
|
||||
actions: 'add-labels'
|
||||
labels: '🔔 Pending processing'
|
||||
labels: '状态:待处理'
|
||||
63
.github/workflows/jms-build-test.yml.disabled
vendored
63
.github/workflows/jms-build-test.yml.disabled
vendored
@@ -1,63 +0,0 @@
|
||||
name: "Run Build Test"
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile*'
|
||||
- 'Dockerfile-*'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
component: [core]
|
||||
version: [v4]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Prepare Build
|
||||
run: |
|
||||
sed -i 's@^FROM registry.fit2cloud.com/jumpserver@FROM ghcr.io/jumpserver@g' Dockerfile-ee
|
||||
|
||||
- name: Build CE Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
file: Dockerfile
|
||||
tags: ghcr.io/jumpserver/${{ matrix.component }}:${{ matrix.version }}-ce
|
||||
platforms: linux/amd64
|
||||
build-args: |
|
||||
VERSION=${{ matrix.version }}
|
||||
APT_MIRROR=http://deb.debian.org
|
||||
PIP_MIRROR=https://pypi.org/simple
|
||||
outputs: type=image,oci-mediatypes=true,compression=zstd,compression-level=3,force-compression=true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build EE Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile-ee
|
||||
tags: ghcr.io/jumpserver/${{ matrix.component }}:${{ matrix.version }}
|
||||
platforms: linux/amd64
|
||||
build-args: |
|
||||
VERSION=${{ matrix.version }}
|
||||
APT_MIRROR=http://deb.debian.org
|
||||
PIP_MIRROR=https://pypi.org/simple
|
||||
outputs: type=image,oci-mediatypes=true,compression=zstd,compression-level=3,force-compression=true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
@@ -10,4 +10,3 @@ jobs:
|
||||
- uses: jumpserver/action-generic-handler@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
|
||||
I18N_TOKEN: ${{ secrets.I18N_TOKEN }}
|
||||
|
||||
18
.github/workflows/lgtm.yml
vendored
Normal file
18
.github/workflows/lgtm.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: Send LGTM reaction
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1.0.0
|
||||
- uses: micnncim/action-lgtm-reaction@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
trigger: '["^.?lgtm$"]'
|
||||
28
.github/workflows/llm-code-review.yml.bak
vendored
28
.github/workflows/llm-code-review.yml.bak
vendored
@@ -1,28 +0,0 @@
|
||||
name: LLM Code Review
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize]
|
||||
|
||||
jobs:
|
||||
llm-code-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: fit2cloud/LLM-CodeReview-Action@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.FIT2CLOUDRD_LLM_CODE_REVIEW_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.ALIYUN_LLM_API_KEY }}
|
||||
LANGUAGE: English
|
||||
OPENAI_API_ENDPOINT: https://dashscope.aliyuncs.com/compatible-mode/v1
|
||||
MODEL: qwen2-1.5b-instruct
|
||||
PROMPT: "Please check the following code differences for any irregularities, potential issues, or optimization suggestions, and provide your answers in English."
|
||||
top_p: 1
|
||||
temperature: 1
|
||||
# max_tokens: 10000
|
||||
MAX_PATCH_LENGTH: 10000
|
||||
IGNORE_PATTERNS: "/node_modules,*.md,/dist,/.github"
|
||||
FILE_PATTERNS: "*.java,*.go,*.py,*.vue,*.ts,*.js,*.css,*.scss,*.html"
|
||||
2
.github/workflows/release-drafter.yml
vendored
2
.github/workflows/release-drafter.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
TAG=$(basename ${GITHUB_REF})
|
||||
VERSION=${TAG/v/}
|
||||
wget https://raw.githubusercontent.com/jumpserver/installer/master/quick_start.sh
|
||||
sed -i "s@VERSION=dev@VERSION=v${VERSION}@g" quick_start.sh
|
||||
sed -i "s@Version=.*@Version=v${VERSION}@g" quick_start.sh
|
||||
echo "::set-output name=TAG::$TAG"
|
||||
echo "::set-output name=VERSION::$VERSION"
|
||||
- name: Create Release
|
||||
|
||||
2
.github/workflows/sync-gitee.yml
vendored
2
.github/workflows/sync-gitee.yml
vendored
@@ -20,4 +20,4 @@ jobs:
|
||||
SSH_PRIVATE_KEY: ${{ secrets.GITEE_SSH_PRIVATE_KEY }}
|
||||
with:
|
||||
source-repo: 'git@github.com:jumpserver/jumpserver.git'
|
||||
destination-repo: 'git@gitee.com:fit2cloud-feizhiyun/JumpServer.git'
|
||||
destination-repo: 'git@gitee.com:jumpserver/jumpserver.git'
|
||||
|
||||
45
.github/workflows/translate-readme.yml
vendored
45
.github/workflows/translate-readme.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: Translate README
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
source_readme:
|
||||
description: "Source README"
|
||||
required: false
|
||||
default: "./readmes/README.en.md"
|
||||
target_langs:
|
||||
description: "Target Languages"
|
||||
required: false
|
||||
default: "zh-hans,zh-hant,ja,pt-br,es,ru"
|
||||
gen_dir_path:
|
||||
description: "Generate Dir Name"
|
||||
required: false
|
||||
default: "readmes/"
|
||||
push_branch:
|
||||
description: "Push Branch"
|
||||
required: false
|
||||
default: "pr@dev@translate_readme"
|
||||
prompt:
|
||||
description: "AI Translate Prompt"
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
gpt_mode:
|
||||
description: "GPT Mode"
|
||||
required: false
|
||||
default: "gpt-4o-mini"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Auto Translate
|
||||
uses: jumpserver-dev/action-translate-readme@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.GPT_API_TOKEN }}
|
||||
GPT_MODE: ${{ github.event.inputs.gpt_mode }}
|
||||
SOURCE_README: ${{ github.event.inputs.source_readme }}
|
||||
TARGET_LANGUAGES: ${{ github.event.inputs.target_langs }}
|
||||
PUSH_BRANCH: ${{ github.event.inputs.push_branch }}
|
||||
GEN_DIR_PATH: ${{ github.event.inputs.gen_dir_path }}
|
||||
PROMPT: ${{ github.event.inputs.prompt }}
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -16,7 +16,6 @@ dump.rdb
|
||||
.cache/
|
||||
.idea/
|
||||
.vscode/
|
||||
.fleet/
|
||||
db.sqlite3
|
||||
config.py
|
||||
config.yml
|
||||
@@ -32,20 +31,12 @@ media
|
||||
celerybeat.pid
|
||||
django.db
|
||||
celerybeat-schedule.db
|
||||
data/static
|
||||
docs/_build/
|
||||
xpack
|
||||
xpack.bak
|
||||
logs/*
|
||||
### Vagrant ###
|
||||
.vagrant/
|
||||
release/*
|
||||
releashe
|
||||
/apps/script.py
|
||||
data/*
|
||||
test.py
|
||||
.history/
|
||||
.test/
|
||||
*.mo
|
||||
apps.iml
|
||||
*.db
|
||||
*.mmdb
|
||||
*.ipdb
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
[settings]
|
||||
line_length=120
|
||||
known_first_party=common,users,assets,perms,authentication,jumpserver,notification,ops,orgs,rbac,settings,terminal,tickets
|
||||
|
||||
11
.prettierrc
11
.prettierrc
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"tabWidth": 4,
|
||||
"useTabs": false,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"bracketSpacing": true,
|
||||
"arrowParens": "avoid",
|
||||
"printWidth": 100,
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
[MESSAGES CONTROL]
|
||||
disable=missing-module-docstring,missing-class-docstring,missing-function-docstring,too-many-ancestors
|
||||
@@ -126,4 +126,3 @@ enforcement ladder](https://github.com/mozilla/diversity).
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
# Contributing
|
||||
|
||||
As a contributor, you should agree that:
|
||||
|
||||
- The producer can adjust the open-source agreement to be more strict or relaxed as deemed necessary.
|
||||
- Your contributed code may be used for commercial purposes, including but not limited to its cloud business operations.
|
||||
|
||||
## Create pull request
|
||||
PR are always welcome, even if they only contain small fixes like typos or a few lines of code. If there will be a significant effort, please document it as an issue and get a discussion going before starting to work on it.
|
||||
|
||||
@@ -28,4 +23,3 @@ When reporting issues, always include:
|
||||
|
||||
Because the issues are open to the public, when submitting files, be sure to remove any sensitive information, e.g. user name, password, IP address, and company name. You can
|
||||
replace those parts with "REDACTED" or other strings like "****".
|
||||
|
||||
|
||||
120
Dockerfile
120
Dockerfile
@@ -1,69 +1,87 @@
|
||||
FROM jumpserver/core-base:20250827_025554 AS stage-build
|
||||
|
||||
# 编译代码
|
||||
FROM python:3.8-slim as stage-build
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ADD . .
|
||||
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& \
|
||||
if [ -n "${VERSION}" ]; then \
|
||||
sed -i "s@VERSION = .*@VERSION = '${VERSION}'@g" apps/jumpserver/const.py; \
|
||||
fi
|
||||
|
||||
RUN set -ex \
|
||||
&& export SECRET_KEY=$(head -c100 < /dev/urandom | base64 | tr -dc A-Za-z0-9 | head -c 48) \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& cd apps \
|
||||
&& python manage.py compilemessages
|
||||
|
||||
|
||||
FROM python:3.11-slim-bullseye
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
libldap2-dev \
|
||||
libx11-dev"
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
cron \
|
||||
ca-certificates \
|
||||
default-libmysqlclient-dev \
|
||||
openssh-client \
|
||||
sshpass \
|
||||
nmap \
|
||||
bubblewrap"
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iproute2 \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
wget"
|
||||
|
||||
ARG APT_MIRROR=http://deb.debian.org
|
||||
|
||||
RUN set -ex \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update > /dev/null \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& sed -i 's/security.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& apt update \
|
||||
&& apt -y install ${BUILD_DEPENDENCIES} \
|
||||
&& apt -y install ${DEPENDENCIES} \
|
||||
&& apt -y install ${TOOLS} \
|
||||
&& localedef -c -f UTF-8 -i zh_CN zh_CN.UTF-8 \
|
||||
&& cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null\n\tCiphers +aes128-cbc\n\tKexAlgorithms +diffie-hellman-group1-sha1\n\tHostKeyAlgorithms +ssh-rsa" > /root/.ssh/config \
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& apt-get clean all \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& sed -i "s@# alias l@alias l@g" ~/.bashrc \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& echo "0 3 * * * root find /tmp -type f -mtime +1 -size +1M -exec rm -f {} \; && date > /tmp/clean.log" > /etc/cron.d/cleanup_tmp \
|
||||
&& chmod 0644 /etc/cron.d/cleanup_tmp
|
||||
&& mv /bin/sh /bin/sh.bak \
|
||||
&& ln -s /bin/bash /bin/sh
|
||||
|
||||
COPY --from=stage-build /opt /opt
|
||||
COPY --from=stage-build /usr/local/bin /usr/local/bin
|
||||
COPY --from=stage-build /opt/jumpserver/apps/libs/ansible/ansible.cfg /etc/ansible/
|
||||
RUN mkdir -p /opt/jumpserver/oracle/ \
|
||||
&& wget https://download.jumpserver.org/public/instantclient-basiclite-linux.x64-21.1.0.0.0.tar \
|
||||
&& tar xf instantclient-basiclite-linux.x64-21.1.0.0.0.tar -C /opt/jumpserver/oracle/ \
|
||||
&& echo "/opt/jumpserver/oracle/instantclient_21_1" > /etc/ld.so.conf.d/oracle-instantclient.conf \
|
||||
&& ldconfig \
|
||||
&& rm -f instantclient-basiclite-linux.x64-21.1.0.0.0.tar
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
|
||||
RUN echo > config.yml \
|
||||
&& pip install --upgrade pip==20.2.4 setuptools==49.6.0 wheel==0.34.2 -i ${PIP_MIRROR} \
|
||||
&& pip install --no-cache-dir $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install --no-cache-dir -r requirements/requirements.txt -i ${PIP_MIRROR} \
|
||||
&& rm -rf ~/.cache/pip
|
||||
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
ENV LANG=zh_CN.UTF-8
|
||||
|
||||
EXPOSE 8070
|
||||
EXPOSE 8080
|
||||
|
||||
STOPSIGNAL SIGQUIT
|
||||
|
||||
CMD ["start", "all"]
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
FROM python:3.11-slim-bullseye
|
||||
ARG TARGETARCH
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
|
||||
# Install APT dependencies
|
||||
ARG DEPENDENCIES=" \
|
||||
ca-certificates \
|
||||
wget \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
gettext \
|
||||
libkrb5-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev"
|
||||
|
||||
ARG APT_MIRROR=http://deb.debian.org
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked,id=core \
|
||||
set -ex \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& apt-get update > /dev/null \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& echo "no" | dpkg-reconfigure dash
|
||||
|
||||
# Install bin tools
|
||||
ARG CHECK_VERSION=v1.0.4
|
||||
RUN set -ex \
|
||||
&& wget https://github.com/jumpserver-dev/healthcheck/releases/download/${CHECK_VERSION}/check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||
&& tar -xf check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||
&& mv check /usr/local/bin/ \
|
||||
&& chown root:root /usr/local/bin/check \
|
||||
&& chmod 755 /usr/local/bin/check \
|
||||
&& rm -f check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz
|
||||
|
||||
# Install Python dependencies
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
|
||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
|
||||
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
|
||||
set -ex \
|
||||
&& uv venv \
|
||||
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
|
||||
&& ln -sf $(pwd)/.venv /opt/py3 \
|
||||
&& bash utils/static_files.sh \
|
||||
&& bash clean_site_packages.sh
|
||||
@@ -1,32 +0,0 @@
|
||||
ARG VERSION=dev
|
||||
|
||||
FROM registry.fit2cloud.com/jumpserver/xpack:${VERSION} AS build-xpack
|
||||
FROM jumpserver/core:${VERSION}-ce
|
||||
|
||||
COPY --from=build-xpack /opt/xpack /opt/jumpserver/apps/xpack
|
||||
|
||||
ARG TOOLS=" \
|
||||
g++ \
|
||||
curl \
|
||||
iputils-ping \
|
||||
netcat-openbsd \
|
||||
nmap \
|
||||
telnet \
|
||||
vim \
|
||||
postgresql-client-13 \
|
||||
wget \
|
||||
poppler-utils"
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& apt-get clean all \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
|
||||
RUN set -ex \
|
||||
&& uv pip install -i${PIP_MIRROR} --group xpack \
|
||||
&& playwright install chromium --with-deps --only-shell
|
||||
3
LICENSE
3
LICENSE
@@ -671,5 +671,4 @@ into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
215
README.md
215
README.md
@@ -1,125 +1,134 @@
|
||||
<div align="center">
|
||||
<a name="readme-top"></a>
|
||||
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||
|
||||
## An open-source PAM platform (Bastion Host)
|
||||
<p align="center"><a href="https://jumpserver.org"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a></p>
|
||||
<h3 align="center">多云环境下更好用的堡垒机</h3>
|
||||
|
||||
[![][license-shield]][license-link]
|
||||
[![][docs-shield]][docs-link]
|
||||
[![][deepwiki-shield]][deepwiki-link]
|
||||
[![][discord-shield]][discord-link]
|
||||
[![][docker-shield]][docker-link]
|
||||
[![][github-release-shield]][github-release-link]
|
||||
[![][github-stars-shield]][github-stars-link]
|
||||
<p align="center">
|
||||
<a href="https://www.gnu.org/licenses/gpl-3.0.html"><img src="https://img.shields.io/github/license/jumpserver/jumpserver" alt="License: GPLv3"></a>
|
||||
<a href="https://shields.io/github/downloads/jumpserver/jumpserver/total"><img src="https://shields.io/github/downloads/jumpserver/jumpserver/total" alt=" release"></a>
|
||||
<a href="https://hub.docker.com/u/jumpserver"><img src="https://img.shields.io/docker/pulls/jumpserver/jms_all.svg" alt="Codacy"></a>
|
||||
<a href="https://github.com/jumpserver/jumpserver"><img src="https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square" alt="Stars"></a>
|
||||
</p>
|
||||
|
||||
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md) · [한국어](/readmes/README.ko.md)
|
||||
|
||||
</div>
|
||||
<br/>
|
||||
|
||||
## What is JumpServer?
|
||||
|
||||
JumpServer is an open-source Privileged Access Management (PAM) platform that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
--------------------------
|
||||
- [ENGLISH](https://github.com/jumpserver/jumpserver/blob/master/README_EN.md)
|
||||
|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://www.jumpserver.com/images/jumpserver-arch-light.png">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://www.jumpserver.com/images/jumpserver-arch-dark.png">
|
||||
<img src="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f" alt="Theme-based Image">
|
||||
</picture>
|
||||
|
||||
JumpServer 是全球首款开源的堡垒机,使用 GPLv3 开源协议,是符合 4A 规范的运维安全审计系统。
|
||||
|
||||
JumpServer 使用 Python 开发,遵循 Web 2.0 规范,配备了业界领先的 Web Terminal 方案,交互界面美观、用户体验好。
|
||||
|
||||
JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向扩展,无资产数量及并发限制。
|
||||
|
||||
改变世界,从一点点开始 ...
|
||||
|
||||
> 如需进一步了解 JumpServer 开源项目,推荐阅读 [JumpServer 的初心和使命](https://mp.weixin.qq.com/s/S6q_2rP_9MwaVwyqLQnXzA)
|
||||
|
||||
### 特色优势
|
||||
|
||||
- 开源: 零门槛,线上快速获取和安装;
|
||||
- 分布式: 轻松支持大规模并发访问;
|
||||
- 无插件: 仅需浏览器,极致的 Web Terminal 使用体验;
|
||||
- 多云支持: 一套系统,同时管理不同云上面的资产;
|
||||
- 云端存储: 审计录像云端存储,永不丢失;
|
||||
- 多租户: 一套系统,多个子公司和部门同时使用;
|
||||
- 多应用支持: 数据库,Windows远程应用,Kubernetes。
|
||||
|
||||
### UI 展示
|
||||
|
||||

|
||||
|
||||
### 在线体验
|
||||
|
||||
- 环境地址:<https://demo.jumpserver.org/>
|
||||
|
||||
| :warning: 注意 |
|
||||
| :--------------------------- |
|
||||
| 该环境仅作体验目的使用,我们会定时清理、重置数据! |
|
||||
| 请勿修改体验环境用户的密码! |
|
||||
| 请勿在环境中添加业务生产环境地址、用户名密码等敏感信息! |
|
||||
|
||||
### 快速开始
|
||||
|
||||
- [极速安装](https://docs.jumpserver.org/zh/master/install/setup_by_fast/)
|
||||
- [完整文档](https://docs.jumpserver.org)
|
||||
- [演示视频](https://www.bilibili.com/video/BV1ZV41127GB)
|
||||
- [手动安装](https://github.com/jumpserver/installer)
|
||||
|
||||
### 组件项目
|
||||
- [Lina](https://github.com/jumpserver/lina) JumpServer Web UI 项目
|
||||
- [Luna](https://github.com/jumpserver/luna) JumpServer Web Terminal 项目
|
||||
- [KoKo](https://github.com/jumpserver/koko) JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco)
|
||||
- [Lion](https://github.com/jumpserver/lion-release) JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/)
|
||||
- [Clients](https://github.com/jumpserver/clients) JumpServer 客户端 项目
|
||||
- [Installer](https://github.com/jumpserver/installer) JumpServer 安装包 项目
|
||||
|
||||
### 社区
|
||||
|
||||
如果您在使用过程中有任何疑问或对建议,欢迎提交 [GitHub Issue](https://github.com/jumpserver/jumpserver/issues/new/choose) 或加入到我们的社区当中进行进一步交流沟通。
|
||||
|
||||
#### 微信交流群
|
||||
|
||||
<img src="https://download.jumpserver.org/images/wecom-group.jpeg" alt="微信群二维码" width="200"/>
|
||||
|
||||
### 贡献
|
||||
如果有你好的想法创意,或者帮助我们修复了 Bug, 欢迎提交 Pull Request
|
||||
|
||||
感谢以下贡献者,让 JumpServer 更加完善
|
||||
|
||||
<a href="https://github.com/jumpserver/jumpserver/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/jumpserver" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/koko/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/koko" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/lina/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/lina" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/luna/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/luna" />
|
||||
</a>
|
||||
|
||||
|
||||
## Quickstart
|
||||
|
||||
Prepare a clean Linux Server ( 64 bit, >= 4c8g )
|
||||
### 致谢
|
||||
- [Apache Guacamole](https://guacamole.apache.org/) Web页面连接 RDP, SSH, VNC协议设备,JumpServer 图形化组件 Lion 依赖
|
||||
- [OmniDB](https://omnidb.org/) Web页面连接使用数据库,JumpServer Web数据库依赖
|
||||
|
||||
```sh
|
||||
curl -sSL https://github.com/jumpserver/jumpserver/releases/latest/download/quick_start.sh | bash
|
||||
```
|
||||
|
||||
Access JumpServer in your browser at `http://your-jumpserver-ip/`
|
||||
- Username: `admin`
|
||||
- Password: `ChangeMe`
|
||||
### JumpServer 企业版
|
||||
- [申请企业版试用](https://jinshuju.net/f/kyOYpi)
|
||||
|
||||
[](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart")
|
||||
### 案例研究
|
||||
|
||||
## Screenshots
|
||||
<table style="border-collapse: collapse; border: 1px solid black;">
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/99fabe5b-0475-4a53-9116-4c370a1426c4" alt="JumpServer Console" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/7c1f81af-37e8-4f07-8ac9-182895e1062e" alt="JumpServer PAM" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/eaa41f66-8cc8-4f01-a001-0d258501f1c9" alt="JumpServer RBAC" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/3a2611cd-8902-49b8-b82b-2a6dac851f3e" alt="JumpServer Settings" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/1e236093-31f7-4563-8eb1-e36d865f1568" alt="JumpServer SSH" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/69373a82-f7ab-41e8-b763-bbad2ba52167" alt="JumpServer RDP" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/5bed98c6-cbe8-4073-9597-d53c69dc3957" alt="JumpServer K8s" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/b80ad654-548f-42bc-ba3d-c1cfdf1b46d6" alt="JumpServer DB" /></td>
|
||||
</tr>
|
||||
</table>
|
||||
- [JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147);
|
||||
- [JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882);
|
||||
- [携程 JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851);
|
||||
- [小红书的JumpServer堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516);
|
||||
- [JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732);
|
||||
- [中通快递:JumpServer主机安全运维实践](https://blog.fit2cloud.com/?p=708);
|
||||
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687);
|
||||
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)。
|
||||
|
||||
## Components
|
||||
### 安全说明
|
||||
|
||||
JumpServer consists of multiple key components, which collectively form the functional framework of JumpServer, providing users with comprehensive capabilities for operations management and security control.
|
||||
JumpServer是一款安全产品,请参考 [基本安全建议](https://docs.jumpserver.org/zh/master/install/install_security/) 部署安装.
|
||||
|
||||
| Project | Status | Description |
|
||||
|--------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------|
|
||||
| [Lina](https://github.com/jumpserver/lina) | <a href="https://github.com/jumpserver/lina/releases"><img alt="Lina release" src="https://img.shields.io/github/release/jumpserver/lina.svg" /></a> | JumpServer Web UI |
|
||||
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal |
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
|
||||
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
|
||||
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
|
||||
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
|
||||
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
|
||||
| [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector |
|
||||
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
|
||||
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
|
||||
如果你发现安全问题,可以直接联系我们:
|
||||
|
||||
## Third-party projects
|
||||
- [jumpserver-grafana-dashboard](https://github.com/acerrah/jumpserver-grafana-dashboard) JumpServer with grafana dashboard
|
||||
- ibuler@fit2cloud.com
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
|
||||
## Contributing
|
||||
### License & Copyright
|
||||
|
||||
Welcome to submit PR to contribute. Please refer to [CONTRIBUTING.md][contributing-link] for guidelines.
|
||||
Copyright (c) 2014-2022 飞致云 FIT2CLOUD, All rights reserved.
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2014-2025 FIT2CLOUD, All rights reserved.
|
||||
|
||||
Licensed under The GNU General Public License version 3 (GPLv3) (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
|
||||
Licensed under The GNU General Public License version 3 (GPLv3) (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
|
||||
|
||||
https://www.gnu.org/licenses/gpl-3.0.html
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
<!-- JumpServer official link -->
|
||||
[docs-link]: https://jumpserver.com/docs
|
||||
[discord-link]: https://discord.com/invite/W6vYXmAQG2
|
||||
[deepwiki-link]: https://deepwiki.com/jumpserver/jumpserver/
|
||||
[contributing-link]: https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md
|
||||
|
||||
<!-- JumpServer Other link-->
|
||||
[license-link]: https://www.gnu.org/licenses/gpl-3.0.html
|
||||
[docker-link]: https://hub.docker.com/u/jumpserver
|
||||
[github-release-link]: https://github.com/jumpserver/jumpserver/releases/latest
|
||||
[github-stars-link]: https://github.com/jumpserver/jumpserver
|
||||
[github-issues-link]: https://github.com/jumpserver/jumpserver/issues
|
||||
|
||||
<!-- Shield link-->
|
||||
[docs-shield]: https://img.shields.io/badge/documentation-148F76
|
||||
[github-release-shield]: https://img.shields.io/github/v/release/jumpserver/jumpserver
|
||||
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square
|
||||
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
|
||||
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
|
||||
[deepwiki-shield]: https://img.shields.io/badge/deepwiki-devin?color=blue
|
||||
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb
|
||||
|
||||
95
README_EN.md
Normal file
95
README_EN.md
Normal file
@@ -0,0 +1,95 @@
|
||||
<p align="center"><a href="https://jumpserver.org"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a></p>
|
||||
<h3 align="center">Open Source Bastion Host</h3>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.gnu.org/licenses/gpl-3.0.html"><img src="https://img.shields.io/github/license/jumpserver/jumpserver" alt="License: GPLv3"></a>
|
||||
<a href="https://shields.io/github/downloads/jumpserver/jumpserver/total"><img src="https://shields.io/github/downloads/jumpserver/jumpserver/total" alt=" release"></a>
|
||||
<a href="https://hub.docker.com/u/jumpserver"><img src="https://img.shields.io/docker/pulls/jumpserver/jms_all.svg" alt="Codacy"></a>
|
||||
<a href="https://github.com/jumpserver/jumpserver"><img src="https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square" alt="Stars"></a>
|
||||
</p>
|
||||
|
||||
JumpServer is the world's first open-source Bastion Host and is licensed under the GPLv3. It is a 4A-compliant professional operation and maintenance security audit system.
|
||||
|
||||
JumpServer uses Python / Django for development, follows Web 2.0 specifications, and is equipped with an industry-leading Web Terminal solution that provides a beautiful user interface and great user experience
|
||||
|
||||
JumpServer adopts a distributed architecture to support multi-branch deployment across multiple cross-regional areas. The central node provides APIs, and login nodes are deployed in each branch. It can be scaled horizontally without concurrency restrictions.
|
||||
|
||||
Change the world by taking every little step
|
||||
|
||||
----
|
||||
### Advantages
|
||||
|
||||
- Open Source: huge transparency and free to access with quick installation process.
|
||||
- Distributed: support large-scale concurrent access with ease.
|
||||
- No Plugin required: all you need is a browser, the ultimate Web Terminal experience.
|
||||
- Multi-Cloud supported: a unified system to manage assets on different clouds at the same time
|
||||
- Cloud storage: audit records are stored in the cloud. Data lost no more!
|
||||
- Multi-Tenant system: multiple subsidiary companies or departments access the same system simultaneously.
|
||||
- Many applications supported: link to databases, windows remote applications, and Kubernetes cluster, etc.
|
||||
|
||||
|
||||
### JumpServer Component Projects
|
||||
- [Lina](https://github.com/jumpserver/lina) JumpServer Web UI
|
||||
- [Luna](https://github.com/jumpserver/luna) JumpServer Web Terminal
|
||||
- [KoKo](https://github.com/jumpserver/koko) JumpServer Character protocaol Connector, replace original Python Version [Coco](https://github.com/jumpserver/coco)
|
||||
- [Lion](https://github.com/jumpserver/lion-release) JumpServer Graphics protocol Connector,rely on [Apache Guacamole](https://guacamole.apache.org/)
|
||||
|
||||
### Contribution
|
||||
If you have any good ideas or helping us to fix bugs, please submit a Pull Request and accept our thanks :)
|
||||
|
||||
Thanks to the following contributors for making JumpServer better everyday!
|
||||
|
||||
<a href="https://github.com/jumpserver/jumpserver/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/jumpserver" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/koko/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/koko" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/lina/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/lina" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/luna/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/luna" />
|
||||
</a>
|
||||
|
||||
### Thanks to
|
||||
- [Apache Guacamole](https://guacamole.apache.org/) Web page connection RDP, SSH, VNC protocol equipment. JumpServer graphical connection dependent.
|
||||
- [OmniDB](https://omnidb.org/) Web page connection to databases. JumpServer Web database dependent.
|
||||
|
||||
|
||||
### JumpServer Enterprise Version
|
||||
- [Apply for it](https://jinshuju.net/f/kyOYpi)
|
||||
|
||||
### Case Study
|
||||
|
||||
- [JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147);
|
||||
- [JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882);
|
||||
- [携程 JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851);
|
||||
- [小红书的JumpServer堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516);
|
||||
- [JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732);
|
||||
- [中通快递:JumpServer主机安全运维实践](https://blog.fit2cloud.com/?p=708);
|
||||
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687);
|
||||
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)。
|
||||
|
||||
### For safety instructions
|
||||
|
||||
JumpServer is a security product. Please refer to [Basic Security Recommendations](https://docs.jumpserver.org/zh/master/install/install_security/) for deployment and installation.
|
||||
|
||||
If you find a security problem, please contact us directly:
|
||||
|
||||
- ibuler@fit2cloud.com
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
|
||||
### License & Copyright
|
||||
Copyright (c) 2014-2022 FIT2CLOUD Tech, Inc., All rights reserved.
|
||||
|
||||
Licensed under The GNU General Public License version 3 (GPLv3) (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
|
||||
|
||||
https://www.gnu.org/licenses/gpl-3.0.htmll
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
@@ -5,7 +5,8 @@ JumpServer 是一款正在成长的安全产品, 请参考 [基本安全建议
|
||||
如果你发现安全问题,请直接联系我们,我们携手让世界更好:
|
||||
|
||||
- ibuler@fit2cloud.com
|
||||
- support@lxware.hk
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
|
||||
|
||||
# Security Policy
|
||||
@@ -15,5 +16,5 @@ JumpServer is a security product, The installation and development should follow
|
||||
All security bugs should be reported to the contact as below:
|
||||
|
||||
- ibuler@fit2cloud.com
|
||||
- support@lxware.hk
|
||||
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
|
||||
56
Vagrantfile
vendored
Normal file
56
Vagrantfile
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
# The most common configuration options are documented and commented below.
|
||||
# For a complete reference, please see the online documentation at
|
||||
# https://docs.vagrantup.com.
|
||||
|
||||
# Every Vagrant development environment requires a box. You can search for
|
||||
# boxes at https://vagrantcloud.com/search.
|
||||
config.vm.box_check_update = false
|
||||
config.vm.box = "centos/7"
|
||||
config.vm.hostname = "jumpserver"
|
||||
config.vm.network "private_network", ip: "172.17.8.101"
|
||||
config.vm.provider "virtualbox" do |vb|
|
||||
vb.memory = "4096"
|
||||
vb.cpus = 2
|
||||
vb.name = "jumpserver"
|
||||
end
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", type: "rsync",
|
||||
rsync__verbose: true,
|
||||
rsync__exclude: ['.git*', 'node_modules*','*.log','*.box','Vagrantfile']
|
||||
|
||||
config.vm.provision "shell", inline: <<-SHELL
|
||||
## 设置yum的阿里云源
|
||||
sudo curl -o /etc/yum.repos.d/CentOS-Base.repo http://mirrors.aliyun.com/repo/Centos-7.repo
|
||||
sudo sed -i -e '/mirrors.cloud.aliyuncs.com/d' -e '/mirrors.aliyuncs.com/d' /etc/yum.repos.d/CentOS-Base.repo
|
||||
sudo curl -o /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo
|
||||
sudo yum makecache
|
||||
|
||||
## 安装依赖包
|
||||
sudo yum install -y python36 python36-devel python36-pip \
|
||||
libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel sshpass \
|
||||
openldap-devel mariadb-devel mysql-devel libffi-devel \
|
||||
openssh-clients telnet openldap-clients gcc
|
||||
|
||||
## 配置pip阿里云源
|
||||
mkdir /home/vagrant/.pip
|
||||
cat << EOF | sudo tee -a /home/vagrant/.pip/pip.conf
|
||||
[global]
|
||||
timeout = 6000
|
||||
index-url = https://mirrors.aliyun.com/pypi/simple/
|
||||
|
||||
[install]
|
||||
use-mirrors = true
|
||||
mirrors = https://mirrors.aliyun.com/pypi/simple/
|
||||
trusted-host=mirrors.aliyun.com
|
||||
EOF
|
||||
|
||||
python3.6 -m venv /home/vagrant/venv
|
||||
source /home/vagrant/venv/bin/activate
|
||||
echo 'source /home/vagrant/venv/bin/activate' >> /home/vagrant/.bash_profile
|
||||
SHELL
|
||||
end
|
||||
@@ -1,2 +0,0 @@
|
||||
from .account import *
|
||||
from .automations import *
|
||||
@@ -1,6 +0,0 @@
|
||||
from .account import *
|
||||
from .application import *
|
||||
from .pam_dashboard import *
|
||||
from .task import *
|
||||
from .template import *
|
||||
from .virtual import *
|
||||
@@ -1,249 +0,0 @@
|
||||
from django.db import transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.generics import ListAPIView, CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.status import HTTP_200_OK
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import ChangeSecretRecordStatusChoice
|
||||
from accounts.filters import AccountFilterSet, NodeFilterBackend
|
||||
from accounts.mixins import AccountRecordViewLogMixin
|
||||
from accounts.models import Account, ChangeSecretRecord
|
||||
from assets.models import Asset, Node
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
from common.api.mixin import ExtraFilterFieldsMixin
|
||||
from common.drf.filters import AttrRulesFilterBackend
|
||||
from common.permissions import IsValidUser
|
||||
from common.utils import lazyproperty, get_logger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
__all__ = [
|
||||
'AccountViewSet', 'AccountSecretsViewSet',
|
||||
'AccountHistoriesSecretAPI', 'AssetAccountBulkCreateApi',
|
||||
]
|
||||
|
||||
|
||||
class AccountViewSet(OrgBulkModelViewSet):
|
||||
model = Account
|
||||
search_fields = ('username', 'name', 'asset__name', 'asset__address', 'comment')
|
||||
extra_filter_backends = [AttrRulesFilterBackend, NodeFilterBackend]
|
||||
filterset_class = AccountFilterSet
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountSerializer,
|
||||
'retrieve': serializers.AccountDetailSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'partial_update': ['accounts.change_account'],
|
||||
'su_from_accounts': 'accounts.view_account',
|
||||
'clear_secret': 'accounts.change_account',
|
||||
'move_to_assets': 'accounts.delete_account',
|
||||
'copy_to_assets': 'accounts.add_account',
|
||||
}
|
||||
export_as_zip = True
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
asset_id = self.request.query_params.get('asset') or self.request.query_params.get('asset_id')
|
||||
if not asset_id:
|
||||
return queryset
|
||||
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
queryset = asset.all_accounts.all()
|
||||
return queryset
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
||||
def su_from_accounts(self, request, *args, **kwargs):
|
||||
account_id = request.query_params.get('account')
|
||||
asset_id = request.query_params.get('asset')
|
||||
|
||||
if account_id:
|
||||
account = get_object_or_404(Account, pk=account_id)
|
||||
accounts = account.get_su_from_accounts()
|
||||
elif asset_id:
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
accounts = asset.accounts.all()
|
||||
else:
|
||||
accounts = Account.objects.none()
|
||||
accounts = self.filter_queryset(accounts)
|
||||
serializer = serializers.AccountSerializer(accounts, many=True)
|
||||
return Response(data=serializer.data)
|
||||
|
||||
@action(
|
||||
methods=['post'], detail=False, url_path='username-suggestions',
|
||||
permission_classes=[IsValidUser]
|
||||
)
|
||||
def username_suggestions(self, request, *args, **kwargs):
|
||||
raw_asset_ids = request.data.get('assets', [])
|
||||
node_ids = request.data.get('nodes', [])
|
||||
username = request.data.get('username', '')
|
||||
|
||||
asset_ids = set(raw_asset_ids)
|
||||
|
||||
if node_ids:
|
||||
nodes = Node.objects.filter(id__in=node_ids)
|
||||
node_asset_qs = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
asset_ids |= {str(u) for u in node_asset_qs}
|
||||
|
||||
if asset_ids:
|
||||
through = Asset.directory_services.through
|
||||
ds_qs = through.objects.filter(asset_id__in=asset_ids) \
|
||||
.values_list('directoryservice_id', flat=True)
|
||||
asset_ids |= {str(u) for u in ds_qs}
|
||||
accounts = Account.objects.filter(asset_id__in=list(asset_ids))
|
||||
else:
|
||||
accounts = Account.objects.all()
|
||||
|
||||
if username:
|
||||
accounts = accounts.filter(username__icontains=username)
|
||||
usernames = list(accounts.values_list('username', flat=True).distinct()[:10])
|
||||
usernames.sort()
|
||||
common = [i for i in usernames if i in usernames if i.lower() in ['root', 'admin', 'administrator']]
|
||||
others = [i for i in usernames if i not in common]
|
||||
usernames = common + others
|
||||
return Response(data=usernames)
|
||||
|
||||
@action(methods=['patch'], detail=False, url_path='clear-secret')
|
||||
def clear_secret(self, request, *args, **kwargs):
|
||||
account_ids = request.data.get('account_ids', [])
|
||||
self.model.objects.filter(id__in=account_ids).update(secret=None)
|
||||
return Response(status=HTTP_200_OK)
|
||||
|
||||
def _copy_or_move_to_assets(self, request, move=False):
|
||||
account = self.get_object()
|
||||
asset_ids = request.data.get('assets', [])
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
field_names = [
|
||||
'name', 'username', 'secret_type', 'secret',
|
||||
'privileged', 'is_active', 'source', 'source_id', 'comment'
|
||||
]
|
||||
account_data = {field: getattr(account, field) for field in field_names}
|
||||
|
||||
creation_results = {}
|
||||
success_count = 0
|
||||
|
||||
for asset in assets:
|
||||
account_data['asset'] = asset
|
||||
creation_results[asset] = {'state': 'created'}
|
||||
try:
|
||||
with transaction.atomic():
|
||||
self.model.objects.create(**account_data)
|
||||
success_count += 1
|
||||
except Exception as e:
|
||||
logger.debug(f'{"Move" if move else "Copy"} to assets error: {e}')
|
||||
creation_results[asset] = {'error': _('Account already exists'), 'state': 'error'}
|
||||
|
||||
results = [{'asset': str(asset), **res} for asset, res in creation_results.items()]
|
||||
|
||||
if move and success_count > 0:
|
||||
account.delete()
|
||||
|
||||
return Response(results, status=HTTP_200_OK)
|
||||
|
||||
@action(methods=['post'], detail=True, url_path='move-to-assets')
|
||||
def move_to_assets(self, request, *args, **kwargs):
|
||||
return self._copy_or_move_to_assets(request, move=True)
|
||||
|
||||
@action(methods=['post'], detail=True, url_path='copy-to-assets')
|
||||
def copy_to_assets(self, request, *args, **kwargs):
|
||||
return self._copy_or_move_to_assets(request, move=False)
|
||||
|
||||
|
||||
class AccountSecretsViewSet(AccountRecordViewLogMixin, AccountViewSet):
|
||||
"""
|
||||
因为可能要导出所有账号,所以单独建立了一个 viewset
|
||||
"""
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountSecretSerializer,
|
||||
}
|
||||
http_method_names = ['get', 'options']
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
rbac_perms = {
|
||||
'list': 'accounts.view_accountsecret',
|
||||
'retrieve': 'accounts.view_accountsecret',
|
||||
}
|
||||
|
||||
|
||||
class AssetAccountBulkCreateApi(CreateAPIView):
|
||||
serializer_class = serializers.AssetAccountBulkSerializer
|
||||
rbac_perms = {
|
||||
'POST': 'accounts.add_account',
|
||||
}
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
data = serializer.create(serializer.validated_data)
|
||||
serializer = serializers.AssetAccountBulkSerializerResultSerializer(data, many=True)
|
||||
return Response(data=serializer.data, status=HTTP_200_OK)
|
||||
|
||||
|
||||
class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixin, ListAPIView):
|
||||
model = Account.history.model
|
||||
serializer_class = serializers.AccountHistorySerializer
|
||||
http_method_names = ['get', 'options']
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
rbac_perms = {
|
||||
'GET': 'accounts.view_accountsecret',
|
||||
}
|
||||
queryset = Account.history.model.objects.none()
|
||||
|
||||
@lazyproperty
|
||||
def account(self) -> Account:
|
||||
return get_object_or_404(Account, pk=self.kwargs.get('pk'))
|
||||
|
||||
def get_object(self):
|
||||
return self.account
|
||||
|
||||
@lazyproperty
|
||||
def latest_history(self):
|
||||
return self.account.history.first()
|
||||
|
||||
@property
|
||||
def latest_change_secret_record(self) -> ChangeSecretRecord:
|
||||
return self.account.changesecretrecords.filter(
|
||||
status=ChangeSecretRecordStatusChoice.pending
|
||||
).order_by('-date_created').first()
|
||||
|
||||
@staticmethod
|
||||
def filter_spm_queryset(resource_ids, queryset):
|
||||
return queryset.filter(history_id__in=resource_ids)
|
||||
|
||||
def get_queryset(self):
|
||||
account = self.account
|
||||
histories = account.history.all()
|
||||
latest_history = self.latest_history
|
||||
if not latest_history:
|
||||
return histories
|
||||
if account.secret != latest_history.secret:
|
||||
return histories
|
||||
if account.secret_type != latest_history.secret_type:
|
||||
return histories
|
||||
histories = histories.exclude(history_id=latest_history.history_id)
|
||||
return histories
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
queryset = super().filter_queryset(queryset)
|
||||
queryset = list(queryset)
|
||||
latest_history = self.latest_history
|
||||
if not latest_history:
|
||||
return queryset
|
||||
|
||||
latest_change_secret_record = self.latest_change_secret_record
|
||||
if not latest_change_secret_record:
|
||||
return queryset
|
||||
|
||||
if latest_change_secret_record.date_created > latest_history.history_date:
|
||||
temp_history = self.model(
|
||||
secret=latest_change_secret_record.new_secret,
|
||||
secret_type=self.account.secret_type,
|
||||
version=latest_history.version,
|
||||
history_date=latest_change_secret_record.date_created,
|
||||
)
|
||||
queryset = [temp_history] + queryset
|
||||
|
||||
return queryset
|
||||
@@ -1,84 +0,0 @@
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _, get_language
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.models import IntegrationApplication
|
||||
from audits.models import IntegrationApplicationLog
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
from common.exceptions import JMSException
|
||||
from common.permissions import IsValidUser
|
||||
from common.utils import get_request_ip
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
|
||||
|
||||
class IntegrationApplicationViewSet(OrgBulkModelViewSet):
|
||||
model = IntegrationApplication
|
||||
search_fields = ('name', 'comment')
|
||||
serializer_classes = {
|
||||
'default': serializers.IntegrationApplicationSerializer,
|
||||
'get_account_secret': serializers.IntegrationAccountSecretSerializer
|
||||
}
|
||||
rbac_perms = {
|
||||
'get_once_secret': 'accounts.change_integrationapplication',
|
||||
'get_account_secret': 'accounts.view_integrationapplication'
|
||||
}
|
||||
|
||||
def read_file(self, path):
|
||||
if os.path.exists(path):
|
||||
with open(path, 'r', encoding='utf-8') as file:
|
||||
return file.read()
|
||||
return ''
|
||||
|
||||
@action(
|
||||
['GET'], detail=False, url_path='sdks',
|
||||
permission_classes=[IsValidUser]
|
||||
)
|
||||
def get_sdks_info(self, request, *args, **kwargs):
|
||||
code_suffix_mapper = {
|
||||
'python': 'py',
|
||||
'java': 'java',
|
||||
'go': 'go',
|
||||
'node': 'js',
|
||||
'curl': 'sh',
|
||||
}
|
||||
sdk_language = request.query_params.get('language', 'python')
|
||||
sdk_path = os.path.join(settings.APPS_DIR, 'accounts', 'demos', sdk_language)
|
||||
readme_path = os.path.join(sdk_path, f'README.{get_language()}.md')
|
||||
demo_path = os.path.join(sdk_path, f'demo.{code_suffix_mapper[sdk_language]}')
|
||||
|
||||
readme_content = self.read_file(readme_path)
|
||||
demo_content = self.read_file(demo_path)
|
||||
|
||||
return Response(data={'readme': readme_content, 'code': demo_content})
|
||||
|
||||
@action(
|
||||
['GET'], detail=True, url_path='secret',
|
||||
permission_classes=[RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
)
|
||||
def get_once_secret(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
return Response(data={'id': instance.id, 'secret': instance.secret})
|
||||
|
||||
@action(['GET'], detail=False, url_path='account-secret',
|
||||
permission_classes=[RBACPermission])
|
||||
def get_account_secret(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.query_params)
|
||||
if not serializer.is_valid():
|
||||
return Response({'error': serializer.errors}, status=400)
|
||||
|
||||
service = request.user
|
||||
account = service.get_account(**serializer.data)
|
||||
if not account:
|
||||
msg = _('Account not found')
|
||||
raise JMSException(code='Not found', detail='%s' % msg)
|
||||
asset = account.asset
|
||||
IntegrationApplicationLog.objects.create(
|
||||
remote_addr=get_request_ip(request), service=service.name, service_id=service.id,
|
||||
account=f'{account.name}({account.username})', asset=f'{asset.name}({asset.address})',
|
||||
)
|
||||
return Response(data={'id': request.user.id, 'secret': account.secret})
|
||||
@@ -1,130 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from collections import defaultdict
|
||||
|
||||
from django.db.models import Count, F, Q
|
||||
from django.http.response import JsonResponse
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import (
|
||||
Account, GatherAccountsAutomation,
|
||||
PushAccountAutomation, BackupAccountAutomation,
|
||||
AccountRisk, IntegrationApplication, ChangeSecretAutomation
|
||||
)
|
||||
from assets.const import AllTypes
|
||||
from common.utils.timezone import local_monday
|
||||
|
||||
__all__ = ['PamDashboardApi']
|
||||
|
||||
|
||||
class PamDashboardApi(APIView):
|
||||
http_method_names = ['get']
|
||||
rbac_perms = {
|
||||
'GET': 'rbac.view_pam',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_type_to_accounts():
|
||||
result = Account.objects.annotate(type=F('asset__platform__type')) \
|
||||
.values('type').order_by('type').annotate(total=Count(1))
|
||||
all_types_dict = dict(AllTypes.choices())
|
||||
|
||||
return [
|
||||
{**i, 'label': all_types_dict.get(i['type'], i['type'])}
|
||||
for i in result
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_account_risk_data(_all, query_params):
|
||||
agg_map = {
|
||||
'total_long_time_no_login_accounts': ('long_time_no_login_count', Q(risk='long_time_no_login')),
|
||||
'total_new_found_accounts': ('new_found_count', Q(risk='new_found')),
|
||||
'total_groups_changed_accounts': ('groups_changed_count', Q(risk='groups_changed')),
|
||||
'total_sudoers_changed_accounts': ('sudoers_changed_count', Q(risk='sudoers_changed')),
|
||||
'total_authorized_keys_changed_accounts': (
|
||||
'authorized_keys_changed_count', Q(risk='authorized_keys_changed')),
|
||||
'total_account_deleted_accounts': ('account_deleted_count', Q(risk='account_deleted')),
|
||||
'total_password_expired_accounts': ('password_expired_count', Q(risk='password_expired')),
|
||||
'total_long_time_password_accounts': ('long_time_password_count', Q(risk='long_time_password')),
|
||||
'total_weak_password_accounts': ('weak_password_count', Q(risk='weak_password')),
|
||||
'total_leaked_password_accounts': ('leaked_password_count', Q(risk='leaked_password')),
|
||||
'total_repeated_password_accounts': ('repeated_password_count', Q(risk='repeated_password')),
|
||||
}
|
||||
|
||||
aggregations = {
|
||||
agg_key: Count('id', distinct=True, filter=agg_filter)
|
||||
for param_key, (agg_key, agg_filter) in agg_map.items()
|
||||
if _all or query_params.get(param_key)
|
||||
}
|
||||
|
||||
data = {}
|
||||
if aggregations:
|
||||
account_stats = AccountRisk.objects.aggregate(**aggregations)
|
||||
data = {param_key: account_stats.get(agg_key) for param_key, (agg_key, _) in agg_map.items() if
|
||||
agg_key in account_stats}
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def get_account_data(_all, query_params):
|
||||
agg_map = {
|
||||
'total_accounts': ('total_count', Count('id')),
|
||||
'total_privileged_accounts': ('privileged_count', Count('id', filter=Q(privileged=True))),
|
||||
'total_connectivity_ok_accounts': ('connectivity_ok_count', Count('id', filter=Q(connectivity='ok'))),
|
||||
'total_secret_reset_accounts': ('secret_reset_count', Count('id', filter=Q(secret_reset=True))),
|
||||
'total_valid_accounts': ('valid_count', Count('id', filter=Q(is_active=True))),
|
||||
'total_week_add_accounts': ('week_add_count', Count('id', filter=Q(date_created__gte=local_monday()))),
|
||||
}
|
||||
|
||||
aggregations = {
|
||||
agg_key: agg_expr
|
||||
for param_key, (agg_key, agg_expr) in agg_map.items()
|
||||
if _all or query_params.get(param_key)
|
||||
}
|
||||
|
||||
data = {}
|
||||
account_stats = Account.objects.aggregate(**aggregations)
|
||||
for param_key, (agg_key, __) in agg_map.items():
|
||||
if agg_key in account_stats:
|
||||
data[param_key] = account_stats[agg_key]
|
||||
|
||||
if _all or query_params.get('total_ordinary_accounts'):
|
||||
if 'total_count' in account_stats and 'privileged_count' in account_stats:
|
||||
data['total_ordinary_accounts'] = \
|
||||
account_stats['total_count'] - account_stats['privileged_count']
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def get_automation_counts(_all, query_params):
|
||||
automation_counts = defaultdict(int)
|
||||
automation_models = {
|
||||
'total_count_change_secret_automation': ChangeSecretAutomation,
|
||||
'total_count_gathered_account_automation': GatherAccountsAutomation,
|
||||
'total_count_push_account_automation': PushAccountAutomation,
|
||||
'total_count_backup_account_automation': BackupAccountAutomation,
|
||||
'total_count_integration_application': IntegrationApplication,
|
||||
}
|
||||
|
||||
for param_key, model in automation_models.items():
|
||||
if _all or query_params.get(param_key):
|
||||
automation_counts[param_key] = model.objects.count()
|
||||
|
||||
return automation_counts
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
query_params = self.request.query_params
|
||||
|
||||
_all = query_params.get('all')
|
||||
|
||||
data = {}
|
||||
data.update(self.get_account_data(_all, query_params))
|
||||
data.update(self.get_account_risk_data(_all, query_params))
|
||||
data.update(self.get_automation_counts(_all, query_params))
|
||||
|
||||
if _all or query_params.get('total_count_type_to_accounts'):
|
||||
data.update({
|
||||
'total_count_type_to_accounts': self.get_type_to_accounts(),
|
||||
})
|
||||
|
||||
return JsonResponse(data, status=200)
|
||||
@@ -1,63 +0,0 @@
|
||||
from django.db.models import Q
|
||||
from rest_framework.generics import CreateAPIView
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.models import Account
|
||||
from accounts.permissions import AccountTaskActionPermission
|
||||
from accounts.tasks import (
|
||||
remove_accounts_task, verify_accounts_connectivity_task, push_accounts_to_assets_task
|
||||
)
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
|
||||
__all__ = [
|
||||
'AccountsTaskCreateAPI',
|
||||
]
|
||||
|
||||
|
||||
class AccountsTaskCreateAPI(CreateAPIView):
|
||||
serializer_class = serializers.AccountTaskSerializer
|
||||
permission_classes = (AccountTaskActionPermission,)
|
||||
|
||||
def get_permissions(self):
|
||||
act = self.request.data.get('action')
|
||||
if act == 'remove':
|
||||
self.permission_classes = [
|
||||
AccountTaskActionPermission,
|
||||
UserConfirmation.require(ConfirmType.PASSWORD)
|
||||
]
|
||||
return super().get_permissions()
|
||||
|
||||
@staticmethod
|
||||
def get_account_ids(data, action):
|
||||
account_type = 'gather_accounts' if action == 'remove' else 'accounts'
|
||||
accounts = data.get(account_type, [])
|
||||
account_ids = [str(a.id) for a in accounts]
|
||||
|
||||
if action == 'remove':
|
||||
return account_ids
|
||||
|
||||
assets = data.get('assets', [])
|
||||
asset_ids = [str(a.id) for a in assets]
|
||||
ids = Account.objects.filter(
|
||||
Q(id__in=account_ids) | Q(asset_id__in=asset_ids)
|
||||
).distinct().values_list('id', flat=True)
|
||||
return [str(_id) for _id in ids]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
data = serializer.validated_data
|
||||
action = data['action']
|
||||
ids = self.get_account_ids(data, action)
|
||||
|
||||
if action == 'push':
|
||||
task = push_accounts_to_assets_task.delay(ids, data.get('params'))
|
||||
elif action == 'remove':
|
||||
task = remove_accounts_task.delay(ids)
|
||||
elif action == 'verify':
|
||||
task = verify_accounts_connectivity_task.delay(ids)
|
||||
else:
|
||||
raise ValueError(f"Invalid action: {action}")
|
||||
|
||||
data = getattr(serializer, '_data', {})
|
||||
data["task"] = task.id
|
||||
setattr(serializer, '_data', data)
|
||||
return task
|
||||
@@ -1,78 +0,0 @@
|
||||
from django_filters import rest_framework as drf_filters
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.mixins import AccountRecordViewLogMixin
|
||||
from accounts.models import AccountTemplate
|
||||
from accounts.tasks import template_sync_related_accounts
|
||||
from assets.const import Protocol
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
|
||||
|
||||
class AccountTemplateFilterSet(BaseFilterSet):
|
||||
protocols = drf_filters.CharFilter(method='filter_protocols')
|
||||
|
||||
class Meta:
|
||||
model = AccountTemplate
|
||||
fields = ('username', 'name')
|
||||
|
||||
@staticmethod
|
||||
def filter_protocols(queryset, name, value):
|
||||
secret_types = set()
|
||||
protocols = value.split(',')
|
||||
protocol_secret_type_map = Protocol.settings()
|
||||
for p in protocols:
|
||||
if p not in protocol_secret_type_map:
|
||||
continue
|
||||
_st = protocol_secret_type_map[p].get('secret_types', [])
|
||||
secret_types.update(_st)
|
||||
if not secret_types:
|
||||
secret_types = ['password']
|
||||
queryset = queryset.filter(secret_type__in=secret_types)
|
||||
return queryset
|
||||
|
||||
|
||||
class AccountTemplateViewSet(OrgBulkModelViewSet):
|
||||
model = AccountTemplate
|
||||
filterset_class = AccountTemplateFilterSet
|
||||
search_fields = ('username', 'name')
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountTemplateSerializer,
|
||||
'retrieve': serializers.AccountDetailTemplateSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'su_from_account_templates': 'accounts.view_accounttemplate',
|
||||
'sync_related_accounts': 'accounts.change_account',
|
||||
}
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='su-from-account-templates')
|
||||
def su_from_account_templates(self, request, *args, **kwargs):
|
||||
pk = request.query_params.get('template_id')
|
||||
templates = AccountTemplate.get_su_from_account_templates(pk)
|
||||
templates = self.filter_queryset(templates)
|
||||
serializer = self.get_serializer(templates, many=True)
|
||||
return Response(data=serializer.data)
|
||||
|
||||
@action(methods=['patch'], detail=True, url_path='sync-related-accounts')
|
||||
def sync_related_accounts(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
user_id = str(request.user.id)
|
||||
task = template_sync_related_accounts.delay(str(instance.id), user_id)
|
||||
return Response({'task': task.id}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class AccountTemplateSecretsViewSet(AccountRecordViewLogMixin, AccountTemplateViewSet):
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountTemplateSecretSerializer,
|
||||
}
|
||||
http_method_names = ['get', 'options']
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
rbac_perms = {
|
||||
'list': 'accounts.view_accounttemplatesecret',
|
||||
'retrieve': 'accounts.view_accounttemplatesecret',
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from accounts.models import VirtualAccount
|
||||
from accounts.serializers import VirtualAccountSerializer
|
||||
from common.utils import is_uuid
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
|
||||
class VirtualAccountViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = VirtualAccountSerializer
|
||||
search_fields = ('alias',)
|
||||
filterset_fields = ('alias',)
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return VirtualAccount.objects.none()
|
||||
return VirtualAccount.get_or_init_queryset()
|
||||
|
||||
def get_object(self, ):
|
||||
pk = self.kwargs.get('pk')
|
||||
kwargs = {'pk': pk} if is_uuid(pk) else {'alias': pk}
|
||||
return get_object_or_404(VirtualAccount, **kwargs)
|
||||
@@ -1,7 +0,0 @@
|
||||
from .backup import *
|
||||
from .base import *
|
||||
from .change_secret import *
|
||||
from .change_secret_dashboard import *
|
||||
from .check_account import *
|
||||
from .gather_account import *
|
||||
from .push_account import *
|
||||
@@ -1,36 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from accounts import serializers
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import (
|
||||
BackupAccountAutomation
|
||||
)
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .base import AutomationExecutionViewSet
|
||||
|
||||
__all__ = [
|
||||
'BackupAccountViewSet', 'BackupAccountExecutionViewSet'
|
||||
]
|
||||
|
||||
|
||||
class BackupAccountViewSet(OrgBulkModelViewSet):
|
||||
model = BackupAccountAutomation
|
||||
filterset_fields = ('name',)
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.BackupAccountSerializer
|
||||
|
||||
|
||||
class BackupAccountExecutionViewSet(AutomationExecutionViewSet):
|
||||
rbac_perms = (
|
||||
("list", "accounts.view_backupaccountexecution"),
|
||||
("retrieve", "accounts.view_backupaccountexecution"),
|
||||
("create", "accounts.add_backupaccountexecution"),
|
||||
("report", "accounts.view_backupaccountexecution"),
|
||||
)
|
||||
|
||||
tp = AutomationTypes.backup_account
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(type=self.tp)
|
||||
return queryset
|
||||
@@ -1,131 +0,0 @@
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views.decorators.clickjacking import xframe_options_sameorigin
|
||||
from rest_framework import status, mixins, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts.filters import AutomationExecutionFilterSet
|
||||
from accounts.models import AutomationExecution
|
||||
from accounts.tasks import execute_account_automation_task
|
||||
from assets import serializers
|
||||
from assets.models import BaseAutomation
|
||||
from common.const.choices import Trigger
|
||||
from orgs.mixins import generics
|
||||
|
||||
__all__ = [
|
||||
'AutomationAssetsListApi', 'AutomationRemoveAssetApi',
|
||||
'AutomationAddAssetApi', 'AutomationNodeAddRemoveApi',
|
||||
'AutomationExecutionViewSet'
|
||||
]
|
||||
|
||||
|
||||
class AutomationAssetsListApi(generics.ListAPIView):
|
||||
model = BaseAutomation
|
||||
serializer_class = serializers.AutomationAssetsSerializer
|
||||
filterset_fields = ("name", "address")
|
||||
search_fields = filterset_fields
|
||||
|
||||
def get_object(self):
|
||||
pk = self.kwargs.get('pk')
|
||||
return get_object_or_404(self.model, pk=pk)
|
||||
|
||||
def get_queryset(self):
|
||||
instance = self.get_object()
|
||||
assets = instance.get_all_assets().only(
|
||||
*self.serializer_class.Meta.only_fields
|
||||
)
|
||||
return assets
|
||||
|
||||
|
||||
class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
serializer = self.serializer_class(data=request.data)
|
||||
|
||||
if not serializer.is_valid():
|
||||
return Response({'error': serializer.errors})
|
||||
|
||||
assets = serializer.validated_data.get('assets')
|
||||
if assets:
|
||||
instance.assets.remove(*tuple(assets))
|
||||
return Response({'msg': 'ok'})
|
||||
|
||||
|
||||
class AutomationAddAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
serializer = self.serializer_class(data=request.data)
|
||||
if serializer.is_valid():
|
||||
assets = serializer.validated_data.get('assets')
|
||||
if assets:
|
||||
instance.assets.add(*tuple(assets))
|
||||
return Response({"msg": "ok"})
|
||||
else:
|
||||
return Response({"error": serializer.errors})
|
||||
|
||||
|
||||
class AutomationNodeAddRemoveApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
serializer_class = serializers.UpdateNodeSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
action_params = ['add', 'remove']
|
||||
action = request.query_params.get('action')
|
||||
if action not in action_params:
|
||||
err_info = _("The parameter 'action' must be [{}]".format(','.join(action_params)))
|
||||
return Response({"error": err_info})
|
||||
|
||||
instance = self.get_object()
|
||||
serializer = self.serializer_class(data=request.data)
|
||||
if serializer.is_valid():
|
||||
nodes = serializer.validated_data.get('nodes')
|
||||
if nodes:
|
||||
# eg: plan.nodes.add(*tuple(assets))
|
||||
getattr(instance.nodes, action)(*tuple(nodes))
|
||||
return Response({"msg": "ok"})
|
||||
else:
|
||||
return Response({"error": serializer.errors})
|
||||
|
||||
|
||||
class AutomationExecutionViewSet(
|
||||
mixins.CreateModelMixin, mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
search_fields = ('trigger', 'automation__name')
|
||||
filterset_fields = ('trigger', 'automation_id', 'automation__name')
|
||||
filterset_class = AutomationExecutionFilterSet
|
||||
serializer_class = serializers.AutomationExecutionSerializer
|
||||
tp: str
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = AutomationExecution.objects.all()
|
||||
return queryset
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
automation = serializer.validated_data.get('automation')
|
||||
task = execute_account_automation_task.delay(
|
||||
pid=str(automation.pk), trigger=Trigger.manual, tp=self.tp
|
||||
)
|
||||
return Response({'task': task.id}, status=status.HTTP_201_CREATED)
|
||||
|
||||
@xframe_options_sameorigin
|
||||
@action(methods=['get'], detail=True, url_path='report')
|
||||
def report(self, request, *args, **kwargs):
|
||||
execution = self.get_object()
|
||||
report = execution.manager.gen_report()
|
||||
return HttpResponse(report)
|
||||
@@ -1,180 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.db.models import Max, Q, Subquery, OuterRef
|
||||
from rest_framework import status, mixins
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import (
|
||||
AutomationTypes, ChangeSecretRecordStatusChoice
|
||||
)
|
||||
from accounts.filters import ChangeSecretRecordFilterSet, ChangeSecretStatusFilterSet
|
||||
from accounts.models import ChangeSecretAutomation, ChangeSecretRecord, Account
|
||||
from accounts.tasks import execute_automation_record_task
|
||||
from accounts.utils import account_secret_task_status
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
from common.permissions import IsValidLicense
|
||||
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
from .base import (
|
||||
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
|
||||
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'ChangeSecretAutomationViewSet', 'ChangeSecretRecordViewSet',
|
||||
'ChangSecretExecutionViewSet', 'ChangSecretAssetsListApi',
|
||||
'ChangSecretRemoveAssetApi', 'ChangSecretAddAssetApi',
|
||||
'ChangSecretNodeAddRemoveApi', 'ChangeSecretStatusViewSet'
|
||||
]
|
||||
|
||||
|
||||
class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
|
||||
model = ChangeSecretAutomation
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
filterset_fields = ('name', 'secret_type', 'secret_strategy')
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.ChangeSecretAutomationSerializer
|
||||
|
||||
|
||||
class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
|
||||
filterset_class = ChangeSecretRecordFilterSet
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
search_fields = ('asset__address', 'account__username')
|
||||
ordering_fields = ('date_finished',)
|
||||
tp = AutomationTypes.change_secret
|
||||
serializer_classes = {
|
||||
'default': serializers.ChangeSecretRecordSerializer,
|
||||
'secret': serializers.ChangeSecretRecordViewSecretSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'execute': 'accounts.add_changesecretexecution',
|
||||
'secret': 'accounts.view_changesecretrecord',
|
||||
'dashboard': 'accounts.view_changesecretrecord',
|
||||
'ignore_fail': 'accounts.view_changesecretrecord',
|
||||
}
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == 'secret':
|
||||
self.permission_classes = [
|
||||
RBACPermission,
|
||||
UserConfirmation.require(ConfirmType.MFA)
|
||||
]
|
||||
return super().get_permissions()
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
queryset = super().filter_queryset(queryset)
|
||||
|
||||
if self.action == 'dashboard':
|
||||
return self.get_dashboard_queryset(queryset)
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def get_dashboard_queryset(queryset):
|
||||
recent_dates = queryset.values('account').annotate(
|
||||
max_date_finished=Max('date_finished')
|
||||
)
|
||||
|
||||
recent_success_accounts = queryset.filter(
|
||||
account=OuterRef('account'),
|
||||
date_finished=Subquery(
|
||||
recent_dates.filter(account=OuterRef('account')).values('max_date_finished')[:1]
|
||||
)
|
||||
).filter(Q(status=ChangeSecretRecordStatusChoice.success))
|
||||
|
||||
failed_records = queryset.filter(
|
||||
~Q(account__in=Subquery(recent_success_accounts.values('account'))),
|
||||
status=ChangeSecretRecordStatusChoice.failed,
|
||||
ignore_fail=False
|
||||
)
|
||||
return failed_records
|
||||
|
||||
def get_queryset(self):
|
||||
return ChangeSecretRecord.get_valid_records()
|
||||
|
||||
@action(methods=['post'], detail=False, url_path='execute')
|
||||
def execute(self, request, *args, **kwargs):
|
||||
record_ids = request.data.get('record_ids')
|
||||
records = self.get_queryset().filter(id__in=record_ids)
|
||||
if not records.exists():
|
||||
return Response(
|
||||
{'detail': 'No valid records found'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
record_ids = [str(_id) for _id in records.values_list('id', flat=True)]
|
||||
task = execute_automation_record_task.delay(record_ids, self.tp)
|
||||
return Response({'task': task.id}, status=status.HTTP_200_OK)
|
||||
|
||||
@action(methods=['get'], detail=True, url_path='secret')
|
||||
def secret(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(instance)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='dashboard')
|
||||
def dashboard(self, request, *args, **kwargs):
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
@action(methods=['patch'], detail=True, url_path='ignore-fail')
|
||||
def ignore_fail(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
instance.ignore_fail = True
|
||||
instance.save(update_fields=['ignore_fail'])
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ChangSecretExecutionViewSet(AutomationExecutionViewSet):
|
||||
rbac_perms = (
|
||||
("list", "accounts.view_changesecretexecution"),
|
||||
("retrieve", "accounts.view_changesecretexecution"),
|
||||
("create", "accounts.add_changesecretexecution"),
|
||||
("report", "accounts.view_changesecretexecution"),
|
||||
)
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
tp = AutomationTypes.change_secret
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(type=self.tp)
|
||||
return queryset
|
||||
|
||||
|
||||
class ChangSecretAssetsListApi(AutomationAssetsListApi):
|
||||
model = ChangeSecretAutomation
|
||||
|
||||
|
||||
class ChangSecretRemoveAssetApi(AutomationRemoveAssetApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
|
||||
|
||||
|
||||
class ChangSecretAddAssetApi(AutomationAddAssetApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
|
||||
|
||||
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
|
||||
|
||||
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
|
||||
perm_model = ChangeSecretAutomation
|
||||
filterset_class = ChangeSecretStatusFilterSet
|
||||
serializer_class = serializers.ChangeSecretAccountSerializer
|
||||
search_fields = ('username',)
|
||||
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
http_method_names = ["get", "delete", "options"]
|
||||
|
||||
def get_queryset(self):
|
||||
account_ids = list(account_secret_task_status.account_ids)
|
||||
return Account.objects.filter(id__in=account_ids).select_related('asset')
|
||||
|
||||
def bulk_destroy(self, request, *args, **kwargs):
|
||||
account_ids = request.data.get('account_ids')
|
||||
if isinstance(account_ids, str):
|
||||
account_ids = [account_ids]
|
||||
for _id in account_ids:
|
||||
account_secret_task_status.clear(_id)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
@@ -1,186 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from collections import defaultdict
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.http.response import JsonResponse
|
||||
from django.utils import timezone
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.const import AutomationTypes, ChangeSecretRecordStatusChoice
|
||||
from accounts.models import ChangeSecretAutomation, AutomationExecution, ChangeSecretRecord
|
||||
from assets.models import Node, Asset
|
||||
from common.const import Status
|
||||
from common.permissions import IsValidLicense
|
||||
from common.utils import lazyproperty
|
||||
from common.utils.timezone import local_zero_hour, local_now
|
||||
from ops.celery import app
|
||||
from rbac.permissions import RBACPermission
|
||||
|
||||
__all__ = ['ChangeSecretDashboardApi']
|
||||
|
||||
|
||||
class ChangeSecretDashboardApi(APIView):
|
||||
http_method_names = ['get']
|
||||
rbac_perms = {
|
||||
'GET': 'accounts.view_changesecretautomation',
|
||||
}
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
tp = AutomationTypes.change_secret
|
||||
task_name = 'accounts.tasks.automation.execute_account_automation_task'
|
||||
ongoing_change_secret_cache_key = "ongoing_change_secret_cache_key"
|
||||
|
||||
@lazyproperty
|
||||
def days(self):
|
||||
count = self.request.query_params.get('days', 1)
|
||||
return int(count)
|
||||
|
||||
@property
|
||||
def days_to_datetime(self):
|
||||
if self.days == 1:
|
||||
return local_zero_hour()
|
||||
return local_now() - timezone.timedelta(days=self.days)
|
||||
|
||||
def get_queryset_date_filter(self, qs, query_field='date_updated'):
|
||||
return qs.filter(**{f'{query_field}__gte': self.days_to_datetime})
|
||||
|
||||
@lazyproperty
|
||||
def date_range_list(self):
|
||||
return [
|
||||
(local_now() - timezone.timedelta(days=i)).date()
|
||||
for i in range(self.days - 1, -1, -1)
|
||||
]
|
||||
|
||||
def filter_by_date_range(self, queryset, field_name):
|
||||
date_range_bounds = self.days_to_datetime.date(), (local_now() + timezone.timedelta(days=1)).date()
|
||||
return queryset.filter(**{f'{field_name}__range': date_range_bounds})
|
||||
|
||||
def calculate_daily_metrics(self, queryset, date_field):
|
||||
filtered_queryset = self.filter_by_date_range(queryset, date_field)
|
||||
results = filtered_queryset.values_list(date_field, 'status')
|
||||
|
||||
status_counts = defaultdict(lambda: defaultdict(int))
|
||||
|
||||
for date_finished, status in results:
|
||||
dt_local = timezone.localtime(date_finished)
|
||||
date_str = str(dt_local.date())
|
||||
if status == ChangeSecretRecordStatusChoice.failed:
|
||||
status_counts[date_str]['failed'] += 1
|
||||
elif status == ChangeSecretRecordStatusChoice.success:
|
||||
status_counts[date_str]['success'] += 1
|
||||
|
||||
metrics = defaultdict(list)
|
||||
for date in self.date_range_list:
|
||||
date_str = str(date)
|
||||
for status in ['success', 'failed']:
|
||||
metrics[status].append(status_counts[date_str].get(status, 0))
|
||||
|
||||
return metrics
|
||||
|
||||
def get_daily_success_and_failure_metrics(self):
|
||||
metrics = self.calculate_daily_metrics(self.change_secret_records_queryset, 'date_finished')
|
||||
return metrics.get('success', []), metrics.get('failed', [])
|
||||
|
||||
@lazyproperty
|
||||
def change_secrets_queryset(self):
|
||||
return ChangeSecretAutomation.objects.all()
|
||||
|
||||
@lazyproperty
|
||||
def change_secret_records_queryset(self):
|
||||
return ChangeSecretRecord.get_valid_records()
|
||||
|
||||
def get_change_secret_asset_queryset(self):
|
||||
qs = self.change_secrets_queryset
|
||||
node_ids = qs.values_list('nodes', flat=True).distinct()
|
||||
nodes = Node.objects.filter(id__in=node_ids).only('id', 'key')
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
direct_asset_ids = qs.values_list('assets', flat=True).distinct()
|
||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||
return Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
def get_filtered_counts(self, qs, field=None):
|
||||
if field is None:
|
||||
return qs.count()
|
||||
return self.get_queryset_date_filter(qs, field).count()
|
||||
|
||||
def get_status_counts(self, executions):
|
||||
executions = executions.filter(type=self.tp)
|
||||
total, failed, success = 0, 0, 0
|
||||
for status in executions.values_list('status', flat=True):
|
||||
total += 1
|
||||
if status in [Status.failed, Status.error]:
|
||||
failed += 1
|
||||
elif status == Status.success:
|
||||
success += 1
|
||||
|
||||
return {
|
||||
'total_count_change_secret_executions': total,
|
||||
'total_count_success_change_secret_executions': success,
|
||||
'total_count_failed_change_secret_executions': failed,
|
||||
}
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
query_params = self.request.query_params
|
||||
data = {}
|
||||
|
||||
_all = query_params.get('all')
|
||||
|
||||
if _all or query_params.get('total_count_change_secrets'):
|
||||
data['total_count_change_secrets'] = self.get_filtered_counts(
|
||||
self.change_secrets_queryset
|
||||
)
|
||||
|
||||
if _all or query_params.get('total_count_periodic_change_secrets'):
|
||||
data['total_count_periodic_change_secrets'] = self.get_filtered_counts(
|
||||
self.change_secrets_queryset.filter(is_periodic=True)
|
||||
)
|
||||
|
||||
if _all or query_params.get('total_count_change_secret_assets'):
|
||||
data['total_count_change_secret_assets'] = self.get_change_secret_asset_queryset().count()
|
||||
|
||||
if _all or query_params.get('total_count_change_secret_status'):
|
||||
executions = self.get_queryset_date_filter(AutomationExecution.objects.all(), 'date_start')
|
||||
data.update(self.get_status_counts(executions))
|
||||
|
||||
if _all or query_params.get('daily_success_and_failure_metrics'):
|
||||
success, failed = self.get_daily_success_and_failure_metrics()
|
||||
data.update({
|
||||
'dates_metrics_date': [date.strftime('%m-%d') for date in self.date_range_list] or ['0'],
|
||||
'dates_metrics_total_count_success': success,
|
||||
'dates_metrics_total_count_failed': failed,
|
||||
})
|
||||
|
||||
if _all or query_params.get('total_count_ongoing_change_secret'):
|
||||
ongoing_counts = cache.get(self.ongoing_change_secret_cache_key)
|
||||
if ongoing_counts is None:
|
||||
execution_ids = []
|
||||
inspect = app.control.inspect()
|
||||
try:
|
||||
active_tasks = inspect.active()
|
||||
except Exception:
|
||||
active_tasks = None
|
||||
if active_tasks:
|
||||
for tasks in active_tasks.values():
|
||||
for task in tasks:
|
||||
_id = task.get('id')
|
||||
name = task.get('name')
|
||||
tp = task.get('kwargs', {}).get('tp')
|
||||
if name == self.task_name and tp == self.tp:
|
||||
execution_ids.append(_id)
|
||||
|
||||
snapshots = AutomationExecution.objects.filter(id__in=execution_ids).values_list('snapshot', flat=True)
|
||||
|
||||
asset_ids = {asset for i in snapshots for asset in i.get('assets', [])}
|
||||
account_ids = {account for i in snapshots for account in i.get('accounts', [])}
|
||||
|
||||
ongoing_counts = (len(execution_ids), len(asset_ids), len(account_ids))
|
||||
data['total_count_ongoing_change_secret'] = ongoing_counts[0]
|
||||
data['total_count_ongoing_change_secret_assets'] = ongoing_counts[1]
|
||||
data['total_count_ongoing_change_secret_accounts'] = ongoing_counts[2]
|
||||
cache.set(self.ongoing_change_secret_cache_key, ongoing_counts, 60)
|
||||
else:
|
||||
data['total_count_ongoing_change_secret'] = ongoing_counts[0]
|
||||
data['total_count_ongoing_change_secret_assets'] = ongoing_counts[1]
|
||||
data['total_count_ongoing_change_secret_accounts'] = ongoing_counts[2]
|
||||
|
||||
return JsonResponse(data, status=200)
|
||||
@@ -1,165 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.db.models import Q, Count
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import MethodNotAllowed
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import (
|
||||
CheckAccountAutomation,
|
||||
AccountRisk,
|
||||
RiskChoice,
|
||||
CheckAccountEngine,
|
||||
AutomationExecution,
|
||||
)
|
||||
from assets.models import Asset
|
||||
from common.api import JMSModelViewSet
|
||||
from common.permissions import IsValidLicense
|
||||
from common.utils import many_get
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
from .base import AutomationExecutionViewSet
|
||||
from ...filters import NodeFilterBackend
|
||||
from ...risk_handlers import RiskHandler
|
||||
|
||||
__all__ = [
|
||||
"CheckAccountAutomationViewSet",
|
||||
"CheckAccountExecutionViewSet",
|
||||
"AccountRiskViewSet",
|
||||
"CheckAccountEngineViewSet",
|
||||
]
|
||||
|
||||
|
||||
class CheckAccountAutomationViewSet(OrgBulkModelViewSet):
|
||||
model = CheckAccountAutomation
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
serializer_class = serializers.CheckAccountAutomationSerializer
|
||||
|
||||
|
||||
class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
|
||||
rbac_perms = (
|
||||
("list", "accounts.view_checkaccountexecution"),
|
||||
("retrieve", "accounts.view_checkaccountexecution"),
|
||||
("create", "accounts.add_checkaccountexecution"),
|
||||
("adhoc", "accounts.add_checkaccountexecution"),
|
||||
("report", "accounts.view_checkaccountexecution"),
|
||||
)
|
||||
ordering = ("-date_created",)
|
||||
tp = AutomationTypes.check_account
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(type=self.tp)
|
||||
return queryset
|
||||
|
||||
@action(methods=["get"], detail=False, url_path="adhoc")
|
||||
def adhoc(self, request, *args, **kwargs):
|
||||
asset_id = request.query_params.get("asset_id")
|
||||
if not asset_id:
|
||||
return Response(status=400, data={"asset_id": "This field is required."})
|
||||
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
name = "Check asset risk: {}".format(asset.name)
|
||||
execution = AutomationExecution()
|
||||
execution.snapshot = {
|
||||
"assets": [asset_id],
|
||||
"nodes": [],
|
||||
"type": AutomationTypes.check_account,
|
||||
"engines": "__all__",
|
||||
"name": name,
|
||||
}
|
||||
execution.save()
|
||||
execution.start()
|
||||
report = execution.manager.gen_report()
|
||||
return HttpResponse(report)
|
||||
|
||||
|
||||
class AccountRiskViewSet(OrgBulkModelViewSet):
|
||||
model = AccountRisk
|
||||
search_fields = ["username", "asset__name"]
|
||||
filterset_fields = ("risk", "status", "asset_id")
|
||||
extra_filter_backends = [NodeFilterBackend]
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
serializer_classes = {
|
||||
"default": serializers.AccountRiskSerializer,
|
||||
"assets": serializers.AssetRiskSerializer,
|
||||
"handle": serializers.HandleRiskSerializer,
|
||||
}
|
||||
ordering_fields = ("asset", "risk", "status", "username", "date_created")
|
||||
ordering = ("status", "asset", "date_created")
|
||||
rbac_perms = {
|
||||
"sync_accounts": "assets.add_accountrisk",
|
||||
"assets": "accounts.view_accountrisk",
|
||||
"handle": "accounts.change_accountrisk",
|
||||
}
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed("PUT")
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed("POST")
|
||||
|
||||
@action(methods=["get"], detail=False, url_path="assets")
|
||||
def assets(self, request, *args, **kwargs):
|
||||
annotations = {
|
||||
f"{risk[0]}_count": Count("id", filter=Q(risk=risk[0]))
|
||||
for risk in RiskChoice.choices
|
||||
}
|
||||
queryset = (
|
||||
AccountRisk.objects.select_related(
|
||||
"asset", "asset__platform"
|
||||
) # 使用 select_related 来优化 asset 和 asset__platform 的查询
|
||||
.values(
|
||||
"asset__id", "asset__name", "asset__address", "asset__platform__name"
|
||||
) # 添加需要的字段
|
||||
.annotate(risk_total=Count("id")) # 计算风险总数
|
||||
.annotate(**annotations) # 使用上面定义的 annotations 进行计数
|
||||
)
|
||||
return self.get_paginated_response_from_queryset(queryset)
|
||||
|
||||
@action(methods=["post"], detail=False, url_path="handle")
|
||||
def handle(self, request, *args, **kwargs):
|
||||
s = self.get_serializer(data=request.data)
|
||||
s.is_valid(raise_exception=True)
|
||||
|
||||
asset, username, act, risk = many_get(
|
||||
s.validated_data, ("asset", "username", "action", "risk")
|
||||
)
|
||||
handler = RiskHandler(asset=asset, username=username, request=self.request)
|
||||
|
||||
try:
|
||||
risk = handler.handle(act, risk)
|
||||
s = serializers.AccountRiskSerializer(instance=risk)
|
||||
return Response(data=s.data)
|
||||
except Exception as e:
|
||||
return Response(status=400, data=str(e))
|
||||
|
||||
|
||||
class CheckAccountEngineViewSet(JMSModelViewSet):
|
||||
search_fields = ("name",)
|
||||
serializer_class = serializers.CheckAccountEngineSerializer
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
perm_model = CheckAccountEngine
|
||||
http_method_names = ['get', 'options']
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return CheckAccountEngine.objects.none()
|
||||
|
||||
return CheckAccountEngine.get_default_engines()
|
||||
|
||||
def filter_queryset(self, queryset: list):
|
||||
search = self.request.GET.get('search')
|
||||
if search is not None:
|
||||
queryset = [
|
||||
item for item in queryset
|
||||
if search in item['name']
|
||||
]
|
||||
return queryset
|
||||
@@ -1,131 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.views.decorators.clickjacking import xframe_options_sameorigin
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.filters import GatheredAccountFilterSet, NodeFilterBackend
|
||||
from accounts.models import GatherAccountsAutomation, AutomationExecution, Account
|
||||
from accounts.models import GatheredAccount
|
||||
from assets.models import Asset
|
||||
from common.const import ConfirmOrIgnore
|
||||
from common.utils.http import is_true
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .base import AutomationExecutionViewSet
|
||||
|
||||
__all__ = [
|
||||
"DiscoverAccountsAutomationViewSet",
|
||||
"DiscoverAccountsExecutionViewSet",
|
||||
"GatheredAccountViewSet",
|
||||
]
|
||||
|
||||
from ...risk_handlers import RiskHandler
|
||||
|
||||
|
||||
class DiscoverAccountsAutomationViewSet(OrgBulkModelViewSet):
|
||||
model = GatherAccountsAutomation
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.DiscoverAccountAutomationSerializer
|
||||
|
||||
|
||||
class DiscoverAccountsExecutionViewSet(AutomationExecutionViewSet):
|
||||
rbac_perms = (
|
||||
("list", "accounts.view_gatheraccountsexecution"),
|
||||
("retrieve", "accounts.view_gatheraccountsexecution"),
|
||||
("create", "accounts.add_gatheraccountsexecution"),
|
||||
("adhoc", "accounts.add_gatheraccountsexecution"),
|
||||
("report", "accounts.view_gatheraccountsexecution"),
|
||||
)
|
||||
|
||||
tp = AutomationTypes.gather_accounts
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(type=self.tp)
|
||||
return queryset
|
||||
|
||||
@xframe_options_sameorigin
|
||||
@action(methods=["get"], detail=False, url_path="adhoc")
|
||||
def adhoc(self, request, *args, **kwargs):
|
||||
asset_id = request.query_params.get("asset_id")
|
||||
if not asset_id:
|
||||
return Response(status=400, data={"asset_id": "This field is required."})
|
||||
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
execution = AutomationExecution()
|
||||
execution.snapshot = {
|
||||
"assets": [asset_id],
|
||||
"nodes": [],
|
||||
"type": "gather_accounts",
|
||||
"is_sync_account": False,
|
||||
"check_risk": True,
|
||||
"name": "Adhoc gather accounts: {}".format(asset.name),
|
||||
}
|
||||
execution.save()
|
||||
execution.start()
|
||||
report = execution.manager.gen_report()
|
||||
return HttpResponse(report)
|
||||
|
||||
|
||||
class GatheredAccountViewSet(OrgBulkModelViewSet):
|
||||
model = GatheredAccount
|
||||
search_fields = ("username",)
|
||||
filterset_class = GatheredAccountFilterSet
|
||||
extra_filter_backends = [NodeFilterBackend]
|
||||
ordering = ("status",)
|
||||
serializer_classes = {
|
||||
"default": serializers.DiscoverAccountSerializer,
|
||||
"status": serializers.DiscoverAccountActionSerializer,
|
||||
"details": serializers.DiscoverAccountDetailsSerializer
|
||||
}
|
||||
rbac_perms = {
|
||||
"status": "assets.change_gatheredaccount",
|
||||
"details": "assets.view_gatheredaccount"
|
||||
}
|
||||
|
||||
@action(methods=["put"], detail=False, url_path="status")
|
||||
def status(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
validated_data = serializer.validated_data
|
||||
ids = validated_data.get('ids', [])
|
||||
new_status = validated_data.get('status')
|
||||
updated_instances = GatheredAccount.objects.filter(id__in=ids).select_related('asset')
|
||||
if new_status == ConfirmOrIgnore.confirmed:
|
||||
GatheredAccount.sync_accounts(updated_instances)
|
||||
updated_instances.update(present=True)
|
||||
updated_instances.update(status=new_status)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
request = self.request
|
||||
params = request.query_params
|
||||
is_delete_remote = params.get("is_delete_remote")
|
||||
is_delete_account = params.get("is_delete_account")
|
||||
asset_id = params.get("asset")
|
||||
username = params.get("username")
|
||||
if is_true(is_delete_remote):
|
||||
self._delete_remote(asset_id, username)
|
||||
if is_true(is_delete_account):
|
||||
account = get_object_or_404(Account, username=username, asset_id=asset_id)
|
||||
account.delete()
|
||||
super().perform_destroy(instance)
|
||||
|
||||
def _delete_remote(self, asset_id, username):
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
handler = RiskHandler(asset, username, request=self.request)
|
||||
handler.handle_delete_remote()
|
||||
|
||||
@action(methods=["get"], detail=True, url_path="details")
|
||||
def details(self, request, *args, **kwargs):
|
||||
pk = kwargs.get('pk')
|
||||
account = get_object_or_404(GatheredAccount, pk=pk)
|
||||
serializer = self.get_serializer(account.detail)
|
||||
return Response(data=serializer.data)
|
||||
@@ -1,72 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework import mixins
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.filters import PushAccountRecordFilterSet
|
||||
from accounts.models import PushAccountAutomation, PushSecretRecord
|
||||
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
|
||||
from .base import (
|
||||
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
|
||||
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'PushAccountAutomationViewSet', 'PushAccountAssetsListApi', 'PushAccountRemoveAssetApi',
|
||||
'PushAccountAddAssetApi', 'PushAccountNodeAddRemoveApi', 'PushAccountExecutionViewSet',
|
||||
'PushAccountRecordViewSet'
|
||||
]
|
||||
|
||||
|
||||
class PushAccountAutomationViewSet(OrgBulkModelViewSet):
|
||||
model = PushAccountAutomation
|
||||
filterset_fields = ('name', 'secret_type', 'secret_strategy')
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.PushAccountAutomationSerializer
|
||||
|
||||
|
||||
class PushAccountExecutionViewSet(AutomationExecutionViewSet):
|
||||
rbac_perms = (
|
||||
("list", "accounts.view_pushaccountexecution"),
|
||||
("retrieve", "accounts.view_pushaccountexecution"),
|
||||
("create", "accounts.add_pushaccountexecution"),
|
||||
("report", "accounts.view_pushaccountexecution"),
|
||||
)
|
||||
|
||||
tp = AutomationTypes.push_account
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(type=self.tp)
|
||||
return queryset
|
||||
|
||||
|
||||
class PushAccountRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
|
||||
filterset_class = PushAccountRecordFilterSet
|
||||
search_fields = ('asset__address', 'account__username')
|
||||
ordering_fields = ('date_finished',)
|
||||
tp = AutomationTypes.push_account
|
||||
serializer_classes = {
|
||||
'default': serializers.PushSecretRecordSerializer,
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
return PushSecretRecord.get_valid_records()
|
||||
|
||||
|
||||
class PushAccountAssetsListApi(AutomationAssetsListApi):
|
||||
model = PushAccountAutomation
|
||||
|
||||
|
||||
class PushAccountRemoveAssetApi(AutomationRemoveAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
class PushAccountAddAssetApi(AutomationAddAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||
@@ -1,11 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AccountsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'accounts'
|
||||
verbose_name = 'App Accounts'
|
||||
|
||||
def ready(self):
|
||||
from . import signal_handlers # noqa
|
||||
from . import tasks # noqa
|
||||
@@ -1,2 +0,0 @@
|
||||
from .endpoint import ExecutionManager
|
||||
from .methods import platform_automation_methods
|
||||
@@ -1,291 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
from collections import defaultdict, OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import F
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
from xlsxwriter import Workbook
|
||||
|
||||
from accounts.const import AccountBackupType
|
||||
from accounts.models import BackupAccountAutomation, Account
|
||||
from accounts.notifications import AccountBackupExecutionTaskMsg, AccountBackupByObjStorageExecutionTaskMsg
|
||||
from accounts.serializers import AccountSecretSerializer
|
||||
from assets.const import AllTypes
|
||||
from common.const import Status
|
||||
from common.utils.file import encrypt_and_compress_zip_file, zip_files
|
||||
from common.utils.timezone import local_now_filename, local_now_display
|
||||
from terminal.models.component.storage import ReplayStorage
|
||||
from users.models import User
|
||||
|
||||
PATH = os.path.join(os.path.dirname(settings.BASE_DIR), 'tmp')
|
||||
split_help_text = _('The account key will be split into two parts and sent')
|
||||
|
||||
|
||||
class RecipientsNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BaseAccountHandler:
|
||||
@classmethod
|
||||
def unpack_data(cls, serializer_data, data=None):
|
||||
if data is None:
|
||||
data = {}
|
||||
for k, v in serializer_data.items():
|
||||
if isinstance(v, OrderedDict):
|
||||
cls.unpack_data(v, data)
|
||||
else:
|
||||
if isinstance(v, dict):
|
||||
v = v.get('label')
|
||||
elif v is None:
|
||||
v = ''
|
||||
data[k] = v
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_header_fields(cls, serializer: serializers.Serializer):
|
||||
try:
|
||||
exclude_backup_fields = getattr(serializer, 'Meta').exclude_backup_fields
|
||||
except AttributeError:
|
||||
exclude_backup_fields = []
|
||||
backup_fields = serializer.fields.keys()
|
||||
|
||||
header_fields = {}
|
||||
for field in backup_fields:
|
||||
if field in exclude_backup_fields:
|
||||
continue
|
||||
|
||||
v = serializer.fields[field]
|
||||
if isinstance(v, serializers.Serializer):
|
||||
_fields = cls.get_header_fields(v)
|
||||
header_fields.update(_fields)
|
||||
else:
|
||||
header_fields[field] = str(v.label)
|
||||
return header_fields
|
||||
|
||||
@classmethod
|
||||
def create_row(cls, data, header_fields):
|
||||
data = cls.unpack_data(data)
|
||||
row_dict = {}
|
||||
for field, header_name in header_fields.items():
|
||||
row_dict[header_name] = str(data.get(field, field))
|
||||
return row_dict
|
||||
|
||||
@classmethod
|
||||
def add_rows(cls, data, header_fields, sheet):
|
||||
data_map = defaultdict(list)
|
||||
for i in data:
|
||||
row = cls.create_row(i, header_fields)
|
||||
if sheet not in data_map:
|
||||
data_map[sheet].append(list(row.keys()))
|
||||
data_map[sheet].append(list(row.values()))
|
||||
return data_map
|
||||
|
||||
|
||||
class AssetAccountHandler(BaseAccountHandler):
|
||||
@staticmethod
|
||||
def get_filename(name):
|
||||
filename = os.path.join(
|
||||
PATH, f'{name}-{local_now_filename()}-{time.time()}.xlsx'
|
||||
)
|
||||
return filename
|
||||
|
||||
@staticmethod
|
||||
def handler_secret(data, section):
|
||||
for account_data in data:
|
||||
secret = account_data.get('secret')
|
||||
if not secret:
|
||||
continue
|
||||
length = len(secret)
|
||||
index = length // 2
|
||||
if section == "front":
|
||||
secret = secret[:index] + '*' * (length - index)
|
||||
elif section == "back":
|
||||
secret = '*' * (length - index) + secret[index:]
|
||||
account_data['secret'] = secret
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls, accounts, section):
|
||||
data_map = defaultdict(list)
|
||||
|
||||
if not accounts.exists():
|
||||
return data_map
|
||||
|
||||
type_dict = {}
|
||||
for i in AllTypes.grouped_choices_to_objs():
|
||||
for j in i['children']:
|
||||
type_dict[j['value']] = j['display_name']
|
||||
|
||||
header_fields = cls.get_header_fields(AccountSecretSerializer(accounts.first()))
|
||||
account_type_map = defaultdict(list)
|
||||
for account in accounts:
|
||||
account_type_map[account.type].append(account)
|
||||
|
||||
data_map = {}
|
||||
for tp, _accounts in account_type_map.items():
|
||||
sheet_name = type_dict.get(tp, tp)
|
||||
data = AccountSecretSerializer(_accounts, many=True).data
|
||||
cls.handler_secret(data, section)
|
||||
data_map.update(cls.add_rows(data, header_fields, sheet_name))
|
||||
number_of_backup_accounts = _('Number of backup accounts')
|
||||
print('\033[33m- {}: {}\033[0m'.format(number_of_backup_accounts, accounts.count()))
|
||||
return data_map
|
||||
|
||||
|
||||
class AccountBackupHandler:
|
||||
def __init__(self, manager, execution):
|
||||
self.manager = manager
|
||||
self.execution = execution
|
||||
self.name = self.execution.snapshot.get('name', '-')
|
||||
|
||||
def get_accounts(self):
|
||||
# TODO 可以优化一下查询 在账号上做 category 的缓存 避免数据量大时连表操作
|
||||
types = self.execution.snapshot.get('types', [])
|
||||
self.manager.summary['total_types'] = len(types)
|
||||
qs = Account.objects.filter(
|
||||
asset__platform__type__in=types
|
||||
).annotate(type=F('asset__platform__type'))
|
||||
return qs
|
||||
|
||||
def create_excel(self, section='complete'):
|
||||
hint = _('Generating asset related backup information files')
|
||||
print(
|
||||
f'\033[32m>>> {hint}\033[0m'
|
||||
''
|
||||
)
|
||||
|
||||
time_start = time.time()
|
||||
files = []
|
||||
accounts = self.get_accounts()
|
||||
self.manager.summary['total_accounts'] = accounts.count()
|
||||
data_map = AssetAccountHandler.create_data_map(accounts, section)
|
||||
if not data_map:
|
||||
return files
|
||||
|
||||
filename = AssetAccountHandler.get_filename(self.name)
|
||||
|
||||
wb = Workbook(filename)
|
||||
for sheet, data in data_map.items():
|
||||
ws = wb.add_worksheet(str(sheet))
|
||||
for row_index, row_data in enumerate(data):
|
||||
for col_index, col_data in enumerate(row_data):
|
||||
ws.write_string(row_index, col_index, col_data)
|
||||
wb.close()
|
||||
files.append(filename)
|
||||
timedelta = round((time.time() - time_start), 2)
|
||||
time_cost = _('Duration')
|
||||
file_created = _('Backup file creation completed')
|
||||
print('{}: {} {}s'.format(file_created, time_cost, timedelta))
|
||||
return files
|
||||
|
||||
def send_backup_mail(self, files, recipients):
|
||||
if not files:
|
||||
return
|
||||
recipients = User.objects.filter(id__in=list(recipients))
|
||||
msg = _("Start sending backup emails")
|
||||
print(
|
||||
f'\033[32m>>> {msg}\033[0m'
|
||||
''
|
||||
)
|
||||
name = self.name
|
||||
for user in recipients:
|
||||
if not user.secret_key:
|
||||
attachment_list = []
|
||||
else:
|
||||
attachment = os.path.join(PATH, f'{name}-{local_now_filename()}-{time.time()}.zip')
|
||||
encrypt_and_compress_zip_file(attachment, user.secret_key, files)
|
||||
attachment_list = [attachment]
|
||||
AccountBackupExecutionTaskMsg(name, user).publish(attachment_list)
|
||||
|
||||
for file in files:
|
||||
os.remove(file)
|
||||
|
||||
def send_backup_obj_storage(self, files, recipients, password):
|
||||
if not files:
|
||||
return
|
||||
recipients = ReplayStorage.objects.filter(id__in=list(recipients))
|
||||
print(
|
||||
'\033[32m>>> 📃 ---> sftp \033[0m'
|
||||
''
|
||||
)
|
||||
name = self.name
|
||||
encrypt_file = _('Encrypting files using encryption password')
|
||||
for rec in recipients:
|
||||
attachment = os.path.join(PATH, f'{name}-{local_now_filename()}-{time.time()}.zip')
|
||||
if password:
|
||||
print(f'\033[32m>>> {encrypt_file}\033[0m')
|
||||
encrypt_and_compress_zip_file(attachment, password, files)
|
||||
else:
|
||||
zip_files(attachment, files)
|
||||
attachment_list = attachment
|
||||
AccountBackupByObjStorageExecutionTaskMsg(name, rec).publish(attachment_list)
|
||||
file_sent_to = _('The backup file will be sent to')
|
||||
print('{}: {}({})'.format(file_sent_to, rec.name, rec.id))
|
||||
for file in files:
|
||||
os.remove(file)
|
||||
|
||||
def _run(self):
|
||||
try:
|
||||
backup_type = self.execution.snapshot.get('backup_type', AccountBackupType.email)
|
||||
if backup_type == AccountBackupType.email:
|
||||
self.backup_by_email()
|
||||
elif backup_type == AccountBackupType.object_storage:
|
||||
self.backup_by_obj_storage()
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
print(f'\033[31m>>> {error}\033[0m')
|
||||
self.execution.status = Status.error
|
||||
self.execution.summary['error'] = error
|
||||
|
||||
def backup_by_obj_storage(self):
|
||||
object_id = self.execution.snapshot.get('id')
|
||||
zip_encrypt_password = BackupAccountAutomation.objects.get(id=object_id).zip_encrypt_password
|
||||
obj_recipients_part_one = self.execution.snapshot.get('obj_recipients_part_one', [])
|
||||
obj_recipients_part_two = self.execution.snapshot.get('obj_recipients_part_two', [])
|
||||
no_assigned_sftp_server = _('The backup task has no assigned sftp server')
|
||||
if not obj_recipients_part_one and not obj_recipients_part_two:
|
||||
print(
|
||||
'\n'
|
||||
f'\033[31m>>> {no_assigned_sftp_server}\033[0m'
|
||||
''
|
||||
)
|
||||
raise RecipientsNotFound('Not Found Recipients')
|
||||
if obj_recipients_part_one and obj_recipients_part_two:
|
||||
print(f'\033[32m>>> {split_help_text}\033[0m')
|
||||
files = self.create_excel(section='front')
|
||||
self.send_backup_obj_storage(files, obj_recipients_part_one, zip_encrypt_password)
|
||||
|
||||
files = self.create_excel(section='back')
|
||||
self.send_backup_obj_storage(files, obj_recipients_part_two, zip_encrypt_password)
|
||||
else:
|
||||
recipients = obj_recipients_part_one or obj_recipients_part_two
|
||||
files = self.create_excel()
|
||||
self.send_backup_obj_storage(files, recipients, zip_encrypt_password)
|
||||
|
||||
def backup_by_email(self):
|
||||
warn_text = _('The backup task has no assigned recipient')
|
||||
recipients_part_one = self.execution.snapshot.get('recipients_part_one', [])
|
||||
recipients_part_two = self.execution.snapshot.get('recipients_part_two', [])
|
||||
if not recipients_part_one and not recipients_part_two:
|
||||
print(
|
||||
'\n'
|
||||
f'\033[31m>>> {warn_text}\033[0m'
|
||||
''
|
||||
)
|
||||
return
|
||||
if recipients_part_one and recipients_part_two:
|
||||
print(f'\033[32m>>> {split_help_text}\033[0m')
|
||||
files = self.create_excel(section='front')
|
||||
self.send_backup_mail(files, recipients_part_one)
|
||||
|
||||
files = self.create_excel(section='back')
|
||||
self.send_backup_mail(files, recipients_part_two)
|
||||
else:
|
||||
recipients = recipients_part_one or recipients_part_two
|
||||
files = self.create_excel()
|
||||
self.send_backup_mail(files, recipients)
|
||||
|
||||
def run(self):
|
||||
print('{}: {}'.format(_('Plan start'), local_now_display()))
|
||||
self._run()
|
||||
@@ -1,30 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from assets.automations.base.manager import BaseManager
|
||||
from common.utils.timezone import local_now_display
|
||||
from .handlers import AccountBackupHandler
|
||||
|
||||
|
||||
class AccountBackupManager(BaseManager):
|
||||
def do_run(self):
|
||||
execution = self.execution
|
||||
account_backup_execution_being_executed = _('The account backup plan is being executed')
|
||||
print(f'\033[33m# {account_backup_execution_being_executed}\033[0m')
|
||||
handler = AccountBackupHandler(self, execution)
|
||||
handler.run()
|
||||
|
||||
def send_report_if_need(self):
|
||||
pass
|
||||
|
||||
def print_summary(self):
|
||||
print('\n\n' + '-' * 80)
|
||||
plan_execution_end = _('Plan execution end')
|
||||
print('{} {}\n'.format(plan_execution_end, local_now_display()))
|
||||
time_cost = _('Duration')
|
||||
print('{}: {}s'.format(time_cost, self.duration))
|
||||
|
||||
def get_report_template(self):
|
||||
return "accounts/backup_account_report.html"
|
||||
@@ -1,14 +0,0 @@
|
||||
## all connection vars
|
||||
hostname asset_name=name asset_type=type asset_primary_protocol=ssh asset_primary_port=22 asset_protocols=[]
|
||||
|
||||
## local connection
|
||||
hostname ansible_connection=local
|
||||
|
||||
## local connection with gateway
|
||||
hostname ansible_connection=ssh ansible_user=gateway.username ansible_port=gateway.port ansible_host=gateway.host ansible_ssh_private_key_file=gateway.key
|
||||
|
||||
## ssh connection for windows
|
||||
hostname ansible_connection=ssh ansible_shell_type=powershell/cmd ansible_user=windows.username ansible_port=windows.port ansible_host=windows.host ansible_ssh_private_key_file=windows.key
|
||||
|
||||
## ssh connection
|
||||
hostname ansible_user=user ansible_password=pass ansible_host=host ansible_port=port ansible_ssh_private_key_file=key ssh_args="-o StrictHostKeyChecking=no"
|
||||
@@ -1,233 +0,0 @@
|
||||
from copy import deepcopy
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.automations.methods import platform_automation_methods
|
||||
from accounts.const import SSHKeyStrategy, SecretStrategy, SecretType, ChangeSecretRecordStatusChoice, \
|
||||
ChangeSecretAccountStatus
|
||||
from accounts.models import BaseAccountQuerySet
|
||||
from accounts.utils import SecretGenerator, account_secret_task_status
|
||||
from assets.automations.base.manager import BasePlaybookManager
|
||||
from assets.const import HostTypes
|
||||
from common.db.utils import safe_atomic_db_connection
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class AccountBasePlaybookManager(BasePlaybookManager):
|
||||
template_path = ''
|
||||
|
||||
@property
|
||||
def platform_automation_methods(self):
|
||||
return platform_automation_methods
|
||||
|
||||
|
||||
class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.secret_type = self.execution.snapshot.get('secret_type')
|
||||
self.secret_strategy = self.execution.snapshot.get(
|
||||
'secret_strategy', SecretStrategy.custom
|
||||
)
|
||||
self.ssh_key_change_strategy = self.execution.snapshot.get(
|
||||
'ssh_key_change_strategy', SSHKeyStrategy.set_jms
|
||||
)
|
||||
self.account_ids = self.execution.snapshot['accounts']
|
||||
self.record_map = self.execution.snapshot.get('record_map', {}) # 这个是某个失败的记录重试
|
||||
self.name_record_mapper = {} # 做个映射,方便后面处理
|
||||
|
||||
def gen_account_inventory(self, account, asset, h, path_dir):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_ssh_params(self, secret, secret_type):
|
||||
kwargs = {}
|
||||
if secret_type != SecretType.SSH_KEY:
|
||||
return kwargs
|
||||
kwargs['strategy'] = self.ssh_key_change_strategy
|
||||
kwargs['exclusive'] = 'yes' if kwargs['strategy'] == SSHKeyStrategy.set else 'no'
|
||||
|
||||
if kwargs['strategy'] == SSHKeyStrategy.set_jms:
|
||||
kwargs['regexp'] = '.*{}$'.format(secret.split()[2].strip())
|
||||
return kwargs
|
||||
|
||||
def get_secret(self, account):
|
||||
if self.secret_strategy == SecretStrategy.custom:
|
||||
new_secret = self.execution.snapshot['secret']
|
||||
else:
|
||||
generator = SecretGenerator(
|
||||
self.secret_strategy, self.secret_type,
|
||||
self.execution.snapshot.get('password_rules')
|
||||
)
|
||||
new_secret = generator.get_secret()
|
||||
return new_secret
|
||||
|
||||
def get_accounts(self, privilege_account) -> BaseAccountQuerySet | None:
|
||||
if not privilege_account:
|
||||
print('Not privilege account')
|
||||
return
|
||||
|
||||
asset = privilege_account.asset
|
||||
accounts = asset.all_accounts.all()
|
||||
accounts = accounts.filter(id__in=self.account_ids, secret_reset=True)
|
||||
|
||||
if self.secret_type:
|
||||
accounts = accounts.filter(secret_type=self.secret_type)
|
||||
|
||||
if settings.CHANGE_AUTH_PLAN_SECURE_MODE_ENABLED:
|
||||
accounts = accounts.filter(privileged=False).exclude(
|
||||
username__in=['root', 'administrator', privilege_account.username]
|
||||
)
|
||||
return accounts
|
||||
|
||||
def handle_ssh_secret(self, secret_type, new_secret, path_dir):
|
||||
private_key_path = None
|
||||
if secret_type == SecretType.SSH_KEY:
|
||||
private_key_path = self.generate_private_key_path(new_secret, path_dir)
|
||||
new_secret = self.generate_public_key(new_secret)
|
||||
return new_secret, private_key_path
|
||||
|
||||
def gen_inventory(self, h, account, new_secret, private_key_path, asset):
|
||||
secret_type = account.secret_type
|
||||
h['ssh_params'].update(self.get_ssh_params(new_secret, secret_type))
|
||||
h['account'] = {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'full_username': account.full_username,
|
||||
'secret_type': secret_type,
|
||||
'secret': account.escape_jinja2_syntax(new_secret),
|
||||
'private_key_path': private_key_path,
|
||||
'become': account.get_ansible_become_auth(),
|
||||
}
|
||||
if asset.platform.type == 'oracle':
|
||||
h['account']['mode'] = 'sysdba' if account.privileged else None
|
||||
return h
|
||||
|
||||
def add_extra_params(self, host, **kwargs):
|
||||
host['ssh_params'] = {}
|
||||
return host
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
host = super().host_callback(
|
||||
host, asset=asset, account=account, automation=automation,
|
||||
path_dir=path_dir, **kwargs
|
||||
)
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
host = self.add_extra_params(host, automation=automation)
|
||||
accounts = self.get_accounts(account)
|
||||
existing_ids = set(map(str, accounts.values_list('id', flat=True)))
|
||||
missing_ids = set(map(str, self.account_ids)) - existing_ids
|
||||
|
||||
for account_id in missing_ids:
|
||||
self.clear_account_queue_status(account_id)
|
||||
|
||||
error_msg = _("No pending accounts found")
|
||||
if not accounts:
|
||||
print(f'{asset}: {error_msg}')
|
||||
return []
|
||||
|
||||
if asset.type == HostTypes.WINDOWS:
|
||||
accounts = accounts.filter(secret_type=SecretType.PASSWORD)
|
||||
|
||||
inventory_hosts = []
|
||||
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
|
||||
print(f'Windows {asset} does not support ssh key push')
|
||||
return inventory_hosts
|
||||
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + account.username + ')' # To distinguish different accounts
|
||||
|
||||
account_status = account_secret_task_status.get_status(account.id)
|
||||
if account_status == ChangeSecretAccountStatus.PROCESSING:
|
||||
h['error'] = f'Account is already being processed, skipping: {account}'
|
||||
inventory_hosts.append(h)
|
||||
continue
|
||||
|
||||
try:
|
||||
h, record = self.gen_account_inventory(account, asset, h, path_dir)
|
||||
h['check_conn_after_change'] = record.execution.snapshot.get('check_conn_after_change', True)
|
||||
account_secret_task_status.set_status(
|
||||
account.id,
|
||||
ChangeSecretAccountStatus.PROCESSING,
|
||||
metadata={'execution_id': self.execution.id}
|
||||
)
|
||||
except Exception as e:
|
||||
h['error'] = str(e)
|
||||
self.clear_account_queue_status(account.id)
|
||||
|
||||
inventory_hosts.append(h)
|
||||
|
||||
return inventory_hosts
|
||||
|
||||
@staticmethod
|
||||
def save_record(record):
|
||||
record.save(update_fields=['error', 'status', 'date_finished'])
|
||||
|
||||
@staticmethod
|
||||
def clear_account_queue_status(account_id):
|
||||
account_secret_task_status.clear(account_id)
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
record = self.name_record_mapper.get(host)
|
||||
if not record:
|
||||
return
|
||||
record.status = ChangeSecretRecordStatusChoice.success.value
|
||||
record.date_finished = timezone.now()
|
||||
|
||||
account = record.account
|
||||
if not account:
|
||||
print("Account not found, deleted ?")
|
||||
return
|
||||
|
||||
account.secret = getattr(record, 'new_secret', account.secret)
|
||||
account.date_updated = timezone.now()
|
||||
account.date_change_secret = timezone.now()
|
||||
account.change_secret_status = ChangeSecretRecordStatusChoice.success
|
||||
|
||||
self.summary['ok_accounts'] += 1
|
||||
self.result['ok_accounts'].append(
|
||||
{
|
||||
"asset": str(account.asset),
|
||||
"username": account.username,
|
||||
}
|
||||
)
|
||||
super().on_host_success(host, result)
|
||||
|
||||
with safe_atomic_db_connection():
|
||||
account.save(update_fields=['secret', 'date_updated', 'date_change_secret', 'change_secret_status'])
|
||||
self.save_record(record)
|
||||
self.clear_account_queue_status(account.id)
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
record = self.name_record_mapper.get(host)
|
||||
if not record:
|
||||
return
|
||||
record.status = ChangeSecretRecordStatusChoice.failed.value
|
||||
record.date_finished = timezone.now()
|
||||
record.error = error
|
||||
account = record.account
|
||||
if not account:
|
||||
print("Account not found, deleted ?")
|
||||
return
|
||||
account.date_updated = timezone.now()
|
||||
account.date_change_secret = timezone.now()
|
||||
account.change_secret_status = ChangeSecretRecordStatusChoice.failed
|
||||
|
||||
self.summary['fail_accounts'] += 1
|
||||
self.result['fail_accounts'].append(
|
||||
{
|
||||
"asset": str(record.asset),
|
||||
"username": record.account.username,
|
||||
}
|
||||
)
|
||||
super().on_host_error(host, error, result)
|
||||
|
||||
with safe_atomic_db_connection():
|
||||
account.save(update_fields=['change_secret_status', 'date_change_secret', 'date_updated'])
|
||||
self.save_record(record)
|
||||
self.clear_account_queue_status(account.id)
|
||||
@@ -1,67 +0,0 @@
|
||||
- hosts: custom
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_connection: local
|
||||
ansible_become: false
|
||||
|
||||
tasks:
|
||||
- name: Test privileged account (paramiko)
|
||||
ssh_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
||||
become: "{{ jms_custom_become | default(False) }}"
|
||||
become_method: "{{ jms_custom_become_method | default('su') }}"
|
||||
become_user: "{{ jms_custom_become_user | default('') }}"
|
||||
become_password: "{{ jms_custom_become_password | default('') }}"
|
||||
become_private_key_path: "{{ jms_custom_become_private_key_path | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
recv_timeout: "{{ params.recv_timeout | default(30) }}"
|
||||
register: ping_info
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Change asset password (paramiko)
|
||||
custom_command:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
||||
become: "{{ jms_custom_become | default(False) }}"
|
||||
become_method: "{{ jms_custom_become_method | default('su') }}"
|
||||
become_user: "{{ jms_custom_become_user | default('') }}"
|
||||
become_password: "{{ jms_custom_become_password | default('') }}"
|
||||
become_private_key_path: "{{ jms_custom_become_private_key_path | default(None) }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
commands: "{{ params.commands }}"
|
||||
answers: "{{ params.answers }}"
|
||||
recv_timeout: "{{ params.recv_timeout | default(30) }}"
|
||||
delay_time: "{{ params.delay_time | default(2) }}"
|
||||
prompt: "{{ params.prompt | default('.*') }}"
|
||||
ignore_errors: true
|
||||
when: ping_info is succeeded and check_conn_after_change
|
||||
register: change_info
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Verify password (paramiko)
|
||||
ssh_ping:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
become_password: "{{ account.become.ansible_password | default('') }}"
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
recv_timeout: "{{ params.recv_timeout | default(30) }}"
|
||||
delegate_to: localhost
|
||||
when: check_conn_after_change
|
||||
@@ -1,132 +0,0 @@
|
||||
id: change_secret_by_ssh
|
||||
name: "{{ 'SSH account change secret' | trans }}"
|
||||
category:
|
||||
- device
|
||||
- host
|
||||
type:
|
||||
- all
|
||||
method: change_secret
|
||||
protocol: ssh
|
||||
priority: 50
|
||||
params:
|
||||
- name: commands
|
||||
type: text
|
||||
label: "{{ 'Params commands label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params commands help text' | trans }}"
|
||||
- name: recv_timeout
|
||||
type: int
|
||||
label: "{{ 'Params recv_timeout label' | trans }}"
|
||||
default: 30
|
||||
help_text: "{{ 'Params recv_timeout help text' | trans }}"
|
||||
- name: delay_time
|
||||
type: int
|
||||
label: "{{ 'Params delay_time label' | trans }}"
|
||||
default: 2
|
||||
help_text: "{{ 'Params delay_time help text' | trans }}"
|
||||
- name: prompt
|
||||
type: str
|
||||
label: "{{ 'Params prompt label' | trans }}"
|
||||
default: '.*'
|
||||
help_text: "{{ 'Params prompt help text' | trans }}"
|
||||
- name: answers
|
||||
type: text
|
||||
label: "{{ 'Params answer label' | trans }}"
|
||||
default: '.*'
|
||||
help_text: "{{ 'Params answer help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
SSH account change secret:
|
||||
zh: '使用 SSH 命令行自定义改密'
|
||||
ja: 'SSH コマンドライン方式でカスタムパスワード変更'
|
||||
en: 'Custom password change by SSH command line'
|
||||
|
||||
Params commands help text:
|
||||
zh: |
|
||||
请将命令中的指定位置改成特殊符号 <br />
|
||||
1. 改密账号 -> {username} <br />
|
||||
2. 改密密码 -> {password} <br />
|
||||
3. 登录用户密码 -> {login_password} <br />
|
||||
<strong>多条命令使用换行分割,</strong>执行任务时系统会根据特殊符号替换真实数据。<br />
|
||||
比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />
|
||||
enable <br />
|
||||
{login_password} <br />
|
||||
configure terminal <br />
|
||||
username {username} privilege 0 password {password} <br />
|
||||
end <br />
|
||||
ja: |
|
||||
コマンド内の指定された位置を特殊記号に変更してください。<br />
|
||||
新しいパスワード(アカウント変更) -> {username} <br />
|
||||
新しいパスワード(パスワード変更) -> {password} <br />
|
||||
ログインユーザーパスワード -> {login_password} <br />
|
||||
<strong>複数のコマンドは改行で区切り、</strong>タスクを実行するときにシステムは特殊記号を使用して実際のデータを置き換えます。<br />
|
||||
例えば、Cisco機器のパスワードを変更する場合、一般的には5つのコマンドを設定する必要があります:<br />
|
||||
enable <br />
|
||||
{login_password} <br />
|
||||
configure terminal <br />
|
||||
username {username} privilege 0 password {password} <br />
|
||||
end <br />
|
||||
en: |
|
||||
Please change the specified positions in the command to special symbols. <br />
|
||||
Change password account -> {username} <br />
|
||||
Change password -> {password} <br />
|
||||
Login user password -> {login_password} <br />
|
||||
<strong>Multiple commands are separated by new lines,</strong> and when executing tasks, <br />
|
||||
the system will replace the special symbols with real data. <br />
|
||||
For example, to change the password for a Cisco device, you generally need to configure five commands: <br />
|
||||
enable <br />
|
||||
{login_password} <br />
|
||||
configure terminal <br />
|
||||
username {username} privilege 0 password {password} <br />
|
||||
end <br />
|
||||
|
||||
Params commands label:
|
||||
zh: '自定义命令'
|
||||
ja: 'カスタムコマンド'
|
||||
en: 'Custom command'
|
||||
|
||||
Params recv_timeout label:
|
||||
zh: '超时时间'
|
||||
ja: 'タイムアウト'
|
||||
en: 'Timeout'
|
||||
|
||||
Params recv_timeout help text:
|
||||
zh: '等待命令结果返回的超时时间(秒)'
|
||||
ja: 'コマンドの結果を待つタイムアウト時間(秒)'
|
||||
en: 'The timeout for waiting for the command result to return (Seconds)'
|
||||
|
||||
Params delay_time label:
|
||||
zh: '延迟发送时间'
|
||||
ja: '遅延送信時間'
|
||||
en: 'Delayed send time'
|
||||
|
||||
Params delay_time help text:
|
||||
zh: '每条命令延迟发送的时间间隔(秒)'
|
||||
ja: '各コマンド送信の遅延間隔(秒)'
|
||||
en: 'Time interval for each command delay in sending (Seconds)'
|
||||
|
||||
Params prompt label:
|
||||
zh: '提示符'
|
||||
ja: 'ヒント'
|
||||
en: 'Prompt'
|
||||
|
||||
Params prompt help text:
|
||||
zh: '终端连接后显示的提示符信息(正则表达式)'
|
||||
ja: 'ターミナル接続後に表示されるプロンプト情報(正規表現)'
|
||||
en: 'Prompt information displayed after terminal connection (Regular expression)'
|
||||
|
||||
Params answer label:
|
||||
zh: '命令结果'
|
||||
ja: 'コマンド結果'
|
||||
en: 'Command result'
|
||||
|
||||
Params answer help text:
|
||||
zh: |
|
||||
根据结果匹配度决定是否执行下一条命令,输入框的内容和上方 “自定义命令” 内容按行一一对应(正则表达式)
|
||||
ja: |
|
||||
結果の一致度に基づいて次のコマンドを実行するかどうかを決定します。
|
||||
入力欄の内容は、上の「カスタムコマンド」の内容と行ごとに対応しています(せいきひょうげん)
|
||||
en: |
|
||||
Decide whether to execute the next command based on the result match.
|
||||
The input content corresponds line by line with the content
|
||||
of the `Custom command` above. (Regular expression)
|
||||
@@ -1,58 +0,0 @@
|
||||
- hosts: mongodb
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MongoDB connection
|
||||
mongodb_ping:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl | default('') }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
register: db_info
|
||||
|
||||
- name: Display MongoDB version
|
||||
debug:
|
||||
var: db_info.server_version
|
||||
when: db_info is succeeded
|
||||
|
||||
- name: Change MongoDB password
|
||||
mongodb_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ignore_errors: true
|
||||
when: db_info is succeeded
|
||||
|
||||
- name: Verify password
|
||||
mongodb_ping:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
@@ -1,12 +0,0 @@
|
||||
id: change_secret_mongodb
|
||||
name: "{{ 'MongoDB account change secret' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- mongodb
|
||||
method: change_secret
|
||||
|
||||
i18n:
|
||||
MongoDB account change secret:
|
||||
zh: 使用 Ansible 模块 mongodb 执行 MongoDB 账号改密
|
||||
ja: Ansible mongodb モジュールを使用して MongoDB アカウントのパスワード変更
|
||||
en: Using Ansible module mongodb to change MongoDB account secret
|
||||
@@ -1,58 +0,0 @@
|
||||
- hosts: mysql
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
community.mysql.mysql_info:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
- name: MySQL version
|
||||
debug:
|
||||
var: db_info.version.full
|
||||
|
||||
- name: Change MySQL password
|
||||
community.mysql.mysql_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
|
||||
append_privs: "{{ db_name != '' | bool }}"
|
||||
ignore_errors: true
|
||||
when: db_info is succeeded
|
||||
|
||||
- name: Verify password
|
||||
community.mysql.mysql_info:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
when: check_conn_after_change
|
||||
@@ -1,13 +0,0 @@
|
||||
id: change_secret_mysql
|
||||
name: "{{ 'MySQL account change secret' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- mysql
|
||||
- mariadb
|
||||
method: change_secret
|
||||
|
||||
i18n:
|
||||
MySQL account change secret:
|
||||
zh: 使用 Ansible 模块 mysql 执行 MySQL 账号改密
|
||||
ja: Ansible mysql モジュールを使用して MySQL アカウントのパスワード変更
|
||||
en: Using Ansible module mysql to change MySQL account secret
|
||||
@@ -1,43 +0,0 @@
|
||||
- hosts: oracle
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Test Oracle connection
|
||||
oracle_ping:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
register: db_info
|
||||
|
||||
- name: Display Oracle version
|
||||
debug:
|
||||
var: db_info.server_version
|
||||
when: db_info is succeeded
|
||||
|
||||
- name: Change Oracle password
|
||||
oracle_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ignore_errors: true
|
||||
when: db_info is succeeded
|
||||
|
||||
- name: Verify password
|
||||
oracle_ping:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ account.mode }}"
|
||||
when: check_conn_after_change
|
||||
@@ -1,11 +0,0 @@
|
||||
id: change_secret_oracle
|
||||
name: "{{ 'Oracle account change secret' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- oracle
|
||||
method: change_secret
|
||||
|
||||
i18n:
|
||||
Oracle account change secret:
|
||||
zh: Oracle 账号改密
|
||||
ja: Oracle アカウントのパスワード変更
|
||||
@@ -1,60 +0,0 @@
|
||||
- hosts: postgre
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
community.postgresql.postgresql_ping:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
- name: Display PostgreSQL version
|
||||
debug:
|
||||
var: result.server_version.full
|
||||
when: result is succeeded
|
||||
|
||||
- name: Change PostgreSQL password
|
||||
community.postgresql.postgresql_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
role_attr_flags: LOGIN
|
||||
ignore_errors: true
|
||||
when: result is succeeded
|
||||
|
||||
- name: Verify password
|
||||
community.postgresql.postgresql_ping:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
@@ -1,11 +0,0 @@
|
||||
id: change_secret_postgresql
|
||||
name: "{{ 'PostgreSQL account change secret' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- postgresql
|
||||
method: change_secret
|
||||
|
||||
i18n:
|
||||
PostgreSQL account change secret:
|
||||
zh: PostgreSQL 账号改密
|
||||
ja: PostgreSQL アカウントのパスワード変更
|
||||
@@ -1,67 +0,0 @@
|
||||
- hosts: sqlserver
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
register: db_info
|
||||
|
||||
- name: SQLServer version
|
||||
set_fact:
|
||||
info:
|
||||
version: "{{ db_info.query_results[0][0][0][0].splitlines()[0] }}"
|
||||
- debug:
|
||||
var: info
|
||||
|
||||
- name: Check whether SQLServer User exist
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
|
||||
when: db_info is succeeded
|
||||
register: user_exist
|
||||
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length != 0
|
||||
|
||||
- name: Add SQLServer user
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: "CREATE LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; CREATE USER {{ account.username }} FOR LOGIN {{ account.username }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length == 0
|
||||
|
||||
- name: Verify password
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
when: check_conn_after_change
|
||||
@@ -1,11 +0,0 @@
|
||||
id: change_secret_sqlserver
|
||||
name: "{{ 'SQLServer account change secret' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- sqlserver
|
||||
method: change_secret
|
||||
|
||||
i18n:
|
||||
SQLServer account change secret:
|
||||
zh: SQLServer 账号改密
|
||||
ja: SQLServer アカウントのパスワード変更
|
||||
@@ -1,2 +0,0 @@
|
||||
# all base inventory in base/base_inventory.txt
|
||||
asset_name(ip)_account_username account={"username": "", "password": "xxx"} ...base_inventory_vars
|
||||
@@ -1,122 +0,0 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: "Test privileged {{ jms_account.username }} account"
|
||||
ansible.builtin.ping:
|
||||
|
||||
- name: "Check if {{ account.username }} user exists"
|
||||
getent:
|
||||
database: passwd
|
||||
key: "{{ account.username }}"
|
||||
register: user_info
|
||||
failed_when: false
|
||||
changed_when: false
|
||||
|
||||
- name: "Add {{ account.username }} user"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
state: present
|
||||
when: user_info.msg is defined
|
||||
|
||||
- name: "Set {{ account.username }} sudo setting"
|
||||
ansible.builtin.lineinfile:
|
||||
dest: /etc/sudoers
|
||||
state: present
|
||||
regexp: "^{{ account.username }} ALL="
|
||||
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
|
||||
validate: visudo -cf %s
|
||||
when:
|
||||
- user_info.msg is defined or params.modify_sudo
|
||||
- params.sudo
|
||||
|
||||
- name: "Change {{ account.username }} password"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret | password_hash('des') }}"
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
ansible.builtin.shell: "getent passwd {{ account.username }} | cut -d: -f6"
|
||||
register: home_dir
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Check if home directory exists for {{ account.username }}"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
register: home_dir_stat
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Ensure {{ account.username }} home directory exists"
|
||||
ansible.builtin.file:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
state: directory
|
||||
owner: "{{ account.username }}"
|
||||
group: "{{ account.username }}"
|
||||
mode: '0750'
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- home_dir_stat.stat.exists == false
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Remove jumpserver ssh key
|
||||
ansible.builtin.lineinfile:
|
||||
dest: "{{ home_dir.stdout.strip() }}/.ssh/authorized_keys"
|
||||
regexp: "{{ ssh_params.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- ssh_params.strategy == "set_jms"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Change {{ account.username }} SSH key"
|
||||
ansible.builtin.authorized_key:
|
||||
user: "{{ account.username }}"
|
||||
key: "{{ account.secret }}"
|
||||
exclusive: "{{ ssh_params.exclusive }}"
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "ssh_key"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: "Verify {{ account.username }} password (paramiko)"
|
||||
ssh_ping:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
become_password: "{{ account.become.ansible_password | default('') }}"
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when:
|
||||
- account.secret_type == "password"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
|
||||
ssh_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
@@ -1,92 +0,0 @@
|
||||
id: change_secret_aix
|
||||
name: "{{ 'AIX account change secret' | trans }}"
|
||||
category: host
|
||||
type:
|
||||
- AIX
|
||||
method: change_secret
|
||||
params:
|
||||
- name: modify_sudo
|
||||
type: bool
|
||||
label: "{{ 'Modify sudo label' | trans }}"
|
||||
default: False
|
||||
help_text: "{{ 'Modify params sudo help text' | trans }}"
|
||||
|
||||
- name: sudo
|
||||
type: str
|
||||
label: 'Sudo'
|
||||
default: '/bin/whoami'
|
||||
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||
|
||||
- name: shell
|
||||
type: str
|
||||
label: 'Shell'
|
||||
default: '/bin/bash'
|
||||
|
||||
- name: home
|
||||
type: str
|
||||
label: "{{ 'Params home label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
- name: uid
|
||||
type: str
|
||||
label: "{{ 'Params uid label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params uid help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
AIX account change secret:
|
||||
zh: '使用 Ansible 模块 user 执行账号改密 (DES)'
|
||||
ja: 'Ansible user モジュールを使用してアカウントのパスワード変更 (DES)'
|
||||
en: 'Using Ansible module user to change account secret (DES)'
|
||||
|
||||
Modify params sudo help text:
|
||||
zh: '如果用户存在,可以修改sudo权限'
|
||||
ja: 'ユーザーが存在する場合、sudo権限を変更できます'
|
||||
en: 'If the user exists, sudo permissions can be modified'
|
||||
|
||||
Params sudo help text:
|
||||
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||
|
||||
Params home help text:
|
||||
zh: '默认家目录 /home/{账号用户名}'
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params uid help text:
|
||||
zh: '请输入用户ID'
|
||||
ja: 'ユーザーIDを入力してください'
|
||||
en: 'Please enter the user ID'
|
||||
|
||||
Modify sudo label:
|
||||
zh: '修改 sudo 权限'
|
||||
ja: 'sudo 権限を変更'
|
||||
en: 'Modify sudo'
|
||||
|
||||
Params home label:
|
||||
zh: '家目录'
|
||||
ja: 'ホームディレクトリ'
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
en: 'User ID'
|
||||
@@ -1,122 +0,0 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: "Test privileged {{ jms_account.username }} account"
|
||||
ansible.builtin.ping:
|
||||
|
||||
- name: "Check if {{ account.username }} user exists"
|
||||
getent:
|
||||
database: passwd
|
||||
key: "{{ account.username }}"
|
||||
register: user_info
|
||||
failed_when: false
|
||||
changed_when: false
|
||||
|
||||
- name: "Add {{ account.username }} user"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
state: present
|
||||
when: user_info.msg is defined
|
||||
|
||||
- name: "Set {{ account.username }} sudo setting"
|
||||
ansible.builtin.lineinfile:
|
||||
dest: /etc/sudoers
|
||||
state: present
|
||||
regexp: "^{{ account.username }} ALL="
|
||||
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
|
||||
validate: visudo -cf %s
|
||||
when:
|
||||
- user_info.msg is defined or params.modify_sudo
|
||||
- params.sudo
|
||||
|
||||
- name: "Change {{ account.username }} password"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret | password_hash('sha512') }}"
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
ansible.builtin.shell: "getent passwd {{ account.username }} | cut -d: -f6"
|
||||
register: home_dir
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Check if home directory exists for {{ account.username }}"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
register: home_dir_stat
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Ensure {{ account.username }} home directory exists"
|
||||
ansible.builtin.file:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
state: directory
|
||||
owner: "{{ account.username }}"
|
||||
group: "{{ account.username }}"
|
||||
mode: '0750'
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- home_dir_stat.stat.exists == false
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Remove jumpserver ssh key
|
||||
ansible.builtin.lineinfile:
|
||||
dest: "{{ home_dir.stdout.strip() }}/.ssh/authorized_keys"
|
||||
regexp: "{{ ssh_params.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- ssh_params.strategy == "set_jms"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Change {{ account.username }} SSH key"
|
||||
ansible.builtin.authorized_key:
|
||||
user: "{{ account.username }}"
|
||||
key: "{{ account.secret }}"
|
||||
exclusive: "{{ ssh_params.exclusive }}"
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "ssh_key"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: "Verify {{ account.username }} password (paramiko)"
|
||||
ssh_ping:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
become_password: "{{ account.become.ansible_password | default('') }}"
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when:
|
||||
- account.secret_type == "password"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
|
||||
ssh_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
@@ -1,94 +0,0 @@
|
||||
id: change_secret_posix
|
||||
name: "{{ 'Posix account change secret' | trans }}"
|
||||
category: host
|
||||
type:
|
||||
- unix
|
||||
- linux
|
||||
method: change_secret
|
||||
params:
|
||||
- name: modify_sudo
|
||||
type: bool
|
||||
label: "{{ 'Modify sudo label' | trans }}"
|
||||
default: False
|
||||
help_text: "{{ 'Modify params sudo help text' | trans }}"
|
||||
|
||||
- name: sudo
|
||||
type: str
|
||||
label: 'Sudo'
|
||||
default: '/bin/whoami'
|
||||
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||
|
||||
- name: shell
|
||||
type: str
|
||||
label: 'Shell'
|
||||
default: '/bin/bash'
|
||||
help_text: ''
|
||||
|
||||
- name: home
|
||||
type: str
|
||||
label: "{{ 'Params home label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
- name: uid
|
||||
type: str
|
||||
label: "{{ 'Params uid label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params uid help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Posix account change secret:
|
||||
zh: '使用 Ansible 模块 user 执行账号改密 (SHA512)'
|
||||
ja: 'Ansible user モジュールを使用して アカウントのパスワード変更 (SHA512)'
|
||||
en: 'Using Ansible module user to change account secret (SHA512)'
|
||||
|
||||
Modify params sudo help text:
|
||||
zh: '如果用户存在,可以修改sudo权限'
|
||||
ja: 'ユーザーが存在する場合、sudo権限を変更できます'
|
||||
en: 'If the user exists, sudo permissions can be modified'
|
||||
|
||||
Params sudo help text:
|
||||
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||
|
||||
Params home help text:
|
||||
zh: '默认家目录 /home/{账号用户名}'
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params uid help text:
|
||||
zh: '请输入用户ID'
|
||||
ja: 'ユーザーIDを入力してください'
|
||||
en: 'Please enter the user ID'
|
||||
|
||||
Modify sudo label:
|
||||
zh: '修改 sudo 权限'
|
||||
ja: 'sudo 権限を変更'
|
||||
en: 'Modify sudo'
|
||||
|
||||
Params home label:
|
||||
zh: '家目录'
|
||||
ja: 'ホームディレクトリ'
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
en: 'User ID'
|
||||
@@ -1,27 +0,0 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
ansible.windows.win_ping:
|
||||
|
||||
- name: Change password
|
||||
ansible.windows.win_user:
|
||||
fullname: "{{ account.username}}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
password_never_expires: yes
|
||||
groups: "{{ params.groups }}"
|
||||
groups_action: add
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: Verify password
|
||||
ansible.windows.win_ping:
|
||||
vars:
|
||||
ansible_user: "{{ account.username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
||||
@@ -1,30 +0,0 @@
|
||||
id: change_secret_local_windows
|
||||
name: "{{ 'Windows account change secret' | trans }}"
|
||||
version: 1
|
||||
method: change_secret
|
||||
category: host
|
||||
type:
|
||||
- windows
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
|
||||
i18n:
|
||||
Windows account change secret:
|
||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密'
|
||||
ja: 'Ansible win_user モジュールを使用して Windows アカウントのパスワード変更'
|
||||
en: 'Using Ansible module win_user to change Windows account secret'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
@@ -1,27 +0,0 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
ansible.windows.win_ping:
|
||||
|
||||
- name: Change password
|
||||
community.windows.win_domain_user:
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
update_password: always
|
||||
password_never_expires: yes
|
||||
state: present
|
||||
groups: "{{ params.groups }}"
|
||||
groups_action: add
|
||||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: Verify password
|
||||
ansible.windows.win_ping:
|
||||
vars:
|
||||
ansible_user: "{{ account.full_username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
||||
@@ -1,32 +0,0 @@
|
||||
id: change_secret_ad_windows
|
||||
name: "{{ 'Windows account change secret' | trans }}"
|
||||
version: 1
|
||||
method: change_secret
|
||||
category:
|
||||
- ds
|
||||
type:
|
||||
- windows_ad
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
|
||||
i18n:
|
||||
Windows account change secret:
|
||||
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号改密'
|
||||
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントのパスワード変更'
|
||||
en: 'Using Ansible module win_domain_user to change Windows account secret'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
ansible.windows.win_ping:
|
||||
|
||||
- name: Change password
|
||||
ansible.windows.win_user:
|
||||
fullname: "{{ account.username}}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
password_never_expires: yes
|
||||
groups: "{{ params.groups }}"
|
||||
groups_action: add
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: Verify password (pyfreerdp)
|
||||
rdp_ping:
|
||||
login_host: "{{ jms_asset.origin_address }}"
|
||||
login_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'rdp') | map(attribute='port') | first }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_secret_type: "{{ account.secret_type }}"
|
||||
gateway_args: "{{ jms_gateway | default({}) }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
||||
delegate_to: localhost
|
||||
@@ -1,32 +0,0 @@
|
||||
id: change_secret_windows_rdp_verify
|
||||
name: "{{ 'Windows account change secret rdp verify' | trans }}"
|
||||
version: 1
|
||||
method: change_secret
|
||||
category: host
|
||||
type:
|
||||
- windows
|
||||
priority: 49
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
|
||||
i18n:
|
||||
Windows account change secret rdp verify:
|
||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密(最后使用 Python 模块 pyfreerdp 验证账号的可连接性)'
|
||||
ja: 'Ansible モジュール win_user を使用して Windows アカウントのパスワードを変更します (最後に Python モジュール pyfreerdp を使用してアカウントの接続を確認します)'
|
||||
en: 'Use the Ansible module win_user to change the Windows account password (finally use the Python module pyfreerdp to verify the account connectivity)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from xlsxwriter import Workbook
|
||||
|
||||
from assets.automations.methods import platform_automation_methods as asset_methods
|
||||
from assets.const import AutomationTypes as AssetAutomationTypes
|
||||
from accounts.automations.methods import platform_automation_methods as account_methods
|
||||
from accounts.const import (
|
||||
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
|
||||
)
|
||||
from accounts.models import ChangeSecretRecord
|
||||
from accounts.notifications import ChangeSecretExecutionTaskMsg, ChangeSecretReportMsg
|
||||
from accounts.serializers import ChangeSecretRecordBackUpSerializer
|
||||
from common.utils import get_logger
|
||||
from common.utils.file import encrypt_and_compress_zip_file
|
||||
from common.utils.timezone import local_now_filename
|
||||
from ..base.manager import BaseChangeSecretPushManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
ansible_account_prefer = ''
|
||||
|
||||
def get_method_id_meta_mapper(self):
|
||||
return {
|
||||
method["id"]: method for method in self.platform_automation_methods
|
||||
}
|
||||
|
||||
@property
|
||||
def platform_automation_methods(self):
|
||||
return asset_methods + account_methods
|
||||
|
||||
def add_extra_params(self, host, **kwargs):
|
||||
host = super().add_extra_params(host, **kwargs)
|
||||
automation = kwargs.get('automation')
|
||||
for extra_type in [AssetAutomationTypes.ping, AutomationTypes.verify_account]:
|
||||
host[f"{extra_type}_params"] = self.get_params(automation, extra_type)
|
||||
return host
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.change_secret
|
||||
|
||||
def gen_account_inventory(self, account, asset, h, path_dir):
|
||||
record = self.get_or_create_record(asset, account, h['name'])
|
||||
new_secret, private_key_path = self.handle_ssh_secret(account.secret_type, record.new_secret, path_dir)
|
||||
h = self.gen_inventory(h, account, new_secret, private_key_path, asset)
|
||||
return h, record
|
||||
|
||||
def get_or_create_record(self, asset, account, name):
|
||||
asset_account_id = f'{asset.id}-{account.id}'
|
||||
|
||||
if asset_account_id in self.record_map:
|
||||
record_id = self.record_map[asset_account_id]
|
||||
record = ChangeSecretRecord.objects.filter(id=record_id).first()
|
||||
else:
|
||||
new_secret = self.get_secret(account)
|
||||
record = self.create_record(asset, account, new_secret)
|
||||
|
||||
self.name_record_mapper[name] = record
|
||||
return record
|
||||
|
||||
def create_record(self, asset, account, new_secret):
|
||||
record = ChangeSecretRecord(
|
||||
asset=asset, account=account, execution=self.execution,
|
||||
old_secret=account.secret, new_secret=new_secret,
|
||||
comment=f'{account.username}@{asset.address}'
|
||||
)
|
||||
return record
|
||||
|
||||
def check_secret(self):
|
||||
if self.secret_strategy == SecretStrategy.custom \
|
||||
and not self.execution.snapshot['secret']:
|
||||
print('Custom secret is empty')
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_summary(records):
|
||||
total, succeed, failed = 0, 0, 0
|
||||
for record in records:
|
||||
if record.status == ChangeSecretRecordStatusChoice.success.value:
|
||||
succeed += 1
|
||||
else:
|
||||
failed += 1
|
||||
total += 1
|
||||
summary = _('Success: %s, Failed: %s, Total: %s') % (succeed, failed, total)
|
||||
return summary
|
||||
|
||||
def print_summary(self):
|
||||
records = list(self.name_record_mapper.values())
|
||||
summary = self.get_summary(records)
|
||||
print('\n\n' + '-' * 80)
|
||||
plan_execution_end = _('Plan execution end')
|
||||
print('{} {}\n'.format(plan_execution_end, local_now_filename()))
|
||||
time_cost = _('Duration')
|
||||
print('{}: {}s'.format(time_cost, self.duration))
|
||||
print(summary)
|
||||
|
||||
def send_report_if_need(self, *args, **kwargs):
|
||||
if self.secret_type and not self.check_secret():
|
||||
return
|
||||
|
||||
records = list(self.name_record_mapper.values())
|
||||
if self.record_map:
|
||||
return
|
||||
|
||||
recipients = self.execution.recipients
|
||||
if not recipients:
|
||||
return
|
||||
|
||||
context = self.get_report_context()
|
||||
for user in recipients:
|
||||
ChangeSecretReportMsg(user, context).publish()
|
||||
|
||||
if not records:
|
||||
return
|
||||
|
||||
summary = self.get_summary(records)
|
||||
self.send_record_mail(recipients, records, summary)
|
||||
|
||||
def send_record_mail(self, recipients, records, summary):
|
||||
name = self.execution.snapshot['name']
|
||||
path = os.path.join(os.path.dirname(settings.BASE_DIR), 'tmp')
|
||||
filename = os.path.join(path, f'{name}-{local_now_filename()}-{time.time()}.xlsx')
|
||||
if not self.create_file(records, filename):
|
||||
return
|
||||
|
||||
for user in recipients:
|
||||
attachments = []
|
||||
if user.secret_key:
|
||||
attachment = os.path.join(path, f'{name}-{local_now_filename()}-{time.time()}.zip')
|
||||
encrypt_and_compress_zip_file(attachment, user.secret_key, [filename])
|
||||
attachments = [attachment]
|
||||
ChangeSecretExecutionTaskMsg(name, user, summary).publish(attachments)
|
||||
os.remove(filename)
|
||||
|
||||
@staticmethod
|
||||
def create_file(records, filename):
|
||||
serializer_cls = ChangeSecretRecordBackUpSerializer
|
||||
serializer = serializer_cls(records, many=True)
|
||||
|
||||
header = [str(v.label) for v in serializer.child.fields.values()]
|
||||
rows = [[str(i) for i in row.values()] for row in serializer.data]
|
||||
if not rows:
|
||||
return False
|
||||
|
||||
rows.insert(0, header)
|
||||
wb = Workbook(filename)
|
||||
ws = wb.add_worksheet('Sheet1')
|
||||
for row_index, row_data in enumerate(rows):
|
||||
for col_index, col_data in enumerate(row_data):
|
||||
ws.write_string(row_index, col_index, col_data)
|
||||
wb.close()
|
||||
return True
|
||||
|
||||
def get_report_template(self):
|
||||
return "accounts/change_secret_report.html"
|
||||
@@ -1,36 +0,0 @@
|
||||
- hosts: website
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
website_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
steps: "{{ ping_params.steps }}"
|
||||
load_state: "{{ ping_params.load_state }}"
|
||||
|
||||
- name: "Change {{ account.username }} password"
|
||||
website_user:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
steps: "{{ params.steps }}"
|
||||
load_state: "{{ params.load_state }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
|
||||
- name: "Verify {{ account.username }} password"
|
||||
website_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
steps: "{{ verify_account_params.steps }}"
|
||||
load_state: "{{ verify_account_params.load_state }}"
|
||||
when:
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
@@ -1,51 +0,0 @@
|
||||
id: change_account_website
|
||||
name: "{{ 'Website account change secret' | trans }}"
|
||||
category: web
|
||||
type:
|
||||
- website
|
||||
method: change_secret
|
||||
priority: 50
|
||||
params:
|
||||
- name: load_state
|
||||
type: choice
|
||||
label: "{{ 'Load state' | trans }}"
|
||||
choices:
|
||||
- [ networkidle, "{{ 'Network idle' | trans }}" ]
|
||||
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
|
||||
- [ load, "{{ 'Load completed' | trans }}" ]
|
||||
default: 'load'
|
||||
- name: steps
|
||||
type: list
|
||||
default: [ ]
|
||||
label: "{{ 'Steps' | trans }}"
|
||||
help_text: "{{ 'Params step help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Website account change secret:
|
||||
zh: 使用 Playwright 模拟浏览器变更账号密码
|
||||
ja: Playwright を使用してブラウザをシミュレートし、アカウントのパスワードを変更します
|
||||
en: Use Playwright to simulate a browser for account password change.
|
||||
Load state:
|
||||
zh: 加载状态检测
|
||||
en: Load state detection
|
||||
ja: ロード状態の検出
|
||||
Steps:
|
||||
zh: 步骤
|
||||
en: Steps
|
||||
ja: 手順
|
||||
Network idle:
|
||||
zh: 网络空闲
|
||||
en: Network idle
|
||||
ja: ネットワークが空いた状態
|
||||
Dom content loaded:
|
||||
zh: 文档内容加载完成
|
||||
en: Dom content loaded
|
||||
ja: ドキュメントの内容がロードされた状態
|
||||
Load completed:
|
||||
zh: 全部加载完成
|
||||
en: All load completed
|
||||
ja: すべてのロードが完了した状態
|
||||
Params step help text:
|
||||
zh: 根据配置决定任务执行步骤
|
||||
ja: 設定に基づいてタスクの実行ステップを決定する
|
||||
en: Determine task execution steps based on configuration
|
||||
@@ -1,78 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
import re
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
|
||||
def is_weak_password(password):
|
||||
if len(password) < 8:
|
||||
return True
|
||||
|
||||
# 判断是否只有一种字符类型
|
||||
if password.isdigit() or password.isalpha():
|
||||
return True
|
||||
|
||||
# 判断是否只包含数字或字母
|
||||
if password.islower() or password.isupper():
|
||||
return True
|
||||
|
||||
# 判断是否包含常见弱密码
|
||||
common_passwords = ["123456", "password", "12345678", "qwerty", "abc123"]
|
||||
if password.lower() in common_passwords:
|
||||
return True
|
||||
|
||||
# 正则表达式判断字符多样性(数字、字母、特殊字符)
|
||||
if (
|
||||
not re.search(r"[A-Za-z]", password)
|
||||
or not re.search(r"[0-9]", password)
|
||||
or not re.search(r"[\W_]", password)
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def parse_it(fname):
|
||||
count = 0
|
||||
lines = []
|
||||
with open(fname, 'rb') as f:
|
||||
for line in f:
|
||||
try:
|
||||
line = line.decode().strip()
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
|
||||
if len(line) > 32:
|
||||
continue
|
||||
|
||||
if is_weak_password(line):
|
||||
continue
|
||||
|
||||
lines.append(line)
|
||||
count += 0
|
||||
print(line)
|
||||
return lines
|
||||
|
||||
|
||||
def insert_to_db(lines):
|
||||
conn = sqlite3.connect('./leak_passwords.db')
|
||||
cursor = conn.cursor()
|
||||
create_table_sql = '''
|
||||
CREATE TABLE IF NOT EXISTS passwords (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
password CHAR(32)
|
||||
)
|
||||
'''
|
||||
create_index_sql = 'CREATE INDEX IF NOT EXISTS idx_password ON passwords(password)'
|
||||
cursor.execute(create_table_sql)
|
||||
cursor.execute(create_index_sql)
|
||||
|
||||
for line in lines:
|
||||
cursor.execute('INSERT INTO passwords (password) VALUES (?)', [line])
|
||||
conn.commit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
filename = sys.argv[1]
|
||||
lines = parse_it(filename)
|
||||
insert_to_db(lines)
|
||||
@@ -1,292 +0,0 @@
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.models import Account, AccountRisk, RiskChoice
|
||||
from assets.automations.base.manager import BaseManager
|
||||
from common.const import ConfirmOrIgnore
|
||||
from common.decorators import bulk_create_decorator, bulk_update_decorator
|
||||
from settings.models import LeakPasswords
|
||||
|
||||
|
||||
# 已设置手动 finish
|
||||
@bulk_create_decorator(AccountRisk)
|
||||
def create_risk(data):
|
||||
return AccountRisk(**data)
|
||||
|
||||
|
||||
# 已设置手动 finish
|
||||
@bulk_update_decorator(AccountRisk, update_fields=["details", "status"])
|
||||
def update_risk(risk):
|
||||
return risk
|
||||
|
||||
|
||||
class BaseCheckHandler:
|
||||
risk = ''
|
||||
|
||||
def __init__(self, assets):
|
||||
self.assets = assets
|
||||
|
||||
def check(self, account):
|
||||
pass
|
||||
|
||||
def clean(self):
|
||||
pass
|
||||
|
||||
|
||||
class CheckSecretHandler(BaseCheckHandler):
|
||||
risk = RiskChoice.weak_password
|
||||
|
||||
@staticmethod
|
||||
def is_weak_password(password):
|
||||
# 判断密码长度
|
||||
if len(password) < 8:
|
||||
return True
|
||||
|
||||
# 判断是否只有一种字符类型
|
||||
if password.isdigit() or password.isalpha():
|
||||
return True
|
||||
|
||||
# 判断是否只包含数字或字母
|
||||
if password.islower() or password.isupper():
|
||||
return True
|
||||
|
||||
# 判断是否包含常见弱密码
|
||||
common_passwords = ["123456", "password", "12345678", "qwerty", "abc123"]
|
||||
if password.lower() in common_passwords:
|
||||
return True
|
||||
|
||||
# 正则表达式判断字符多样性(数字、字母、特殊字符)
|
||||
if (
|
||||
not re.search(r"[A-Za-z]", password)
|
||||
or not re.search(r"[0-9]", password)
|
||||
or not re.search(r"[\W_]", password)
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def check(self, account):
|
||||
if not account.secret:
|
||||
return False
|
||||
return self.is_weak_password(account.secret)
|
||||
|
||||
|
||||
class CheckRepeatHandler(BaseCheckHandler):
|
||||
risk = RiskChoice.repeated_password
|
||||
|
||||
def __init__(self, assets):
|
||||
super().__init__(assets)
|
||||
self.path, self.conn, self.cursor = self.init_repeat_check_db()
|
||||
self.add_password_for_check_repeat()
|
||||
|
||||
@staticmethod
|
||||
def init_repeat_check_db():
|
||||
path = os.path.join('/tmp', 'accounts_' + str(uuid.uuid4()) + '.db')
|
||||
sql = """
|
||||
CREATE TABLE IF NOT EXISTS accounts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
digest CHAR(32)
|
||||
)
|
||||
"""
|
||||
index = "CREATE INDEX IF NOT EXISTS idx_digest ON accounts(digest)"
|
||||
conn = sqlite3.connect(path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(sql)
|
||||
cursor.execute(index)
|
||||
return path, conn, cursor
|
||||
|
||||
def check(self, account):
|
||||
if not account.secret:
|
||||
return False
|
||||
|
||||
digest = self.digest(account.secret)
|
||||
sql = 'SELECT COUNT(*) FROM accounts WHERE digest = ?'
|
||||
self.cursor.execute(sql, [digest])
|
||||
result = self.cursor.fetchone()
|
||||
if not result:
|
||||
return False
|
||||
return result[0] > 1
|
||||
|
||||
@staticmethod
|
||||
def digest(secret):
|
||||
return hashlib.md5(secret.encode()).hexdigest()
|
||||
|
||||
def add_password_for_check_repeat(self):
|
||||
accounts = Account.objects.all().only('id', '_secret', 'secret_type')
|
||||
sql = "INSERT INTO accounts (digest) VALUES (?)"
|
||||
|
||||
for account in accounts:
|
||||
secret = account.secret
|
||||
if not secret:
|
||||
continue
|
||||
digest = self.digest(secret)
|
||||
self.cursor.execute(sql, [digest])
|
||||
self.conn.commit()
|
||||
|
||||
def clean(self):
|
||||
self.cursor.close()
|
||||
self.conn.close()
|
||||
os.remove(self.path)
|
||||
|
||||
|
||||
class CheckLeakHandler(BaseCheckHandler):
|
||||
risk = RiskChoice.leaked_password
|
||||
|
||||
def __init__(self, *args):
|
||||
super().__init__(*args)
|
||||
self.conn, self.cursor = self.init_leak_password_db()
|
||||
|
||||
@staticmethod
|
||||
def init_leak_password_db():
|
||||
db_path = os.path.join(
|
||||
settings.APPS_DIR, 'accounts', 'automations',
|
||||
'check_account', 'leak_passwords.db'
|
||||
)
|
||||
|
||||
if settings.LEAK_PASSWORD_DB_PATH and os.path.isfile(settings.LEAK_PASSWORD_DB_PATH):
|
||||
db_path = settings.LEAK_PASSWORD_DB_PATH
|
||||
|
||||
db_conn = sqlite3.connect(db_path)
|
||||
db_cursor = db_conn.cursor()
|
||||
return db_conn, db_cursor
|
||||
|
||||
def check(self, account):
|
||||
if not account.secret:
|
||||
return False
|
||||
|
||||
is_exist = LeakPasswords.objects.using('sqlite').filter(password=account.secret).exists()
|
||||
return is_exist
|
||||
|
||||
def clean(self):
|
||||
self.cursor.close()
|
||||
self.conn.close()
|
||||
|
||||
|
||||
class CheckAccountManager(BaseManager):
|
||||
batch_size = 100
|
||||
tmpl = 'Checked the status of account %s: %s'
|
||||
|
||||
def __init__(self, execution):
|
||||
super().__init__(execution)
|
||||
self.assets = []
|
||||
self.batch_risks = []
|
||||
self.handlers = []
|
||||
|
||||
def add_risk(self, risk, account):
|
||||
self.summary[risk] += 1
|
||||
self.result[risk].append({
|
||||
'asset': str(account.asset), 'username': account.username,
|
||||
})
|
||||
risk_obj = {'account': account, 'risk': risk}
|
||||
self.batch_risks.append(risk_obj)
|
||||
|
||||
def commit_risks(self, assets):
|
||||
account_risks = AccountRisk.objects.filter(asset__in=assets)
|
||||
ori_risk_map = {}
|
||||
|
||||
for risk in account_risks:
|
||||
key = f'{risk.account_id}_{risk.risk}'
|
||||
ori_risk_map[key] = risk
|
||||
|
||||
now = timezone.now().isoformat()
|
||||
for d in self.batch_risks:
|
||||
account = d["account"]
|
||||
key = f'{account.id}_{d["risk"]}'
|
||||
origin_risk = ori_risk_map.get(key)
|
||||
|
||||
if origin_risk and origin_risk.status != ConfirmOrIgnore.pending:
|
||||
details = origin_risk.details or []
|
||||
details.append({"datetime": now, 'type': 'refind'})
|
||||
|
||||
if len(details) > 10:
|
||||
details = [*details[:5], *details[-5:]]
|
||||
|
||||
origin_risk.details = details
|
||||
origin_risk.status = ConfirmOrIgnore.pending
|
||||
update_risk(origin_risk)
|
||||
else:
|
||||
create_risk({
|
||||
"account": account,
|
||||
"asset": account.asset,
|
||||
"username": account.username,
|
||||
"risk": d["risk"],
|
||||
"details": [{"datetime": now, 'type': 'init'}],
|
||||
})
|
||||
|
||||
create_risk.finish()
|
||||
update_risk.finish()
|
||||
|
||||
def pre_run(self):
|
||||
super().pre_run()
|
||||
self.assets = self.execution.get_all_assets()
|
||||
|
||||
def batch_check(self, handler):
|
||||
print("Engine: {}".format(handler.__class__.__name__))
|
||||
for i in range(0, len(self.assets), self.batch_size):
|
||||
_assets = self.assets[i: i + self.batch_size]
|
||||
accounts = Account.objects.filter(asset__in=_assets)
|
||||
|
||||
print("Start to check accounts: {}".format(len(accounts)))
|
||||
|
||||
for account in accounts:
|
||||
error = handler.check(account)
|
||||
msg = handler.risk if error else 'ok'
|
||||
|
||||
print("Check: {} => {}".format(account, msg))
|
||||
if not error:
|
||||
AccountRisk.objects.filter(
|
||||
asset=account.asset,
|
||||
username=account.username,
|
||||
risk=handler.risk
|
||||
).delete()
|
||||
continue
|
||||
self.add_risk(handler.risk, account)
|
||||
self.commit_risks(_assets)
|
||||
|
||||
def do_run(self, *args, **kwargs):
|
||||
engines = self.execution.snapshot.get("engines", [])
|
||||
if engines == '__all__':
|
||||
engines = ['check_account_secret', 'check_account_repeat', 'check_account_leak']
|
||||
|
||||
for engine in engines:
|
||||
if engine == "check_account_secret":
|
||||
handler = CheckSecretHandler(self.assets)
|
||||
elif engine == "check_account_repeat":
|
||||
handler = CheckRepeatHandler(self.assets)
|
||||
elif engine == "check_account_leak":
|
||||
handler = CheckLeakHandler(self.assets)
|
||||
else:
|
||||
print("Unknown engine: {}".format(engine))
|
||||
continue
|
||||
|
||||
self.handlers.append(handler)
|
||||
self.batch_check(handler)
|
||||
|
||||
def post_run(self):
|
||||
super().post_run()
|
||||
for handler in self.handlers:
|
||||
handler.clean()
|
||||
|
||||
def get_report_subject(self):
|
||||
return _("Check account report of {}").format(self.execution.id)
|
||||
|
||||
def get_report_template(self):
|
||||
return "accounts/check_account_report.html"
|
||||
|
||||
def print_summary(self):
|
||||
tmpl = _("---\nSummary: \nok: {}, weak password: {}, leaked password: {}, "
|
||||
"repeated password: {}, no secret: {}, using time: {}s").format(
|
||||
self.summary["ok"],
|
||||
self.summary[RiskChoice.weak_password],
|
||||
self.summary[RiskChoice.leaked_password],
|
||||
self.summary[RiskChoice.repeated_password],
|
||||
self.summary["no_secret"],
|
||||
self.duration
|
||||
)
|
||||
print(tmpl)
|
||||
@@ -1,32 +0,0 @@
|
||||
from .backup_account.manager import AccountBackupManager
|
||||
from .change_secret.manager import ChangeSecretManager
|
||||
from .check_account.manager import CheckAccountManager
|
||||
from .gather_account.manager import GatherAccountsManager
|
||||
from .push_account.manager import PushAccountManager
|
||||
from .remove_account.manager import RemoveAccountManager
|
||||
from .verify_account.manager import VerifyAccountManager
|
||||
from .verify_gateway_account.manager import VerifyGatewayAccountManager
|
||||
from ..const import AutomationTypes
|
||||
|
||||
|
||||
class ExecutionManager:
|
||||
manager_type_mapper = {
|
||||
AutomationTypes.push_account: PushAccountManager,
|
||||
AutomationTypes.change_secret: ChangeSecretManager,
|
||||
AutomationTypes.verify_account: VerifyAccountManager,
|
||||
AutomationTypes.remove_account: RemoveAccountManager,
|
||||
AutomationTypes.gather_accounts: GatherAccountsManager,
|
||||
AutomationTypes.verify_gateway_account: VerifyGatewayAccountManager,
|
||||
AutomationTypes.check_account: CheckAccountManager,
|
||||
AutomationTypes.backup_account: AccountBackupManager,
|
||||
}
|
||||
|
||||
def __init__(self, execution):
|
||||
self.execution = execution
|
||||
self._runner = self.manager_type_mapper[execution.manager_type](execution)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
return self._runner.run(*args, **kwargs)
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self._runner, item)
|
||||
@@ -1,27 +0,0 @@
|
||||
- hosts: mongodb
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
community.mongodb.mongodb_info:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
set_fact:
|
||||
info: "{{ db_info.users }}"
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
@@ -1,11 +0,0 @@
|
||||
id: gather_accounts_mongodb
|
||||
name: "{{ 'MongoDB account gather' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- mongodb
|
||||
method: gather_accounts
|
||||
|
||||
i18n:
|
||||
MongoDB account gather:
|
||||
zh: MongoDB 账号收集
|
||||
ja: MongoDB アカウントの収集
|
||||
@@ -1,29 +0,0 @@
|
||||
- hosts: mysql
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
community.mysql.mysql_info:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
set_fact:
|
||||
info: "{{ db_info.users }}"
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
@@ -1,12 +0,0 @@
|
||||
id: gather_accounts_mysql
|
||||
name: "{{ 'MySQL account gather' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- mysql
|
||||
- mariadb
|
||||
method: gather_accounts
|
||||
|
||||
i18n:
|
||||
MySQL account gather:
|
||||
zh: MySQL 账号收集
|
||||
ja: MySQL アカウントの収集
|
||||
@@ -1,23 +0,0 @@
|
||||
- hosts: oralce
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
oracle_info:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
set_fact:
|
||||
info: "{{ db_info.users }}"
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
@@ -1,11 +0,0 @@
|
||||
id: gather_accounts_oracle
|
||||
name: "{{ 'Oracle account gather' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- oracle
|
||||
method: gather_accounts
|
||||
|
||||
i18n:
|
||||
Oracle account gather:
|
||||
zh: Oracle 账号收集
|
||||
ja: Oracle アカウントの収集
|
||||
@@ -1,30 +0,0 @@
|
||||
- hosts: postgresql
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
community.postgresql.postgresql_info:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
filter: "roles"
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
set_fact:
|
||||
info: "{{ db_info.roles }}"
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user