mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-15 08:32:48 +00:00
Compare commits
282 Commits
ccrc_v4
...
v4.10.13-l
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d0db2ba8b | ||
|
|
c97124c279 | ||
|
|
32a766ed34 | ||
|
|
58fd15d743 | ||
|
|
f50250dedb | ||
|
|
9e150b7fbe | ||
|
|
16c79f59a7 | ||
|
|
be0f04862a | ||
|
|
1a3fb2f0db | ||
|
|
4cd70efe66 | ||
|
|
28700c01c8 | ||
|
|
4524822245 | ||
|
|
9d04fda018 | ||
|
|
01c277cd1e | ||
|
|
c4b3531d72 | ||
|
|
8870d1ef9e | ||
|
|
6c5086a083 | ||
|
|
e9f762a982 | ||
|
|
d4d4cadbcd | ||
|
|
5e56590405 | ||
|
|
ad8c0f6664 | ||
|
|
47dd6babfc | ||
|
|
691d1c4dba | ||
|
|
ac485804d5 | ||
|
|
51e5fdb301 | ||
|
|
69c4d613f7 | ||
|
|
1ad825bf0d | ||
|
|
a286cb9343 | ||
|
|
1eb489bb2d | ||
|
|
4334ae9e5e | ||
|
|
f2e346a0c3 | ||
|
|
dc20b06431 | ||
|
|
387a9248fc | ||
|
|
705fd6385f | ||
|
|
0ccf36621f | ||
|
|
a9ae12fc2c | ||
|
|
7b1a25adde | ||
|
|
a1b5eb1cd8 | ||
|
|
24ac642c5e | ||
|
|
e4f5e21219 | ||
|
|
a2aae9db47 | ||
|
|
206c43cf75 | ||
|
|
019a657ec3 | ||
|
|
fad60ee40f | ||
|
|
1728412793 | ||
|
|
3e93034fbc | ||
|
|
f4b3a7d73a | ||
|
|
3781c40179 | ||
|
|
fab6219cea | ||
|
|
dd0cacb4bc | ||
|
|
b8639601a1 | ||
|
|
ab9882c9c1 | ||
|
|
77a7b74b15 | ||
|
|
4bc05865f1 | ||
|
|
bec9e4f3a7 | ||
|
|
359adf3dbb | ||
|
|
ac54bb672c | ||
|
|
9e3ba00bc4 | ||
|
|
2ec9a43317 | ||
|
|
06be56ef06 | ||
|
|
b2a618b206 | ||
|
|
1039c2e320 | ||
|
|
8d7267400d | ||
|
|
d67e473884 | ||
|
|
70068c9253 | ||
|
|
d68babb2e1 | ||
|
|
afb6f466d5 | ||
|
|
453ad331ee | ||
|
|
e617245b26 | ||
|
|
d309d11a8f | ||
|
|
4771693a56 | ||
|
|
cefc820ac1 | ||
|
|
d007afdb43 | ||
|
|
e8921a43be | ||
|
|
a9b44103d4 | ||
|
|
4abf2bded6 | ||
|
|
54693089a0 | ||
|
|
0b859dd502 | ||
|
|
3fb27f969a | ||
|
|
45627a1d92 | ||
|
|
245e2dab66 | ||
|
|
8f0a41b1a8 | ||
|
|
1a9e56c520 | ||
|
|
67c2f471b4 | ||
|
|
b04f96f5f2 | ||
|
|
30f03b7d89 | ||
|
|
28a97d0b5a | ||
|
|
3410686690 | ||
|
|
6860e2327f | ||
|
|
20253e760c | ||
|
|
a63cfde8d2 | ||
|
|
92e250e03b | ||
|
|
098f0950cb | ||
|
|
39b0830a6b | ||
|
|
2e847bc2bc | ||
|
|
f82f31876a | ||
|
|
cde182c015 | ||
|
|
b990cdf561 | ||
|
|
c9a062823d | ||
|
|
643ba4fc15 | ||
|
|
d16a55bbe2 | ||
|
|
ae31554729 | ||
|
|
53b47980a2 | ||
|
|
d31b5ee570 | ||
|
|
65aea1ea36 | ||
|
|
5abb5c5d5a | ||
|
|
93e41a5004 | ||
|
|
95f51bbe48 | ||
|
|
0184d292ec | ||
|
|
23a6d320c7 | ||
|
|
b16304c48a | ||
|
|
7cd1e4d3a0 | ||
|
|
64a9987c3f | ||
|
|
18bfe312fa | ||
|
|
9280884c1c | ||
|
|
c593f91d77 | ||
|
|
46da05652a | ||
|
|
9249aba1a9 | ||
|
|
eca637c120 | ||
|
|
ddacd5fce1 | ||
|
|
3ca5c04099 | ||
|
|
6603a073ec | ||
|
|
d745f7495a | ||
|
|
76f1667c89 | ||
|
|
1ab1954299 | ||
|
|
c8335999a4 | ||
|
|
5b4a67362d | ||
|
|
e025073da2 | ||
|
|
2155bc6862 | ||
|
|
953b515817 | ||
|
|
7f7a354b2d | ||
|
|
2b2f7ea3f0 | ||
|
|
529123e1b5 | ||
|
|
e156ab6ad8 | ||
|
|
3c1fd134ae | ||
|
|
b15f663c87 | ||
|
|
93906dff0a | ||
|
|
307befdacd | ||
|
|
dbfc4d3981 | ||
|
|
849edd33c1 | ||
|
|
37cceec8fe | ||
|
|
d2494c25cc | ||
|
|
023952582e | ||
|
|
863fe95100 | ||
|
|
4b0bdb18c9 | ||
|
|
10da053a95 | ||
|
|
c40bc46520 | ||
|
|
a732cc614e | ||
|
|
bb29d519c6 | ||
|
|
b56c3a76a7 | ||
|
|
ab908d24a7 | ||
|
|
79cabe1b3c | ||
|
|
231b7287c1 | ||
|
|
be7a4c0d6e | ||
|
|
009da19050 | ||
|
|
dfda6b1e08 | ||
|
|
59b40578d8 | ||
|
|
e5db28c014 | ||
|
|
6d1f26b0f8 | ||
|
|
2333dbbe33 | ||
|
|
16461b0fa9 | ||
|
|
528b0ea1ba | ||
|
|
60f06adaa9 | ||
|
|
7a6187b95f | ||
|
|
aacaf3a174 | ||
|
|
3c9d2534fa | ||
|
|
4f79abe678 | ||
|
|
ae9956ff91 | ||
|
|
429677e0ce | ||
|
|
034ee65157 | ||
|
|
fdd7d9b6b1 | ||
|
|
db0e21f5d9 | ||
|
|
468b84eb3d | ||
|
|
28d5475d0f | ||
|
|
b9c60d856f | ||
|
|
bd1d73c6dd | ||
|
|
bf92c756d4 | ||
|
|
62ebe0d636 | ||
|
|
0b1fea8492 | ||
|
|
65b5f573f8 | ||
|
|
bb639e1fe7 | ||
|
|
395b868dcf | ||
|
|
1350b774b3 | ||
|
|
af7a00c1b1 | ||
|
|
965ec7007c | ||
|
|
1372fd7535 | ||
|
|
3b0ef4cca7 | ||
|
|
6832abdaad | ||
|
|
c6bf290dbb | ||
|
|
23ab66c11a | ||
|
|
1debaa5547 | ||
|
|
47413966c9 | ||
|
|
703f39607c | ||
|
|
f31994fdcd | ||
|
|
b65ff0d84c | ||
|
|
30d781dd12 | ||
|
|
9551cd4da9 | ||
|
|
87b456c941 | ||
|
|
d4d5224c17 | ||
|
|
dabb30d90a | ||
|
|
82192d38e1 | ||
|
|
571d2b4575 | ||
|
|
ea64313c4e | ||
|
|
8764cdb733 | ||
|
|
980394efed | ||
|
|
2c94f10d64 | ||
|
|
e1c9f5180d | ||
|
|
3f1d7fa230 | ||
|
|
44bcd6e399 | ||
|
|
5f87d98c31 | ||
|
|
540becdcbe | ||
|
|
6929c4968e | ||
|
|
63b213d3a8 | ||
|
|
64fe7a55ec | ||
|
|
27829e09ef | ||
|
|
1bfc7daef6 | ||
|
|
9422aebc5e | ||
|
|
8c0cd20b48 | ||
|
|
0c612648a0 | ||
|
|
36e01a316c | ||
|
|
e1b96e01eb | ||
|
|
144f4b4466 | ||
|
|
8e007004c2 | ||
|
|
c14f740209 | ||
|
|
13a85f062c | ||
|
|
7f9d027bd3 | ||
|
|
c037ce1c29 | ||
|
|
ee7c6b4708 | ||
|
|
d0e625e322 | ||
|
|
c65794a99d | ||
|
|
1e4bca6e24 | ||
|
|
c1c5025fbb | ||
|
|
96020fa6b4 | ||
|
|
5ad6f87a9e | ||
|
|
9b0c73c9f9 | ||
|
|
c029714ffd | ||
|
|
c1e8a1b561 | ||
|
|
21126de2c1 | ||
|
|
7d06819bbe | ||
|
|
92b20fe2ef | ||
|
|
4326d35065 | ||
|
|
4810eae725 | ||
|
|
24f7946b7b | ||
|
|
4b9c4a550e | ||
|
|
d3ec23ba85 | ||
|
|
e3c33bca32 | ||
|
|
0fb7e84678 | ||
|
|
ab30bfb2d2 | ||
|
|
d9d034488f | ||
|
|
24bd7b7e1a | ||
|
|
7fb5fd3956 | ||
|
|
9c621f5ff5 | ||
|
|
ac8998b9ee | ||
|
|
b258537890 | ||
|
|
b38d83c578 | ||
|
|
257f290d18 | ||
|
|
d185be2180 | ||
|
|
4e33b5b478 | ||
|
|
1406437d4e | ||
|
|
e46aa95980 | ||
|
|
c619a35a04 | ||
|
|
29f10bf10e | ||
|
|
a822905ae7 | ||
|
|
dc5a743f4f | ||
|
|
1de8781704 | ||
|
|
f3d9f4c446 | ||
|
|
6b5d5c15ae | ||
|
|
1074a0df19 | ||
|
|
04dca794dd | ||
|
|
14e0396508 | ||
|
|
835eb2e3d0 | ||
|
|
be24f28d9b | ||
|
|
26cea550c4 | ||
|
|
36ae076cb0 | ||
|
|
51c5294fb4 | ||
|
|
da083fffa3 | ||
|
|
1df04d2a94 | ||
|
|
299e52cd11 | ||
|
|
38b268b104 | ||
|
|
6095e9c9bd | ||
|
|
c4a348aac6 | ||
|
|
75575af56f |
26
.github/.github/issue-spam-config.json
vendored
Normal file
26
.github/.github/issue-spam-config.json
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"dry_run": false,
|
||||
"min_account_age_days": 3,
|
||||
"max_urls_for_spam": 1,
|
||||
"min_body_len_for_links": 40,
|
||||
"spam_words": [
|
||||
"call now",
|
||||
"zadzwoń",
|
||||
"zadzwoń teraz",
|
||||
"kontakt",
|
||||
"telefon",
|
||||
"telefone",
|
||||
"contato",
|
||||
"suporte",
|
||||
"infolinii",
|
||||
"click here",
|
||||
"buy now",
|
||||
"subscribe",
|
||||
"visit"
|
||||
],
|
||||
"bracket_max": 6,
|
||||
"special_char_density_threshold": 0.12,
|
||||
"phone_regex": "\\+?\\d[\\d\\-\\s\\(\\)\\.]{6,}\\d",
|
||||
"labels_for_spam": ["spam"],
|
||||
"labels_for_review": ["needs-triage"]
|
||||
}
|
||||
120
.github/workflows/build-base-image.yml
vendored
120
.github/workflows/build-base-image.yml
vendored
@@ -1,74 +1,72 @@
|
||||
name: Build and Push Base Image
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'dev'
|
||||
- 'v*'
|
||||
paths:
|
||||
- poetry.lock
|
||||
- pyproject.toml
|
||||
- Dockerfile-base
|
||||
- package.json
|
||||
- go.mod
|
||||
- yarn.lock
|
||||
- pom.xml
|
||||
- install_deps.sh
|
||||
- utils/clean_site_packages.sh
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
pull_request:
|
||||
branches:
|
||||
- 'dev'
|
||||
- 'v*'
|
||||
paths:
|
||||
- poetry.lock
|
||||
- pyproject.toml
|
||||
- Dockerfile-base
|
||||
- package.json
|
||||
- go.mod
|
||||
- yarn.lock
|
||||
- pom.xml
|
||||
- install_deps.sh
|
||||
- utils/clean_site_packages.sh
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
build-and-push:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:qemu-v7.0.0-28
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract date
|
||||
id: vars
|
||||
run: echo "IMAGE_TAG=$(date +'%Y%m%d_%H%M%S')" >> $GITHUB_ENV
|
||||
- name: Extract date
|
||||
id: vars
|
||||
run: echo "IMAGE_TAG=$(date +'%Y%m%d_%H%M%S')" >> $GITHUB_ENV
|
||||
|
||||
- name: Extract repository name
|
||||
id: repo
|
||||
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
|
||||
- name: Extract repository name
|
||||
id: repo
|
||||
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push multi-arch image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
file: Dockerfile-base
|
||||
tags: jumpserver/core-base:${{ env.IMAGE_TAG }}
|
||||
- name: Build and push multi-arch image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
file: Dockerfile-base
|
||||
tags: jumpserver/core-base:${{ env.IMAGE_TAG }}
|
||||
|
||||
- name: Update Dockerfile
|
||||
run: |
|
||||
sed -i 's|-base:.* AS stage-build|-base:${{ env.IMAGE_TAG }} AS stage-build|' Dockerfile
|
||||
- name: Update Dockerfile
|
||||
run: |
|
||||
sed -i 's|-base:.* AS stage-build|-base:${{ env.IMAGE_TAG }} AS stage-build|' Dockerfile
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git config --global user.name 'github-actions[bot]'
|
||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
git add Dockerfile
|
||||
git commit -m "perf: Update Dockerfile with new base image tag"
|
||||
git push origin ${{ github.event.pull_request.head.ref }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git config --global user.name 'github-actions[bot]'
|
||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
git add Dockerfile
|
||||
git commit -m "perf: Update Dockerfile with new base image tag"
|
||||
git push origin ${{ github.event.pull_request.head.ref }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
46
.github/workflows/build-python-image.yml
vendored
Normal file
46
.github/workflows/build-python-image.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Build and Push Python Base Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Tag to build'
|
||||
required: true
|
||||
default: '3.11-slim-bullseye-v1'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:qemu-v7.0.0-28
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract repository name
|
||||
id: repo
|
||||
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push multi-arch image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
file: Dockerfile-python
|
||||
tags: jumpserver/core-base:python-${{ inputs.tag }}
|
||||
|
||||
123
.github/workflows/cleanup-branches.yml
vendored
Normal file
123
.github/workflows/cleanup-branches.yml
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
name: Cleanup PR Branches
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# 每天凌晨2点运行
|
||||
- cron: '0 2 * * *'
|
||||
workflow_dispatch:
|
||||
# 允许手动触发
|
||||
inputs:
|
||||
dry_run:
|
||||
description: 'Dry run mode (default: true)'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
cleanup-branches:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # 获取所有分支和提交历史
|
||||
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Actions"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Get dry run setting
|
||||
id: dry-run
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
echo "dry_run=${{ github.event.inputs.dry_run }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "dry_run=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Cleanup branches
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DRY_RUN: ${{ steps.dry-run.outputs.dry_run }}
|
||||
run: |
|
||||
echo "Starting branch cleanup..."
|
||||
echo "Dry run mode: $DRY_RUN"
|
||||
|
||||
# 获取所有本地分支
|
||||
git fetch --all --prune
|
||||
|
||||
# 获取以 pr 或 repr 开头的分支
|
||||
branches=$(git branch -r | grep -E 'origin/(pr|repr)' | sed 's/origin\///' | grep -v 'HEAD')
|
||||
|
||||
echo "Found branches matching pattern:"
|
||||
echo "$branches"
|
||||
|
||||
deleted_count=0
|
||||
skipped_count=0
|
||||
|
||||
for branch in $branches; do
|
||||
echo ""
|
||||
echo "Processing branch: $branch"
|
||||
|
||||
# 检查分支是否有未合并的PR
|
||||
pr_info=$(gh pr list --head "$branch" --state open --json number,title,state 2>/dev/null)
|
||||
|
||||
if [ $? -eq 0 ] && [ "$pr_info" != "[]" ]; then
|
||||
echo " ⚠️ Branch has open PR(s), skipping deletion"
|
||||
echo " PR info: $pr_info"
|
||||
skipped_count=$((skipped_count + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
# 检查分支是否有已合并的PR(可选:如果PR已合并也可以删除)
|
||||
merged_pr_info=$(gh pr list --head "$branch" --state merged --json number,title,state 2>/dev/null)
|
||||
|
||||
if [ $? -eq 0 ] && [ "$merged_pr_info" != "[]" ]; then
|
||||
echo " ✅ Branch has merged PR(s), safe to delete"
|
||||
echo " Merged PR info: $merged_pr_info"
|
||||
else
|
||||
echo " ℹ️ No PRs found for this branch"
|
||||
fi
|
||||
|
||||
# 执行删除操作
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo " 🔍 [DRY RUN] Would delete branch: $branch"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " 🗑️ Deleting branch: $branch"
|
||||
|
||||
# 删除远程分支
|
||||
if git push origin --delete "$branch" 2>/dev/null; then
|
||||
echo " ✅ Successfully deleted remote branch: $branch"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " ❌ Failed to delete remote branch: $branch"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "=== Cleanup Summary ==="
|
||||
echo "Branches processed: $(echo "$branches" | wc -l)"
|
||||
echo "Branches deleted: $deleted_count"
|
||||
echo "Branches skipped: $skipped_count"
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo ""
|
||||
echo "🔍 This was a DRY RUN - no branches were actually deleted"
|
||||
echo "To perform actual deletion, run this workflow manually with dry_run=false"
|
||||
fi
|
||||
|
||||
- name: Create summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Branch Cleanup Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Workflow:** ${{ github.workflow }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Run ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Dry Run:** ${{ steps.dry-run.outputs.dry_run }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Triggered by:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Check the logs above for detailed information about processed branches." >> $GITHUB_STEP_SUMMARY
|
||||
9
.github/workflows/sync-gitee.yml
vendored
9
.github/workflows/sync-gitee.yml
vendored
@@ -1,11 +1,9 @@
|
||||
name: 🔀 Sync mirror to Gitee
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- dev
|
||||
create:
|
||||
schedule:
|
||||
# 每天凌晨3点运行
|
||||
- cron: '0 3 * * *'
|
||||
|
||||
jobs:
|
||||
mirror:
|
||||
@@ -14,7 +12,6 @@ jobs:
|
||||
steps:
|
||||
- name: mirror
|
||||
continue-on-error: true
|
||||
if: github.event_name == 'push' || (github.event_name == 'create' && github.event.ref_type == 'tag')
|
||||
uses: wearerequired/git-mirror-action@v1
|
||||
env:
|
||||
SSH_PRIVATE_KEY: ${{ secrets.GITEE_SSH_PRIVATE_KEY }}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM jumpserver/core-base:20250509_094529 AS stage-build
|
||||
FROM jumpserver/core-base:20251113_092612 AS stage-build
|
||||
|
||||
ARG VERSION
|
||||
|
||||
@@ -19,7 +19,7 @@ RUN set -ex \
|
||||
&& python manage.py compilemessages
|
||||
|
||||
|
||||
FROM python:3.11-slim-bullseye
|
||||
FROM python:3.11-slim-trixie
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
|
||||
@@ -33,12 +33,13 @@ ARG TOOLS=" \
|
||||
default-libmysqlclient-dev \
|
||||
openssh-client \
|
||||
sshpass \
|
||||
nmap \
|
||||
bubblewrap"
|
||||
|
||||
ARG APT_MIRROR=http://deb.debian.org
|
||||
|
||||
RUN set -ex \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list.d/debian.sources \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update > /dev/null \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
FROM python:3.11-slim-bullseye
|
||||
FROM python:3.11.14-slim-trixie
|
||||
ARG TARGETARCH
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
|
||||
# Install APT dependencies
|
||||
ARG DEPENDENCIES=" \
|
||||
ca-certificates \
|
||||
@@ -22,13 +21,13 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
set -ex \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list.d/debian.sources \
|
||||
&& apt-get update > /dev/null \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& echo "no" | dpkg-reconfigure dash
|
||||
|
||||
# Install bin tools
|
||||
ARG CHECK_VERSION=v1.0.4
|
||||
ARG CHECK_VERSION=v1.0.5
|
||||
RUN set -ex \
|
||||
&& wget https://github.com/jumpserver-dev/healthcheck/releases/download/${CHECK_VERSION}/check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||
&& tar -xf check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||
@@ -41,12 +40,10 @@ RUN set -ex \
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
|
||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
ENV SETUPTOOLS_SCM_PRETEND_VERSION=3.4.5
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
@@ -54,6 +51,7 @@ RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
|
||||
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
|
||||
set -ex \
|
||||
&& pip install uv -i${PIP_MIRROR} \
|
||||
&& uv venv \
|
||||
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
|
||||
&& ln -sf $(pwd)/.venv /opt/py3 \
|
||||
|
||||
@@ -13,8 +13,9 @@ ARG TOOLS=" \
|
||||
nmap \
|
||||
telnet \
|
||||
vim \
|
||||
postgresql-client-13 \
|
||||
wget"
|
||||
postgresql-client \
|
||||
wget \
|
||||
poppler-utils"
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
@@ -27,5 +28,5 @@ WORKDIR /opt/jumpserver
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
|
||||
RUN set -ex \
|
||||
&& uv pip install -i${PIP_MIRROR} --group xpack
|
||||
|
||||
&& uv pip install -i${PIP_MIRROR} --group xpack \
|
||||
&& playwright install chromium --with-deps --only-shell
|
||||
@@ -2,7 +2,7 @@
|
||||
<a name="readme-top"></a>
|
||||
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||
|
||||
## An open-source PAM tool (Bastion Host)
|
||||
## An open-source PAM platform (Bastion Host)
|
||||
|
||||
[![][license-shield]][license-link]
|
||||
[![][docs-shield]][docs-link]
|
||||
@@ -19,7 +19,7 @@
|
||||
|
||||
## What is JumpServer?
|
||||
|
||||
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
JumpServer is an open-source Privileged Access Management (PAM) platform that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
|
||||
|
||||
<picture>
|
||||
@@ -77,7 +77,8 @@ JumpServer consists of multiple key components, which collectively form the func
|
||||
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal |
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
|
||||
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
|
||||
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
|
||||
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB
|
||||
| [Client](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer Client |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
|
||||
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
|
||||
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
from django.db import transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers as drf_serializers
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.generics import ListAPIView, CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.status import HTTP_200_OK
|
||||
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import ChangeSecretRecordStatusChoice
|
||||
from accounts.filters import AccountFilterSet, NodeFilterBackend
|
||||
from accounts.mixins import AccountRecordViewLogMixin
|
||||
from accounts.models import Account, ChangeSecretRecord
|
||||
from assets.const.gpt import create_or_update_chatx_resources
|
||||
from assets.models import Asset, Node
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
from common.api.mixin import ExtraFilterFieldsMixin
|
||||
@@ -18,6 +20,7 @@ from common.drf.filters import AttrRulesFilterBackend
|
||||
from common.permissions import IsValidUser
|
||||
from common.utils import lazyproperty, get_logger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from orgs.utils import tmp_to_root_org
|
||||
from rbac.permissions import RBACPermission
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -41,8 +44,9 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
'partial_update': ['accounts.change_account'],
|
||||
'su_from_accounts': 'accounts.view_account',
|
||||
'clear_secret': 'accounts.change_account',
|
||||
'move_to_assets': 'accounts.create_account',
|
||||
'copy_to_assets': 'accounts.create_account',
|
||||
'move_to_assets': 'accounts.delete_account',
|
||||
'copy_to_assets': 'accounts.add_account',
|
||||
'chat': 'accounts.view_account',
|
||||
}
|
||||
export_as_zip = True
|
||||
|
||||
@@ -152,6 +156,13 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
def copy_to_assets(self, request, *args, **kwargs):
|
||||
return self._copy_or_move_to_assets(request, move=False)
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='chat')
|
||||
def chat(self, request, *args, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
__, account = create_or_update_chatx_resources()
|
||||
serializer = self.get_serializer(account)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class AccountSecretsViewSet(AccountRecordViewLogMixin, AccountViewSet):
|
||||
"""
|
||||
@@ -174,12 +185,66 @@ class AssetAccountBulkCreateApi(CreateAPIView):
|
||||
'POST': 'accounts.add_account',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_all_assets(base_payload: dict):
|
||||
nodes = base_payload.pop('nodes', [])
|
||||
asset_ids = base_payload.pop('assets', [])
|
||||
nodes = Node.objects.filter(id__in=nodes).only('id', 'key')
|
||||
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
asset_ids = set(asset_ids + list(node_asset_ids))
|
||||
return Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
data = serializer.create(serializer.validated_data)
|
||||
serializer = serializers.AssetAccountBulkSerializerResultSerializer(data, many=True)
|
||||
return Response(data=serializer.data, status=HTTP_200_OK)
|
||||
if hasattr(request.data, "copy"):
|
||||
base_payload = request.data.copy()
|
||||
else:
|
||||
base_payload = dict(request.data)
|
||||
|
||||
templates = base_payload.pop("template", None)
|
||||
assets = self.get_all_assets(base_payload)
|
||||
if not assets.exists():
|
||||
error = _("No valid assets found for account creation.")
|
||||
return Response(
|
||||
data={
|
||||
"detail": error,
|
||||
"code": "no_valid_assets"
|
||||
},
|
||||
status=HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
result = []
|
||||
errors = []
|
||||
|
||||
def handle_one(_payload):
|
||||
try:
|
||||
ser = self.get_serializer(data=_payload)
|
||||
ser.is_valid(raise_exception=True)
|
||||
data = ser.bulk_create(ser.validated_data, assets)
|
||||
if isinstance(data, (list, tuple)):
|
||||
result.extend(data)
|
||||
else:
|
||||
result.append(data)
|
||||
except drf_serializers.ValidationError as e:
|
||||
errors.extend(list(e.detail))
|
||||
except Exception as e:
|
||||
errors.extend([str(e)])
|
||||
|
||||
if not templates:
|
||||
handle_one(base_payload)
|
||||
else:
|
||||
if not isinstance(templates, (list, tuple)):
|
||||
templates = [templates]
|
||||
for tpl in templates:
|
||||
payload = dict(base_payload)
|
||||
payload["template"] = tpl
|
||||
handle_one(payload)
|
||||
|
||||
if errors:
|
||||
raise drf_serializers.ValidationError(errors)
|
||||
|
||||
out_ser = serializers.AssetAccountBulkSerializerResultSerializer(result, many=True)
|
||||
return Response(data=out_ser.data, status=HTTP_200_OK)
|
||||
|
||||
|
||||
class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixin, ListAPIView):
|
||||
@@ -190,6 +255,7 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
|
||||
rbac_perms = {
|
||||
'GET': 'accounts.view_accountsecret',
|
||||
}
|
||||
queryset = Account.history.model.objects.none()
|
||||
|
||||
@lazyproperty
|
||||
def account(self) -> Account:
|
||||
|
||||
@@ -25,7 +25,8 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
|
||||
}
|
||||
rbac_perms = {
|
||||
'get_once_secret': 'accounts.change_integrationapplication',
|
||||
'get_account_secret': 'accounts.view_integrationapplication'
|
||||
'get_account_secret': 'accounts.view_integrationapplication',
|
||||
'get_sdks_info': 'accounts.view_integrationapplication'
|
||||
}
|
||||
|
||||
def read_file(self, path):
|
||||
@@ -36,7 +37,6 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
|
||||
|
||||
@action(
|
||||
['GET'], detail=False, url_path='sdks',
|
||||
permission_classes=[IsValidUser]
|
||||
)
|
||||
def get_sdks_info(self, request, *args, **kwargs):
|
||||
code_suffix_mapper = {
|
||||
|
||||
@@ -20,7 +20,7 @@ __all__ = ['PamDashboardApi']
|
||||
class PamDashboardApi(APIView):
|
||||
http_method_names = ['get']
|
||||
rbac_perms = {
|
||||
'GET': 'accounts.view_account',
|
||||
'GET': 'rbac.view_pam',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -12,6 +12,8 @@ class VirtualAccountViewSet(OrgBulkModelViewSet):
|
||||
filterset_fields = ('alias',)
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return VirtualAccount.objects.none()
|
||||
return VirtualAccount.get_or_init_queryset()
|
||||
|
||||
def get_object(self, ):
|
||||
|
||||
@@ -41,6 +41,7 @@ class AutomationAssetsListApi(generics.ListAPIView):
|
||||
|
||||
class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
@@ -59,6 +60,7 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
|
||||
class AutomationAddAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
|
||||
@@ -154,12 +154,10 @@ class ChangSecretAddAssetApi(AutomationAddAssetApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
|
||||
|
||||
|
||||
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
|
||||
|
||||
|
||||
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
|
||||
perm_model = ChangeSecretAutomation
|
||||
filterset_class = ChangeSecretStatusFilterSet
|
||||
|
||||
@@ -62,7 +62,8 @@ class ChangeSecretDashboardApi(APIView):
|
||||
status_counts = defaultdict(lambda: defaultdict(int))
|
||||
|
||||
for date_finished, status in results:
|
||||
date_str = str(date_finished.date())
|
||||
dt_local = timezone.localtime(date_finished)
|
||||
date_str = str(dt_local.date())
|
||||
if status == ChangeSecretRecordStatusChoice.failed:
|
||||
status_counts[date_str]['failed'] += 1
|
||||
elif status == ChangeSecretRecordStatusChoice.success:
|
||||
@@ -90,10 +91,10 @@ class ChangeSecretDashboardApi(APIView):
|
||||
|
||||
def get_change_secret_asset_queryset(self):
|
||||
qs = self.change_secrets_queryset
|
||||
node_ids = qs.filter(nodes__isnull=False).values_list('nodes', flat=True).distinct()
|
||||
nodes = Node.objects.filter(id__in=node_ids)
|
||||
node_ids = qs.values_list('nodes', flat=True).distinct()
|
||||
nodes = Node.objects.filter(id__in=node_ids).only('id', 'key')
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
direct_asset_ids = qs.filter(assets__isnull=False).values_list('assets', flat=True).distinct()
|
||||
direct_asset_ids = qs.values_list('assets', flat=True).distinct()
|
||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||
return Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
|
||||
@@ -45,10 +45,10 @@ class CheckAccountAutomationViewSet(OrgBulkModelViewSet):
|
||||
class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
|
||||
rbac_perms = (
|
||||
("list", "accounts.view_checkaccountexecution"),
|
||||
("retrieve", "accounts.view_checkaccountsexecution"),
|
||||
("retrieve", "accounts.view_checkaccountexecution"),
|
||||
("create", "accounts.add_checkaccountexecution"),
|
||||
("adhoc", "accounts.add_checkaccountexecution"),
|
||||
("report", "accounts.view_checkaccountsexecution"),
|
||||
("report", "accounts.view_checkaccountexecution"),
|
||||
)
|
||||
ordering = ("-date_created",)
|
||||
tp = AutomationTypes.check_account
|
||||
@@ -150,6 +150,9 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
|
||||
http_method_names = ['get', 'options']
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return CheckAccountEngine.objects.none()
|
||||
|
||||
return CheckAccountEngine.get_default_engines()
|
||||
|
||||
def filter_queryset(self, queryset: list):
|
||||
|
||||
@@ -63,12 +63,10 @@ class PushAccountRemoveAssetApi(AutomationRemoveAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
|
||||
class PushAccountAddAssetApi(AutomationAddAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
|
||||
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||
@@ -235,8 +235,8 @@ class AccountBackupHandler:
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
print(f'\033[31m>>> {error}\033[0m')
|
||||
self.execution.status = Status.error
|
||||
self.execution.summary['error'] = error
|
||||
self.manager.status = Status.error
|
||||
self.manager.summary['error'] = error
|
||||
|
||||
def backup_by_obj_storage(self):
|
||||
object_id = self.execution.snapshot.get('id')
|
||||
|
||||
@@ -113,6 +113,16 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
inventory_hosts = []
|
||||
if asset.type == HostTypes.WINDOWS:
|
||||
if self.secret_type == SecretType.SSH_KEY:
|
||||
host['error'] = _("Windows does not support SSH key authentication")
|
||||
return host
|
||||
new_secret = self.get_secret(account)
|
||||
if '>' in new_secret or '^' in new_secret:
|
||||
host['error'] = _("Windows password cannot contain special characters like > ^")
|
||||
return host
|
||||
|
||||
host['ssh_params'] = {}
|
||||
|
||||
accounts = self.get_accounts(account)
|
||||
@@ -130,11 +140,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
if asset.type == HostTypes.WINDOWS:
|
||||
accounts = accounts.filter(secret_type=SecretType.PASSWORD)
|
||||
|
||||
inventory_hosts = []
|
||||
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
|
||||
print(f'Windows {asset} does not support ssh key push')
|
||||
return inventory_hosts
|
||||
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + account.username + ')' # To distinguish different accounts
|
||||
|
||||
@@ -53,4 +53,6 @@
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
when: check_conn_after_change
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
@@ -39,7 +39,8 @@
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
|
||||
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
|
||||
append_privs: "{{ db_name != '' | bool }}"
|
||||
ignore_errors: true
|
||||
when: db_info is succeeded
|
||||
|
||||
|
||||
@@ -56,3 +56,5 @@
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
register: db_info
|
||||
@@ -23,45 +25,53 @@
|
||||
var: info
|
||||
|
||||
- name: Check whether SQLServer User exist
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
|
||||
when: db_info is succeeded
|
||||
register: user_exist
|
||||
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length != 0
|
||||
|
||||
- name: Add SQLServer user
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "CREATE LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; CREATE USER {{ account.username }} FOR LOGIN {{ account.username }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length == 0
|
||||
|
||||
- name: Verify password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
when: check_conn_after_change
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -28,6 +28,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -61,6 +67,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -86,6 +97,11 @@ i18n:
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -30,6 +30,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -63,6 +69,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -88,6 +99,11 @@ i18n:
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
|
||||
@@ -8,7 +8,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -24,3 +24,7 @@ i18n:
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -9,7 +9,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -25,3 +25,8 @@ i18n:
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ priority: 49
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -25,3 +25,8 @@ i18n:
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from accounts.const import (
|
||||
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
|
||||
)
|
||||
from accounts.models import ChangeSecretRecord
|
||||
from accounts.notifications import ChangeSecretExecutionTaskMsg, ChangeSecretReportMsg
|
||||
from accounts.notifications import ChangeSecretExecutionTaskMsg
|
||||
from accounts.serializers import ChangeSecretRecordBackUpSerializer
|
||||
from common.utils import get_logger
|
||||
from common.utils.file import encrypt_and_compress_zip_file
|
||||
@@ -94,10 +94,6 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
if not recipients:
|
||||
return
|
||||
|
||||
context = self.get_report_context()
|
||||
for user in recipients:
|
||||
ChangeSecretReportMsg(user, context).publish()
|
||||
|
||||
if not records:
|
||||
return
|
||||
|
||||
|
||||
@@ -240,6 +240,11 @@ class CheckAccountManager(BaseManager):
|
||||
|
||||
print("Check: {} => {}".format(account, msg))
|
||||
if not error:
|
||||
AccountRisk.objects.filter(
|
||||
asset=account.asset,
|
||||
username=account.username,
|
||||
risk=handler.risk
|
||||
).delete()
|
||||
continue
|
||||
self.add_risk(handler.risk, account)
|
||||
self.commit_risks(_assets)
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT
|
||||
l.name,
|
||||
|
||||
@@ -54,3 +54,5 @@
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -39,7 +39,8 @@
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
|
||||
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
|
||||
append_privs: "{{ db_name != '' | bool }}"
|
||||
ignore_errors: true
|
||||
when: db_info is succeeded
|
||||
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
register: db_info
|
||||
@@ -23,47 +25,55 @@
|
||||
var: info
|
||||
|
||||
- name: Check whether SQLServer User exist
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
|
||||
when: db_info is succeeded
|
||||
register: user_exist
|
||||
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length != 0
|
||||
register: change_info
|
||||
|
||||
- name: Add SQLServer user
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "CREATE LOGIN [{{ account.username }}] WITH PASSWORD = '{{ account.secret }}'; CREATE USER [{{ account.username }}] FOR LOGIN [{{ account.username }}]; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length == 0
|
||||
register: change_info
|
||||
|
||||
- name: Verify password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
when: check_conn_after_change
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -28,6 +28,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -61,6 +67,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -86,6 +97,11 @@ i18n:
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -30,6 +30,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -63,6 +69,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -84,9 +95,14 @@ i18n:
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
zh: '附加组'
|
||||
ja: '追加グループ'
|
||||
en: 'Additional Group'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
|
||||
@@ -8,7 +8,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -22,3 +22,8 @@ i18n:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
@@ -9,7 +9,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -23,3 +23,8 @@ i18n:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -9,7 +9,7 @@ priority: 49
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -23,3 +23,8 @@ i18n:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -5,11 +5,13 @@
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: "{{ jms_asset.spec_info.db_name }}"
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "DROP LOGIN {{ account.username }}; select @@version"
|
||||
|
||||
|
||||
@@ -16,3 +16,5 @@
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -5,11 +5,13 @@
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
from azure.identity import ClientSecretCredential
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
@@ -14,6 +11,9 @@ __all__ = ['AZUREVaultClient']
|
||||
class AZUREVaultClient(object):
|
||||
|
||||
def __init__(self, vault_url, tenant_id, client_id, client_secret):
|
||||
from azure.identity import ClientSecretCredential
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
|
||||
authentication_endpoint = 'https://login.microsoftonline.com/' \
|
||||
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/'
|
||||
|
||||
@@ -23,6 +23,8 @@ class AZUREVaultClient(object):
|
||||
self.client = SecretClient(vault_url=vault_url, credential=credentials)
|
||||
|
||||
def is_active(self):
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
|
||||
try:
|
||||
self.client.set_secret('jumpserver', '666')
|
||||
except (ResourceNotFoundError, ClientAuthenticationError) as e:
|
||||
@@ -32,6 +34,8 @@ class AZUREVaultClient(object):
|
||||
return True, ''
|
||||
|
||||
def get(self, name, version=None):
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
|
||||
try:
|
||||
secret = self.client.get_secret(name, version)
|
||||
return secret.value
|
||||
|
||||
@@ -46,11 +46,16 @@ class Migration(migrations.Migration):
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Account',
|
||||
'permissions': [('view_accountsecret', 'Can view asset account secret'),
|
||||
('view_historyaccount', 'Can view asset history account'),
|
||||
('view_historyaccountsecret', 'Can view asset history account secret'),
|
||||
('verify_account', 'Can verify account'), ('push_account', 'Can push account'),
|
||||
('remove_account', 'Can remove account')],
|
||||
'permissions': [
|
||||
('view_accountsecret', 'Can view asset account secret'),
|
||||
('view_historyaccount', 'Can view asset history account'),
|
||||
('view_historyaccountsecret', 'Can view asset history account secret'),
|
||||
('verify_account', 'Can verify account'),
|
||||
('push_account', 'Can push account'),
|
||||
('remove_account', 'Can remove account'),
|
||||
('view_accountsession', 'Can view session'),
|
||||
('view_accountactivity', 'Can view activity')
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
|
||||
@@ -116,6 +116,8 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
('verify_account', _('Can verify account')),
|
||||
('push_account', _('Can push account')),
|
||||
('remove_account', _('Can remove account')),
|
||||
('view_accountsession', _('Can view session')),
|
||||
('view_accountactivity', _('Can view activity')),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
@@ -130,7 +132,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.asset.platform
|
||||
|
||||
@lazyproperty
|
||||
def alias(self):
|
||||
def alias(self) -> str:
|
||||
"""
|
||||
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
|
||||
"""
|
||||
@@ -138,13 +140,13 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.username
|
||||
return str(self.id)
|
||||
|
||||
def is_virtual(self):
|
||||
def is_virtual(self) -> bool:
|
||||
"""
|
||||
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
|
||||
"""
|
||||
return self.alias.startswith('@')
|
||||
|
||||
def is_ds_account(self):
|
||||
def is_ds_account(self) -> bool:
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
if not self.asset.is_directory_service:
|
||||
@@ -158,7 +160,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.asset.ds
|
||||
|
||||
@lazyproperty
|
||||
def ds_domain(self):
|
||||
def ds_domain(self) -> str:
|
||||
"""这个不能去掉,perm_account 会动态设置这个值,以更改 full_username"""
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
@@ -170,17 +172,17 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return '@' in self.username or '\\' in self.username
|
||||
|
||||
@property
|
||||
def full_username(self):
|
||||
def full_username(self) -> str:
|
||||
if not self.username_has_domain() and self.ds_domain:
|
||||
return '{}@{}'.format(self.username, self.ds_domain)
|
||||
return self.username
|
||||
|
||||
@lazyproperty
|
||||
def has_secret(self):
|
||||
def has_secret(self) -> bool:
|
||||
return bool(self.secret)
|
||||
|
||||
@lazyproperty
|
||||
def versions(self):
|
||||
def versions(self) -> int:
|
||||
return self.history.count()
|
||||
|
||||
def get_su_from_accounts(self):
|
||||
|
||||
@@ -33,7 +33,7 @@ class IntegrationApplication(JMSOrgBaseModel):
|
||||
return qs.filter(*query)
|
||||
|
||||
@property
|
||||
def accounts_amount(self):
|
||||
def accounts_amount(self) -> int:
|
||||
return self.get_accounts().count()
|
||||
|
||||
@property
|
||||
|
||||
@@ -68,8 +68,10 @@ class AccountRisk(JMSOrgBaseModel):
|
||||
related_name='risks', null=True
|
||||
)
|
||||
risk = models.CharField(max_length=128, verbose_name=_('Risk'), choices=RiskChoice.choices)
|
||||
status = models.CharField(max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
|
||||
blank=True, verbose_name=_('Status'))
|
||||
status = models.CharField(
|
||||
max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
|
||||
blank=True, verbose_name=_('Status')
|
||||
)
|
||||
details = models.JSONField(default=list, verbose_name=_('Detail'))
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -75,11 +75,11 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return bool(self.secret)
|
||||
|
||||
@property
|
||||
def has_username(self):
|
||||
def has_username(self) -> bool:
|
||||
return bool(self.username)
|
||||
|
||||
@property
|
||||
def spec_info(self):
|
||||
def spec_info(self) -> dict:
|
||||
data = {}
|
||||
if self.secret_type != SecretType.SSH_KEY:
|
||||
return data
|
||||
@@ -87,13 +87,13 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return data
|
||||
|
||||
@property
|
||||
def password(self):
|
||||
def password(self) -> str:
|
||||
if self.secret_type == SecretType.PASSWORD:
|
||||
return self.secret
|
||||
return None
|
||||
|
||||
@property
|
||||
def private_key(self):
|
||||
def private_key(self) -> str:
|
||||
if self.secret_type == SecretType.SSH_KEY:
|
||||
return self.secret
|
||||
return None
|
||||
@@ -110,7 +110,7 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return None
|
||||
|
||||
@property
|
||||
def ssh_key_fingerprint(self):
|
||||
def ssh_key_fingerprint(self) -> str:
|
||||
if self.public_key:
|
||||
public_key = self.public_key
|
||||
elif self.private_key:
|
||||
|
||||
@@ -56,7 +56,7 @@ class VaultModelMixin(models.Model):
|
||||
__secret = None
|
||||
|
||||
@property
|
||||
def secret(self):
|
||||
def secret(self) -> str:
|
||||
if self.__secret:
|
||||
return self.__secret
|
||||
from accounts.backends import vault_client
|
||||
|
||||
@@ -18,11 +18,11 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
verbose_name = _('Virtual account')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
return self.get_alias_display()
|
||||
|
||||
@property
|
||||
def username(self):
|
||||
def username(self) -> str:
|
||||
usernames_map = {
|
||||
AliasAccount.INPUT: _("Manual input"),
|
||||
AliasAccount.USER: _("Same with user"),
|
||||
@@ -32,7 +32,7 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
return usernames_map.get(self.alias, '')
|
||||
|
||||
@property
|
||||
def comment(self):
|
||||
def comment(self) -> str:
|
||||
comments_map = {
|
||||
AliasAccount.INPUT: _('Non-asset account, Input username/password on connect'),
|
||||
AliasAccount.USER: _('The account username name same with user on connect'),
|
||||
|
||||
@@ -14,7 +14,7 @@ from accounts.models import Account, AccountTemplate, GatheredAccount
|
||||
from accounts.tasks import push_accounts_to_assets_task
|
||||
from assets.const import Category, AllTypes
|
||||
from assets.models import Asset
|
||||
from common.serializers import SecretReadableMixin
|
||||
from common.serializers import SecretReadableMixin, CommonBulkModelSerializer
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from common.utils import get_logger
|
||||
from .base import BaseAccountSerializer, AuthValidateMixin
|
||||
@@ -253,6 +253,8 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
'source_id': {'required': False, 'allow_null': True},
|
||||
}
|
||||
fields_unimport_template = ['params']
|
||||
# 手动判断唯一性校验
|
||||
validators = []
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
@@ -263,6 +265,21 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
)
|
||||
return queryset
|
||||
|
||||
def validate(self, attrs):
|
||||
instance = getattr(self, "instance", None)
|
||||
if instance:
|
||||
return super().validate(attrs)
|
||||
|
||||
field_errors = {}
|
||||
for _fields in Account._meta.unique_together:
|
||||
lookup = {field: attrs.get(field) for field in _fields}
|
||||
if Account.objects.filter(**lookup).exists():
|
||||
verbose_names = ', '.join([str(Account._meta.get_field(f).verbose_name) for f in _fields])
|
||||
msg_template = _('Account already exists. Field(s): {fields} must be unique.')
|
||||
field_errors[_fields[0]] = msg_template.format(fields=verbose_names)
|
||||
raise serializers.ValidationError(field_errors)
|
||||
return attrs
|
||||
|
||||
|
||||
class AccountDetailSerializer(AccountSerializer):
|
||||
has_secret = serializers.BooleanField(label=_("Has secret"), read_only=True)
|
||||
@@ -275,26 +292,26 @@ class AccountDetailSerializer(AccountSerializer):
|
||||
|
||||
class AssetAccountBulkSerializerResultSerializer(serializers.Serializer):
|
||||
asset = serializers.CharField(read_only=True, label=_('Asset'))
|
||||
account = serializers.CharField(read_only=True, label=_('Account'))
|
||||
state = serializers.CharField(read_only=True, label=_('State'))
|
||||
error = serializers.CharField(read_only=True, label=_('Error'))
|
||||
changed = serializers.BooleanField(read_only=True, label=_('Changed'))
|
||||
|
||||
|
||||
class AssetAccountBulkSerializer(
|
||||
AccountCreateUpdateSerializerMixin, AuthValidateMixin, serializers.ModelSerializer
|
||||
AccountCreateUpdateSerializerMixin, AuthValidateMixin, CommonBulkModelSerializer
|
||||
):
|
||||
su_from_username = serializers.CharField(
|
||||
max_length=128, required=False, write_only=True, allow_null=True, label=_("Su from"),
|
||||
allow_blank=True,
|
||||
)
|
||||
assets = serializers.PrimaryKeyRelatedField(queryset=Asset.objects, many=True, label=_('Assets'))
|
||||
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = [
|
||||
'name', 'username', 'secret', 'secret_type', 'passphrase',
|
||||
'privileged', 'is_active', 'comment', 'template',
|
||||
'on_invalid', 'push_now', 'params', 'assets',
|
||||
'name', 'username', 'secret', 'secret_type', 'secret_reset',
|
||||
'passphrase', 'privileged', 'is_active', 'comment', 'template',
|
||||
'on_invalid', 'push_now', 'params',
|
||||
'su_from_username', 'source', 'source_id',
|
||||
]
|
||||
extra_kwargs = {
|
||||
@@ -376,8 +393,7 @@ class AssetAccountBulkSerializer(
|
||||
handler = self._handle_err_create
|
||||
return handler
|
||||
|
||||
def perform_bulk_create(self, vd):
|
||||
assets = vd.pop('assets')
|
||||
def perform_bulk_create(self, vd, assets):
|
||||
on_invalid = vd.pop('on_invalid', 'skip')
|
||||
secret_type = vd.get('secret_type', 'password')
|
||||
|
||||
@@ -385,8 +401,7 @@ class AssetAccountBulkSerializer(
|
||||
vd['name'] = vd.get('username')
|
||||
|
||||
create_handler = self.get_create_handler(on_invalid)
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
secret_type_supports = Asset.get_secret_type_assets(asset_ids, secret_type)
|
||||
secret_type_supports = Asset.get_secret_type_assets(assets, secret_type)
|
||||
|
||||
_results = {}
|
||||
for asset in assets:
|
||||
@@ -394,6 +409,7 @@ class AssetAccountBulkSerializer(
|
||||
_results[asset] = {
|
||||
'error': _('Asset does not support this secret type: %s') % secret_type,
|
||||
'state': 'error',
|
||||
'account': vd['name'],
|
||||
}
|
||||
continue
|
||||
|
||||
@@ -403,13 +419,13 @@ class AssetAccountBulkSerializer(
|
||||
self.clean_auth_fields(vd)
|
||||
instance, changed, state = self.perform_create(vd, create_handler)
|
||||
_results[asset] = {
|
||||
'changed': changed, 'instance': instance.id, 'state': state
|
||||
'changed': changed, 'instance': instance.id, 'state': state, 'account': vd['name']
|
||||
}
|
||||
except serializers.ValidationError as e:
|
||||
_results[asset] = {'error': e.detail[0], 'state': 'error'}
|
||||
_results[asset] = {'error': e.detail[0], 'state': 'error', 'account': vd['name']}
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
_results[asset] = {'error': str(e), 'state': 'error'}
|
||||
_results[asset] = {'error': str(e), 'state': 'error', 'account': vd['name']}
|
||||
|
||||
results = [{'asset': asset, **result} for asset, result in _results.items()]
|
||||
state_score = {'created': 3, 'updated': 2, 'skipped': 1, 'error': 0}
|
||||
@@ -426,7 +442,8 @@ class AssetAccountBulkSerializer(
|
||||
errors.append({
|
||||
'error': _('Account has exist'),
|
||||
'state': 'error',
|
||||
'asset': str(result['asset'])
|
||||
'asset': str(result['asset']),
|
||||
'account': result.get('account'),
|
||||
})
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
@@ -445,10 +462,16 @@ class AssetAccountBulkSerializer(
|
||||
account_ids = [str(_id) for _id in accounts.values_list('id', flat=True)]
|
||||
push_accounts_to_assets_task.delay(account_ids, params)
|
||||
|
||||
def create(self, validated_data):
|
||||
def bulk_create(self, validated_data, assets):
|
||||
if not assets:
|
||||
raise serializers.ValidationError(
|
||||
{'assets': _('At least one asset or node must be specified')},
|
||||
{'nodes': _('At least one asset or node must be specified')}
|
||||
)
|
||||
|
||||
params = validated_data.pop('params', None)
|
||||
push_now = validated_data.pop('push_now', False)
|
||||
results = self.perform_bulk_create(validated_data)
|
||||
results = self.perform_bulk_create(validated_data, assets)
|
||||
self.push_accounts_if_need(results, push_now, params)
|
||||
for res in results:
|
||||
res['asset'] = str(res['asset'])
|
||||
@@ -456,6 +479,8 @@ class AssetAccountBulkSerializer(
|
||||
|
||||
|
||||
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields = AccountSerializer.Meta.fields + ['spec_info']
|
||||
extra_kwargs = {
|
||||
@@ -470,6 +495,7 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
|
||||
class AccountHistorySerializer(serializers.ModelSerializer):
|
||||
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
|
||||
secret = serializers.CharField(label=_('Secret'), read_only=True)
|
||||
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -70,6 +70,8 @@ class AuthValidateMixin(serializers.Serializer):
|
||||
class BaseAccountSerializer(
|
||||
AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer
|
||||
):
|
||||
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = BaseAccount
|
||||
fields_mini = ["id", "name", "username"]
|
||||
|
||||
@@ -130,7 +130,7 @@ class ChangeSecretRecordSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_is_success(obj):
|
||||
def get_is_success(obj) -> bool:
|
||||
return obj.status == ChangeSecretRecordStatusChoice.success
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_asset(instance):
|
||||
def get_asset(instance) -> str:
|
||||
return str(instance.asset)
|
||||
|
||||
@staticmethod
|
||||
@@ -165,7 +165,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
||||
return str(instance.account)
|
||||
|
||||
@staticmethod
|
||||
def get_is_success(obj):
|
||||
def get_is_success(obj) -> str:
|
||||
if obj.status == ChangeSecretRecordStatusChoice.success.value:
|
||||
return _("Success")
|
||||
return _("Failed")
|
||||
@@ -196,9 +196,9 @@ class ChangeSecretAccountSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_meta(obj):
|
||||
def get_meta(obj) -> dict:
|
||||
return account_secret_task_status.get(str(obj.id))
|
||||
|
||||
@staticmethod
|
||||
def get_ttl(obj):
|
||||
def get_ttl(obj) -> int:
|
||||
return account_secret_task_status.get_ttl(str(obj.id))
|
||||
|
||||
@@ -69,7 +69,7 @@ class AssetRiskSerializer(serializers.Serializer):
|
||||
risk_summary = serializers.SerializerMethodField()
|
||||
|
||||
@staticmethod
|
||||
def get_risk_summary(obj):
|
||||
def get_risk_summary(obj) -> dict:
|
||||
summary = {}
|
||||
for risk in RiskChoice.choices:
|
||||
summary[f"{risk[0]}_count"] = obj.get(f"{risk[0]}_count", 0)
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Task name' %}: {{ name }}</h3>
|
||||
<h3>{% trans 'Task execution id' %}: {{ execution_id }}</h3>
|
||||
<p>{% trans 'Respectful' %} {{ recipient }}</p>
|
||||
<p>{% trans 'Hello! The following is the failure of changing the password of your assets or pushing the account. Please check and handle it in time.' %}</p>
|
||||
<table style="width: 100%; border-collapse: collapse; max-width: 100%; text-align: left; margin-top: 20px;">
|
||||
<caption></caption>
|
||||
<thead>
|
||||
<tr style="background-color: #f2f2f2;">
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Asset' %}</th>
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Account' %}</th>
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Error' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for asset_name, account_username, error in asset_account_errors %}
|
||||
<tr>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">{{ asset_name }}</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">{{ account_username }}</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">
|
||||
<div style="
|
||||
max-width: 90%;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: block;"
|
||||
title="{{ error }}"
|
||||
>
|
||||
{{ error }}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
@@ -3,3 +3,4 @@ from .connect_method import *
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .login_asset_check import *
|
||||
from .data_masking import *
|
||||
20
apps/acls/api/data_masking.py
Normal file
20
apps/acls/api/data_masking.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
from .common import ACLUserFilterMixin
|
||||
from ..models import DataMaskingRule
|
||||
from .. import serializers
|
||||
|
||||
__all__ = ['DataMaskingRuleViewSet']
|
||||
|
||||
|
||||
class DataMaskingRuleFilter(ACLUserFilterMixin):
|
||||
class Meta:
|
||||
model = DataMaskingRule
|
||||
fields = ('name',)
|
||||
|
||||
|
||||
class DataMaskingRuleViewSet(OrgBulkModelViewSet):
|
||||
model = DataMaskingRule
|
||||
filterset_class = DataMaskingRuleFilter
|
||||
search_fields = ('name',)
|
||||
serializer_class = serializers.DataMaskingRuleSerializer
|
||||
@@ -8,7 +8,7 @@ __all__ = ['LoginAssetACLViewSet']
|
||||
class LoginAssetACLFilter(ACLUserAssetFilterMixin):
|
||||
class Meta:
|
||||
model = models.LoginAssetACL
|
||||
fields = ['name', ]
|
||||
fields = ['name', 'action']
|
||||
|
||||
|
||||
class LoginAssetACLViewSet(OrgBulkModelViewSet):
|
||||
|
||||
45
apps/acls/migrations/0003_datamaskingrule.py
Normal file
45
apps/acls/migrations/0003_datamaskingrule.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Generated by Django 4.1.13 on 2025-10-07 16:16
|
||||
|
||||
import common.db.fields
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('acls', '0002_auto_20210926_1047'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='DataMaskingRule',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('priority', models.IntegerField(default=50, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority')),
|
||||
('action', models.CharField(default='reject', max_length=64, verbose_name='Action')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Active')),
|
||||
('users', common.db.fields.JSONManyToManyField(default=dict, to='users.User', verbose_name='Users')),
|
||||
('assets', common.db.fields.JSONManyToManyField(default=dict, to='assets.Asset', verbose_name='Assets')),
|
||||
('accounts', models.JSONField(default=list, verbose_name='Accounts')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('fields_pattern', models.CharField(default='password', max_length=128, verbose_name='Fields pattern')),
|
||||
('masking_method', models.CharField(choices=[('fixed_char', 'Fixed Character Replacement'), ('hide_middle', 'Hide Middle Characters'), ('keep_prefix', 'Keep Prefix Only'), ('keep_suffix', 'Keep Suffix Only')], default='fixed_char', max_length=32, verbose_name='Masking Method')),
|
||||
('mask_pattern', models.CharField(blank=True, default='######', max_length=128, null=True, verbose_name='Mask Pattern')),
|
||||
('reviewers', models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL, verbose_name='Reviewers')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Data Masking Rule',
|
||||
'unique_together': {('org_id', 'name')},
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -2,3 +2,4 @@ from .command_acl import *
|
||||
from .connect_method import *
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .data_masking import *
|
||||
@@ -5,7 +5,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from common.db.fields import JSONManyToManyField
|
||||
from common.db.models import JMSBaseModel
|
||||
from common.utils import contains_ip
|
||||
from common.utils.time_period import contains_time_period
|
||||
from common.utils.timezone import contains_time_period
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from ..const import ActionChoices
|
||||
|
||||
|
||||
@@ -34,16 +34,16 @@ class CommandGroup(JMSOrgBaseModel):
|
||||
|
||||
@lazyproperty
|
||||
def pattern(self):
|
||||
content = self.content.replace('\r\n', '\n')
|
||||
if self.type == 'command':
|
||||
s = self.construct_command_regex(self.content)
|
||||
s = self.construct_command_regex(content)
|
||||
else:
|
||||
s = r'{0}'.format(self.content)
|
||||
s = r'{0}'.format(r'{}'.format('|'.join(content.split('\n'))))
|
||||
return s
|
||||
|
||||
@classmethod
|
||||
def construct_command_regex(cls, content):
|
||||
regex = []
|
||||
content = content.replace('\r\n', '\n')
|
||||
for _cmd in content.split('\n'):
|
||||
cmd = re.sub(r'\s+', ' ', _cmd)
|
||||
cmd = re.escape(cmd)
|
||||
|
||||
42
apps/acls/models/data_masking.py
Normal file
42
apps/acls/models/data_masking.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from django.db import models
|
||||
|
||||
from acls.models import UserAssetAccountBaseACL
|
||||
from common.utils import get_logger
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
__all__ = ['MaskingMethod', 'DataMaskingRule']
|
||||
|
||||
|
||||
class MaskingMethod(models.TextChoices):
|
||||
fixed_char = "fixed_char", _("Fixed Character Replacement") # 固定字符替换
|
||||
hide_middle = "hide_middle", _("Hide Middle Characters") # 隐藏中间几位
|
||||
keep_prefix = "keep_prefix", _("Keep Prefix Only") # 只保留前缀
|
||||
keep_suffix = "keep_suffix", _("Keep Suffix Only") # 只保留后缀
|
||||
|
||||
|
||||
class DataMaskingRule(UserAssetAccountBaseACL):
|
||||
name = models.CharField(max_length=128, verbose_name=_("Name"))
|
||||
fields_pattern = models.CharField(max_length=128, default='password', verbose_name=_("Fields pattern"))
|
||||
|
||||
masking_method = models.CharField(
|
||||
max_length=32,
|
||||
choices=MaskingMethod.choices,
|
||||
default=MaskingMethod.fixed_char,
|
||||
verbose_name=_("Masking Method"),
|
||||
)
|
||||
mask_pattern = models.CharField(
|
||||
max_length=128,
|
||||
verbose_name=_("Mask Pattern"),
|
||||
default="######",
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
unique_together = [('org_id', 'name')]
|
||||
verbose_name = _("Data Masking Rule")
|
||||
@@ -1,30 +1,52 @@
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.models import Account
|
||||
from acls.models import LoginACL, LoginAssetACL
|
||||
from assets.models import Asset
|
||||
from audits.models import UserLoginLog
|
||||
from common.views.template import custom_render_to_string
|
||||
from notifications.notifications import UserMessage
|
||||
from users.models import User
|
||||
|
||||
|
||||
class UserLoginReminderMsg(UserMessage):
|
||||
subject = _('User login reminder')
|
||||
template_name = 'acls/user_login_reminder.html'
|
||||
contexts = [
|
||||
{"name": "city", "label": _('Login city'), "default": "Shanghai"},
|
||||
{"name": "username", "label": _('User'), "default": "john"},
|
||||
{"name": "ip", "label": "IP", "default": "192.168.1.1"},
|
||||
{"name": "recipient_name", "label": _("Recipient name"), "default": "John"},
|
||||
{"name": "recipient_username", "label": _("Recipient username"), "default": "john"},
|
||||
{"name": "user_agent", "label": _('User agent'), "default": "Mozilla/5.0"},
|
||||
{"name": "acl_name", "label": _('ACL name'), "default": "login acl"},
|
||||
{"name": "login_from", "label": _('Login from'), "default": "web"},
|
||||
{"name": "time", "label": _('Login time'), "default": "2025-01-01 12:00:00"},
|
||||
]
|
||||
|
||||
def __init__(self, user, user_log: UserLoginLog):
|
||||
def __init__(self, user, user_log: UserLoginLog, acl: LoginACL):
|
||||
self.user_log = user_log
|
||||
self.acl_name = str(acl)
|
||||
self.login_from = user_log.get_type_display()
|
||||
now = timezone.localtime(user_log.datetime)
|
||||
self.time = now.strftime('%Y-%m-%d %H:%M:%S')
|
||||
super().__init__(user)
|
||||
|
||||
def get_html_msg(self) -> dict:
|
||||
user_log = self.user_log
|
||||
context = {
|
||||
'ip': user_log.ip,
|
||||
'time': self.time,
|
||||
'city': user_log.city,
|
||||
'acl_name': self.acl_name,
|
||||
'login_from': self.login_from,
|
||||
'username': user_log.username,
|
||||
'recipient': self.user,
|
||||
'recipient_name': self.user.name,
|
||||
'recipient_username': self.user.username,
|
||||
'user_agent': user_log.user_agent,
|
||||
}
|
||||
message = render_to_string('acls/user_login_reminder.html', context)
|
||||
message = custom_render_to_string(self.template_name, context)
|
||||
|
||||
return {
|
||||
'subject': str(self.subject),
|
||||
@@ -40,24 +62,55 @@ class UserLoginReminderMsg(UserMessage):
|
||||
|
||||
class AssetLoginReminderMsg(UserMessage):
|
||||
subject = _('User login alert for asset')
|
||||
template_name = 'acls/asset_login_reminder.html'
|
||||
contexts = [
|
||||
{"name": "city", "label": _('Login city'), "default": "Shanghai"},
|
||||
{"name": "username", "label": _('User'), "default": "john"},
|
||||
{"name": "name", "label": _('Name'), "default": "John"},
|
||||
{"name": "asset", "label": _('Asset'), "default": "dev server"},
|
||||
{"name": "recipient_name", "label": _('Recipient name'), "default": "John"},
|
||||
{"name": "recipient_username", "label": _('Recipient username'), "default": "john"},
|
||||
{"name": "account", "label": _('Account Input username'), "default": "root"},
|
||||
{"name": "account_name", "label": _('Account name'), "default": "root"},
|
||||
{"name": "acl_name", "label": _('ACL name'), "default": "login acl"},
|
||||
{"name": "ip", "label": "IP", "default": "192.168.1.1"},
|
||||
{"name": "login_from", "label": _('Login from'), "default": "web"},
|
||||
{"name": "time", "label": _('Login time'), "default": "2025-01-01 12:00:00"}
|
||||
]
|
||||
|
||||
def __init__(self, user, asset: Asset, login_user: User, account: Account, input_username):
|
||||
def __init__(
|
||||
self, user, asset: Asset, login_user: User,
|
||||
account: Account, acl: LoginAssetACL,
|
||||
ip, input_username, login_from
|
||||
):
|
||||
self.ip = ip
|
||||
self.asset = asset
|
||||
self.login_user = login_user
|
||||
self.account = account
|
||||
self.acl_name = str(acl)
|
||||
self.login_from = login_from
|
||||
self.login_user = login_user
|
||||
self.input_username = input_username
|
||||
|
||||
now = timezone.localtime(timezone.now())
|
||||
self.time = now.strftime('%Y-%m-%d %H:%M:%S')
|
||||
super().__init__(user)
|
||||
|
||||
def get_html_msg(self) -> dict:
|
||||
context = {
|
||||
'recipient': self.user,
|
||||
'ip': self.ip,
|
||||
'time': self.time,
|
||||
'login_from': self.login_from,
|
||||
'recipient_name': self.user.name,
|
||||
'recipient_username': self.user.username,
|
||||
'username': self.login_user.username,
|
||||
'name': self.login_user.name,
|
||||
'asset': str(self.asset),
|
||||
'account': self.input_username,
|
||||
'account_name': self.account.name,
|
||||
'acl_name': self.acl_name,
|
||||
}
|
||||
message = render_to_string('acls/asset_login_reminder.html', context)
|
||||
message = custom_render_to_string(self.template_name, context)
|
||||
|
||||
return {
|
||||
'subject': str(self.subject),
|
||||
|
||||
@@ -3,3 +3,4 @@ from .connect_method import *
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .login_asset_check import *
|
||||
from .data_masking import *
|
||||
@@ -90,7 +90,7 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
fields_small = fields_mini + [
|
||||
"is_active", "priority", "action",
|
||||
"date_created", "date_updated",
|
||||
"comment", "created_by", "org_id",
|
||||
"comment", "created_by"
|
||||
]
|
||||
fields_m2m = ["reviewers", ]
|
||||
fields = fields_small + fields_m2m
|
||||
@@ -100,6 +100,20 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
'reviewers': {'label': _('Recipients')},
|
||||
}
|
||||
|
||||
class BaseUserACLSerializer(BaseACLSerializer):
|
||||
users = JSONManyToManyField(label=_('User'))
|
||||
|
||||
class Meta(BaseACLSerializer.Meta):
|
||||
fields = BaseACLSerializer.Meta.fields + ['users']
|
||||
|
||||
|
||||
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
|
||||
assets = JSONManyToManyField(label=_('Asset'))
|
||||
accounts = serializers.ListField(label=_('Account'))
|
||||
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts', 'org_id']
|
||||
|
||||
def validate_reviewers(self, reviewers):
|
||||
action = self.initial_data.get('action')
|
||||
if not action and self.instance:
|
||||
@@ -118,19 +132,4 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
"None of the reviewers belong to Organization `{}`".format(org.name)
|
||||
)
|
||||
raise serializers.ValidationError(error)
|
||||
return valid_reviewers
|
||||
|
||||
|
||||
class BaseUserACLSerializer(BaseACLSerializer):
|
||||
users = JSONManyToManyField(label=_('User'))
|
||||
|
||||
class Meta(BaseACLSerializer.Meta):
|
||||
fields = BaseACLSerializer.Meta.fields + ['users']
|
||||
|
||||
|
||||
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
|
||||
assets = JSONManyToManyField(label=_('Asset'))
|
||||
accounts = serializers.ListField(label=_('Account'))
|
||||
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts']
|
||||
return valid_reviewers
|
||||
@@ -1,4 +1,4 @@
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.serializers.mixin import CommonBulkModelSerializer
|
||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||
from ..const import ActionChoices
|
||||
from ..models import ConnectMethodACL
|
||||
@@ -6,16 +6,15 @@ from ..models import ConnectMethodACL
|
||||
__all__ = ["ConnectMethodACLSerializer"]
|
||||
|
||||
|
||||
class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
|
||||
class ConnectMethodACLSerializer(BaseSerializer, CommonBulkModelSerializer):
|
||||
class Meta(BaseSerializer.Meta):
|
||||
model = ConnectMethodACL
|
||||
fields = [
|
||||
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
||||
if i not in ['assets', 'accounts']
|
||||
if i not in ['assets', 'accounts', 'org_id']
|
||||
]
|
||||
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
|
||||
ActionChoices.review,
|
||||
ActionChoices.accept,
|
||||
ActionChoices.notice,
|
||||
ActionChoices.face_verify,
|
||||
ActionChoices.face_online,
|
||||
|
||||
19
apps/acls/serializers/data_masking.py
Normal file
19
apps/acls/serializers/data_masking.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from acls.models import MaskingMethod, DataMaskingRule
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
from common.serializers.mixin import CommonBulkModelSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||
|
||||
__all__ = ['DataMaskingRuleSerializer']
|
||||
|
||||
|
||||
class DataMaskingRuleSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
|
||||
masking_method = LabeledChoiceField(
|
||||
choices=MaskingMethod.choices, default=MaskingMethod.fixed_char, label=_('Masking Method')
|
||||
)
|
||||
|
||||
class Meta(BaseSerializer.Meta):
|
||||
model = DataMaskingRule
|
||||
fields = BaseSerializer.Meta.fields + ['fields_pattern', 'masking_method', 'mask_pattern']
|
||||
@@ -1,7 +1,7 @@
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from common.serializers import CommonBulkModelSerializer
|
||||
from common.serializers import MethodSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaseUserACLSerializer
|
||||
from .rules import RuleSerializer
|
||||
from ..const import ActionChoices
|
||||
@@ -12,12 +12,12 @@ __all__ = ["LoginACLSerializer"]
|
||||
common_help_text = _("With * indicating a match all. ")
|
||||
|
||||
|
||||
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
|
||||
class LoginACLSerializer(BaseUserACLSerializer, CommonBulkModelSerializer):
|
||||
rules = MethodSerializer(label=_('Rule'))
|
||||
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
model = LoginACL
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
|
||||
fields = list((set(BaseUserACLSerializer.Meta.fields) | {'rules'}))
|
||||
action_choices_exclude = [
|
||||
ActionChoices.warning,
|
||||
ActionChoices.notify_and_warn,
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Dear' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
|
||||
<h3>{% trans 'Dear' %}: {{ recipient_name }}[{{ recipient_username }}]</h3>
|
||||
<hr>
|
||||
<p>{% trans 'We would like to inform you that a user has recently logged into the following asset:' %}<p>
|
||||
<p><strong>{% trans 'Asset details' %}:</strong></p>
|
||||
<ul>
|
||||
<li><strong>{% trans 'User' %}:</strong> [{{ name }}({{ username }})]</li>
|
||||
<li><strong>IP:</strong> [{{ ip }}]</li>
|
||||
<li><strong>{% trans 'Assets' %}:</strong> [{{ asset }}]</li>
|
||||
<li><strong>{% trans 'Account' %}:</strong> [{{ account_name }}({{ account }})]</li>
|
||||
<li><strong>{% trans 'Login asset acl' %}:</strong> [{{ acl_name }}]</li>
|
||||
<li><strong>{% trans 'Login from' %}:</strong> [{{ login_from }}]</li>
|
||||
<li><strong>{% trans 'Time' %}:</strong> [{{ time }}]</li>
|
||||
</ul>
|
||||
<hr>
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Dear' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
|
||||
<h3>{% trans 'Dear' %}: {{ recipient_name }}[{{ recipient_username }}]</h3>
|
||||
<hr>
|
||||
<p>{% trans 'We would like to inform you that a user has recently logged:' %}<p>
|
||||
<p><strong>{% trans 'User details' %}:</strong></p>
|
||||
@@ -8,7 +8,10 @@
|
||||
<li><strong>{% trans 'User' %}:</strong> [{{ username }}]</li>
|
||||
<li><strong>IP:</strong> [{{ ip }}]</li>
|
||||
<li><strong>{% trans 'Login city' %}:</strong> [{{ city }}]</li>
|
||||
<li><strong>{% trans 'Login from' %}:</strong> [{{ login_from }}]</li>
|
||||
<li><strong>{% trans 'User agent' %}:</strong> [{{ user_agent }}]</li>
|
||||
<li><strong>{% trans 'Login acl' %}:</strong> [{{ acl_name }}]</li>
|
||||
<li><strong>{% trans 'Time' %}:</strong> [{{ time }}]</li>
|
||||
</ul>
|
||||
<hr>
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ router.register(r'login-asset-acls', api.LoginAssetACLViewSet, 'login-asset-acl'
|
||||
router.register(r'command-filter-acls', api.CommandFilterACLViewSet, 'command-filter-acl')
|
||||
router.register(r'command-groups', api.CommandGroupViewSet, 'command-group')
|
||||
router.register(r'connect-method-acls', api.ConnectMethodACLViewSet, 'connect-method-acl')
|
||||
router.register(r'data-masking-rules', api.DataMaskingRuleViewSet, 'data-masking-rule')
|
||||
|
||||
urlpatterns = [
|
||||
path('login-asset/check/', api.LoginAssetCheckAPI.as_view(), name='login-asset-check'),
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from collections import defaultdict
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters import rest_framework as drf_filters
|
||||
@@ -113,7 +112,7 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
|
||||
("accounts", AccountSerializer),
|
||||
)
|
||||
rbac_perms = (
|
||||
("match", "assets.match_asset"),
|
||||
("match", "assets.view_asset"),
|
||||
("platform", "assets.view_platform"),
|
||||
("gateways", "assets.view_gateway"),
|
||||
("accounts", "assets.view_account"),
|
||||
@@ -181,33 +180,18 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
|
||||
def sync_platform_protocols(self, request, *args, **kwargs):
|
||||
platform_id = request.data.get('platform_id')
|
||||
platform = get_object_or_404(Platform, pk=platform_id)
|
||||
assets = platform.assets.all()
|
||||
asset_ids = list(platform.assets.values_list('id', flat=True))
|
||||
platform_protocols = list(platform.protocols.values('name', 'port'))
|
||||
|
||||
platform_protocols = {
|
||||
p['name']: p['port']
|
||||
for p in platform.protocols.values('name', 'port')
|
||||
}
|
||||
asset_protocols_map = defaultdict(set)
|
||||
protocols = assets.prefetch_related('protocols').values_list(
|
||||
'id', 'protocols__name'
|
||||
)
|
||||
for asset_id, protocol in protocols:
|
||||
asset_id = str(asset_id)
|
||||
asset_protocols_map[asset_id].add(protocol)
|
||||
objs = []
|
||||
for asset_id, protocols in asset_protocols_map.items():
|
||||
protocol_names = set(platform_protocols) - protocols
|
||||
if not protocol_names:
|
||||
continue
|
||||
for name in protocol_names:
|
||||
objs.append(
|
||||
Protocol(
|
||||
name=name,
|
||||
port=platform_protocols[name],
|
||||
asset_id=asset_id,
|
||||
)
|
||||
)
|
||||
Protocol.objects.bulk_create(objs)
|
||||
with transaction.atomic():
|
||||
if asset_ids:
|
||||
Protocol.objects.filter(asset_id__in=asset_ids).delete()
|
||||
if asset_ids and platform_protocols:
|
||||
objs = []
|
||||
for aid in asset_ids:
|
||||
for p in platform_protocols:
|
||||
objs.append(Protocol(name=p['name'], port=p['port'], asset_id=aid))
|
||||
Protocol.objects.bulk_create(objs)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
def filter_bulk_update_data(self):
|
||||
|
||||
@@ -14,6 +14,7 @@ class FavoriteAssetViewSet(BulkModelViewSet):
|
||||
serializer_class = FavoriteAssetSerializer
|
||||
permission_classes = (IsValidUser,)
|
||||
filterset_fields = ['asset']
|
||||
page_no_limit = True
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
|
||||
@@ -43,7 +43,7 @@ class NodeViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
search_fields = ('full_value',)
|
||||
serializer_class = serializers.NodeSerializer
|
||||
rbac_perms = {
|
||||
'match': 'assets.match_node',
|
||||
'match': 'assets.view_node',
|
||||
'check_assets_amount_task': 'assets.change_node'
|
||||
}
|
||||
|
||||
|
||||
@@ -7,15 +7,18 @@ from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from assets.const import AllTypes
|
||||
from assets.models import Platform, Node, Asset, PlatformProtocol
|
||||
from assets.models import Platform, Node, Asset, PlatformProtocol, PlatformAutomation
|
||||
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer, PlatformListSerializer
|
||||
from common.api import JMSModelViewSet
|
||||
from common.permissions import IsValidUser
|
||||
from common.serializers import GroupedChoiceSerializer
|
||||
from rbac.models import RoleBinding
|
||||
|
||||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
||||
|
||||
|
||||
|
||||
|
||||
class PlatformFilter(filters.FilterSet):
|
||||
name__startswith = filters.CharFilter(field_name='name', lookup_expr='istartswith')
|
||||
|
||||
@@ -40,6 +43,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||
'ops_methods': 'assets.view_platform',
|
||||
'filter_nodes_assets': 'assets.view_platform',
|
||||
}
|
||||
page_no_limit = True
|
||||
|
||||
def get_queryset(self):
|
||||
# 因为没有走分页逻辑,所以需要这里 prefetch
|
||||
@@ -63,6 +67,13 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||
return super().get_object()
|
||||
return self.get_queryset().get(name=pk)
|
||||
|
||||
|
||||
def check_permissions(self, request):
|
||||
if self.action == 'list' and RoleBinding.is_org_admin(request.user):
|
||||
return True
|
||||
else:
|
||||
return super().check_permissions(request)
|
||||
|
||||
def check_object_permissions(self, request, obj):
|
||||
if request.method.lower() in ['delete', 'put', 'patch'] and obj.internal:
|
||||
self.permission_denied(
|
||||
@@ -101,7 +112,10 @@ class PlatformProtocolViewSet(JMSModelViewSet):
|
||||
|
||||
|
||||
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
queryset = PlatformAutomation.objects.none()
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_platform'
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def automation_methods():
|
||||
|
||||
@@ -13,3 +13,13 @@ class ProtocolListApi(ListAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
return list(Protocol.protocols())
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
search = self.request.query_params.get("search", "").lower().strip()
|
||||
if not search:
|
||||
return queryset
|
||||
queryset = [
|
||||
p for p in queryset
|
||||
if search in p['label'].lower() or search in p['value'].lower()
|
||||
]
|
||||
return queryset
|
||||
|
||||
@@ -161,6 +161,7 @@ class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
|
||||
'GET': 'assets.view_asset',
|
||||
'list': 'assets.view_asset',
|
||||
}
|
||||
queryset = Node.objects.none()
|
||||
|
||||
def get_assets(self):
|
||||
key = self.request.query_params.get('key')
|
||||
|
||||
@@ -11,15 +11,20 @@ class FormatAssetInfo:
|
||||
@staticmethod
|
||||
def get_cpu_model_count(cpus):
|
||||
try:
|
||||
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)]
|
||||
if len(cpus) % 3 == 0:
|
||||
step = 3
|
||||
models = [cpus[i + 2] for i in range(0, len(cpus), step)]
|
||||
elif len(cpus) % 2 == 0:
|
||||
step = 2
|
||||
models = [cpus[i + 1] for i in range(0, len(cpus), step)]
|
||||
else:
|
||||
raise ValueError("CPU list format not recognized")
|
||||
|
||||
model_counts = Counter(models)
|
||||
|
||||
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
|
||||
except Exception as e:
|
||||
print(f"Error processing CPU model list: {e}")
|
||||
result = ''
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
ansible_shell_type: sh
|
||||
ansible_connection: local
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test asset connection (pyfreerdp)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
ansible_connection: local
|
||||
ansible_shell_type: sh
|
||||
ansible_become: false
|
||||
|
||||
ansible_timeout: 30
|
||||
tasks:
|
||||
- name: Test asset connection (paramiko)
|
||||
ssh_ping:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
vars:
|
||||
ansible_connection: local
|
||||
ansible_shell_type: sh
|
||||
|
||||
ansible_timeout: 30
|
||||
tasks:
|
||||
- name: Test asset connection (telnet)
|
||||
telnet_ping:
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test MongoDB connection
|
||||
@@ -16,3 +17,5 @@
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test Oracle connection
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
ansible_timeout: 30
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
community.postgresql.postgresql_ping:
|
||||
|
||||
@@ -2,14 +2,17 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
||||
@@ -14,6 +14,10 @@ class Connectivity(TextChoices):
|
||||
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
|
||||
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
return {choice.value: choice.label for choice in cls}
|
||||
|
||||
|
||||
class AutomationTypes(TextChoices):
|
||||
ping = 'ping', _('Ping')
|
||||
|
||||
@@ -20,3 +20,7 @@ class Category(ChoicesMixin, models.TextChoices):
|
||||
_category = getattr(cls, category.upper(), None)
|
||||
choices = [(_category.value, _category.label)] if _category else cls.choices
|
||||
return choices
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
return {choice.value: choice.label for choice in cls}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from orgs.models import Organization
|
||||
from .base import BaseType
|
||||
|
||||
|
||||
@@ -52,3 +53,41 @@ class GPTTypes(BaseType):
|
||||
return [
|
||||
cls.CHATGPT,
|
||||
]
|
||||
|
||||
|
||||
CHATX_NAME = 'ChatX'
|
||||
|
||||
|
||||
def create_or_update_chatx_resources(chatx_name=CHATX_NAME, org_id=Organization.SYSTEM_ID):
|
||||
from django.apps import apps
|
||||
|
||||
platform_model = apps.get_model('assets', 'Platform')
|
||||
asset_model = apps.get_model('assets', 'Asset')
|
||||
account_model = apps.get_model('accounts', 'Account')
|
||||
|
||||
platform, __ = platform_model.objects.get_or_create(
|
||||
name=chatx_name,
|
||||
defaults={
|
||||
'internal': True,
|
||||
'type': chatx_name,
|
||||
'category': 'ai',
|
||||
}
|
||||
)
|
||||
asset, __ = asset_model.objects.get_or_create(
|
||||
address=chatx_name,
|
||||
defaults={
|
||||
'name': chatx_name,
|
||||
'platform': platform,
|
||||
'org_id': org_id
|
||||
}
|
||||
)
|
||||
|
||||
account, __ = account_model.objects.get_or_create(
|
||||
username=chatx_name,
|
||||
defaults={
|
||||
'name': chatx_name,
|
||||
'asset': asset,
|
||||
'org_id': org_id
|
||||
}
|
||||
)
|
||||
return asset, account
|
||||
|
||||
@@ -250,6 +250,12 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||
'default': False,
|
||||
'label': _('Auth username')
|
||||
},
|
||||
'enable_cluster_mode': {
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'label': _('Enable cluster mode'),
|
||||
'help_text': _('Enable if this Redis instance is part of a cluster')
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -349,7 +355,7 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||
for protocol, config in cls.settings().items():
|
||||
if not xpack_enabled and config.get('xpack', False):
|
||||
continue
|
||||
protocols.append(protocol)
|
||||
protocols.append({'label': protocol.label, 'value': protocol.value})
|
||||
|
||||
from assets.models.platform import PlatformProtocol
|
||||
custom_protocols = (
|
||||
|
||||
@@ -112,7 +112,7 @@ class Protocol(models.Model):
|
||||
return protocols[0] if len(protocols) > 0 else {}
|
||||
|
||||
@property
|
||||
def setting(self):
|
||||
def setting(self) -> dict:
|
||||
if self._setting is not None:
|
||||
return self._setting
|
||||
return self.asset_platform_protocol.get('setting', {})
|
||||
@@ -122,7 +122,7 @@ class Protocol(models.Model):
|
||||
self._setting = value
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
def public(self) -> bool:
|
||||
return self.asset_platform_protocol.get('public', True)
|
||||
|
||||
|
||||
@@ -210,7 +210,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return self.category == const.Category.DS and hasattr(self, 'ds')
|
||||
|
||||
@lazyproperty
|
||||
def spec_info(self):
|
||||
def spec_info(self) -> dict:
|
||||
instance = getattr(self, self.category, None)
|
||||
if not instance:
|
||||
return {}
|
||||
@@ -240,7 +240,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return info
|
||||
|
||||
@lazyproperty
|
||||
def auto_config(self):
|
||||
def auto_config(self) -> dict:
|
||||
platform = self.platform
|
||||
auto_config = {
|
||||
'su_enabled': platform.su_enabled,
|
||||
@@ -343,11 +343,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return names
|
||||
|
||||
@lazyproperty
|
||||
def type(self):
|
||||
def type(self) -> str:
|
||||
return self.platform.type
|
||||
|
||||
@lazyproperty
|
||||
def category(self):
|
||||
def category(self) -> str:
|
||||
return self.platform.category
|
||||
|
||||
def is_category(self, category):
|
||||
@@ -408,8 +408,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return tree_node
|
||||
|
||||
@staticmethod
|
||||
def get_secret_type_assets(asset_ids, secret_type):
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
def get_secret_type_assets(assets, secret_type):
|
||||
asset_protocol = assets.prefetch_related('protocols').values_list('id', 'protocols__name')
|
||||
protocol_secret_types_map = const.Protocol.protocol_secret_types()
|
||||
asset_secret_types_mapp = defaultdict(set)
|
||||
|
||||
@@ -53,7 +53,7 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
||||
return name
|
||||
|
||||
def get_all_assets(self):
|
||||
nodes = self.nodes.all()
|
||||
nodes = self.nodes.only("id", "key")
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
|
||||
direct_asset_ids = self.assets.all().values_list("id", flat=True)
|
||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||
|
||||
@@ -28,7 +28,8 @@ class MyAsset(JMSBaseModel):
|
||||
|
||||
@staticmethod
|
||||
def set_asset_custom_value(assets, user):
|
||||
my_assets = MyAsset.objects.filter(asset__in=assets, user=user).all()
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
my_assets = MyAsset.objects.filter(asset_id__in=asset_ids, user=user).all()
|
||||
customs = {my_asset.asset.id: my_asset.custom_to_dict() for my_asset in my_assets}
|
||||
for asset in assets:
|
||||
custom = customs.get(asset.id)
|
||||
|
||||
@@ -573,7 +573,7 @@ class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
return not self.__gt__(other)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
return self.value
|
||||
|
||||
def computed_full_value(self):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user