mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-15 08:32:48 +00:00
Compare commits
264 Commits
v4.10.7
...
refactor_p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b29928d9b | ||
|
|
21cf94493c | ||
|
|
39caee6a2b | ||
|
|
9c1a36c573 | ||
|
|
5ca5234274 | ||
|
|
83b7ccf225 | ||
|
|
8ee3f9935a | ||
|
|
6ec0bee77d | ||
|
|
afd1cd4542 | ||
|
|
7c39f9f43e | ||
|
|
ab9e10791b | ||
|
|
878974ffbd | ||
|
|
3052c5055f | ||
|
|
4be301c2dc | ||
|
|
7f90027754 | ||
|
|
db3cd0bcc7 | ||
|
|
6995754fd9 | ||
|
|
8bd116e955 | ||
|
|
41884d054d | ||
|
|
0ef78fb545 | ||
|
|
98218e814b | ||
|
|
167267067f | ||
|
|
8126d52a8b | ||
|
|
8b53a21659 | ||
|
|
3496a31e1f | ||
|
|
2ec71feafc | ||
|
|
02e8905330 | ||
|
|
8d68f5589b | ||
|
|
4df13fc384 | ||
|
|
78c1162028 | ||
|
|
14c2512b45 | ||
|
|
d6d7072da5 | ||
|
|
993bc36c5e | ||
|
|
ecff2ea07e | ||
|
|
ba70edf221 | ||
|
|
50050dff57 | ||
|
|
944226866c | ||
|
|
fe13221d88 | ||
|
|
ba17863892 | ||
|
|
065bfeda52 | ||
|
|
04af26500a | ||
|
|
e0388364c3 | ||
|
|
3c96480b0c | ||
|
|
95331a0c4b | ||
|
|
b8ecb703cf | ||
|
|
1a3f5e3f9a | ||
|
|
854396e8d5 | ||
|
|
ab08603e66 | ||
|
|
427fd3f72c | ||
|
|
0aba9ba120 | ||
|
|
045ca8807a | ||
|
|
19a68d8930 | ||
|
|
75ed02a2d2 | ||
|
|
f420dac49c | ||
|
|
1ee68134f2 | ||
|
|
937265db5d | ||
|
|
c611d5e88b | ||
|
|
883b6b6383 | ||
|
|
ac4c72064f | ||
|
|
dbf8360e27 | ||
|
|
150d7a09bc | ||
|
|
a7ed20e059 | ||
|
|
1b7b8e6f2e | ||
|
|
cd22fbce19 | ||
|
|
c191d86f43 | ||
|
|
7911137ffb | ||
|
|
1053933cae | ||
|
|
96fdc025cd | ||
|
|
fde19764e0 | ||
|
|
978fbc70e6 | ||
|
|
636ffd786d | ||
|
|
3b756aa26f | ||
|
|
817c0099d1 | ||
|
|
a0d7871130 | ||
|
|
c97124c279 | ||
|
|
32a766ed34 | ||
|
|
58fd15d743 | ||
|
|
f50250dedb | ||
|
|
9e150b7fbe | ||
|
|
16c79f59a7 | ||
|
|
be0f04862a | ||
|
|
1a3fb2f0db | ||
|
|
4cd70efe66 | ||
|
|
28700c01c8 | ||
|
|
4524822245 | ||
|
|
9d04fda018 | ||
|
|
01c277cd1e | ||
|
|
c4b3531d72 | ||
|
|
8870d1ef9e | ||
|
|
6c5086a083 | ||
|
|
e9f762a982 | ||
|
|
d4d4cadbcd | ||
|
|
5e56590405 | ||
|
|
ad8c0f6664 | ||
|
|
47dd6babfc | ||
|
|
691d1c4dba | ||
|
|
ac485804d5 | ||
|
|
51e5fdb301 | ||
|
|
69c4d613f7 | ||
|
|
1ad825bf0d | ||
|
|
a286cb9343 | ||
|
|
1eb489bb2d | ||
|
|
4334ae9e5e | ||
|
|
f2e346a0c3 | ||
|
|
dc20b06431 | ||
|
|
387a9248fc | ||
|
|
705fd6385f | ||
|
|
0ccf36621f | ||
|
|
a9ae12fc2c | ||
|
|
7b1a25adde | ||
|
|
a1b5eb1cd8 | ||
|
|
24ac642c5e | ||
|
|
e4f5e21219 | ||
|
|
a2aae9db47 | ||
|
|
206c43cf75 | ||
|
|
019a657ec3 | ||
|
|
fad60ee40f | ||
|
|
1728412793 | ||
|
|
3e93034fbc | ||
|
|
f4b3a7d73a | ||
|
|
3781c40179 | ||
|
|
fab6219cea | ||
|
|
dd0cacb4bc | ||
|
|
b8639601a1 | ||
|
|
ab9882c9c1 | ||
|
|
77a7b74b15 | ||
|
|
4bc05865f1 | ||
|
|
bec9e4f3a7 | ||
|
|
359adf3dbb | ||
|
|
ac54bb672c | ||
|
|
9e3ba00bc4 | ||
|
|
2ec9a43317 | ||
|
|
06be56ef06 | ||
|
|
b2a618b206 | ||
|
|
1039c2e320 | ||
|
|
8d7267400d | ||
|
|
d67e473884 | ||
|
|
70068c9253 | ||
|
|
d68babb2e1 | ||
|
|
afb6f466d5 | ||
|
|
453ad331ee | ||
|
|
d309d11a8f | ||
|
|
4771693a56 | ||
|
|
cefc820ac1 | ||
|
|
d007afdb43 | ||
|
|
e8921a43be | ||
|
|
a9b44103d4 | ||
|
|
4abf2bded6 | ||
|
|
54693089a0 | ||
|
|
0b859dd502 | ||
|
|
3fb27f969a | ||
|
|
45627a1d92 | ||
|
|
245e2dab66 | ||
|
|
8f0a41b1a8 | ||
|
|
1a9e56c520 | ||
|
|
67c2f471b4 | ||
|
|
b04f96f5f2 | ||
|
|
30f03b7d89 | ||
|
|
28a97d0b5a | ||
|
|
3410686690 | ||
|
|
6860e2327f | ||
|
|
20253e760c | ||
|
|
a63cfde8d2 | ||
|
|
92e250e03b | ||
|
|
098f0950cb | ||
|
|
39b0830a6b | ||
|
|
2e847bc2bc | ||
|
|
f82f31876a | ||
|
|
cde182c015 | ||
|
|
b990cdf561 | ||
|
|
c9a062823d | ||
|
|
643ba4fc15 | ||
|
|
d16a55bbe2 | ||
|
|
ae31554729 | ||
|
|
53b47980a2 | ||
|
|
d31b5ee570 | ||
|
|
65aea1ea36 | ||
|
|
5abb5c5d5a | ||
|
|
93e41a5004 | ||
|
|
95f51bbe48 | ||
|
|
0184d292ec | ||
|
|
23a6d320c7 | ||
|
|
b16304c48a | ||
|
|
7cd1e4d3a0 | ||
|
|
64a9987c3f | ||
|
|
18bfe312fa | ||
|
|
c593f91d77 | ||
|
|
46da05652a | ||
|
|
9249aba1a9 | ||
|
|
eca637c120 | ||
|
|
ddacd5fce1 | ||
|
|
3ca5c04099 | ||
|
|
6603a073ec | ||
|
|
d745f7495a | ||
|
|
76f1667c89 | ||
|
|
1ab1954299 | ||
|
|
c8335999a4 | ||
|
|
5b4a67362d | ||
|
|
e025073da2 | ||
|
|
2155bc6862 | ||
|
|
953b515817 | ||
|
|
7f7a354b2d | ||
|
|
2b2f7ea3f0 | ||
|
|
529123e1b5 | ||
|
|
e156ab6ad8 | ||
|
|
3c1fd134ae | ||
|
|
b15f663c87 | ||
|
|
93906dff0a | ||
|
|
307befdacd | ||
|
|
dbfc4d3981 | ||
|
|
849edd33c1 | ||
|
|
37cceec8fe | ||
|
|
d2494c25cc | ||
|
|
023952582e | ||
|
|
863fe95100 | ||
|
|
4b0bdb18c9 | ||
|
|
10da053a95 | ||
|
|
c40bc46520 | ||
|
|
a732cc614e | ||
|
|
bb29d519c6 | ||
|
|
b56c3a76a7 | ||
|
|
ab908d24a7 | ||
|
|
79cabe1b3c | ||
|
|
231b7287c1 | ||
|
|
be7a4c0d6e | ||
|
|
009da19050 | ||
|
|
dfda6b1e08 | ||
|
|
59b40578d8 | ||
|
|
e5db28c014 | ||
|
|
6d1f26b0f8 | ||
|
|
2333dbbe33 | ||
|
|
16461b0fa9 | ||
|
|
528b0ea1ba | ||
|
|
60f06adaa9 | ||
|
|
7a6187b95f | ||
|
|
aacaf3a174 | ||
|
|
3c9d2534fa | ||
|
|
4f79abe678 | ||
|
|
ae9956ff91 | ||
|
|
429677e0ce | ||
|
|
034ee65157 | ||
|
|
fdd7d9b6b1 | ||
|
|
db0e21f5d9 | ||
|
|
468b84eb3d | ||
|
|
28d5475d0f | ||
|
|
b9c60d856f | ||
|
|
bd1d73c6dd | ||
|
|
bf92c756d4 | ||
|
|
62ebe0d636 | ||
|
|
0b1fea8492 | ||
|
|
65b5f573f8 | ||
|
|
bb639e1fe7 | ||
|
|
395b868dcf | ||
|
|
1350b774b3 | ||
|
|
af7a00c1b1 | ||
|
|
965ec7007c | ||
|
|
1372fd7535 | ||
|
|
3b0ef4cca7 | ||
|
|
6832abdaad | ||
|
|
c6bf290dbb | ||
|
|
23ab66c11a | ||
|
|
1debaa5547 | ||
|
|
47413966c9 | ||
|
|
703f39607c |
26
.github/.github/issue-spam-config.json
vendored
Normal file
26
.github/.github/issue-spam-config.json
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"dry_run": false,
|
||||
"min_account_age_days": 3,
|
||||
"max_urls_for_spam": 1,
|
||||
"min_body_len_for_links": 40,
|
||||
"spam_words": [
|
||||
"call now",
|
||||
"zadzwoń",
|
||||
"zadzwoń teraz",
|
||||
"kontakt",
|
||||
"telefon",
|
||||
"telefone",
|
||||
"contato",
|
||||
"suporte",
|
||||
"infolinii",
|
||||
"click here",
|
||||
"buy now",
|
||||
"subscribe",
|
||||
"visit"
|
||||
],
|
||||
"bracket_max": 6,
|
||||
"special_char_density_threshold": 0.12,
|
||||
"phone_regex": "\\+?\\d[\\d\\-\\s\\(\\)\\.]{6,}\\d",
|
||||
"labels_for_spam": ["spam"],
|
||||
"labels_for_review": ["needs-triage"]
|
||||
}
|
||||
120
.github/workflows/build-base-image.yml
vendored
120
.github/workflows/build-base-image.yml
vendored
@@ -1,74 +1,72 @@
|
||||
name: Build and Push Base Image
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'dev'
|
||||
- 'v*'
|
||||
paths:
|
||||
- poetry.lock
|
||||
- pyproject.toml
|
||||
- Dockerfile-base
|
||||
- package.json
|
||||
- go.mod
|
||||
- yarn.lock
|
||||
- pom.xml
|
||||
- install_deps.sh
|
||||
- utils/clean_site_packages.sh
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
pull_request:
|
||||
branches:
|
||||
- 'dev'
|
||||
- 'v*'
|
||||
paths:
|
||||
- poetry.lock
|
||||
- pyproject.toml
|
||||
- Dockerfile-base
|
||||
- package.json
|
||||
- go.mod
|
||||
- yarn.lock
|
||||
- pom.xml
|
||||
- install_deps.sh
|
||||
- utils/clean_site_packages.sh
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
build-and-push:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:qemu-v7.0.0-28
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract date
|
||||
id: vars
|
||||
run: echo "IMAGE_TAG=$(date +'%Y%m%d_%H%M%S')" >> $GITHUB_ENV
|
||||
- name: Extract date
|
||||
id: vars
|
||||
run: echo "IMAGE_TAG=$(date +'%Y%m%d_%H%M%S')" >> $GITHUB_ENV
|
||||
|
||||
- name: Extract repository name
|
||||
id: repo
|
||||
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
|
||||
- name: Extract repository name
|
||||
id: repo
|
||||
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push multi-arch image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
file: Dockerfile-base
|
||||
tags: jumpserver/core-base:${{ env.IMAGE_TAG }}
|
||||
- name: Build and push multi-arch image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
file: Dockerfile-base
|
||||
tags: jumpserver/core-base:${{ env.IMAGE_TAG }}
|
||||
|
||||
- name: Update Dockerfile
|
||||
run: |
|
||||
sed -i 's|-base:.* AS stage-build|-base:${{ env.IMAGE_TAG }} AS stage-build|' Dockerfile
|
||||
- name: Update Dockerfile
|
||||
run: |
|
||||
sed -i 's|-base:.* AS stage-build|-base:${{ env.IMAGE_TAG }} AS stage-build|' Dockerfile
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git config --global user.name 'github-actions[bot]'
|
||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
git add Dockerfile
|
||||
git commit -m "perf: Update Dockerfile with new base image tag"
|
||||
git push origin ${{ github.event.pull_request.head.ref }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git config --global user.name 'github-actions[bot]'
|
||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
git add Dockerfile
|
||||
git commit -m "perf: Update Dockerfile with new base image tag"
|
||||
git push origin ${{ github.event.pull_request.head.ref }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
46
.github/workflows/build-python-image.yml
vendored
Normal file
46
.github/workflows/build-python-image.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Build and Push Python Base Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Tag to build'
|
||||
required: true
|
||||
default: '3.11-slim-bullseye-v1'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:qemu-v7.0.0-28
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Extract repository name
|
||||
id: repo
|
||||
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push multi-arch image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
file: Dockerfile-python
|
||||
tags: jumpserver/core-base:python-${{ inputs.tag }}
|
||||
|
||||
123
.github/workflows/cleanup-branches.yml
vendored
Normal file
123
.github/workflows/cleanup-branches.yml
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
name: Cleanup PR Branches
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# 每天凌晨2点运行
|
||||
- cron: '0 2 * * *'
|
||||
workflow_dispatch:
|
||||
# 允许手动触发
|
||||
inputs:
|
||||
dry_run:
|
||||
description: 'Dry run mode (default: true)'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
cleanup-branches:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # 获取所有分支和提交历史
|
||||
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Actions"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Get dry run setting
|
||||
id: dry-run
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
echo "dry_run=${{ github.event.inputs.dry_run }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "dry_run=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Cleanup branches
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DRY_RUN: ${{ steps.dry-run.outputs.dry_run }}
|
||||
run: |
|
||||
echo "Starting branch cleanup..."
|
||||
echo "Dry run mode: $DRY_RUN"
|
||||
|
||||
# 获取所有本地分支
|
||||
git fetch --all --prune
|
||||
|
||||
# 获取以 pr 或 repr 开头的分支
|
||||
branches=$(git branch -r | grep -E 'origin/(pr|repr)' | sed 's/origin\///' | grep -v 'HEAD')
|
||||
|
||||
echo "Found branches matching pattern:"
|
||||
echo "$branches"
|
||||
|
||||
deleted_count=0
|
||||
skipped_count=0
|
||||
|
||||
for branch in $branches; do
|
||||
echo ""
|
||||
echo "Processing branch: $branch"
|
||||
|
||||
# 检查分支是否有未合并的PR
|
||||
pr_info=$(gh pr list --head "$branch" --state open --json number,title,state 2>/dev/null)
|
||||
|
||||
if [ $? -eq 0 ] && [ "$pr_info" != "[]" ]; then
|
||||
echo " ⚠️ Branch has open PR(s), skipping deletion"
|
||||
echo " PR info: $pr_info"
|
||||
skipped_count=$((skipped_count + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
# 检查分支是否有已合并的PR(可选:如果PR已合并也可以删除)
|
||||
merged_pr_info=$(gh pr list --head "$branch" --state merged --json number,title,state 2>/dev/null)
|
||||
|
||||
if [ $? -eq 0 ] && [ "$merged_pr_info" != "[]" ]; then
|
||||
echo " ✅ Branch has merged PR(s), safe to delete"
|
||||
echo " Merged PR info: $merged_pr_info"
|
||||
else
|
||||
echo " ℹ️ No PRs found for this branch"
|
||||
fi
|
||||
|
||||
# 执行删除操作
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo " 🔍 [DRY RUN] Would delete branch: $branch"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " 🗑️ Deleting branch: $branch"
|
||||
|
||||
# 删除远程分支
|
||||
if git push origin --delete "$branch" 2>/dev/null; then
|
||||
echo " ✅ Successfully deleted remote branch: $branch"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " ❌ Failed to delete remote branch: $branch"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "=== Cleanup Summary ==="
|
||||
echo "Branches processed: $(echo "$branches" | wc -l)"
|
||||
echo "Branches deleted: $deleted_count"
|
||||
echo "Branches skipped: $skipped_count"
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo ""
|
||||
echo "🔍 This was a DRY RUN - no branches were actually deleted"
|
||||
echo "To perform actual deletion, run this workflow manually with dry_run=false"
|
||||
fi
|
||||
|
||||
- name: Create summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Branch Cleanup Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Workflow:** ${{ github.workflow }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Run ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Dry Run:** ${{ steps.dry-run.outputs.dry_run }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Triggered by:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Check the logs above for detailed information about processed branches." >> $GITHUB_STEP_SUMMARY
|
||||
29
.github/workflows/jms-generic-action-handler.yml
vendored
29
.github/workflows/jms-generic-action-handler.yml
vendored
@@ -1,10 +1,33 @@
|
||||
on: [push, pull_request, release]
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
types: [opened, synchronize, closed]
|
||||
release:
|
||||
types: [created]
|
||||
|
||||
name: JumpServer repos generic handler
|
||||
|
||||
jobs:
|
||||
generic_handler:
|
||||
name: Run generic handler
|
||||
handle_pull_request:
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: jumpserver/action-generic-handler@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
|
||||
I18N_TOKEN: ${{ secrets.I18N_TOKEN }}
|
||||
|
||||
handle_push:
|
||||
if: github.event_name == 'push'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: jumpserver/action-generic-handler@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
|
||||
I18N_TOKEN: ${{ secrets.I18N_TOKEN }}
|
||||
|
||||
handle_release:
|
||||
if: github.event_name == 'release'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: jumpserver/action-generic-handler@master
|
||||
|
||||
9
.github/workflows/sync-gitee.yml
vendored
9
.github/workflows/sync-gitee.yml
vendored
@@ -1,11 +1,9 @@
|
||||
name: 🔀 Sync mirror to Gitee
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- dev
|
||||
create:
|
||||
schedule:
|
||||
# 每天凌晨3点运行
|
||||
- cron: '0 3 * * *'
|
||||
|
||||
jobs:
|
||||
mirror:
|
||||
@@ -14,7 +12,6 @@ jobs:
|
||||
steps:
|
||||
- name: mirror
|
||||
continue-on-error: true
|
||||
if: github.event_name == 'push' || (github.event_name == 'create' && github.event.ref_type == 'tag')
|
||||
uses: wearerequired/git-mirror-action@v1
|
||||
env:
|
||||
SSH_PRIVATE_KEY: ${{ secrets.GITEE_SSH_PRIVATE_KEY }}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM jumpserver/core-base:20250819_064003 AS stage-build
|
||||
FROM jumpserver/core-base:20251128_025056 AS stage-build
|
||||
|
||||
ARG VERSION
|
||||
|
||||
@@ -19,7 +19,7 @@ RUN set -ex \
|
||||
&& python manage.py compilemessages
|
||||
|
||||
|
||||
FROM python:3.11-slim-bullseye
|
||||
FROM python:3.11-slim-trixie
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
|
||||
@@ -33,12 +33,13 @@ ARG TOOLS=" \
|
||||
default-libmysqlclient-dev \
|
||||
openssh-client \
|
||||
sshpass \
|
||||
nmap \
|
||||
bubblewrap"
|
||||
|
||||
ARG APT_MIRROR=http://deb.debian.org
|
||||
|
||||
RUN set -ex \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list.d/debian.sources \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update > /dev/null \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
FROM python:3.11-slim-bullseye
|
||||
FROM python:3.11.14-slim-trixie
|
||||
ARG TARGETARCH
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
|
||||
# Install APT dependencies
|
||||
ARG DEPENDENCIES=" \
|
||||
ca-certificates \
|
||||
@@ -22,13 +21,13 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
set -ex \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list.d/debian.sources \
|
||||
&& apt-get update > /dev/null \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& echo "no" | dpkg-reconfigure dash
|
||||
|
||||
# Install bin tools
|
||||
ARG CHECK_VERSION=v1.0.4
|
||||
ARG CHECK_VERSION=v1.0.5
|
||||
RUN set -ex \
|
||||
&& wget https://github.com/jumpserver-dev/healthcheck/releases/download/${CHECK_VERSION}/check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||
&& tar -xf check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||
@@ -41,12 +40,10 @@ RUN set -ex \
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
|
||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
ENV SETUPTOOLS_SCM_PRETEND_VERSION=3.4.5
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
@@ -54,6 +51,7 @@ RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
|
||||
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
|
||||
set -ex \
|
||||
&& pip install uv -i${PIP_MIRROR} \
|
||||
&& uv venv \
|
||||
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
|
||||
&& ln -sf $(pwd)/.venv /opt/py3 \
|
||||
|
||||
@@ -13,7 +13,7 @@ ARG TOOLS=" \
|
||||
nmap \
|
||||
telnet \
|
||||
vim \
|
||||
postgresql-client-13 \
|
||||
postgresql-client \
|
||||
wget \
|
||||
poppler-utils"
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<a name="readme-top"></a>
|
||||
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||
|
||||
## An open-source PAM tool (Bastion Host)
|
||||
## An open-source PAM platform (Bastion Host)
|
||||
|
||||
[![][license-shield]][license-link]
|
||||
[![][docs-shield]][docs-link]
|
||||
@@ -19,7 +19,7 @@
|
||||
|
||||
## What is JumpServer?
|
||||
|
||||
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
JumpServer is an open-source Privileged Access Management (PAM) platform that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
|
||||
|
||||
<picture>
|
||||
@@ -77,7 +77,8 @@ JumpServer consists of multiple key components, which collectively form the func
|
||||
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal |
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
|
||||
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
|
||||
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
|
||||
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB
|
||||
| [Client](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer Client |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
|
||||
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
|
||||
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
from django.db import transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers as drf_serializers
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.generics import ListAPIView, CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.status import HTTP_200_OK
|
||||
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import ChangeSecretRecordStatusChoice
|
||||
from accounts.filters import AccountFilterSet, NodeFilterBackend
|
||||
from accounts.mixins import AccountRecordViewLogMixin
|
||||
from accounts.models import Account, ChangeSecretRecord
|
||||
from assets.const.gpt import create_or_update_chatx_resources
|
||||
from assets.models import Asset, Node
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
from common.api.mixin import ExtraFilterFieldsMixin
|
||||
@@ -18,6 +20,7 @@ from common.drf.filters import AttrRulesFilterBackend
|
||||
from common.permissions import IsValidUser
|
||||
from common.utils import lazyproperty, get_logger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from orgs.utils import tmp_to_root_org
|
||||
from rbac.permissions import RBACPermission
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -43,6 +46,7 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
'clear_secret': 'accounts.change_account',
|
||||
'move_to_assets': 'accounts.delete_account',
|
||||
'copy_to_assets': 'accounts.add_account',
|
||||
'chat': 'accounts.view_account',
|
||||
}
|
||||
export_as_zip = True
|
||||
|
||||
@@ -152,6 +156,13 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
def copy_to_assets(self, request, *args, **kwargs):
|
||||
return self._copy_or_move_to_assets(request, move=False)
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='chat')
|
||||
def chat(self, request, *args, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
__, account = create_or_update_chatx_resources()
|
||||
serializer = self.get_serializer(account)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class AccountSecretsViewSet(AccountRecordViewLogMixin, AccountViewSet):
|
||||
"""
|
||||
@@ -174,12 +185,66 @@ class AssetAccountBulkCreateApi(CreateAPIView):
|
||||
'POST': 'accounts.add_account',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_all_assets(base_payload: dict):
|
||||
nodes = base_payload.pop('nodes', [])
|
||||
asset_ids = base_payload.pop('assets', [])
|
||||
nodes = Node.objects.filter(id__in=nodes).only('id', 'key')
|
||||
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
asset_ids = set(asset_ids + list(node_asset_ids))
|
||||
return Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
data = serializer.create(serializer.validated_data)
|
||||
serializer = serializers.AssetAccountBulkSerializerResultSerializer(data, many=True)
|
||||
return Response(data=serializer.data, status=HTTP_200_OK)
|
||||
if hasattr(request.data, "copy"):
|
||||
base_payload = request.data.copy()
|
||||
else:
|
||||
base_payload = dict(request.data)
|
||||
|
||||
templates = base_payload.pop("template", None)
|
||||
assets = self.get_all_assets(base_payload)
|
||||
if not assets.exists():
|
||||
error = _("No valid assets found for account creation.")
|
||||
return Response(
|
||||
data={
|
||||
"detail": error,
|
||||
"code": "no_valid_assets"
|
||||
},
|
||||
status=HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
result = []
|
||||
errors = []
|
||||
|
||||
def handle_one(_payload):
|
||||
try:
|
||||
ser = self.get_serializer(data=_payload)
|
||||
ser.is_valid(raise_exception=True)
|
||||
data = ser.bulk_create(ser.validated_data, assets)
|
||||
if isinstance(data, (list, tuple)):
|
||||
result.extend(data)
|
||||
else:
|
||||
result.append(data)
|
||||
except drf_serializers.ValidationError as e:
|
||||
errors.extend(list(e.detail))
|
||||
except Exception as e:
|
||||
errors.extend([str(e)])
|
||||
|
||||
if not templates:
|
||||
handle_one(base_payload)
|
||||
else:
|
||||
if not isinstance(templates, (list, tuple)):
|
||||
templates = [templates]
|
||||
for tpl in templates:
|
||||
payload = dict(base_payload)
|
||||
payload["template"] = tpl
|
||||
handle_one(payload)
|
||||
|
||||
if errors:
|
||||
raise drf_serializers.ValidationError(errors)
|
||||
|
||||
out_ser = serializers.AssetAccountBulkSerializerResultSerializer(result, many=True)
|
||||
return Response(data=out_ser.data, status=HTTP_200_OK)
|
||||
|
||||
|
||||
class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixin, ListAPIView):
|
||||
@@ -190,6 +255,7 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
|
||||
rbac_perms = {
|
||||
'GET': 'accounts.view_accountsecret',
|
||||
}
|
||||
queryset = Account.history.model.objects.none()
|
||||
|
||||
@lazyproperty
|
||||
def account(self) -> Account:
|
||||
|
||||
@@ -25,7 +25,8 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
|
||||
}
|
||||
rbac_perms = {
|
||||
'get_once_secret': 'accounts.change_integrationapplication',
|
||||
'get_account_secret': 'accounts.view_integrationapplication'
|
||||
'get_account_secret': 'accounts.view_integrationapplication',
|
||||
'get_sdks_info': 'accounts.view_integrationapplication'
|
||||
}
|
||||
|
||||
def read_file(self, path):
|
||||
@@ -36,7 +37,6 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
|
||||
|
||||
@action(
|
||||
['GET'], detail=False, url_path='sdks',
|
||||
permission_classes=[IsValidUser]
|
||||
)
|
||||
def get_sdks_info(self, request, *args, **kwargs):
|
||||
code_suffix_mapper = {
|
||||
|
||||
@@ -12,6 +12,8 @@ class VirtualAccountViewSet(OrgBulkModelViewSet):
|
||||
filterset_fields = ('alias',)
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return VirtualAccount.objects.none()
|
||||
return VirtualAccount.get_or_init_queryset()
|
||||
|
||||
def get_object(self, ):
|
||||
|
||||
@@ -41,6 +41,7 @@ class AutomationAssetsListApi(generics.ListAPIView):
|
||||
|
||||
class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
@@ -59,6 +60,7 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
|
||||
class AutomationAddAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
|
||||
@@ -154,12 +154,10 @@ class ChangSecretAddAssetApi(AutomationAddAssetApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
|
||||
|
||||
|
||||
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
|
||||
|
||||
|
||||
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
|
||||
perm_model = ChangeSecretAutomation
|
||||
filterset_class = ChangeSecretStatusFilterSet
|
||||
|
||||
@@ -62,7 +62,8 @@ class ChangeSecretDashboardApi(APIView):
|
||||
status_counts = defaultdict(lambda: defaultdict(int))
|
||||
|
||||
for date_finished, status in results:
|
||||
date_str = str(date_finished.date())
|
||||
dt_local = timezone.localtime(date_finished)
|
||||
date_str = str(dt_local.date())
|
||||
if status == ChangeSecretRecordStatusChoice.failed:
|
||||
status_counts[date_str]['failed'] += 1
|
||||
elif status == ChangeSecretRecordStatusChoice.success:
|
||||
|
||||
@@ -150,6 +150,9 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
|
||||
http_method_names = ['get', 'options']
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return CheckAccountEngine.objects.none()
|
||||
|
||||
return CheckAccountEngine.get_default_engines()
|
||||
|
||||
def filter_queryset(self, queryset: list):
|
||||
|
||||
@@ -63,12 +63,10 @@ class PushAccountRemoveAssetApi(AutomationRemoveAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
|
||||
class PushAccountAddAssetApi(AutomationAddAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
|
||||
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||
@@ -235,8 +235,8 @@ class AccountBackupHandler:
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
print(f'\033[31m>>> {error}\033[0m')
|
||||
self.execution.status = Status.error
|
||||
self.execution.summary['error'] = error
|
||||
self.manager.status = Status.error
|
||||
self.manager.summary['error'] = error
|
||||
|
||||
def backup_by_obj_storage(self):
|
||||
object_id = self.execution.snapshot.get('id')
|
||||
|
||||
@@ -113,6 +113,16 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
inventory_hosts = []
|
||||
if asset.type == HostTypes.WINDOWS:
|
||||
if self.secret_type == SecretType.SSH_KEY:
|
||||
host['error'] = _("Windows does not support SSH key authentication")
|
||||
return host
|
||||
new_secret = self.get_secret(account)
|
||||
if '>' in new_secret or '^' in new_secret:
|
||||
host['error'] = _("Windows password cannot contain special characters like > ^")
|
||||
return host
|
||||
|
||||
host['ssh_params'] = {}
|
||||
|
||||
accounts = self.get_accounts(account)
|
||||
@@ -130,11 +140,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
if asset.type == HostTypes.WINDOWS:
|
||||
accounts = accounts.filter(secret_type=SecretType.PASSWORD)
|
||||
|
||||
inventory_hosts = []
|
||||
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
|
||||
print(f'Windows {asset} does not support ssh key push')
|
||||
return inventory_hosts
|
||||
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + account.username + ')' # To distinguish different accounts
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
register: db_info
|
||||
@@ -23,45 +25,53 @@
|
||||
var: info
|
||||
|
||||
- name: Check whether SQLServer User exist
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
|
||||
when: db_info is succeeded
|
||||
register: user_exist
|
||||
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length != 0
|
||||
|
||||
- name: Add SQLServer user
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "CREATE LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; CREATE USER {{ account.username }} FOR LOGIN {{ account.username }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length == 0
|
||||
|
||||
- name: Verify password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
when: check_conn_after_change
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -28,6 +28,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -61,6 +67,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -86,6 +97,11 @@ i18n:
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -30,6 +30,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -63,6 +69,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -88,6 +99,11 @@ i18n:
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
|
||||
@@ -9,7 +9,7 @@ from accounts.const import (
|
||||
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
|
||||
)
|
||||
from accounts.models import ChangeSecretRecord
|
||||
from accounts.notifications import ChangeSecretExecutionTaskMsg, ChangeSecretReportMsg
|
||||
from accounts.notifications import ChangeSecretExecutionTaskMsg
|
||||
from accounts.serializers import ChangeSecretRecordBackUpSerializer
|
||||
from common.utils import get_logger
|
||||
from common.utils.file import encrypt_and_compress_zip_file
|
||||
@@ -94,10 +94,6 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
if not recipients:
|
||||
return
|
||||
|
||||
context = self.get_report_context()
|
||||
for user in recipients:
|
||||
ChangeSecretReportMsg(user, context).publish()
|
||||
|
||||
if not records:
|
||||
return
|
||||
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT
|
||||
l.name,
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
register: db_info
|
||||
@@ -23,47 +25,55 @@
|
||||
var: info
|
||||
|
||||
- name: Check whether SQLServer User exist
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
|
||||
when: db_info is succeeded
|
||||
register: user_exist
|
||||
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length != 0
|
||||
register: change_info
|
||||
|
||||
- name: Add SQLServer user
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "CREATE LOGIN [{{ account.username }}] WITH PASSWORD = '{{ account.secret }}'; CREATE USER [{{ account.username }}] FOR LOGIN [{{ account.username }}]; select @@version"
|
||||
ignore_errors: true
|
||||
when: user_exist.query_results[0] | length == 0
|
||||
register: change_info
|
||||
|
||||
- name: Verify password
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
when: check_conn_after_change
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -28,6 +28,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -61,6 +67,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -86,6 +97,11 @@ i18n:
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
group: "{{ params.group if params.group | length > 0 else omit }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: "{{ true if params.groups | length > 0 else false }}"
|
||||
expires: -1
|
||||
|
||||
@@ -30,6 +30,12 @@ params:
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: group
|
||||
type: str
|
||||
label: "{{ 'Params group label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params group help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
@@ -63,6 +69,11 @@ i18n:
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params group help text:
|
||||
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
|
||||
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
|
||||
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
@@ -84,9 +95,14 @@ i18n:
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
zh: '附加组'
|
||||
ja: '追加グループ'
|
||||
en: 'Additional Group'
|
||||
|
||||
Params group label:
|
||||
zh: '主组'
|
||||
ja: '主组'
|
||||
en: 'Main group'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
|
||||
@@ -5,11 +5,13 @@
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: "{{ jms_asset.spec_info.db_name }}"
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: "DROP LOGIN {{ account.username }}; select @@version"
|
||||
|
||||
|
||||
@@ -5,11 +5,13 @@
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
from azure.identity import ClientSecretCredential
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
@@ -14,6 +11,9 @@ __all__ = ['AZUREVaultClient']
|
||||
class AZUREVaultClient(object):
|
||||
|
||||
def __init__(self, vault_url, tenant_id, client_id, client_secret):
|
||||
from azure.identity import ClientSecretCredential
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
|
||||
authentication_endpoint = 'https://login.microsoftonline.com/' \
|
||||
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/'
|
||||
|
||||
@@ -23,6 +23,8 @@ class AZUREVaultClient(object):
|
||||
self.client = SecretClient(vault_url=vault_url, credential=credentials)
|
||||
|
||||
def is_active(self):
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
|
||||
try:
|
||||
self.client.set_secret('jumpserver', '666')
|
||||
except (ResourceNotFoundError, ClientAuthenticationError) as e:
|
||||
@@ -32,6 +34,8 @@ class AZUREVaultClient(object):
|
||||
return True, ''
|
||||
|
||||
def get(self, name, version=None):
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
|
||||
try:
|
||||
secret = self.client.get_secret(name, version)
|
||||
return secret.value
|
||||
|
||||
@@ -132,7 +132,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.asset.platform
|
||||
|
||||
@lazyproperty
|
||||
def alias(self):
|
||||
def alias(self) -> str:
|
||||
"""
|
||||
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
|
||||
"""
|
||||
@@ -140,13 +140,13 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.username
|
||||
return str(self.id)
|
||||
|
||||
def is_virtual(self):
|
||||
def is_virtual(self) -> bool:
|
||||
"""
|
||||
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
|
||||
"""
|
||||
return self.alias.startswith('@')
|
||||
|
||||
def is_ds_account(self):
|
||||
def is_ds_account(self) -> bool:
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
if not self.asset.is_directory_service:
|
||||
@@ -160,7 +160,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.asset.ds
|
||||
|
||||
@lazyproperty
|
||||
def ds_domain(self):
|
||||
def ds_domain(self) -> str:
|
||||
"""这个不能去掉,perm_account 会动态设置这个值,以更改 full_username"""
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
@@ -172,17 +172,17 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return '@' in self.username or '\\' in self.username
|
||||
|
||||
@property
|
||||
def full_username(self):
|
||||
def full_username(self) -> str:
|
||||
if not self.username_has_domain() and self.ds_domain:
|
||||
return '{}@{}'.format(self.username, self.ds_domain)
|
||||
return self.username
|
||||
|
||||
@lazyproperty
|
||||
def has_secret(self):
|
||||
def has_secret(self) -> bool:
|
||||
return bool(self.secret)
|
||||
|
||||
@lazyproperty
|
||||
def versions(self):
|
||||
def versions(self) -> int:
|
||||
return self.history.count()
|
||||
|
||||
def get_su_from_accounts(self):
|
||||
|
||||
@@ -33,7 +33,7 @@ class IntegrationApplication(JMSOrgBaseModel):
|
||||
return qs.filter(*query)
|
||||
|
||||
@property
|
||||
def accounts_amount(self):
|
||||
def accounts_amount(self) -> int:
|
||||
return self.get_accounts().count()
|
||||
|
||||
@property
|
||||
|
||||
@@ -75,11 +75,11 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return bool(self.secret)
|
||||
|
||||
@property
|
||||
def has_username(self):
|
||||
def has_username(self) -> bool:
|
||||
return bool(self.username)
|
||||
|
||||
@property
|
||||
def spec_info(self):
|
||||
def spec_info(self) -> dict:
|
||||
data = {}
|
||||
if self.secret_type != SecretType.SSH_KEY:
|
||||
return data
|
||||
@@ -87,13 +87,13 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return data
|
||||
|
||||
@property
|
||||
def password(self):
|
||||
def password(self) -> str:
|
||||
if self.secret_type == SecretType.PASSWORD:
|
||||
return self.secret
|
||||
return None
|
||||
|
||||
@property
|
||||
def private_key(self):
|
||||
def private_key(self) -> str:
|
||||
if self.secret_type == SecretType.SSH_KEY:
|
||||
return self.secret
|
||||
return None
|
||||
@@ -110,7 +110,7 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return None
|
||||
|
||||
@property
|
||||
def ssh_key_fingerprint(self):
|
||||
def ssh_key_fingerprint(self) -> str:
|
||||
if self.public_key:
|
||||
public_key = self.public_key
|
||||
elif self.private_key:
|
||||
|
||||
@@ -56,7 +56,7 @@ class VaultModelMixin(models.Model):
|
||||
__secret = None
|
||||
|
||||
@property
|
||||
def secret(self):
|
||||
def secret(self) -> str:
|
||||
if self.__secret:
|
||||
return self.__secret
|
||||
from accounts.backends import vault_client
|
||||
|
||||
@@ -18,11 +18,11 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
verbose_name = _('Virtual account')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
return self.get_alias_display()
|
||||
|
||||
@property
|
||||
def username(self):
|
||||
def username(self) -> str:
|
||||
usernames_map = {
|
||||
AliasAccount.INPUT: _("Manual input"),
|
||||
AliasAccount.USER: _("Same with user"),
|
||||
@@ -32,7 +32,7 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
return usernames_map.get(self.alias, '')
|
||||
|
||||
@property
|
||||
def comment(self):
|
||||
def comment(self) -> str:
|
||||
comments_map = {
|
||||
AliasAccount.INPUT: _('Non-asset account, Input username/password on connect'),
|
||||
AliasAccount.USER: _('The account username name same with user on connect'),
|
||||
|
||||
@@ -14,7 +14,7 @@ from accounts.models import Account, AccountTemplate, GatheredAccount
|
||||
from accounts.tasks import push_accounts_to_assets_task
|
||||
from assets.const import Category, AllTypes
|
||||
from assets.models import Asset
|
||||
from common.serializers import SecretReadableMixin
|
||||
from common.serializers import SecretReadableMixin, CommonBulkModelSerializer
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from common.utils import get_logger
|
||||
from .base import BaseAccountSerializer, AuthValidateMixin
|
||||
@@ -253,6 +253,8 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
'source_id': {'required': False, 'allow_null': True},
|
||||
}
|
||||
fields_unimport_template = ['params']
|
||||
# 手动判断唯一性校验
|
||||
validators = []
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
@@ -263,6 +265,21 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
)
|
||||
return queryset
|
||||
|
||||
def validate(self, attrs):
|
||||
instance = getattr(self, "instance", None)
|
||||
if instance:
|
||||
return super().validate(attrs)
|
||||
|
||||
field_errors = {}
|
||||
for _fields in Account._meta.unique_together:
|
||||
lookup = {field: attrs.get(field) for field in _fields}
|
||||
if Account.objects.filter(**lookup).exists():
|
||||
verbose_names = ', '.join([str(Account._meta.get_field(f).verbose_name) for f in _fields])
|
||||
msg_template = _('Account already exists. Field(s): {fields} must be unique.')
|
||||
field_errors[_fields[0]] = msg_template.format(fields=verbose_names)
|
||||
raise serializers.ValidationError(field_errors)
|
||||
return attrs
|
||||
|
||||
|
||||
class AccountDetailSerializer(AccountSerializer):
|
||||
has_secret = serializers.BooleanField(label=_("Has secret"), read_only=True)
|
||||
@@ -275,26 +292,26 @@ class AccountDetailSerializer(AccountSerializer):
|
||||
|
||||
class AssetAccountBulkSerializerResultSerializer(serializers.Serializer):
|
||||
asset = serializers.CharField(read_only=True, label=_('Asset'))
|
||||
account = serializers.CharField(read_only=True, label=_('Account'))
|
||||
state = serializers.CharField(read_only=True, label=_('State'))
|
||||
error = serializers.CharField(read_only=True, label=_('Error'))
|
||||
changed = serializers.BooleanField(read_only=True, label=_('Changed'))
|
||||
|
||||
|
||||
class AssetAccountBulkSerializer(
|
||||
AccountCreateUpdateSerializerMixin, AuthValidateMixin, serializers.ModelSerializer
|
||||
AccountCreateUpdateSerializerMixin, AuthValidateMixin, CommonBulkModelSerializer
|
||||
):
|
||||
su_from_username = serializers.CharField(
|
||||
max_length=128, required=False, write_only=True, allow_null=True, label=_("Su from"),
|
||||
allow_blank=True,
|
||||
)
|
||||
assets = serializers.PrimaryKeyRelatedField(queryset=Asset.objects, many=True, label=_('Assets'))
|
||||
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = [
|
||||
'name', 'username', 'secret', 'secret_type', 'passphrase',
|
||||
'privileged', 'is_active', 'comment', 'template',
|
||||
'on_invalid', 'push_now', 'params', 'assets',
|
||||
'name', 'username', 'secret', 'secret_type', 'secret_reset',
|
||||
'passphrase', 'privileged', 'is_active', 'comment', 'template',
|
||||
'on_invalid', 'push_now', 'params',
|
||||
'su_from_username', 'source', 'source_id',
|
||||
]
|
||||
extra_kwargs = {
|
||||
@@ -376,8 +393,7 @@ class AssetAccountBulkSerializer(
|
||||
handler = self._handle_err_create
|
||||
return handler
|
||||
|
||||
def perform_bulk_create(self, vd):
|
||||
assets = vd.pop('assets')
|
||||
def perform_bulk_create(self, vd, assets):
|
||||
on_invalid = vd.pop('on_invalid', 'skip')
|
||||
secret_type = vd.get('secret_type', 'password')
|
||||
|
||||
@@ -385,8 +401,7 @@ class AssetAccountBulkSerializer(
|
||||
vd['name'] = vd.get('username')
|
||||
|
||||
create_handler = self.get_create_handler(on_invalid)
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
secret_type_supports = Asset.get_secret_type_assets(asset_ids, secret_type)
|
||||
secret_type_supports = Asset.get_secret_type_assets(assets, secret_type)
|
||||
|
||||
_results = {}
|
||||
for asset in assets:
|
||||
@@ -394,6 +409,7 @@ class AssetAccountBulkSerializer(
|
||||
_results[asset] = {
|
||||
'error': _('Asset does not support this secret type: %s') % secret_type,
|
||||
'state': 'error',
|
||||
'account': vd['name'],
|
||||
}
|
||||
continue
|
||||
|
||||
@@ -403,13 +419,13 @@ class AssetAccountBulkSerializer(
|
||||
self.clean_auth_fields(vd)
|
||||
instance, changed, state = self.perform_create(vd, create_handler)
|
||||
_results[asset] = {
|
||||
'changed': changed, 'instance': instance.id, 'state': state
|
||||
'changed': changed, 'instance': instance.id, 'state': state, 'account': vd['name']
|
||||
}
|
||||
except serializers.ValidationError as e:
|
||||
_results[asset] = {'error': e.detail[0], 'state': 'error'}
|
||||
_results[asset] = {'error': e.detail[0], 'state': 'error', 'account': vd['name']}
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
_results[asset] = {'error': str(e), 'state': 'error'}
|
||||
_results[asset] = {'error': str(e), 'state': 'error', 'account': vd['name']}
|
||||
|
||||
results = [{'asset': asset, **result} for asset, result in _results.items()]
|
||||
state_score = {'created': 3, 'updated': 2, 'skipped': 1, 'error': 0}
|
||||
@@ -426,7 +442,8 @@ class AssetAccountBulkSerializer(
|
||||
errors.append({
|
||||
'error': _('Account has exist'),
|
||||
'state': 'error',
|
||||
'asset': str(result['asset'])
|
||||
'asset': str(result['asset']),
|
||||
'account': result.get('account'),
|
||||
})
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
@@ -445,10 +462,16 @@ class AssetAccountBulkSerializer(
|
||||
account_ids = [str(_id) for _id in accounts.values_list('id', flat=True)]
|
||||
push_accounts_to_assets_task.delay(account_ids, params)
|
||||
|
||||
def create(self, validated_data):
|
||||
def bulk_create(self, validated_data, assets):
|
||||
if not assets:
|
||||
raise serializers.ValidationError(
|
||||
{'assets': _('At least one asset or node must be specified')},
|
||||
{'nodes': _('At least one asset or node must be specified')}
|
||||
)
|
||||
|
||||
params = validated_data.pop('params', None)
|
||||
push_now = validated_data.pop('push_now', False)
|
||||
results = self.perform_bulk_create(validated_data)
|
||||
results = self.perform_bulk_create(validated_data, assets)
|
||||
self.push_accounts_if_need(results, push_now, params)
|
||||
for res in results:
|
||||
res['asset'] = str(res['asset'])
|
||||
@@ -456,6 +479,8 @@ class AssetAccountBulkSerializer(
|
||||
|
||||
|
||||
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields = AccountSerializer.Meta.fields + ['spec_info']
|
||||
extra_kwargs = {
|
||||
@@ -470,6 +495,7 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
|
||||
class AccountHistorySerializer(serializers.ModelSerializer):
|
||||
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
|
||||
secret = serializers.CharField(label=_('Secret'), read_only=True)
|
||||
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -70,6 +70,8 @@ class AuthValidateMixin(serializers.Serializer):
|
||||
class BaseAccountSerializer(
|
||||
AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer
|
||||
):
|
||||
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = BaseAccount
|
||||
fields_mini = ["id", "name", "username"]
|
||||
|
||||
@@ -130,7 +130,7 @@ class ChangeSecretRecordSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_is_success(obj):
|
||||
def get_is_success(obj) -> bool:
|
||||
return obj.status == ChangeSecretRecordStatusChoice.success
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_asset(instance):
|
||||
def get_asset(instance) -> str:
|
||||
return str(instance.asset)
|
||||
|
||||
@staticmethod
|
||||
@@ -165,7 +165,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
||||
return str(instance.account)
|
||||
|
||||
@staticmethod
|
||||
def get_is_success(obj):
|
||||
def get_is_success(obj) -> str:
|
||||
if obj.status == ChangeSecretRecordStatusChoice.success.value:
|
||||
return _("Success")
|
||||
return _("Failed")
|
||||
@@ -196,9 +196,9 @@ class ChangeSecretAccountSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_meta(obj):
|
||||
def get_meta(obj) -> dict:
|
||||
return account_secret_task_status.get(str(obj.id))
|
||||
|
||||
@staticmethod
|
||||
def get_ttl(obj):
|
||||
def get_ttl(obj) -> int:
|
||||
return account_secret_task_status.get_ttl(str(obj.id))
|
||||
|
||||
@@ -69,7 +69,7 @@ class AssetRiskSerializer(serializers.Serializer):
|
||||
risk_summary = serializers.SerializerMethodField()
|
||||
|
||||
@staticmethod
|
||||
def get_risk_summary(obj):
|
||||
def get_risk_summary(obj) -> dict:
|
||||
summary = {}
|
||||
for risk in RiskChoice.choices:
|
||||
summary[f"{risk[0]}_count"] = obj.get(f"{risk[0]}_count", 0)
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Task name' %}: {{ name }}</h3>
|
||||
<h3>{% trans 'Task execution id' %}: {{ execution_id }}</h3>
|
||||
<p>{% trans 'Respectful' %} {{ recipient }}</p>
|
||||
<p>{% trans 'Hello! The following is the failure of changing the password of your assets or pushing the account. Please check and handle it in time.' %}</p>
|
||||
<table style="width: 100%; border-collapse: collapse; max-width: 100%; text-align: left; margin-top: 20px;">
|
||||
<caption></caption>
|
||||
<thead>
|
||||
<tr style="background-color: #f2f2f2;">
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Asset' %}</th>
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Account' %}</th>
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Error' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for asset_name, account_username, error in asset_account_errors %}
|
||||
<tr>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">{{ asset_name }}</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">{{ account_username }}</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">
|
||||
<div style="
|
||||
max-width: 90%;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: block;"
|
||||
title="{{ error }}"
|
||||
>
|
||||
{{ error }}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
@@ -3,3 +3,4 @@ from .connect_method import *
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .login_asset_check import *
|
||||
from .data_masking import *
|
||||
20
apps/acls/api/data_masking.py
Normal file
20
apps/acls/api/data_masking.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
from .common import ACLUserFilterMixin
|
||||
from ..models import DataMaskingRule
|
||||
from .. import serializers
|
||||
|
||||
__all__ = ['DataMaskingRuleViewSet']
|
||||
|
||||
|
||||
class DataMaskingRuleFilter(ACLUserFilterMixin):
|
||||
class Meta:
|
||||
model = DataMaskingRule
|
||||
fields = ('name',)
|
||||
|
||||
|
||||
class DataMaskingRuleViewSet(OrgBulkModelViewSet):
|
||||
model = DataMaskingRule
|
||||
filterset_class = DataMaskingRuleFilter
|
||||
search_fields = ('name',)
|
||||
serializer_class = serializers.DataMaskingRuleSerializer
|
||||
@@ -8,7 +8,7 @@ __all__ = ['LoginAssetACLViewSet']
|
||||
class LoginAssetACLFilter(ACLUserAssetFilterMixin):
|
||||
class Meta:
|
||||
model = models.LoginAssetACL
|
||||
fields = ['name', ]
|
||||
fields = ['name', 'action']
|
||||
|
||||
|
||||
class LoginAssetACLViewSet(OrgBulkModelViewSet):
|
||||
|
||||
45
apps/acls/migrations/0003_datamaskingrule.py
Normal file
45
apps/acls/migrations/0003_datamaskingrule.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Generated by Django 4.1.13 on 2025-10-07 16:16
|
||||
|
||||
import common.db.fields
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('acls', '0002_auto_20210926_1047'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='DataMaskingRule',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('priority', models.IntegerField(default=50, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority')),
|
||||
('action', models.CharField(default='reject', max_length=64, verbose_name='Action')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Active')),
|
||||
('users', common.db.fields.JSONManyToManyField(default=dict, to='users.User', verbose_name='Users')),
|
||||
('assets', common.db.fields.JSONManyToManyField(default=dict, to='assets.Asset', verbose_name='Assets')),
|
||||
('accounts', models.JSONField(default=list, verbose_name='Accounts')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('fields_pattern', models.CharField(default='password', max_length=128, verbose_name='Fields pattern')),
|
||||
('masking_method', models.CharField(choices=[('fixed_char', 'Fixed Character Replacement'), ('hide_middle', 'Hide Middle Characters'), ('keep_prefix', 'Keep Prefix Only'), ('keep_suffix', 'Keep Suffix Only')], default='fixed_char', max_length=32, verbose_name='Masking Method')),
|
||||
('mask_pattern', models.CharField(blank=True, default='######', max_length=128, null=True, verbose_name='Mask Pattern')),
|
||||
('reviewers', models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL, verbose_name='Reviewers')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Data Masking Rule',
|
||||
'unique_together': {('org_id', 'name')},
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -2,3 +2,4 @@ from .command_acl import *
|
||||
from .connect_method import *
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .data_masking import *
|
||||
42
apps/acls/models/data_masking.py
Normal file
42
apps/acls/models/data_masking.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from django.db import models
|
||||
|
||||
from acls.models import UserAssetAccountBaseACL
|
||||
from common.utils import get_logger
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
__all__ = ['MaskingMethod', 'DataMaskingRule']
|
||||
|
||||
|
||||
class MaskingMethod(models.TextChoices):
|
||||
fixed_char = "fixed_char", _("Fixed Character Replacement") # 固定字符替换
|
||||
hide_middle = "hide_middle", _("Hide Middle Characters") # 隐藏中间几位
|
||||
keep_prefix = "keep_prefix", _("Keep Prefix Only") # 只保留前缀
|
||||
keep_suffix = "keep_suffix", _("Keep Suffix Only") # 只保留后缀
|
||||
|
||||
|
||||
class DataMaskingRule(UserAssetAccountBaseACL):
|
||||
name = models.CharField(max_length=128, verbose_name=_("Name"))
|
||||
fields_pattern = models.CharField(max_length=128, default='password', verbose_name=_("Fields pattern"))
|
||||
|
||||
masking_method = models.CharField(
|
||||
max_length=32,
|
||||
choices=MaskingMethod.choices,
|
||||
default=MaskingMethod.fixed_char,
|
||||
verbose_name=_("Masking Method"),
|
||||
)
|
||||
mask_pattern = models.CharField(
|
||||
max_length=128,
|
||||
verbose_name=_("Mask Pattern"),
|
||||
default="######",
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
unique_together = [('org_id', 'name')]
|
||||
verbose_name = _("Data Masking Rule")
|
||||
@@ -1,30 +1,52 @@
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.models import Account
|
||||
from acls.models import LoginACL, LoginAssetACL
|
||||
from assets.models import Asset
|
||||
from audits.models import UserLoginLog
|
||||
from common.views.template import custom_render_to_string
|
||||
from notifications.notifications import UserMessage
|
||||
from users.models import User
|
||||
|
||||
|
||||
class UserLoginReminderMsg(UserMessage):
|
||||
subject = _('User login reminder')
|
||||
template_name = 'acls/user_login_reminder.html'
|
||||
contexts = [
|
||||
{"name": "city", "label": _('Login city'), "default": "Shanghai"},
|
||||
{"name": "username", "label": _('User'), "default": "john"},
|
||||
{"name": "ip", "label": "IP", "default": "192.168.1.1"},
|
||||
{"name": "recipient_name", "label": _("Recipient name"), "default": "John"},
|
||||
{"name": "recipient_username", "label": _("Recipient username"), "default": "john"},
|
||||
{"name": "user_agent", "label": _('User agent'), "default": "Mozilla/5.0"},
|
||||
{"name": "acl_name", "label": _('ACL name'), "default": "login acl"},
|
||||
{"name": "login_from", "label": _('Login from'), "default": "web"},
|
||||
{"name": "time", "label": _('Login time'), "default": "2025-01-01 12:00:00"},
|
||||
]
|
||||
|
||||
def __init__(self, user, user_log: UserLoginLog):
|
||||
def __init__(self, user, user_log: UserLoginLog, acl: LoginACL):
|
||||
self.user_log = user_log
|
||||
self.acl_name = str(acl)
|
||||
self.login_from = user_log.get_type_display()
|
||||
now = timezone.localtime(user_log.datetime)
|
||||
self.time = now.strftime('%Y-%m-%d %H:%M:%S')
|
||||
super().__init__(user)
|
||||
|
||||
def get_html_msg(self) -> dict:
|
||||
user_log = self.user_log
|
||||
context = {
|
||||
'ip': user_log.ip,
|
||||
'time': self.time,
|
||||
'city': user_log.city,
|
||||
'acl_name': self.acl_name,
|
||||
'login_from': self.login_from,
|
||||
'username': user_log.username,
|
||||
'recipient': self.user,
|
||||
'recipient_name': self.user.name,
|
||||
'recipient_username': self.user.username,
|
||||
'user_agent': user_log.user_agent,
|
||||
}
|
||||
message = render_to_string('acls/user_login_reminder.html', context)
|
||||
message = custom_render_to_string(self.template_name, context)
|
||||
|
||||
return {
|
||||
'subject': str(self.subject),
|
||||
@@ -40,24 +62,55 @@ class UserLoginReminderMsg(UserMessage):
|
||||
|
||||
class AssetLoginReminderMsg(UserMessage):
|
||||
subject = _('User login alert for asset')
|
||||
template_name = 'acls/asset_login_reminder.html'
|
||||
contexts = [
|
||||
{"name": "city", "label": _('Login city'), "default": "Shanghai"},
|
||||
{"name": "username", "label": _('User'), "default": "john"},
|
||||
{"name": "name", "label": _('Name'), "default": "John"},
|
||||
{"name": "asset", "label": _('Asset'), "default": "dev server"},
|
||||
{"name": "recipient_name", "label": _('Recipient name'), "default": "John"},
|
||||
{"name": "recipient_username", "label": _('Recipient username'), "default": "john"},
|
||||
{"name": "account", "label": _('Account Input username'), "default": "root"},
|
||||
{"name": "account_name", "label": _('Account name'), "default": "root"},
|
||||
{"name": "acl_name", "label": _('ACL name'), "default": "login acl"},
|
||||
{"name": "ip", "label": "IP", "default": "192.168.1.1"},
|
||||
{"name": "login_from", "label": _('Login from'), "default": "web"},
|
||||
{"name": "time", "label": _('Login time'), "default": "2025-01-01 12:00:00"}
|
||||
]
|
||||
|
||||
def __init__(self, user, asset: Asset, login_user: User, account: Account, input_username):
|
||||
def __init__(
|
||||
self, user, asset: Asset, login_user: User,
|
||||
account: Account, acl: LoginAssetACL,
|
||||
ip, input_username, login_from
|
||||
):
|
||||
self.ip = ip
|
||||
self.asset = asset
|
||||
self.login_user = login_user
|
||||
self.account = account
|
||||
self.acl_name = str(acl)
|
||||
self.login_from = login_from
|
||||
self.login_user = login_user
|
||||
self.input_username = input_username
|
||||
|
||||
now = timezone.localtime(timezone.now())
|
||||
self.time = now.strftime('%Y-%m-%d %H:%M:%S')
|
||||
super().__init__(user)
|
||||
|
||||
def get_html_msg(self) -> dict:
|
||||
context = {
|
||||
'recipient': self.user,
|
||||
'ip': self.ip,
|
||||
'time': self.time,
|
||||
'login_from': self.login_from,
|
||||
'recipient_name': self.user.name,
|
||||
'recipient_username': self.user.username,
|
||||
'username': self.login_user.username,
|
||||
'name': self.login_user.name,
|
||||
'asset': str(self.asset),
|
||||
'account': self.input_username,
|
||||
'account_name': self.account.name,
|
||||
'acl_name': self.acl_name,
|
||||
}
|
||||
message = render_to_string('acls/asset_login_reminder.html', context)
|
||||
message = custom_render_to_string(self.template_name, context)
|
||||
|
||||
return {
|
||||
'subject': str(self.subject),
|
||||
|
||||
@@ -3,3 +3,4 @@ from .connect_method import *
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .login_asset_check import *
|
||||
from .data_masking import *
|
||||
@@ -90,7 +90,7 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
fields_small = fields_mini + [
|
||||
"is_active", "priority", "action",
|
||||
"date_created", "date_updated",
|
||||
"comment", "created_by", "org_id",
|
||||
"comment", "created_by"
|
||||
]
|
||||
fields_m2m = ["reviewers", ]
|
||||
fields = fields_small + fields_m2m
|
||||
@@ -100,6 +100,20 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
'reviewers': {'label': _('Recipients')},
|
||||
}
|
||||
|
||||
class BaseUserACLSerializer(BaseACLSerializer):
|
||||
users = JSONManyToManyField(label=_('User'))
|
||||
|
||||
class Meta(BaseACLSerializer.Meta):
|
||||
fields = BaseACLSerializer.Meta.fields + ['users']
|
||||
|
||||
|
||||
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
|
||||
assets = JSONManyToManyField(label=_('Asset'))
|
||||
accounts = serializers.ListField(label=_('Account'))
|
||||
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts', 'org_id']
|
||||
|
||||
def validate_reviewers(self, reviewers):
|
||||
action = self.initial_data.get('action')
|
||||
if not action and self.instance:
|
||||
@@ -118,19 +132,4 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
"None of the reviewers belong to Organization `{}`".format(org.name)
|
||||
)
|
||||
raise serializers.ValidationError(error)
|
||||
return valid_reviewers
|
||||
|
||||
|
||||
class BaseUserACLSerializer(BaseACLSerializer):
|
||||
users = JSONManyToManyField(label=_('User'))
|
||||
|
||||
class Meta(BaseACLSerializer.Meta):
|
||||
fields = BaseACLSerializer.Meta.fields + ['users']
|
||||
|
||||
|
||||
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
|
||||
assets = JSONManyToManyField(label=_('Asset'))
|
||||
accounts = serializers.ListField(label=_('Account'))
|
||||
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts']
|
||||
return valid_reviewers
|
||||
@@ -1,4 +1,4 @@
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.serializers.mixin import CommonBulkModelSerializer
|
||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||
from ..const import ActionChoices
|
||||
from ..models import ConnectMethodACL
|
||||
@@ -6,16 +6,15 @@ from ..models import ConnectMethodACL
|
||||
__all__ = ["ConnectMethodACLSerializer"]
|
||||
|
||||
|
||||
class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
|
||||
class ConnectMethodACLSerializer(BaseSerializer, CommonBulkModelSerializer):
|
||||
class Meta(BaseSerializer.Meta):
|
||||
model = ConnectMethodACL
|
||||
fields = [
|
||||
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
||||
if i not in ['assets', 'accounts']
|
||||
if i not in ['assets', 'accounts', 'org_id']
|
||||
]
|
||||
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
|
||||
ActionChoices.review,
|
||||
ActionChoices.accept,
|
||||
ActionChoices.notice,
|
||||
ActionChoices.face_verify,
|
||||
ActionChoices.face_online,
|
||||
|
||||
19
apps/acls/serializers/data_masking.py
Normal file
19
apps/acls/serializers/data_masking.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from acls.models import MaskingMethod, DataMaskingRule
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
from common.serializers.mixin import CommonBulkModelSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||
|
||||
__all__ = ['DataMaskingRuleSerializer']
|
||||
|
||||
|
||||
class DataMaskingRuleSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
|
||||
masking_method = LabeledChoiceField(
|
||||
choices=MaskingMethod.choices, default=MaskingMethod.fixed_char, label=_('Masking Method')
|
||||
)
|
||||
|
||||
class Meta(BaseSerializer.Meta):
|
||||
model = DataMaskingRule
|
||||
fields = BaseSerializer.Meta.fields + ['fields_pattern', 'masking_method', 'mask_pattern']
|
||||
@@ -1,7 +1,7 @@
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from common.serializers import CommonBulkModelSerializer
|
||||
from common.serializers import MethodSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaseUserACLSerializer
|
||||
from .rules import RuleSerializer
|
||||
from ..const import ActionChoices
|
||||
@@ -12,12 +12,12 @@ __all__ = ["LoginACLSerializer"]
|
||||
common_help_text = _("With * indicating a match all. ")
|
||||
|
||||
|
||||
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
|
||||
class LoginACLSerializer(BaseUserACLSerializer, CommonBulkModelSerializer):
|
||||
rules = MethodSerializer(label=_('Rule'))
|
||||
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
model = LoginACL
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
|
||||
fields = list((set(BaseUserACLSerializer.Meta.fields) | {'rules'}))
|
||||
action_choices_exclude = [
|
||||
ActionChoices.warning,
|
||||
ActionChoices.notify_and_warn,
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Dear' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
|
||||
<h3>{% trans 'Dear' %}: {{ recipient_name }}[{{ recipient_username }}]</h3>
|
||||
<hr>
|
||||
<p>{% trans 'We would like to inform you that a user has recently logged into the following asset:' %}<p>
|
||||
<p><strong>{% trans 'Asset details' %}:</strong></p>
|
||||
<ul>
|
||||
<li><strong>{% trans 'User' %}:</strong> [{{ name }}({{ username }})]</li>
|
||||
<li><strong>IP:</strong> [{{ ip }}]</li>
|
||||
<li><strong>{% trans 'Assets' %}:</strong> [{{ asset }}]</li>
|
||||
<li><strong>{% trans 'Account' %}:</strong> [{{ account_name }}({{ account }})]</li>
|
||||
<li><strong>{% trans 'Login asset acl' %}:</strong> [{{ acl_name }}]</li>
|
||||
<li><strong>{% trans 'Login from' %}:</strong> [{{ login_from }}]</li>
|
||||
<li><strong>{% trans 'Time' %}:</strong> [{{ time }}]</li>
|
||||
</ul>
|
||||
<hr>
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Dear' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
|
||||
<h3>{% trans 'Dear' %}: {{ recipient_name }}[{{ recipient_username }}]</h3>
|
||||
<hr>
|
||||
<p>{% trans 'We would like to inform you that a user has recently logged:' %}<p>
|
||||
<p><strong>{% trans 'User details' %}:</strong></p>
|
||||
@@ -8,7 +8,10 @@
|
||||
<li><strong>{% trans 'User' %}:</strong> [{{ username }}]</li>
|
||||
<li><strong>IP:</strong> [{{ ip }}]</li>
|
||||
<li><strong>{% trans 'Login city' %}:</strong> [{{ city }}]</li>
|
||||
<li><strong>{% trans 'Login from' %}:</strong> [{{ login_from }}]</li>
|
||||
<li><strong>{% trans 'User agent' %}:</strong> [{{ user_agent }}]</li>
|
||||
<li><strong>{% trans 'Login acl' %}:</strong> [{{ acl_name }}]</li>
|
||||
<li><strong>{% trans 'Time' %}:</strong> [{{ time }}]</li>
|
||||
</ul>
|
||||
<hr>
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ router.register(r'login-asset-acls', api.LoginAssetACLViewSet, 'login-asset-acl'
|
||||
router.register(r'command-filter-acls', api.CommandFilterACLViewSet, 'command-filter-acl')
|
||||
router.register(r'command-groups', api.CommandGroupViewSet, 'command-group')
|
||||
router.register(r'connect-method-acls', api.ConnectMethodACLViewSet, 'connect-method-acl')
|
||||
router.register(r'data-masking-rules', api.DataMaskingRuleViewSet, 'data-masking-rule')
|
||||
|
||||
urlpatterns = [
|
||||
path('login-asset/check/', api.LoginAssetCheckAPI.as_view(), name='login-asset-check'),
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from collections import defaultdict
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters import rest_framework as drf_filters
|
||||
@@ -113,7 +112,7 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
|
||||
("accounts", AccountSerializer),
|
||||
)
|
||||
rbac_perms = (
|
||||
("match", "assets.match_asset"),
|
||||
("match", "assets.view_asset"),
|
||||
("platform", "assets.view_platform"),
|
||||
("gateways", "assets.view_gateway"),
|
||||
("accounts", "assets.view_account"),
|
||||
@@ -181,33 +180,18 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
|
||||
def sync_platform_protocols(self, request, *args, **kwargs):
|
||||
platform_id = request.data.get('platform_id')
|
||||
platform = get_object_or_404(Platform, pk=platform_id)
|
||||
assets = platform.assets.all()
|
||||
asset_ids = list(platform.assets.values_list('id', flat=True))
|
||||
platform_protocols = list(platform.protocols.values('name', 'port'))
|
||||
|
||||
platform_protocols = {
|
||||
p['name']: p['port']
|
||||
for p in platform.protocols.values('name', 'port')
|
||||
}
|
||||
asset_protocols_map = defaultdict(set)
|
||||
protocols = assets.prefetch_related('protocols').values_list(
|
||||
'id', 'protocols__name'
|
||||
)
|
||||
for asset_id, protocol in protocols:
|
||||
asset_id = str(asset_id)
|
||||
asset_protocols_map[asset_id].add(protocol)
|
||||
objs = []
|
||||
for asset_id, protocols in asset_protocols_map.items():
|
||||
protocol_names = set(platform_protocols) - protocols
|
||||
if not protocol_names:
|
||||
continue
|
||||
for name in protocol_names:
|
||||
objs.append(
|
||||
Protocol(
|
||||
name=name,
|
||||
port=platform_protocols[name],
|
||||
asset_id=asset_id,
|
||||
)
|
||||
)
|
||||
Protocol.objects.bulk_create(objs)
|
||||
with transaction.atomic():
|
||||
if asset_ids:
|
||||
Protocol.objects.filter(asset_id__in=asset_ids).delete()
|
||||
if asset_ids and platform_protocols:
|
||||
objs = []
|
||||
for aid in asset_ids:
|
||||
for p in platform_protocols:
|
||||
objs.append(Protocol(name=p['name'], port=p['port'], asset_id=aid))
|
||||
Protocol.objects.bulk_create(objs)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
def filter_bulk_update_data(self):
|
||||
|
||||
@@ -14,6 +14,7 @@ class FavoriteAssetViewSet(BulkModelViewSet):
|
||||
serializer_class = FavoriteAssetSerializer
|
||||
permission_classes = (IsValidUser,)
|
||||
filterset_fields = ['asset']
|
||||
page_no_limit = True
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
|
||||
@@ -43,7 +43,7 @@ class NodeViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
search_fields = ('full_value',)
|
||||
serializer_class = serializers.NodeSerializer
|
||||
rbac_perms = {
|
||||
'match': 'assets.match_node',
|
||||
'match': 'assets.view_node',
|
||||
'check_assets_amount_task': 'assets.change_node'
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from assets.const import AllTypes
|
||||
from assets.models import Platform, Node, Asset, PlatformProtocol
|
||||
from assets.models import Platform, Node, Asset, PlatformProtocol, PlatformAutomation
|
||||
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer, PlatformListSerializer
|
||||
from common.api import JMSModelViewSet
|
||||
from common.permissions import IsValidUser
|
||||
@@ -43,6 +43,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||
'ops_methods': 'assets.view_platform',
|
||||
'filter_nodes_assets': 'assets.view_platform',
|
||||
}
|
||||
page_no_limit = True
|
||||
|
||||
def get_queryset(self):
|
||||
# 因为没有走分页逻辑,所以需要这里 prefetch
|
||||
@@ -111,7 +112,10 @@ class PlatformProtocolViewSet(JMSModelViewSet):
|
||||
|
||||
|
||||
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
queryset = PlatformAutomation.objects.none()
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_platform'
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def automation_methods():
|
||||
|
||||
@@ -161,6 +161,7 @@ class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
|
||||
'GET': 'assets.view_asset',
|
||||
'list': 'assets.view_asset',
|
||||
}
|
||||
queryset = Node.objects.none()
|
||||
|
||||
def get_assets(self):
|
||||
key = self.request.query_params.get('key')
|
||||
|
||||
@@ -6,11 +6,13 @@
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
community.general.mssql_script:
|
||||
mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
encryption: "{{ jms_asset.encryption | default(None) }}"
|
||||
tds_version: "{{ jms_asset.tds_version | default(None) }}"
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from orgs.models import Organization
|
||||
from .base import BaseType
|
||||
|
||||
|
||||
@@ -52,3 +53,41 @@ class GPTTypes(BaseType):
|
||||
return [
|
||||
cls.CHATGPT,
|
||||
]
|
||||
|
||||
|
||||
CHATX_NAME = 'ChatX'
|
||||
|
||||
|
||||
def create_or_update_chatx_resources(chatx_name=CHATX_NAME, org_id=Organization.SYSTEM_ID):
|
||||
from django.apps import apps
|
||||
|
||||
platform_model = apps.get_model('assets', 'Platform')
|
||||
asset_model = apps.get_model('assets', 'Asset')
|
||||
account_model = apps.get_model('accounts', 'Account')
|
||||
|
||||
platform, __ = platform_model.objects.get_or_create(
|
||||
name=chatx_name,
|
||||
defaults={
|
||||
'internal': True,
|
||||
'type': chatx_name,
|
||||
'category': 'ai',
|
||||
}
|
||||
)
|
||||
asset, __ = asset_model.objects.get_or_create(
|
||||
address=chatx_name,
|
||||
defaults={
|
||||
'name': chatx_name,
|
||||
'platform': platform,
|
||||
'org_id': org_id
|
||||
}
|
||||
)
|
||||
|
||||
account, __ = account_model.objects.get_or_create(
|
||||
username=chatx_name,
|
||||
defaults={
|
||||
'name': chatx_name,
|
||||
'asset': asset,
|
||||
'org_id': org_id
|
||||
}
|
||||
)
|
||||
return asset, account
|
||||
|
||||
@@ -250,6 +250,12 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||
'default': False,
|
||||
'label': _('Auth username')
|
||||
},
|
||||
'enable_cluster_mode': {
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'label': _('Enable cluster mode'),
|
||||
'help_text': _('Enable if this Redis instance is part of a cluster')
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -262,6 +268,14 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||
'port_from_addr': True,
|
||||
'required': True,
|
||||
'secret_types': ['token'],
|
||||
'setting': {
|
||||
'namespace': {
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'default': '',
|
||||
'label': _('Namespace')
|
||||
}
|
||||
}
|
||||
},
|
||||
cls.http: {
|
||||
'port': 80,
|
||||
|
||||
25
apps/assets/migrations/0020_assetnodebackup.py
Normal file
25
apps/assets/migrations/0020_assetnodebackup.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 4.1.13 on 2025-12-13 02:18
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0019_alter_asset_connectivity'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AssetNodeBackup',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('asset_id', models.CharField(max_length=1024, verbose_name='Asset ID')),
|
||||
('node_id', models.CharField(max_length=1024, verbose_name='Node ID')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Asset Node Backup',
|
||||
'db_table': 'assets_asset_nodes_backup',
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,343 @@
|
||||
# Generated by Django 4.1.13 on 2025-12-12 03:55
|
||||
|
||||
"""
|
||||
【数据迁移流程】
|
||||
|
||||
本迁移将 Asset.nodes 从自动生成的 M2M through 表迁移到自定义的 AssetNode 模型,并添加 node_key 字段。
|
||||
|
||||
五阶段迁移流程:
|
||||
|
||||
【阶段1】读取 through 表并备份到 AssetNodeBackup 中
|
||||
- 创建 AssetNodeBackup 表用于备份
|
||||
- 读取原 Asset.nodes.through 表中所有 (asset_id, node_id) 数据
|
||||
- 将数据保存到全局变量 through_old_data 中(内存缓存)
|
||||
- 同时将数据备份到 AssetNodeBackup 表中(持久化,支持重试)
|
||||
- 好处:即使迁移中断,再次执行时也可以从 backup 恢复
|
||||
|
||||
【阶段2】数据库表结构修改
|
||||
- 删除 Asset.nodes 的 M2M 关系字段
|
||||
- 创建新的 AssetNode 自定义 through 模型
|
||||
- 重新添加 Asset.nodes M2M 字段,指向新的 AssetNode
|
||||
- 创建优化后的索引和修改一个联合唯一索引:
|
||||
* idx_node_key_asset_id: 支持按 node_key 范围查询资产
|
||||
* idx_node_id_asset_id: 支持按 node_id 查询资产
|
||||
* idx_asset_id_node_id_key: 支持按 asset_id 反向查询节点
|
||||
* unique_together (asset, node): 保证每个资产和节点组合唯一
|
||||
|
||||
【阶段3】恢复数据并填充 node_key
|
||||
- 优先使用内存缓存中的数据(through_old_data)
|
||||
- 如果内存为空,说明之前可能迁移失败过,则从 AssetNodeBackup 表加载数据
|
||||
- 预加载 Node.key 映射,为每条数据填充 node_key 字段
|
||||
- 预加载已存在的 (asset_id, node_id) 对,避免重复插入
|
||||
- 批量插入到 AssetNode 表中(50k/batch)
|
||||
- 如果批插入失败,降级为单条插入
|
||||
- 统计插入和跳过的记录数
|
||||
|
||||
【阶段4】清理 through 表中的重复数据
|
||||
- 找出原 through 表中 node_key 为空的数据(这些是重复或无效的)
|
||||
- 显示前 100 条要删除的数据的 (asset_id, node_id)
|
||||
- 分批删除(50k/batch)
|
||||
- 输出删除的总数
|
||||
|
||||
【阶段5】删除备份表 (或用户手动删除也可以)
|
||||
- 验证 AssetNodeBackup 表中的数据(显示记录数)
|
||||
- 清空备份表中的所有数据
|
||||
- 删除 AssetNodeBackup 表
|
||||
- 意义:迁移完成后,备份表已无用,清理数据库空间
|
||||
|
||||
【数据一致性保证】
|
||||
- 备份表:AssetNodeBackup 在阶段1中持久化所有原始数据,支持恢复
|
||||
- 去重:阶段3 中使用 set 预检测避免重复
|
||||
- 容错:阶段3 批插入失败时自动降级到单条插入
|
||||
- 清理:阶段4 只删除 node_key 为空的无效数据
|
||||
- 清理:阶段5 删除已完成使命的备份表
|
||||
|
||||
【字段映射】
|
||||
AssetNode.node_key 来自 Node.key
|
||||
"""
|
||||
|
||||
import time
|
||||
from datetime import datetime
|
||||
from django.db import migrations, models, transaction
|
||||
from django.db.models import Count, Q
|
||||
import django.db.models.deletion
|
||||
import assets.models.asset.common
|
||||
|
||||
|
||||
# ============== 全局变量 ==============
|
||||
through_old_data = []
|
||||
migration_stats = {'backed_up': 0, 'restored': 0}
|
||||
|
||||
|
||||
def log(msg):
|
||||
print(f"[{datetime.now().strftime('%H:%M:%S')}] {msg}")
|
||||
|
||||
|
||||
def load_data_from_backup(AssetNodeBackup):
|
||||
"""从 backup 表加载数据到内存"""
|
||||
global through_old_data
|
||||
total = AssetNodeBackup.objects.count()
|
||||
|
||||
if total == 0:
|
||||
log("⚠ backup 表为空,无数据可恢复")
|
||||
return False
|
||||
|
||||
log(f"从 backup 表加载 {total:,} 条数据...")
|
||||
batch_size = 50000
|
||||
start = time.time()
|
||||
|
||||
for offset in range(0, total, batch_size):
|
||||
batch = list(AssetNodeBackup.objects.all().values_list('asset_id', 'node_id')[offset:offset + batch_size])
|
||||
through_old_data.extend(batch)
|
||||
log(f" 已加载 {len(through_old_data):,}/{total:,}")
|
||||
|
||||
log(f"✓ 从 backup 加载完成! 耗时 {time.time()-start:.1f}s")
|
||||
return True
|
||||
|
||||
|
||||
def phase1_save_and_backup(apps, schema_editor):
|
||||
"""阶段1: 读取 through 所有数据,加载到全局变量,并保存到 backup 中"""
|
||||
global through_old_data, migration_stats
|
||||
Asset = apps.get_model('assets', 'Asset')
|
||||
AssetNodeBackup = apps.get_model('assets', 'AssetNodeBackup')
|
||||
asset_node_through = Asset.nodes.through
|
||||
total = asset_node_through.objects.count()
|
||||
|
||||
log(f"\n{'='*50}")
|
||||
log("【阶段1】读取 through 数据并备份")
|
||||
log(f"{'='*50}")
|
||||
log(f"从 through 表读取 {total:,} 条数据...")
|
||||
|
||||
batch_size = 50000
|
||||
start = time.time()
|
||||
backup_batch = []
|
||||
|
||||
# 阶段1-1: 读取所有数据到内存
|
||||
for offset in range(0, total, batch_size):
|
||||
batch = list(asset_node_through.objects.all().values_list('asset_id', 'node_id')[offset:offset + batch_size])
|
||||
through_old_data.extend(batch)
|
||||
|
||||
# 准备备份数据
|
||||
backup_objs = [AssetNodeBackup(asset_id=aid, node_id=nid) for aid, nid in batch]
|
||||
backup_batch.extend(backup_objs)
|
||||
|
||||
log(f" 已读取 {len(through_old_data):,}/{total:,} ({len(through_old_data)/total*100:.1f}%)")
|
||||
|
||||
# 阶段1-2: 写入备份到数据库并立即提交
|
||||
log(f"\n写入 {len(backup_batch):,} 条备份数据到数据库...")
|
||||
backup_start = time.time()
|
||||
|
||||
backup_batch_size = 50000
|
||||
for i in range(0, len(backup_batch), backup_batch_size):
|
||||
batch = backup_batch[i:i + backup_batch_size]
|
||||
with transaction.atomic():
|
||||
created = AssetNodeBackup.objects.bulk_create(batch, batch_size=backup_batch_size, ignore_conflicts=True)
|
||||
migration_stats['backed_up'] += len(created)
|
||||
log(f" 已备份 {min(i+backup_batch_size, len(backup_batch)):,}/{len(backup_batch):,}")
|
||||
|
||||
log(f"✓ 阶段1完成! 读取耗时 {time.time()-start:.1f}s, 备份耗时 {time.time()-backup_start:.1f}s")
|
||||
log(f" 内存缓存: {len(through_old_data):,} 条")
|
||||
log(f" 数据库备份: {migration_stats['backed_up']:,} 条\n")
|
||||
|
||||
|
||||
def phase3_restore_data_and_set_node_key(apps, schema_editor):
|
||||
"""阶段3: 恢复数据时,先查看全局变量是否有,如果没有从backup中加载"""
|
||||
global through_old_data, migration_stats
|
||||
Node = apps.get_model('assets', 'Node')
|
||||
AssetNode = apps.get_model('assets', 'AssetNode')
|
||||
AssetNodeBackup = apps.get_model('assets', 'AssetNodeBackup')
|
||||
|
||||
log(f"\n{'='*50}")
|
||||
log("【阶段3】恢复数据并设置 node_key")
|
||||
log(f"{'='*50}")
|
||||
|
||||
# 检查内存是否有数据,如果没有则从 backup 加载
|
||||
if not through_old_data:
|
||||
log("内存缓存为空,从 backup 表加载数据...")
|
||||
if not load_data_from_backup(AssetNodeBackup):
|
||||
log("✗ 无法恢复数据:backup 表也为空")
|
||||
return
|
||||
log()
|
||||
else:
|
||||
log(f"使用内存缓存的 {len(through_old_data):,} 条数据\n")
|
||||
|
||||
total = len(through_old_data)
|
||||
log(f"开始恢复 {total:,} 条数据到 AssetNode 表...")
|
||||
|
||||
# 预加载 node_key 映射
|
||||
id_key_map = {str(item['id']): item['key'] for item in Node.objects.values('id', 'key')}
|
||||
|
||||
# 预加载已存在的数据,避免重复
|
||||
existing = set(AssetNode.objects.values_list('asset_id', 'node_id'))
|
||||
log(f"数据库中已存在 {len(existing):,} 条记录\n")
|
||||
|
||||
batch_size = 50000
|
||||
start = time.time()
|
||||
skipped = 0
|
||||
|
||||
for i in range(0, total, batch_size):
|
||||
batch = through_old_data[i:i + batch_size]
|
||||
|
||||
# 去重:只保留不存在的记录
|
||||
objs = []
|
||||
for aid, nid in batch:
|
||||
if (aid, nid) not in existing:
|
||||
objs.append(AssetNode(asset_id=aid, node_id=nid, node_key=id_key_map.get(str(nid), '')))
|
||||
existing.add((aid, nid))
|
||||
else:
|
||||
skipped += 1
|
||||
|
||||
# 批量插入
|
||||
if objs:
|
||||
try:
|
||||
AssetNode.objects.bulk_create(objs, batch_size=batch_size, ignore_conflicts=True)
|
||||
migration_stats['restored'] += len(objs)
|
||||
except Exception as e:
|
||||
log(f" ✗ 批插入失败: {str(e)}")
|
||||
# 降级:逐条插入
|
||||
for obj in objs:
|
||||
try:
|
||||
obj.save()
|
||||
migration_stats['restored'] += 1
|
||||
except Exception as save_err:
|
||||
log(f" ✗ 跳过 asset_id={obj.asset_id}, node_id={obj.node_id}: {str(save_err)}")
|
||||
skipped += 1
|
||||
|
||||
progress = min(i + batch_size, total)
|
||||
log(f" 已恢复 {progress:,}/{total:,} (插入{migration_stats['restored']:,} 跳过{skipped:,})")
|
||||
|
||||
log(f"✓ 阶段3完成! 耗时 {time.time()-start:.1f}s")
|
||||
log(f" 插入: {migration_stats['restored']:,} 条")
|
||||
log(f" 跳过: {skipped:,} 条\n")
|
||||
|
||||
|
||||
def phase4_cleanup_duplicates(apps, schema_editor):
|
||||
"""阶段4: 删除 through 表中 node_key 为空的数据"""
|
||||
Asset = apps.get_model('assets', 'Asset')
|
||||
asset_node_through = Asset.nodes.through
|
||||
|
||||
log(f"\n{'='*50}")
|
||||
log("【阶段4】清理 through 表中 node_key 为空的数据")
|
||||
log(f"{'='*50}")
|
||||
|
||||
# 找出 node_key 为空的记录
|
||||
empty_node_key = asset_node_through.objects.filter(Q(node_key='') | Q(node_key__isnull=True))
|
||||
total = empty_node_key.count()
|
||||
|
||||
if total == 0:
|
||||
log("✓ 没有 node_key 为空的数据,无需清理\n")
|
||||
return
|
||||
|
||||
log(f"发现 {total:,} 条 node_key 为空的数据")
|
||||
|
||||
start = time.time()
|
||||
batch_size = 50000
|
||||
deleted = 0
|
||||
|
||||
# 获取要删除的数据信息
|
||||
to_delete_records = list(
|
||||
empty_node_key.values_list('asset_id', 'node_id', 'id')
|
||||
)
|
||||
|
||||
log("删除详情:")
|
||||
for aid, nid, record_id in to_delete_records[:100]: # 显示前100条
|
||||
log(f" asset_id={aid}, node_id={nid}")
|
||||
|
||||
if len(to_delete_records) > 100:
|
||||
log(f" ... 还有 {len(to_delete_records)-100:,} 条")
|
||||
|
||||
# 分批删除
|
||||
for offset in range(0, len(to_delete_records), batch_size):
|
||||
batch_ids = [record_id for _, _, record_id in to_delete_records[offset:offset + batch_size]]
|
||||
|
||||
if batch_ids:
|
||||
delete_count, _ = asset_node_through.objects.filter(id__in=batch_ids).delete()
|
||||
deleted += delete_count
|
||||
log(f" 已删除 {deleted:,}/{total:,}")
|
||||
|
||||
log(f"✓ 阶段4完成! 耗时 {time.time()-start:.1f}s")
|
||||
log(f" 删除: {deleted:,} 条 node_key 为空的数据\n")
|
||||
|
||||
|
||||
def phase5_cleanup_backup_table(apps, schema_editor):
|
||||
"""阶段5: 删除备份表"""
|
||||
log(f"\n{'='*50}")
|
||||
log("【阶段5】删除 AssetNodeBackup 备份表")
|
||||
log(f"{'='*50}")
|
||||
|
||||
AssetNodeBackup = apps.get_model('assets', 'AssetNodeBackup')
|
||||
total = AssetNodeBackup.objects.count()
|
||||
|
||||
log(f"备份表中有 {total:,} 条数据")
|
||||
|
||||
start = time.time()
|
||||
|
||||
# 删除所有备份数据
|
||||
delete_count, _ = AssetNodeBackup.objects.all().delete()
|
||||
|
||||
log(f"✓ 删除 {delete_count:,} 条备份数据")
|
||||
log(f"✓ 阶段5完成! 耗时 {time.time()-start:.1f}s\n")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0020_assetnodebackup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# 阶段1:保存和备份
|
||||
migrations.RunPython(phase1_save_and_backup),
|
||||
|
||||
# 阶段2:数据库表结构操作
|
||||
migrations.RemoveField(
|
||||
model_name='asset',
|
||||
name='nodes',
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AssetNode',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('node_key', models.CharField(db_index=True, default='', max_length=64, verbose_name='Node key')),
|
||||
('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='assets.asset', verbose_name='Asset')),
|
||||
('node', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='assets.node', verbose_name='Node')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Asset Node',
|
||||
'db_table': 'assets_asset_nodes',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='nodes',
|
||||
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets', through='assets.AssetNode', to='assets.node', verbose_name='Nodes'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='assetnode',
|
||||
unique_together={('asset', 'node_key'), ('asset', 'node')},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='assetnode',
|
||||
index=models.Index(fields=['node_key', 'asset_id'], name='idx_node_key_asset_id'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='assetnode',
|
||||
index=models.Index(fields=['node_id', 'asset_id'], name='idx_node_id_asset_id'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='assetnode',
|
||||
index=models.Index(fields=['asset_id', 'node_id', 'node_key'], name='idx_asset_id_node_id_key'),
|
||||
),
|
||||
|
||||
# 阶段3:恢复数据
|
||||
migrations.RunPython(phase3_restore_data_and_set_node_key),
|
||||
|
||||
# 阶段4:清理重复数据
|
||||
migrations.RunPython(phase4_cleanup_duplicates),
|
||||
|
||||
# 阶段5:删除备份表 (或用户手动删除也可以)
|
||||
# migrations.RunPython(phase5_cleanup_backup_table),
|
||||
# migrations.DeleteModel(
|
||||
# name='AssetNodeBackup',
|
||||
# ),
|
||||
]
|
||||
@@ -9,3 +9,4 @@ from .node import *
|
||||
from .favorite_asset import *
|
||||
from .automations import *
|
||||
from .my_asset import *
|
||||
from .asset_node import *
|
||||
@@ -6,4 +6,4 @@ from .device import *
|
||||
from .ds import *
|
||||
from .gpt import *
|
||||
from .host import *
|
||||
from .web import *
|
||||
from .web import *
|
||||
@@ -112,7 +112,7 @@ class Protocol(models.Model):
|
||||
return protocols[0] if len(protocols) > 0 else {}
|
||||
|
||||
@property
|
||||
def setting(self):
|
||||
def setting(self) -> dict:
|
||||
if self._setting is not None:
|
||||
return self._setting
|
||||
return self.asset_platform_protocol.get('setting', {})
|
||||
@@ -122,7 +122,7 @@ class Protocol(models.Model):
|
||||
self._setting = value
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
def public(self) -> bool:
|
||||
return self.asset_platform_protocol.get('public', True)
|
||||
|
||||
|
||||
@@ -173,7 +173,12 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
verbose_name=_("Zone"), on_delete=models.SET_NULL
|
||||
)
|
||||
nodes = models.ManyToManyField(
|
||||
'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes")
|
||||
'assets.Node',
|
||||
default=default_node,
|
||||
related_name='assets',
|
||||
verbose_name=_("Nodes"),
|
||||
through='assets.AssetNode', # 使用自定义 through 表
|
||||
through_fields=('asset', 'node')
|
||||
)
|
||||
directory_services = models.ManyToManyField(
|
||||
'assets.DirectoryService', related_name='assets',
|
||||
@@ -210,7 +215,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return self.category == const.Category.DS and hasattr(self, 'ds')
|
||||
|
||||
@lazyproperty
|
||||
def spec_info(self):
|
||||
def spec_info(self) -> dict:
|
||||
instance = getattr(self, self.category, None)
|
||||
if not instance:
|
||||
return {}
|
||||
@@ -240,7 +245,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return info
|
||||
|
||||
@lazyproperty
|
||||
def auto_config(self):
|
||||
def auto_config(self) -> dict:
|
||||
platform = self.platform
|
||||
auto_config = {
|
||||
'su_enabled': platform.su_enabled,
|
||||
@@ -343,11 +348,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return names
|
||||
|
||||
@lazyproperty
|
||||
def type(self):
|
||||
def type(self) -> str:
|
||||
return self.platform.type
|
||||
|
||||
@lazyproperty
|
||||
def category(self):
|
||||
def category(self) -> str:
|
||||
return self.platform.category
|
||||
|
||||
def is_category(self, category):
|
||||
@@ -408,8 +413,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
return tree_node
|
||||
|
||||
@staticmethod
|
||||
def get_secret_type_assets(asset_ids, secret_type):
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
def get_secret_type_assets(assets, secret_type):
|
||||
asset_protocol = assets.prefetch_related('protocols').values_list('id', 'protocols__name')
|
||||
protocol_secret_types_map = const.Protocol.protocol_secret_types()
|
||||
asset_secret_types_mapp = defaultdict(set)
|
||||
|
||||
67
apps/assets/models/asset_node.py
Normal file
67
apps/assets/models/asset_node.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from django.db import models
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
__all__ = ['AssetNode', 'AssetNodeBackup']
|
||||
|
||||
|
||||
class AssetNode(models.Model):
|
||||
asset = models.ForeignKey(
|
||||
'assets.Asset',
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_('Asset'),
|
||||
db_index=True
|
||||
)
|
||||
node = models.ForeignKey(
|
||||
'assets.Node',
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_('Node'),
|
||||
db_index=True
|
||||
)
|
||||
node_key = models.CharField(
|
||||
max_length=64,
|
||||
verbose_name=_('Node key'),
|
||||
db_index=True,
|
||||
default=''
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'assets_asset_nodes'
|
||||
verbose_name = _('Asset Node')
|
||||
|
||||
# unique_together: 定义哪些字段组合必须唯一
|
||||
unique_together = [
|
||||
('asset', 'node'), # asset_id + node_id 必须唯一
|
||||
('asset', 'node_key'), # 必须加上 node_key 保证唯一
|
||||
]
|
||||
|
||||
# indexes: 定义查询优化的索引
|
||||
indexes = [
|
||||
# 索引 1: 按 node_key 查询所有资产(子孙节点)
|
||||
# 查询: WHERE node_key LIKE '1.12:%'
|
||||
models.Index(fields=['node_key', 'asset_id'], name='idx_node_key_asset_id'),
|
||||
|
||||
# 索引 2: 按 node_id 查询所有直接资产
|
||||
# 查询: WHERE node_id = ?
|
||||
models.Index(fields=['node_id', 'asset_id'], name='idx_node_id_asset_id'),
|
||||
|
||||
# 索引 3: 按 asset_id 查询 node_key 和 node_id
|
||||
# 查询: WHERE asset_id = ? 获取 node_key 或 node_id
|
||||
models.Index(fields=['asset_id', 'node_id', 'node_key'], name='idx_asset_id_node_id_key'),
|
||||
]
|
||||
|
||||
|
||||
class AssetNodeBackup(models.Model):
|
||||
asset_id = models.CharField(
|
||||
max_length=1024,
|
||||
verbose_name=_('Asset ID'),
|
||||
)
|
||||
node_id = models.CharField(
|
||||
max_length=1024,
|
||||
verbose_name=_('Node ID'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'assets_asset_nodes_backup'
|
||||
verbose_name = _('Asset Node Backup')
|
||||
@@ -28,7 +28,8 @@ class MyAsset(JMSBaseModel):
|
||||
|
||||
@staticmethod
|
||||
def set_asset_custom_value(assets, user):
|
||||
my_assets = MyAsset.objects.filter(asset__in=assets, user=user).all()
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
my_assets = MyAsset.objects.filter(asset_id__in=asset_ids, user=user).all()
|
||||
customs = {my_asset.asset.id: my_asset.custom_to_dict() for my_asset in my_assets}
|
||||
for asset in assets:
|
||||
custom = customs.get(asset.id)
|
||||
|
||||
@@ -65,7 +65,7 @@ class FamilyMixin:
|
||||
def get_nodes_children_key_pattern(cls, nodes, with_self=True):
|
||||
keys = [i.key for i in nodes]
|
||||
keys = cls.clean_children_keys(keys)
|
||||
patterns = [cls.get_node_all_children_key_pattern(key) for key in keys]
|
||||
patterns = [cls.get_node_all_children_key_pattern(key, with_self=with_self) for key in keys]
|
||||
patterns = '|'.join(patterns)
|
||||
return patterns
|
||||
|
||||
@@ -573,7 +573,7 @@ class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
return not self.__gt__(other)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
return self.value
|
||||
|
||||
def computed_full_value(self):
|
||||
|
||||
@@ -25,7 +25,7 @@ class PlatformProtocol(models.Model):
|
||||
return '{}/{}'.format(self.name, self.port)
|
||||
|
||||
@property
|
||||
def secret_types(self):
|
||||
def secret_types(self) -> list:
|
||||
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
||||
|
||||
@lazyproperty
|
||||
|
||||
@@ -147,6 +147,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
|
||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
|
||||
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
|
||||
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
|
||||
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'),
|
||||
attrs=('id', 'name', 'type'))
|
||||
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
|
||||
@@ -425,6 +426,18 @@ class DetailMixin(serializers.Serializer):
|
||||
gathered_info = MethodSerializer(label=_('Gathered info'), read_only=True)
|
||||
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
|
||||
|
||||
@staticmethod
|
||||
def get_auto_config(obj) -> dict:
|
||||
return obj.auto_config
|
||||
|
||||
@staticmethod
|
||||
def get_gathered_info(obj) -> dict:
|
||||
return obj.gathered_info
|
||||
|
||||
@staticmethod
|
||||
def get_spec_info(obj) -> dict:
|
||||
return obj.spec_info
|
||||
|
||||
def get_instance(self):
|
||||
request = self.context.get('request')
|
||||
if not self.instance and UUID_PATTERN.findall(request.path):
|
||||
|
||||
@@ -59,7 +59,10 @@ class DatabaseSerializer(AssetSerializer):
|
||||
if not platform:
|
||||
return
|
||||
|
||||
if platform.type in ['mysql', 'mariadb']:
|
||||
if platform.type in [
|
||||
'mysql', 'mariadb', 'oracle', 'sqlserver',
|
||||
'db2', 'dameng', 'clickhouse', 'redis'
|
||||
]:
|
||||
db_field.required = False
|
||||
db_field.allow_blank = True
|
||||
db_field.allow_null = True
|
||||
|
||||
@@ -26,4 +26,13 @@ class WebSerializer(AssetSerializer):
|
||||
'submit_selector': {
|
||||
'default': 'id=login_button',
|
||||
},
|
||||
'script': {
|
||||
'default': [],
|
||||
}
|
||||
}
|
||||
|
||||
def to_internal_value(self, data):
|
||||
data = data.copy()
|
||||
if data.get('script') in ("", None):
|
||||
data.pop('script', None)
|
||||
return super().to_internal_value(data)
|
||||
|
||||
@@ -19,11 +19,13 @@ __all__ = [
|
||||
class BaseAutomationSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
assets = ObjectRelatedField(many=True, required=False, queryset=Asset.objects, label=_('Assets'))
|
||||
nodes = ObjectRelatedField(many=True, required=False, queryset=Node.objects, label=_('Nodes'))
|
||||
executed_amount = serializers.IntegerField(read_only=True, label=_('Executed amount'))
|
||||
|
||||
class Meta:
|
||||
read_only_fields = [
|
||||
'date_created', 'date_updated', 'created_by',
|
||||
'periodic_display', 'executed_amount', 'type', 'last_execution_date'
|
||||
'periodic_display', 'executed_amount', 'type',
|
||||
'last_execution_date',
|
||||
]
|
||||
mini_fields = [
|
||||
'id', 'name', 'type', 'is_periodic', 'interval',
|
||||
|
||||
@@ -84,6 +84,7 @@ class PlatformAutomationSerializer(serializers.ModelSerializer):
|
||||
class PlatformProtocolSerializer(serializers.ModelSerializer):
|
||||
setting = MethodSerializer(required=False, label=_("Setting"))
|
||||
port_from_addr = serializers.BooleanField(label=_("Port from addr"), read_only=True)
|
||||
port = serializers.IntegerField(label=_("Port"), required=False, min_value=0, max_value=65535)
|
||||
|
||||
class Meta:
|
||||
model = PlatformProtocol
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from .asset import *
|
||||
from .node_assets_amount import *
|
||||
from .node_assets_mapping import *
|
||||
from .node_asset import *
|
||||
24
apps/assets/signal_handlers/node_asset.py
Normal file
24
apps/assets/signal_handlers/node_asset.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from django.db.models.signals import pre_save, m2m_changed, post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from assets.models import AssetNode, Asset, Node
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||
def fill_node_key_on_m2m_change(sender, instance, action, pk_set, **kwargs):
|
||||
if action == 'post_add':
|
||||
if isinstance(instance, Asset):
|
||||
asset_ids = [str(instance.id)]
|
||||
node_ids = [str(pk) for pk in pk_set]
|
||||
elif isinstance(instance, Node):
|
||||
asset_ids = [str(pk) for pk in pk_set]
|
||||
node_ids = [str(instance.id)]
|
||||
else:
|
||||
return
|
||||
|
||||
id_key_pairs = Node.objects.filter(id__in=node_ids).values_list('id', 'key')
|
||||
id_key_map = {str(id_): key for id_, key in id_key_pairs}
|
||||
rs = AssetNode.objects.filter(asset_id__in=asset_ids, node_id__in=node_ids)
|
||||
for r in rs:
|
||||
r.node_key = id_key_map.get(str(r.node_id), '')
|
||||
AssetNode.objects.bulk_update(rs, ['node_key'], batch_size=5000)
|
||||
@@ -43,7 +43,7 @@ from .serializers import (
|
||||
OperateLogSerializer, OperateLogActionDetailSerializer,
|
||||
PasswordChangeLogSerializer, ActivityUnionLogSerializer,
|
||||
FileSerializer, UserSessionSerializer, JobsAuditSerializer,
|
||||
ServiceAccessLogSerializer
|
||||
ServiceAccessLogSerializer, OperateLogFullSerializer
|
||||
)
|
||||
from .utils import construct_userlogin_usernames, record_operate_log_and_activity_log
|
||||
|
||||
@@ -256,7 +256,9 @@ class OperateLogViewSet(OrgReadonlyModelViewSet):
|
||||
def get_serializer_class(self):
|
||||
if self.is_action_detail:
|
||||
return OperateLogActionDetailSerializer
|
||||
return super().get_serializer_class()
|
||||
elif self.request.query_params.get('format'):
|
||||
return OperateLogFullSerializer
|
||||
return OperateLogSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
current_org_id = str(current_org.id)
|
||||
|
||||
@@ -23,6 +23,8 @@ logger = get_logger(__name__)
|
||||
|
||||
class OperatorLogHandler(metaclass=Singleton):
|
||||
CACHE_KEY = 'OPERATOR_LOG_CACHE_KEY'
|
||||
SYSTEM_OBJECTS = frozenset({"Role"})
|
||||
PREFER_CURRENT_ELSE_USER = frozenset({"SSOToken"})
|
||||
|
||||
def __init__(self):
|
||||
self.log_client = self.get_storage_client()
|
||||
@@ -142,13 +144,21 @@ class OperatorLogHandler(metaclass=Singleton):
|
||||
after = self.__data_processing(after)
|
||||
return before, after
|
||||
|
||||
@staticmethod
|
||||
def get_org_id(object_name):
|
||||
system_obj = ('Role',)
|
||||
org_id = get_current_org_id()
|
||||
if object_name in system_obj:
|
||||
org_id = Organization.SYSTEM_ID
|
||||
return org_id
|
||||
def get_org_id(self, user, object_name):
|
||||
if object_name in self.SYSTEM_OBJECTS:
|
||||
return Organization.SYSTEM_ID
|
||||
|
||||
current = get_current_org_id()
|
||||
current_id = str(current) if current else None
|
||||
|
||||
if object_name in self.PREFER_CURRENT_ELSE_USER:
|
||||
if current_id and current_id != Organization.DEFAULT_ID:
|
||||
return current_id
|
||||
|
||||
org = user.orgs.distinct().first()
|
||||
return str(org.id) if org else Organization.DEFAULT_ID
|
||||
|
||||
return current_id or Organization.DEFAULT_ID
|
||||
|
||||
def create_or_update_operate_log(
|
||||
self, action, resource_type, resource=None, resource_display=None,
|
||||
@@ -168,7 +178,7 @@ class OperatorLogHandler(metaclass=Singleton):
|
||||
# 前后都没变化,没必要生成日志,除非手动强制保存
|
||||
return
|
||||
|
||||
org_id = self.get_org_id(object_name)
|
||||
org_id = self.get_org_id(user, object_name)
|
||||
data = {
|
||||
'id': log_id, "user": str(user), 'action': action,
|
||||
'resource_type': str(resource_type), 'org_id': org_id,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
import uuid
|
||||
from datetime import timedelta
|
||||
from datetime import timedelta, datetime
|
||||
from importlib import import_module
|
||||
|
||||
from django.conf import settings
|
||||
@@ -40,7 +40,7 @@ __all__ = [
|
||||
|
||||
class JobLog(JobExecution):
|
||||
@property
|
||||
def creator_name(self):
|
||||
def creator_name(self) -> str:
|
||||
return self.creator.name
|
||||
|
||||
class Meta:
|
||||
@@ -232,7 +232,7 @@ class UserLoginLog(models.Model):
|
||||
return '%s(%s)' % (self.username, self.city)
|
||||
|
||||
@property
|
||||
def backend_display(self):
|
||||
def backend_display(self) -> str:
|
||||
return gettext(self.backend)
|
||||
|
||||
@classmethod
|
||||
@@ -258,7 +258,7 @@ class UserLoginLog(models.Model):
|
||||
return login_logs
|
||||
|
||||
@property
|
||||
def reason_display(self):
|
||||
def reason_display(self) -> str:
|
||||
from authentication.errors import reason_choices, old_reason_choices
|
||||
|
||||
reason = reason_choices.get(self.reason)
|
||||
@@ -300,15 +300,15 @@ class UserSession(models.Model):
|
||||
return '%s(%s)' % (self.user, self.ip)
|
||||
|
||||
@property
|
||||
def backend_display(self):
|
||||
def backend_display(self) -> str:
|
||||
return gettext(self.backend)
|
||||
|
||||
@property
|
||||
def is_active(self):
|
||||
def is_active(self) -> bool:
|
||||
return user_session_manager.check_active(self.key)
|
||||
|
||||
@property
|
||||
def date_expired(self):
|
||||
def date_expired(self) -> datetime:
|
||||
session_store_cls = import_module(settings.SESSION_ENGINE).SessionStore
|
||||
session_store = session_store_cls(session_key=self.key)
|
||||
cache_key = session_store.cache_key
|
||||
|
||||
@@ -119,14 +119,29 @@ class OperateLogSerializer(BulkOrgResourceModelSerializer):
|
||||
fields = fields_small
|
||||
|
||||
@staticmethod
|
||||
def get_resource_type(instance):
|
||||
def get_resource_type(instance) -> str:
|
||||
return _(instance.resource_type)
|
||||
|
||||
@staticmethod
|
||||
def get_resource(instance):
|
||||
def get_resource(instance) -> str:
|
||||
return i18n_trans(instance.resource)
|
||||
|
||||
|
||||
class DiffFieldSerializer(serializers.JSONField):
|
||||
def to_file_representation(self, value):
|
||||
row = getattr(self, '_row') or {}
|
||||
attrs = {'diff': value, 'resource_type': row.get('resource_type')}
|
||||
instance = type('OperateLog', (), attrs)
|
||||
return OperateLogStore.convert_diff_friendly(instance)
|
||||
|
||||
|
||||
class OperateLogFullSerializer(OperateLogSerializer):
|
||||
diff = DiffFieldSerializer(label=_("Diff"))
|
||||
|
||||
class Meta(OperateLogSerializer.Meta):
|
||||
fields = OperateLogSerializer.Meta.fields + ['diff']
|
||||
|
||||
|
||||
class PasswordChangeLogSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.PasswordChangeLog
|
||||
@@ -147,11 +162,11 @@ class ActivityUnionLogSerializer(serializers.Serializer):
|
||||
r_type = serializers.CharField(read_only=True)
|
||||
|
||||
@staticmethod
|
||||
def get_timestamp(obj):
|
||||
def get_timestamp(obj) -> str:
|
||||
return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
@staticmethod
|
||||
def get_content(obj):
|
||||
def get_content(obj) -> str:
|
||||
if not obj['r_detail']:
|
||||
action = obj['r_action'].replace('_', ' ').capitalize()
|
||||
ctn = _('%s %s this resource') % (obj['r_user'], _(action).lower())
|
||||
@@ -160,7 +175,7 @@ class ActivityUnionLogSerializer(serializers.Serializer):
|
||||
return ctn
|
||||
|
||||
@staticmethod
|
||||
def get_detail_url(obj):
|
||||
def get_detail_url(obj) -> str:
|
||||
detail_url = ''
|
||||
detail_id, obj_type = obj['r_detail_id'], obj['r_type']
|
||||
if not detail_id:
|
||||
@@ -210,7 +225,7 @@ class UserSessionSerializer(serializers.ModelSerializer):
|
||||
"backend_display": {"label": _("Auth backend display")},
|
||||
}
|
||||
|
||||
def get_is_current_user_session(self, obj):
|
||||
def get_is_current_user_session(self, obj) -> bool:
|
||||
request = self.context.get('request')
|
||||
if not request:
|
||||
return False
|
||||
|
||||
@@ -116,7 +116,7 @@ def send_login_info_to_reviewers(instance: UserLoginLog | str, auth_acl_id):
|
||||
|
||||
reviewers = acl.reviewers.all()
|
||||
for reviewer in reviewers:
|
||||
UserLoginReminderMsg(reviewer, instance).publish_async()
|
||||
UserLoginReminderMsg(reviewer, instance, acl).publish_async()
|
||||
|
||||
|
||||
@receiver(post_auth_success)
|
||||
|
||||
@@ -47,20 +47,21 @@ def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
|
||||
objs = model.objects.filter(pk__in=pk_set)
|
||||
objs_display = [str(o) for o in objs]
|
||||
action = M2M_ACTION[action]
|
||||
changed_field = current_instance.get(field_name, [])
|
||||
changed_field = current_instance.get(field_name, {})
|
||||
changed_value = changed_field.get('value', [])
|
||||
|
||||
after, before, before_value = None, None, None
|
||||
if action == ActionChoices.create:
|
||||
before_value = list(set(changed_field) - set(objs_display))
|
||||
before_value = list(set(changed_value) - set(objs_display))
|
||||
elif action == ActionChoices.delete:
|
||||
before_value = list(
|
||||
set(changed_field).symmetric_difference(set(objs_display))
|
||||
)
|
||||
before_value = list(set(changed_value).symmetric_difference(set(objs_display)))
|
||||
|
||||
if changed_field:
|
||||
after = {field_name: changed_field}
|
||||
if before_value:
|
||||
before = {field_name: before_value}
|
||||
before_change_field = changed_field.copy()
|
||||
before_change_field['value'] = before_value
|
||||
before = {field_name: before_change_field}
|
||||
|
||||
if sorted(str(before)) == sorted(str(after)):
|
||||
return
|
||||
|
||||
@@ -2,15 +2,14 @@
|
||||
#
|
||||
import datetime
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from celery import shared_task
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import default_storage
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils._os import safe_join
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||
from common.storage.ftp_file import FTPFileStorageHandler
|
||||
@@ -79,7 +78,7 @@ def clean_celery_tasks_period():
|
||||
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;"
|
||||
safe_run_cmd(command, (settings.CELERY_LOG_DIR, expire_days))
|
||||
celery_log_path = safe_join(settings.LOG_DIR, 'celery.log')
|
||||
command = "echo > {}".format(celery_log_path)
|
||||
command = "echo > %s"
|
||||
safe_run_cmd(command, (celery_log_path,))
|
||||
|
||||
|
||||
|
||||
@@ -16,3 +16,4 @@ from .sso import *
|
||||
from .temp_token import *
|
||||
from .token import *
|
||||
from .face import *
|
||||
from .access_token import *
|
||||
|
||||
47
apps/authentication/api/access_token.py
Normal file
47
apps/authentication/api/access_token.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND
|
||||
|
||||
from oauth2_provider.models import get_access_token_model
|
||||
|
||||
from common.api import JMSModelViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
from ..serializers import AccessTokenSerializer
|
||||
|
||||
|
||||
AccessToken = get_access_token_model()
|
||||
|
||||
|
||||
class AccessTokenViewSet(JMSModelViewSet):
|
||||
"""
|
||||
OAuth2 Access Token 管理视图集
|
||||
用户只能查看和撤销自己的 access token
|
||||
"""
|
||||
serializer_class = AccessTokenSerializer
|
||||
permission_classes = [RBACPermission]
|
||||
http_method_names = ['get', 'options', 'delete']
|
||||
rbac_perms = {
|
||||
'revoke': 'oauth2_provider.delete_accesstoken',
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
"""只返回当前用户的 access token,按创建时间倒序"""
|
||||
return AccessToken.objects.filter(user=self.request.user).order_by('-created')
|
||||
|
||||
@action(methods=['DELETE'], detail=True, url_path='revoke')
|
||||
def revoke(self, request, *args, **kwargs):
|
||||
"""
|
||||
撤销 access token 及其关联的 refresh token
|
||||
如果 token 不存在或不属于当前用户,返回 404
|
||||
"""
|
||||
token = get_object_or_404(
|
||||
AccessToken.objects.filter(user=request.user),
|
||||
id=kwargs['pk']
|
||||
)
|
||||
# 优先撤销 refresh token,会自动撤销关联的 access token
|
||||
token_to_revoke = token.refresh_token if token.refresh_token else token
|
||||
token_to_revoke.revoke()
|
||||
return Response(status=HTTP_204_NO_CONTENT)
|
||||
@@ -69,6 +69,8 @@ class RDPFileClientProtocolURLMixin:
|
||||
'autoreconnection enabled:i': '1',
|
||||
'bookmarktype:i': '3',
|
||||
'use redirection server name:i': '0',
|
||||
'bitmapcachepersistenable:i': '0',
|
||||
'bitmapcachesize:i': '1500',
|
||||
}
|
||||
|
||||
# copy from
|
||||
@@ -76,7 +78,6 @@ class RDPFileClientProtocolURLMixin:
|
||||
rdp_low_speed_broadband_option = {
|
||||
"connection type:i": 2,
|
||||
"disable wallpaper:i": 1,
|
||||
"bitmapcachepersistenable:i": 1,
|
||||
"disable full window drag:i": 1,
|
||||
"disable menu anims:i": 1,
|
||||
"allow font smoothing:i": 0,
|
||||
@@ -87,7 +88,6 @@ class RDPFileClientProtocolURLMixin:
|
||||
rdp_high_speed_broadband_option = {
|
||||
"connection type:i": 4,
|
||||
"disable wallpaper:i": 0,
|
||||
"bitmapcachepersistenable:i": 1,
|
||||
"disable full window drag:i": 1,
|
||||
"disable menu anims:i": 0,
|
||||
"allow font smoothing:i": 0,
|
||||
@@ -362,6 +362,7 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
|
||||
self.validate_serializer(serializer)
|
||||
return super().perform_create(serializer)
|
||||
|
||||
|
||||
def _insert_connect_options(self, data, user):
|
||||
connect_options = data.pop('connect_options', {})
|
||||
default_name_opts = {
|
||||
@@ -375,7 +376,7 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
|
||||
for name in default_name_opts.keys():
|
||||
value = preferences.get(name, default_name_opts[name])
|
||||
connect_options[name] = value
|
||||
connect_options['lang'] = getattr(user, 'lang', settings.LANGUAGE_CODE)
|
||||
connect_options['lang'] = getattr(user, 'lang') or settings.LANGUAGE_CODE
|
||||
data['connect_options'] = connect_options
|
||||
|
||||
@staticmethod
|
||||
@@ -431,7 +432,7 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
|
||||
if account.username != AliasAccount.INPUT:
|
||||
data['input_username'] = ''
|
||||
|
||||
ticket = self._validate_acl(user, asset, account, connect_method)
|
||||
ticket = self._validate_acl(user, asset, account, connect_method, protocol)
|
||||
if ticket:
|
||||
data['from_ticket'] = ticket
|
||||
|
||||
@@ -470,7 +471,7 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
|
||||
after=after, object_name=object_name
|
||||
)
|
||||
|
||||
def _validate_acl(self, user, asset, account, connect_method):
|
||||
def _validate_acl(self, user, asset, account, connect_method, protocol):
|
||||
from acls.models import LoginAssetACL
|
||||
kwargs = {'user': user, 'asset': asset, 'account': account}
|
||||
if account.username == AliasAccount.INPUT:
|
||||
@@ -523,9 +524,15 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
|
||||
return
|
||||
|
||||
self._record_operate_log(acl, asset)
|
||||
os = get_request_os(self.request) if self.request else 'windows'
|
||||
method = ConnectMethodUtil.get_connect_method(
|
||||
connect_method, protocol=protocol, os=os
|
||||
)
|
||||
login_from = method['label'] if method else connect_method
|
||||
for reviewer in reviewers:
|
||||
AssetLoginReminderMsg(
|
||||
reviewer, asset, user, account, self.input_username
|
||||
reviewer, asset, user, account, acl,
|
||||
ip, self.input_username, login_from
|
||||
).publish_async()
|
||||
|
||||
def create_face_verify(self, response):
|
||||
@@ -558,7 +565,9 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||
rbac_perms = {
|
||||
'create': 'authentication.add_superconnectiontoken',
|
||||
'renewal': 'authentication.add_superconnectiontoken',
|
||||
'list': 'authentication.view_superconnectiontoken',
|
||||
'check': 'authentication.view_superconnectiontoken',
|
||||
'retrieve': 'authentication.view_superconnectiontoken',
|
||||
'get_secret_detail': 'authentication.view_superconnectiontokensecret',
|
||||
'get_applet_info': 'authentication.view_superconnectiontoken',
|
||||
'release_applet_account': 'authentication.view_superconnectiontoken',
|
||||
@@ -566,7 +575,12 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
return ConnectionToken.objects.all()
|
||||
return ConnectionToken.objects.none()
|
||||
|
||||
def get_object(self):
|
||||
pk = self.kwargs.get(self.lookup_field)
|
||||
token = get_object_or_404(ConnectionToken, pk=pk)
|
||||
return token
|
||||
|
||||
def get_user(self, serializer):
|
||||
return serializer.validated_data.get('user')
|
||||
@@ -618,6 +632,8 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||
|
||||
token_id = request.data.get('id') or ''
|
||||
token = ConnectionToken.get_typed_connection_token(token_id)
|
||||
if not token:
|
||||
raise PermissionDenied('Token {} is not valid'.format(token))
|
||||
token.is_valid()
|
||||
serializer = self.get_serializer(instance=token)
|
||||
|
||||
|
||||
@@ -67,8 +67,9 @@ class UserResetPasswordSendCodeApi(CreateAPIView):
|
||||
|
||||
code = random_string(settings.SMS_CODE_LENGTH, lower=False, upper=False)
|
||||
subject = '%s: %s' % (get_login_title(), _('Forgot password'))
|
||||
tip = _('The validity period of the verification code is {} minute').format(settings.VERIFY_CODE_TTL // 60)
|
||||
context = {
|
||||
'user': user, 'title': subject, 'code': code,
|
||||
'user': user, 'title': subject, 'code': code, 'tip': tip,
|
||||
}
|
||||
message = render_to_string('authentication/_msg_reset_password_code.html', context)
|
||||
content = {'subject': subject, 'message': message}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.backends import ModelBackend
|
||||
from django.views import View
|
||||
|
||||
from common.utils import get_logger
|
||||
from users.models import User
|
||||
@@ -25,7 +24,10 @@ class JMSBaseAuthBackend:
|
||||
"""
|
||||
# 三方用户认证完成后,在后续的 get_user 获取逻辑中,也应该需要检查用户是否有效
|
||||
is_valid = getattr(user, 'is_valid', None)
|
||||
return is_valid or is_valid is None
|
||||
if not is_valid:
|
||||
logger.info("User %s is not valid", getattr(user, "username", "<unknown>"))
|
||||
return False
|
||||
return True
|
||||
|
||||
# allow user to authenticate
|
||||
def username_allow_authenticate(self, username):
|
||||
@@ -63,11 +65,3 @@ class JMSBaseAuthBackend:
|
||||
class JMSModelBackend(JMSBaseAuthBackend, ModelBackend):
|
||||
def user_can_authenticate(self, user):
|
||||
return True
|
||||
|
||||
|
||||
class BaseAuthCallbackClientView(View):
|
||||
http_method_names = ['get']
|
||||
|
||||
def get(self, request):
|
||||
from authentication.views.utils import redirect_to_guard_view
|
||||
return redirect_to_guard_view(query_string='next=client')
|
||||
|
||||
@@ -1,51 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import threading
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django_cas_ng.backends import CASBackend as _CASBackend
|
||||
|
||||
from common.utils import get_logger
|
||||
from ..base import JMSBaseAuthBackend
|
||||
|
||||
__all__ = ['CASBackend', 'CASUserDoesNotExist']
|
||||
__all__ = ['CASBackend']
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class CASUserDoesNotExist(Exception):
|
||||
"""Exception raised when a CAS user does not exist."""
|
||||
pass
|
||||
|
||||
|
||||
class CASBackend(JMSBaseAuthBackend, _CASBackend):
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_CAS
|
||||
|
||||
def authenticate(self, request, ticket, service):
|
||||
UserModel = get_user_model()
|
||||
manager = UserModel._default_manager
|
||||
original_get_by_natural_key = manager.get_by_natural_key
|
||||
thread_local = threading.local()
|
||||
thread_local.thread_id = threading.get_ident()
|
||||
logger.debug(f"CASBackend.authenticate: thread_id={thread_local.thread_id}")
|
||||
|
||||
def get_by_natural_key(self, username):
|
||||
logger.debug(f"CASBackend.get_by_natural_key: thread_id={threading.get_ident()}, username={username}")
|
||||
if threading.get_ident() != thread_local.thread_id:
|
||||
return original_get_by_natural_key(username)
|
||||
|
||||
try:
|
||||
user = original_get_by_natural_key(username)
|
||||
except UserModel.DoesNotExist:
|
||||
raise CASUserDoesNotExist(username)
|
||||
return user
|
||||
|
||||
try:
|
||||
manager.get_by_natural_key = get_by_natural_key.__get__(manager, type(manager))
|
||||
user = super().authenticate(request, ticket=ticket, service=service)
|
||||
finally:
|
||||
manager.get_by_natural_key = original_get_by_natural_key
|
||||
return user
|
||||
# 这里做个hack ,让父类始终走CAS_CREATE_USER=True的逻辑,然后调用 authentication/mixins.py 中的 custom_get_or_create 方法
|
||||
settings.CAS_CREATE_USER = True
|
||||
return super().authenticate(request, ticket, service)
|
||||
|
||||
@@ -3,11 +3,10 @@
|
||||
import django_cas_ng.views
|
||||
from django.urls import path
|
||||
|
||||
from .views import CASLoginView, CASCallbackClientView
|
||||
from .views import CASLoginView
|
||||
|
||||
urlpatterns = [
|
||||
path('login/', CASLoginView.as_view(), name='cas-login'),
|
||||
path('logout/', django_cas_ng.views.LogoutView.as_view(), name='cas-logout'),
|
||||
path('callback/', django_cas_ng.views.CallbackView.as_view(), name='cas-proxy-callback'),
|
||||
path('login/client', CASCallbackClientView.as_view(), name='cas-proxy-callback-client'),
|
||||
path('callback/', django_cas_ng.views.CallbackView.as_view(), name='cas-proxy-callback')
|
||||
]
|
||||
|
||||
@@ -3,31 +3,20 @@ from django.http import HttpResponseRedirect
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_cas_ng.views import LoginView
|
||||
|
||||
from authentication.backends.base import BaseAuthCallbackClientView
|
||||
from common.utils import FlashMessageUtil
|
||||
from .backends import CASUserDoesNotExist
|
||||
from authentication.views.mixins import FlashMessageMixin
|
||||
|
||||
__all__ = ['LoginView']
|
||||
|
||||
|
||||
class CASLoginView(LoginView):
|
||||
class CASLoginView(LoginView, FlashMessageMixin):
|
||||
def get(self, request):
|
||||
try:
|
||||
resp = super().get(request)
|
||||
return resp
|
||||
except PermissionDenied:
|
||||
return HttpResponseRedirect('/')
|
||||
except CASUserDoesNotExist as e:
|
||||
message_data = {
|
||||
'title': _('User does not exist: {}').format(e),
|
||||
'error': _(
|
||||
'CAS login was successful, but no corresponding local user was found in the system, and automatic '
|
||||
'user creation is disabled in the CAS authentication configuration. Login failed.'),
|
||||
'interval': 10,
|
||||
'redirect_url': '/',
|
||||
}
|
||||
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
||||
|
||||
|
||||
class CASCallbackClientView(BaseAuthCallbackClientView):
|
||||
pass
|
||||
resp = HttpResponseRedirect('/')
|
||||
error_message = getattr(request, 'error_message', '')
|
||||
if error_message:
|
||||
response = self.get_failed_response('/', title=_('CAS Error'), msg=error_message)
|
||||
return response
|
||||
else:
|
||||
return resp
|
||||
|
||||
@@ -69,6 +69,8 @@ class AccessTokenAuthentication(authentication.BaseAuthentication):
|
||||
msg = _('Invalid token header. Sign string should not contain invalid characters.')
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
user, header = self.authenticate_credentials(token)
|
||||
if not user:
|
||||
return None
|
||||
after_authenticate_update_date(user)
|
||||
return user, header
|
||||
|
||||
@@ -77,10 +79,6 @@ class AccessTokenAuthentication(authentication.BaseAuthentication):
|
||||
model = get_user_model()
|
||||
user_id = cache.get(token)
|
||||
user = get_object_or_none(model, id=user_id)
|
||||
|
||||
if not user:
|
||||
msg = _('Invalid token or cache refreshed.')
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
return user, None
|
||||
|
||||
def authenticate_header(self, request):
|
||||
@@ -136,7 +134,7 @@ class SignatureAuthentication(signature.SignatureAuthentication):
|
||||
# example implementation:
|
||||
try:
|
||||
key = AccessKey.objects.get(id=key_id)
|
||||
if not key.is_active:
|
||||
if not key.is_valid:
|
||||
return None, None
|
||||
user, secret = key.user, str(key.secret)
|
||||
after_authenticate_update_date(user, key)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user