Compare commits

..

38 Commits

Author SHA1 Message Date
老广
15259fc10c Update base.py 2025-08-21 22:05:51 +08:00
Bryan
f31994fdcd Merge pull request #15899 from jumpserver/dev 2025-08-21 19:03:18 +08:00
Bryan
71766418bb Merge pull request #15742 from jumpserver/dev
merge: v4.10.4-lts
2025-07-17 15:12:58 +08:00
Bryan
a9399dd709 Merge pull request #15608 from jumpserver/dev
v4.10.2
2025-06-19 20:14:21 +08:00
Bryan
d0cb9e5432 Merge pull request #15412 from jumpserver/dev
v4.10.0
2025-05-15 17:11:43 +08:00
老广
558188da90 merge: dev to master
Ready to relase
2025-04-17 20:24:45 +08:00
Bryan
ad5460dab8 Merge pull request #15086 from jumpserver/dev
v4.8.0
2025-03-20 18:44:44 +08:00
Bryan
4d37dca0de Merge pull request #14901 from jumpserver/dev
v4.7.0
2025-02-20 10:21:16 +08:00
Bryan
2ca4002624 Merge pull request #14813 from jumpserver/dev
v4.6.0
2025-01-15 14:38:17 +08:00
Bryan
053d640e4c Merge pull request #14699 from jumpserver/dev
v4.5.0
2024-12-19 16:04:45 +08:00
Bryan
f3acc28ded Merge pull request #14697 from jumpserver/dev
v4.5.0
2024-12-19 15:57:11 +08:00
Bryan
25987545db Merge pull request #14511 from jumpserver/dev
v4.4.0
2024-11-21 19:00:35 +08:00
Bryan
6720ecc6e0 Merge pull request #14319 from jumpserver/dev
v4.3.0
2024-10-17 14:55:38 +08:00
老广
0b3a7bb020 Merge pull request #14203 from jumpserver/dev
merge: from dev to master
2024-09-19 19:37:19 +08:00
Bryan
56373e362b Merge pull request #13988 from jumpserver/dev
v4.1.0
2024-08-16 18:40:35 +08:00
Bryan
02fc045370 Merge pull request #13600 from jumpserver/dev
v4.0.0
2024-07-03 19:04:35 +08:00
Bryan
e4ac73896f Merge pull request #13452 from jumpserver/dev
v3.10.11-lts
2024-06-19 16:01:26 +08:00
Bryan
1518f792d6 Merge pull request #13236 from jumpserver/dev
v3.10.10-lts
2024-05-16 16:04:07 +08:00
Bai
67277dd622 fix: 修复仪表盘会话排序数量都是 1 的问题 2024-04-22 19:42:33 +08:00
Bryan
82e7f020ea Merge pull request #13094 from jumpserver/dev
v3.10.9 (dev to master)
2024-04-22 19:39:53 +08:00
Bryan
f20b9e01ab Merge pull request #13062 from jumpserver/dev
v3.10.8 dev to master
2024-04-18 18:01:20 +08:00
Bryan
8cf8a3701b Merge pull request #13059 from jumpserver/dev
v3.10.8
2024-04-18 17:16:37 +08:00
Bryan
7ba24293d1 Merge pull request #12736 from jumpserver/pr@dev@master_fix
fix: 解决冲突
2024-02-29 16:38:43 +08:00
Bai
f10114c9ed fix: 解决冲突 2024-02-29 16:37:10 +08:00
Bryan
cf31cbfb07 Merge pull request #12729 from jumpserver/dev
v3.10.4
2024-02-29 16:19:59 +08:00
wangruidong
0edad24d5d fix: 资产过期消息提示发送失败 2024-02-04 11:41:48 +08:00
ibuler
1f1c1a9157 fix: 修复定时检测用户是否活跃任务无法执行的问题 2024-01-23 09:28:38 +00:00
feng
6c9d271ae1 fix: redis 密码有特殊字符celery beat启动失败 2024-01-22 06:18:34 +00:00
Bai
6ff852e225 perf: 修复 Count 时没有去重的问题 2024-01-22 06:16:25 +00:00
Bryan
baa75dc735 Merge pull request #12566 from jumpserver/master
v3.10.2
2024-01-17 07:34:28 -04:00
Bryan
8a9f0436b8 Merge pull request #12565 from jumpserver/dev
v3.10.2
2024-01-17 07:23:30 -04:00
Bryan
a9620a3cbe Merge pull request #12461 from jumpserver/master
v3.10.1
2023-12-29 11:33:05 +05:00
Bryan
769e7dc8a0 Merge pull request #12460 from jumpserver/dev
v3.10.1
2023-12-29 11:20:36 +05:00
Bryan
2a70449411 Merge pull request #12458 from jumpserver/dev
v3.10.1
2023-12-29 11:01:13 +05:00
Bryan
8df720f19e Merge pull request #12401 from jumpserver/dev
v3.10
2023-12-21 15:14:19 +05:00
老广
dabbb45f6e Merge pull request #12144 from jumpserver/dev
v3.9.0
2023-11-16 18:23:05 +08:00
Bryan
ce24c1c3fd Merge pull request #11914 from jumpserver/dev
v3.8.0
2023-10-19 03:37:39 -05:00
Bryan
3c54c82ce9 Merge pull request #11636 from jumpserver/dev
v3.7.0
2023-09-21 17:02:48 +08:00
298 changed files with 5759 additions and 31561 deletions

View File

@@ -1,26 +0,0 @@
{
"dry_run": false,
"min_account_age_days": 3,
"max_urls_for_spam": 1,
"min_body_len_for_links": 40,
"spam_words": [
"call now",
"zadzwoń",
"zadzwoń teraz",
"kontakt",
"telefon",
"telefone",
"contato",
"suporte",
"infolinii",
"click here",
"buy now",
"subscribe",
"visit"
],
"bracket_max": 6,
"special_char_density_threshold": 0.12,
"phone_regex": "\\+?\\d[\\d\\-\\s\\(\\)\\.]{6,}\\d",
"labels_for_spam": ["spam"],
"labels_for_review": ["needs-triage"]
}

View File

@@ -1,72 +1,74 @@
name: Build and Push Base Image
on:
pull_request:
branches:
- 'dev'
- 'v*'
paths:
- poetry.lock
- pyproject.toml
- Dockerfile-base
- package.json
- go.mod
- yarn.lock
- pom.xml
- install_deps.sh
- utils/clean_site_packages.sh
types:
- opened
- synchronize
- reopened
pull_request:
branches:
- 'dev'
- 'v*'
paths:
- poetry.lock
- pyproject.toml
- Dockerfile-base
- package.json
- go.mod
- yarn.lock
- pom.xml
- install_deps.sh
- utils/clean_site_packages.sh
types:
- opened
- synchronize
- reopened
jobs:
build-and-push:
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
build-and-push:
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v7.0.0-28
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract date
id: vars
run: echo "IMAGE_TAG=$(date +'%Y%m%d_%H%M%S')" >> $GITHUB_ENV
- name: Extract date
id: vars
run: echo "IMAGE_TAG=$(date +'%Y%m%d_%H%M%S')" >> $GITHUB_ENV
- name: Extract repository name
id: repo
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
- name: Extract repository name
id: repo
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
- name: Build and push multi-arch image
uses: docker/build-push-action@v6
with:
platforms: linux/amd64,linux/arm64
push: true
file: Dockerfile-base
tags: jumpserver/core-base:${{ env.IMAGE_TAG }}
- name: Build and push multi-arch image
uses: docker/build-push-action@v6
with:
platforms: linux/amd64,linux/arm64
push: true
file: Dockerfile-base
tags: jumpserver/core-base:${{ env.IMAGE_TAG }}
- name: Update Dockerfile
run: |
sed -i 's|-base:.* AS stage-build|-base:${{ env.IMAGE_TAG }} AS stage-build|' Dockerfile
- name: Update Dockerfile
run: |
sed -i 's|-base:.* AS stage-build|-base:${{ env.IMAGE_TAG }} AS stage-build|' Dockerfile
- name: Commit changes
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
git add Dockerfile
git commit -m "perf: Update Dockerfile with new base image tag"
git push origin ${{ github.event.pull_request.head.ref }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Commit changes
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
git add Dockerfile
git commit -m "perf: Update Dockerfile with new base image tag"
git push origin ${{ github.event.pull_request.head.ref }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,46 +0,0 @@
name: Build and Push Python Base Image
on:
workflow_dispatch:
inputs:
tag:
description: 'Tag to build'
required: true
default: '3.11-slim-bullseye-v1'
type: string
jobs:
build-and-push:
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v7.0.0-28
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract repository name
id: repo
run: echo "REPO=$(basename ${{ github.repository }})" >> $GITHUB_ENV
- name: Build and push multi-arch image
uses: docker/build-push-action@v6
with:
platforms: linux/amd64,linux/arm64
push: true
file: Dockerfile-python
tags: jumpserver/core-base:python-${{ inputs.tag }}

View File

@@ -1,123 +0,0 @@
name: Cleanup PR Branches
on:
schedule:
# 每天凌晨2点运行
- cron: '0 2 * * *'
workflow_dispatch:
# 允许手动触发
inputs:
dry_run:
description: 'Dry run mode (default: true)'
required: false
default: 'true'
type: boolean
jobs:
cleanup-branches:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # 获取所有分支和提交历史
- name: Setup Git
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: Get dry run setting
id: dry-run
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
echo "dry_run=${{ github.event.inputs.dry_run }}" >> $GITHUB_OUTPUT
else
echo "dry_run=false" >> $GITHUB_OUTPUT
fi
- name: Cleanup branches
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DRY_RUN: ${{ steps.dry-run.outputs.dry_run }}
run: |
echo "Starting branch cleanup..."
echo "Dry run mode: $DRY_RUN"
# 获取所有本地分支
git fetch --all --prune
# 获取以 pr 或 repr 开头的分支
branches=$(git branch -r | grep -E 'origin/(pr|repr)' | sed 's/origin\///' | grep -v 'HEAD')
echo "Found branches matching pattern:"
echo "$branches"
deleted_count=0
skipped_count=0
for branch in $branches; do
echo ""
echo "Processing branch: $branch"
# 检查分支是否有未合并的PR
pr_info=$(gh pr list --head "$branch" --state open --json number,title,state 2>/dev/null)
if [ $? -eq 0 ] && [ "$pr_info" != "[]" ]; then
echo " ⚠️ Branch has open PR(s), skipping deletion"
echo " PR info: $pr_info"
skipped_count=$((skipped_count + 1))
continue
fi
# 检查分支是否有已合并的PR可选如果PR已合并也可以删除
merged_pr_info=$(gh pr list --head "$branch" --state merged --json number,title,state 2>/dev/null)
if [ $? -eq 0 ] && [ "$merged_pr_info" != "[]" ]; then
echo " ✅ Branch has merged PR(s), safe to delete"
echo " Merged PR info: $merged_pr_info"
else
echo " No PRs found for this branch"
fi
# 执行删除操作
if [ "$DRY_RUN" = "true" ]; then
echo " 🔍 [DRY RUN] Would delete branch: $branch"
deleted_count=$((deleted_count + 1))
else
echo " 🗑️ Deleting branch: $branch"
# 删除远程分支
if git push origin --delete "$branch" 2>/dev/null; then
echo " ✅ Successfully deleted remote branch: $branch"
deleted_count=$((deleted_count + 1))
else
echo " ❌ Failed to delete remote branch: $branch"
fi
fi
done
echo ""
echo "=== Cleanup Summary ==="
echo "Branches processed: $(echo "$branches" | wc -l)"
echo "Branches deleted: $deleted_count"
echo "Branches skipped: $skipped_count"
if [ "$DRY_RUN" = "true" ]; then
echo ""
echo "🔍 This was a DRY RUN - no branches were actually deleted"
echo "To perform actual deletion, run this workflow manually with dry_run=false"
fi
- name: Create summary
if: always()
run: |
echo "## Branch Cleanup Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Workflow:** ${{ github.workflow }}" >> $GITHUB_STEP_SUMMARY
echo "**Run ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Dry Run:** ${{ steps.dry-run.outputs.dry_run }}" >> $GITHUB_STEP_SUMMARY
echo "**Triggered by:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Check the logs above for detailed information about processed branches." >> $GITHUB_STEP_SUMMARY

View File

@@ -1,33 +1,10 @@
on:
push:
pull_request:
types: [opened, synchronize, closed]
release:
types: [created]
on: [push, pull_request, release]
name: JumpServer repos generic handler
jobs:
handle_pull_request:
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
steps:
- uses: jumpserver/action-generic-handler@master
env:
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
I18N_TOKEN: ${{ secrets.I18N_TOKEN }}
handle_push:
if: github.event_name == 'push'
runs-on: ubuntu-latest
steps:
- uses: jumpserver/action-generic-handler@master
env:
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
I18N_TOKEN: ${{ secrets.I18N_TOKEN }}
handle_release:
if: github.event_name == 'release'
generic_handler:
name: Run generic handler
runs-on: ubuntu-latest
steps:
- uses: jumpserver/action-generic-handler@master

View File

@@ -1,9 +1,11 @@
name: 🔀 Sync mirror to Gitee
on:
schedule:
# 每天凌晨3点运行
- cron: '0 3 * * *'
push:
branches:
- master
- dev
create:
jobs:
mirror:
@@ -12,6 +14,7 @@ jobs:
steps:
- name: mirror
continue-on-error: true
if: github.event_name == 'push' || (github.event_name == 'create' && github.event.ref_type == 'tag')
uses: wearerequired/git-mirror-action@v1
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEE_SSH_PRIVATE_KEY }}

View File

@@ -1,4 +1,4 @@
FROM jumpserver/core-base:20251113_092612 AS stage-build
FROM jumpserver/core-base:20250819_064003 AS stage-build
ARG VERSION
@@ -19,7 +19,7 @@ RUN set -ex \
&& python manage.py compilemessages
FROM python:3.11-slim-trixie
FROM python:3.11-slim-bullseye
ENV LANG=en_US.UTF-8 \
PATH=/opt/py3/bin:$PATH
@@ -33,13 +33,12 @@ ARG TOOLS=" \
default-libmysqlclient-dev \
openssh-client \
sshpass \
nmap \
bubblewrap"
ARG APT_MIRROR=http://deb.debian.org
RUN set -ex \
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list.d/debian.sources \
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
&& apt-get update > /dev/null \
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \

View File

@@ -1,5 +1,6 @@
FROM python:3.11.14-slim-trixie
FROM python:3.11-slim-bullseye
ARG TARGETARCH
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
# Install APT dependencies
ARG DEPENDENCIES=" \
ca-certificates \
@@ -21,13 +22,13 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
set -ex \
&& rm -f /etc/apt/apt.conf.d/docker-clean \
&& echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list.d/debian.sources \
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
&& apt-get update > /dev/null \
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
&& echo "no" | dpkg-reconfigure dash
# Install bin tools
ARG CHECK_VERSION=v1.0.5
ARG CHECK_VERSION=v1.0.4
RUN set -ex \
&& wget https://github.com/jumpserver-dev/healthcheck/releases/download/${CHECK_VERSION}/check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
&& tar -xf check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
@@ -40,10 +41,12 @@ RUN set -ex \
WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
ENV LANG=en_US.UTF-8 \
PATH=/opt/py3/bin:$PATH
ENV SETUPTOOLS_SCM_PRETEND_VERSION=3.4.5
ENV UV_LINK_MODE=copy
RUN --mount=type=cache,target=/root/.cache \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
@@ -51,7 +54,6 @@ RUN --mount=type=cache,target=/root/.cache \
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
set -ex \
&& pip install uv -i${PIP_MIRROR} \
&& uv venv \
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
&& ln -sf $(pwd)/.venv /opt/py3 \

View File

@@ -13,7 +13,7 @@ ARG TOOLS=" \
nmap \
telnet \
vim \
postgresql-client \
postgresql-client-13 \
wget \
poppler-utils"

View File

@@ -2,7 +2,7 @@
<a name="readme-top"></a>
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
## An open-source PAM platform (Bastion Host)
## An open-source PAM tool (Bastion Host)
[![][license-shield]][license-link]
[![][docs-shield]][docs-link]
@@ -19,7 +19,7 @@
## What is JumpServer?
JumpServer is an open-source Privileged Access Management (PAM) platform that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
<picture>
@@ -77,8 +77,7 @@ JumpServer consists of multiple key components, which collectively form the func
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal |
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB
| [Client](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer Client |
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |

View File

@@ -1,18 +1,16 @@
from django.db import transaction
from django.shortcuts import get_object_or_404
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers as drf_serializers
from rest_framework.decorators import action
from rest_framework.generics import ListAPIView, CreateAPIView
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from rest_framework.status import HTTP_200_OK
from accounts import serializers
from accounts.const import ChangeSecretRecordStatusChoice
from accounts.filters import AccountFilterSet, NodeFilterBackend
from accounts.mixins import AccountRecordViewLogMixin
from accounts.models import Account, ChangeSecretRecord
from assets.const.gpt import create_or_update_chatx_resources
from assets.models import Asset, Node
from authentication.permissions import UserConfirmation, ConfirmType
from common.api.mixin import ExtraFilterFieldsMixin
@@ -20,7 +18,6 @@ from common.drf.filters import AttrRulesFilterBackend
from common.permissions import IsValidUser
from common.utils import lazyproperty, get_logger
from orgs.mixins.api import OrgBulkModelViewSet
from orgs.utils import tmp_to_root_org
from rbac.permissions import RBACPermission
logger = get_logger(__file__)
@@ -46,7 +43,6 @@ class AccountViewSet(OrgBulkModelViewSet):
'clear_secret': 'accounts.change_account',
'move_to_assets': 'accounts.delete_account',
'copy_to_assets': 'accounts.add_account',
'chat': 'accounts.view_account',
}
export_as_zip = True
@@ -156,13 +152,6 @@ class AccountViewSet(OrgBulkModelViewSet):
def copy_to_assets(self, request, *args, **kwargs):
return self._copy_or_move_to_assets(request, move=False)
@action(methods=['get'], detail=False, url_path='chat')
def chat(self, request, *args, **kwargs):
with tmp_to_root_org():
__, account = create_or_update_chatx_resources()
serializer = self.get_serializer(account)
return Response(serializer.data)
class AccountSecretsViewSet(AccountRecordViewLogMixin, AccountViewSet):
"""
@@ -185,66 +174,12 @@ class AssetAccountBulkCreateApi(CreateAPIView):
'POST': 'accounts.add_account',
}
@staticmethod
def get_all_assets(base_payload: dict):
nodes = base_payload.pop('nodes', [])
asset_ids = base_payload.pop('assets', [])
nodes = Node.objects.filter(id__in=nodes).only('id', 'key')
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
asset_ids = set(asset_ids + list(node_asset_ids))
return Asset.objects.filter(id__in=asset_ids)
def create(self, request, *args, **kwargs):
if hasattr(request.data, "copy"):
base_payload = request.data.copy()
else:
base_payload = dict(request.data)
templates = base_payload.pop("template", None)
assets = self.get_all_assets(base_payload)
if not assets.exists():
error = _("No valid assets found for account creation.")
return Response(
data={
"detail": error,
"code": "no_valid_assets"
},
status=HTTP_400_BAD_REQUEST
)
result = []
errors = []
def handle_one(_payload):
try:
ser = self.get_serializer(data=_payload)
ser.is_valid(raise_exception=True)
data = ser.bulk_create(ser.validated_data, assets)
if isinstance(data, (list, tuple)):
result.extend(data)
else:
result.append(data)
except drf_serializers.ValidationError as e:
errors.extend(list(e.detail))
except Exception as e:
errors.extend([str(e)])
if not templates:
handle_one(base_payload)
else:
if not isinstance(templates, (list, tuple)):
templates = [templates]
for tpl in templates:
payload = dict(base_payload)
payload["template"] = tpl
handle_one(payload)
if errors:
raise drf_serializers.ValidationError(errors)
out_ser = serializers.AssetAccountBulkSerializerResultSerializer(result, many=True)
return Response(data=out_ser.data, status=HTTP_200_OK)
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.create(serializer.validated_data)
serializer = serializers.AssetAccountBulkSerializerResultSerializer(data, many=True)
return Response(data=serializer.data, status=HTTP_200_OK)
class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixin, ListAPIView):
@@ -255,7 +190,6 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
rbac_perms = {
'GET': 'accounts.view_accountsecret',
}
queryset = Account.history.model.objects.none()
@lazyproperty
def account(self) -> Account:

View File

@@ -25,8 +25,7 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
}
rbac_perms = {
'get_once_secret': 'accounts.change_integrationapplication',
'get_account_secret': 'accounts.view_integrationapplication',
'get_sdks_info': 'accounts.view_integrationapplication'
'get_account_secret': 'accounts.view_integrationapplication'
}
def read_file(self, path):
@@ -37,6 +36,7 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
@action(
['GET'], detail=False, url_path='sdks',
permission_classes=[IsValidUser]
)
def get_sdks_info(self, request, *args, **kwargs):
code_suffix_mapper = {

View File

@@ -12,8 +12,6 @@ class VirtualAccountViewSet(OrgBulkModelViewSet):
filterset_fields = ('alias',)
def get_queryset(self):
if getattr(self, "swagger_fake_view", False):
return VirtualAccount.objects.none()
return VirtualAccount.get_or_init_queryset()
def get_object(self, ):

View File

@@ -41,7 +41,6 @@ class AutomationAssetsListApi(generics.ListAPIView):
class AutomationRemoveAssetApi(generics.UpdateAPIView):
model = BaseAutomation
queryset = BaseAutomation.objects.all()
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
@@ -60,7 +59,6 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
class AutomationAddAssetApi(generics.UpdateAPIView):
model = BaseAutomation
queryset = BaseAutomation.objects.all()
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']

View File

@@ -154,10 +154,12 @@ class ChangSecretAddAssetApi(AutomationAddAssetApi):
model = ChangeSecretAutomation
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
model = ChangeSecretAutomation
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
perm_model = ChangeSecretAutomation
filterset_class = ChangeSecretStatusFilterSet

View File

@@ -62,8 +62,7 @@ class ChangeSecretDashboardApi(APIView):
status_counts = defaultdict(lambda: defaultdict(int))
for date_finished, status in results:
dt_local = timezone.localtime(date_finished)
date_str = str(dt_local.date())
date_str = str(date_finished.date())
if status == ChangeSecretRecordStatusChoice.failed:
status_counts[date_str]['failed'] += 1
elif status == ChangeSecretRecordStatusChoice.success:

View File

@@ -150,9 +150,6 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
http_method_names = ['get', 'options']
def get_queryset(self):
if getattr(self, "swagger_fake_view", False):
return CheckAccountEngine.objects.none()
return CheckAccountEngine.get_default_engines()
def filter_queryset(self, queryset: list):

View File

@@ -63,10 +63,12 @@ class PushAccountRemoveAssetApi(AutomationRemoveAssetApi):
model = PushAccountAutomation
serializer_class = serializers.PushAccountUpdateAssetSerializer
class PushAccountAddAssetApi(AutomationAddAssetApi):
model = PushAccountAutomation
serializer_class = serializers.PushAccountUpdateAssetSerializer
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
model = PushAccountAutomation
serializer_class = serializers.PushAccountUpdateNodeSerializer
serializer_class = serializers.PushAccountUpdateNodeSerializer

View File

@@ -235,8 +235,8 @@ class AccountBackupHandler:
except Exception as e:
error = str(e)
print(f'\033[31m>>> {error}\033[0m')
self.manager.status = Status.error
self.manager.summary['error'] = error
self.execution.status = Status.error
self.execution.summary['error'] = error
def backup_by_obj_storage(self):
object_id = self.execution.snapshot.get('id')

View File

@@ -113,16 +113,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
if host.get('error'):
return host
inventory_hosts = []
if asset.type == HostTypes.WINDOWS:
if self.secret_type == SecretType.SSH_KEY:
host['error'] = _("Windows does not support SSH key authentication")
return host
new_secret = self.get_secret(account)
if '>' in new_secret or '^' in new_secret:
host['error'] = _("Windows password cannot contain special characters like > ^")
return host
host['ssh_params'] = {}
accounts = self.get_accounts(account)
@@ -140,6 +130,11 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
if asset.type == HostTypes.WINDOWS:
accounts = accounts.filter(secret_type=SecretType.PASSWORD)
inventory_hosts = []
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
print(f'Windows {asset} does not support ssh key push')
return inventory_hosts
for account in accounts:
h = deepcopy(host)
h['name'] += '(' + account.username + ')' # To distinguish different accounts

View File

@@ -5,14 +5,12 @@
tasks:
- name: Test SQLServer connection
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: |
SELECT @@version
register: db_info
@@ -25,53 +23,45 @@
var: info
- name: Check whether SQLServer User exist
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
when: db_info is succeeded
register: user_exist
- name: Change SQLServer password
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; select @@version"
ignore_errors: true
when: user_exist.query_results[0] | length != 0
- name: Add SQLServer user
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: "CREATE LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; CREATE USER {{ account.username }} FOR LOGIN {{ account.username }}; select @@version"
ignore_errors: true
when: user_exist.query_results[0] | length == 0
- name: Verify password
mssql_script:
community.general.mssql_script:
login_user: "{{ account.username }}"
login_password: "{{ account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: |
SELECT @@version
when: check_conn_after_change

View File

@@ -18,7 +18,6 @@
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
group: "{{ params.group if params.group | length > 0 else omit }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1

View File

@@ -28,12 +28,6 @@ params:
default: ''
help_text: "{{ 'Params home help text' | trans }}"
- name: group
type: str
label: "{{ 'Params group label' | trans }}"
default: ''
help_text: "{{ 'Params group help text' | trans }}"
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
@@ -67,11 +61,6 @@ i18n:
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
en: 'Default home directory /home/{account username}'
Params group help text:
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
@@ -97,11 +86,6 @@ i18n:
ja: 'グループ'
en: 'Groups'
Params group label:
zh: '主组'
ja: '主组'
en: 'Main group'
Params uid label:
zh: '用户ID'
ja: 'ユーザーID'

View File

@@ -18,7 +18,6 @@
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
group: "{{ params.group if params.group | length > 0 else omit }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1

View File

@@ -30,12 +30,6 @@ params:
default: ''
help_text: "{{ 'Params home help text' | trans }}"
- name: group
type: str
label: "{{ 'Params group label' | trans }}"
default: ''
help_text: "{{ 'Params group help text' | trans }}"
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
@@ -69,11 +63,6 @@ i18n:
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
en: 'Default home directory /home/{account username}'
Params group help text:
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
@@ -99,11 +88,6 @@ i18n:
ja: 'グループ'
en: 'Groups'
Params group label:
zh: '主组'
ja: '主组'
en: 'Main group'
Params uid label:
zh: '用户ID'
ja: 'ユーザーID'

View File

@@ -9,7 +9,7 @@ from accounts.const import (
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
)
from accounts.models import ChangeSecretRecord
from accounts.notifications import ChangeSecretExecutionTaskMsg
from accounts.notifications import ChangeSecretExecutionTaskMsg, ChangeSecretReportMsg
from accounts.serializers import ChangeSecretRecordBackUpSerializer
from common.utils import get_logger
from common.utils.file import encrypt_and_compress_zip_file
@@ -94,6 +94,10 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
if not recipients:
return
context = self.get_report_context()
for user in recipients:
ChangeSecretReportMsg(user, context).publish()
if not records:
return

View File

@@ -5,14 +5,12 @@
tasks:
- name: Test SQLServer connection
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: |
SELECT
l.name,

View File

@@ -5,14 +5,12 @@
tasks:
- name: Test SQLServer connection
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: |
SELECT @@version
register: db_info
@@ -25,55 +23,47 @@
var: info
- name: Check whether SQLServer User exist
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
when: db_info is succeeded
register: user_exist
- name: Change SQLServer password
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}', DEFAULT_DATABASE = {{ jms_asset.spec_info.db_name }}; select @@version"
ignore_errors: true
when: user_exist.query_results[0] | length != 0
register: change_info
- name: Add SQLServer user
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: "CREATE LOGIN [{{ account.username }}] WITH PASSWORD = '{{ account.secret }}'; CREATE USER [{{ account.username }}] FOR LOGIN [{{ account.username }}]; select @@version"
ignore_errors: true
when: user_exist.query_results[0] | length == 0
register: change_info
- name: Verify password
mssql_script:
community.general.mssql_script:
login_user: "{{ account.username }}"
login_password: "{{ account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: |
SELECT @@version
when: check_conn_after_change

View File

@@ -18,7 +18,6 @@
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
group: "{{ params.group if params.group | length > 0 else omit }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1

View File

@@ -28,12 +28,6 @@ params:
default: ''
help_text: "{{ 'Params home help text' | trans }}"
- name: group
type: str
label: "{{ 'Params group label' | trans }}"
default: ''
help_text: "{{ 'Params group help text' | trans }}"
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
@@ -67,11 +61,6 @@ i18n:
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
en: 'Default home directory /home/{account username}'
Params group help text:
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
@@ -97,11 +86,6 @@ i18n:
ja: 'グループ'
en: 'Groups'
Params group label:
zh: '主组'
ja: '主组'
en: 'Main group'
Params uid label:
zh: '用户ID'
ja: 'ユーザーID'

View File

@@ -18,7 +18,6 @@
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
group: "{{ params.group if params.group | length > 0 else omit }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1

View File

@@ -30,12 +30,6 @@ params:
default: ''
help_text: "{{ 'Params home help text' | trans }}"
- name: group
type: str
label: "{{ 'Params group label' | trans }}"
default: ''
help_text: "{{ 'Params group help text' | trans }}"
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
@@ -69,11 +63,6 @@ i18n:
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
en: 'Default home directory /home/{account username}'
Params group help text:
zh: '请输入用户组(名字或数字),只能输入一个(需填写已存在的用户组)'
ja: 'ユーザー グループ (名前または番号) を入力してください。入力できるのは 1 つだけです (既存のユーザー グループを入力する必要があります)'
en: 'Please enter a user group (name or number), only one can be entered (must fill in an existing user group)'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
@@ -95,14 +84,9 @@ i18n:
en: 'Home'
Params groups label:
zh: '附加组'
ja: '追加グループ'
en: 'Additional Group'
Params group label:
zh: '主组'
ja: '主组'
en: 'Main group'
zh: '用户组'
ja: 'グループ'
en: 'Groups'
Params uid label:
zh: '用户ID'

View File

@@ -5,13 +5,11 @@
tasks:
- name: "Remove account"
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: "{{ jms_asset.spec_info.db_name }}"
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: "DROP LOGIN {{ account.username }}; select @@version"

View File

@@ -5,13 +5,11 @@
tasks:
- name: Verify account
mssql_script:
community.general.mssql_script:
login_user: "{{ account.username }}"
login_password: "{{ account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: |
SELECT @@version

View File

@@ -1,5 +1,8 @@
# -*- coding: utf-8 -*-
#
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
from azure.identity import ClientSecretCredential
from azure.keyvault.secrets import SecretClient
from common.utils import get_logger
@@ -11,9 +14,6 @@ __all__ = ['AZUREVaultClient']
class AZUREVaultClient(object):
def __init__(self, vault_url, tenant_id, client_id, client_secret):
from azure.identity import ClientSecretCredential
from azure.keyvault.secrets import SecretClient
authentication_endpoint = 'https://login.microsoftonline.com/' \
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/'
@@ -23,8 +23,6 @@ class AZUREVaultClient(object):
self.client = SecretClient(vault_url=vault_url, credential=credentials)
def is_active(self):
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
try:
self.client.set_secret('jumpserver', '666')
except (ResourceNotFoundError, ClientAuthenticationError) as e:
@@ -34,8 +32,6 @@ class AZUREVaultClient(object):
return True, ''
def get(self, name, version=None):
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
try:
secret = self.client.get_secret(name, version)
return secret.value

View File

@@ -132,7 +132,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return self.asset.platform
@lazyproperty
def alias(self) -> str:
def alias(self):
"""
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
"""
@@ -140,13 +140,13 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return self.username
return str(self.id)
def is_virtual(self) -> bool:
def is_virtual(self):
"""
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
"""
return self.alias.startswith('@')
def is_ds_account(self) -> bool:
def is_ds_account(self):
if self.is_virtual():
return ''
if not self.asset.is_directory_service:
@@ -160,7 +160,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return self.asset.ds
@lazyproperty
def ds_domain(self) -> str:
def ds_domain(self):
"""这个不能去掉perm_account 会动态设置这个值,以更改 full_username"""
if self.is_virtual():
return ''
@@ -172,17 +172,17 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return '@' in self.username or '\\' in self.username
@property
def full_username(self) -> str:
def full_username(self):
if not self.username_has_domain() and self.ds_domain:
return '{}@{}'.format(self.username, self.ds_domain)
return self.username
@lazyproperty
def has_secret(self) -> bool:
def has_secret(self):
return bool(self.secret)
@lazyproperty
def versions(self) -> int:
def versions(self):
return self.history.count()
def get_su_from_accounts(self):

View File

@@ -33,7 +33,7 @@ class IntegrationApplication(JMSOrgBaseModel):
return qs.filter(*query)
@property
def accounts_amount(self) -> int:
def accounts_amount(self):
return self.get_accounts().count()
@property

View File

@@ -75,11 +75,11 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
return bool(self.secret)
@property
def has_username(self) -> bool:
def has_username(self):
return bool(self.username)
@property
def spec_info(self) -> dict:
def spec_info(self):
data = {}
if self.secret_type != SecretType.SSH_KEY:
return data
@@ -87,13 +87,13 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
return data
@property
def password(self) -> str:
def password(self):
if self.secret_type == SecretType.PASSWORD:
return self.secret
return None
@property
def private_key(self) -> str:
def private_key(self):
if self.secret_type == SecretType.SSH_KEY:
return self.secret
return None
@@ -110,7 +110,7 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
return None
@property
def ssh_key_fingerprint(self) -> str:
def ssh_key_fingerprint(self):
if self.public_key:
public_key = self.public_key
elif self.private_key:

View File

@@ -56,7 +56,7 @@ class VaultModelMixin(models.Model):
__secret = None
@property
def secret(self) -> str:
def secret(self):
if self.__secret:
return self.__secret
from accounts.backends import vault_client

View File

@@ -18,11 +18,11 @@ class VirtualAccount(JMSOrgBaseModel):
verbose_name = _('Virtual account')
@property
def name(self) -> str:
def name(self):
return self.get_alias_display()
@property
def username(self) -> str:
def username(self):
usernames_map = {
AliasAccount.INPUT: _("Manual input"),
AliasAccount.USER: _("Same with user"),
@@ -32,7 +32,7 @@ class VirtualAccount(JMSOrgBaseModel):
return usernames_map.get(self.alias, '')
@property
def comment(self) -> str:
def comment(self):
comments_map = {
AliasAccount.INPUT: _('Non-asset account, Input username/password on connect'),
AliasAccount.USER: _('The account username name same with user on connect'),

View File

@@ -14,7 +14,7 @@ from accounts.models import Account, AccountTemplate, GatheredAccount
from accounts.tasks import push_accounts_to_assets_task
from assets.const import Category, AllTypes
from assets.models import Asset
from common.serializers import SecretReadableMixin, CommonBulkModelSerializer
from common.serializers import SecretReadableMixin
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
from common.utils import get_logger
from .base import BaseAccountSerializer, AuthValidateMixin
@@ -253,8 +253,6 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
'source_id': {'required': False, 'allow_null': True},
}
fields_unimport_template = ['params']
# 手动判断唯一性校验
validators = []
@classmethod
def setup_eager_loading(cls, queryset):
@@ -265,21 +263,6 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
)
return queryset
def validate(self, attrs):
instance = getattr(self, "instance", None)
if instance:
return super().validate(attrs)
field_errors = {}
for _fields in Account._meta.unique_together:
lookup = {field: attrs.get(field) for field in _fields}
if Account.objects.filter(**lookup).exists():
verbose_names = ', '.join([str(Account._meta.get_field(f).verbose_name) for f in _fields])
msg_template = _('Account already exists. Field(s): {fields} must be unique.')
field_errors[_fields[0]] = msg_template.format(fields=verbose_names)
raise serializers.ValidationError(field_errors)
return attrs
class AccountDetailSerializer(AccountSerializer):
has_secret = serializers.BooleanField(label=_("Has secret"), read_only=True)
@@ -292,26 +275,26 @@ class AccountDetailSerializer(AccountSerializer):
class AssetAccountBulkSerializerResultSerializer(serializers.Serializer):
asset = serializers.CharField(read_only=True, label=_('Asset'))
account = serializers.CharField(read_only=True, label=_('Account'))
state = serializers.CharField(read_only=True, label=_('State'))
error = serializers.CharField(read_only=True, label=_('Error'))
changed = serializers.BooleanField(read_only=True, label=_('Changed'))
class AssetAccountBulkSerializer(
AccountCreateUpdateSerializerMixin, AuthValidateMixin, CommonBulkModelSerializer
AccountCreateUpdateSerializerMixin, AuthValidateMixin, serializers.ModelSerializer
):
su_from_username = serializers.CharField(
max_length=128, required=False, write_only=True, allow_null=True, label=_("Su from"),
allow_blank=True,
)
assets = serializers.PrimaryKeyRelatedField(queryset=Asset.objects, many=True, label=_('Assets'))
class Meta:
model = Account
fields = [
'name', 'username', 'secret', 'secret_type', 'secret_reset',
'passphrase', 'privileged', 'is_active', 'comment', 'template',
'on_invalid', 'push_now', 'params',
'name', 'username', 'secret', 'secret_type', 'passphrase',
'privileged', 'is_active', 'comment', 'template',
'on_invalid', 'push_now', 'params', 'assets',
'su_from_username', 'source', 'source_id',
]
extra_kwargs = {
@@ -393,7 +376,8 @@ class AssetAccountBulkSerializer(
handler = self._handle_err_create
return handler
def perform_bulk_create(self, vd, assets):
def perform_bulk_create(self, vd):
assets = vd.pop('assets')
on_invalid = vd.pop('on_invalid', 'skip')
secret_type = vd.get('secret_type', 'password')
@@ -401,7 +385,8 @@ class AssetAccountBulkSerializer(
vd['name'] = vd.get('username')
create_handler = self.get_create_handler(on_invalid)
secret_type_supports = Asset.get_secret_type_assets(assets, secret_type)
asset_ids = [asset.id for asset in assets]
secret_type_supports = Asset.get_secret_type_assets(asset_ids, secret_type)
_results = {}
for asset in assets:
@@ -409,7 +394,6 @@ class AssetAccountBulkSerializer(
_results[asset] = {
'error': _('Asset does not support this secret type: %s') % secret_type,
'state': 'error',
'account': vd['name'],
}
continue
@@ -419,13 +403,13 @@ class AssetAccountBulkSerializer(
self.clean_auth_fields(vd)
instance, changed, state = self.perform_create(vd, create_handler)
_results[asset] = {
'changed': changed, 'instance': instance.id, 'state': state, 'account': vd['name']
'changed': changed, 'instance': instance.id, 'state': state
}
except serializers.ValidationError as e:
_results[asset] = {'error': e.detail[0], 'state': 'error', 'account': vd['name']}
_results[asset] = {'error': e.detail[0], 'state': 'error'}
except Exception as e:
logger.exception(e)
_results[asset] = {'error': str(e), 'state': 'error', 'account': vd['name']}
_results[asset] = {'error': str(e), 'state': 'error'}
results = [{'asset': asset, **result} for asset, result in _results.items()]
state_score = {'created': 3, 'updated': 2, 'skipped': 1, 'error': 0}
@@ -442,8 +426,7 @@ class AssetAccountBulkSerializer(
errors.append({
'error': _('Account has exist'),
'state': 'error',
'asset': str(result['asset']),
'account': result.get('account'),
'asset': str(result['asset'])
})
if errors:
raise serializers.ValidationError(errors)
@@ -462,16 +445,10 @@ class AssetAccountBulkSerializer(
account_ids = [str(_id) for _id in accounts.values_list('id', flat=True)]
push_accounts_to_assets_task.delay(account_ids, params)
def bulk_create(self, validated_data, assets):
if not assets:
raise serializers.ValidationError(
{'assets': _('At least one asset or node must be specified')},
{'nodes': _('At least one asset or node must be specified')}
)
def create(self, validated_data):
params = validated_data.pop('params', None)
push_now = validated_data.pop('push_now', False)
results = self.perform_bulk_create(validated_data, assets)
results = self.perform_bulk_create(validated_data)
self.push_accounts_if_need(results, push_now, params)
for res in results:
res['asset'] = str(res['asset'])
@@ -479,8 +456,6 @@ class AssetAccountBulkSerializer(
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
class Meta(AccountSerializer.Meta):
fields = AccountSerializer.Meta.fields + ['spec_info']
extra_kwargs = {
@@ -495,7 +470,6 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
class AccountHistorySerializer(serializers.ModelSerializer):
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
secret = serializers.CharField(label=_('Secret'), read_only=True)
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
class Meta:

View File

@@ -70,8 +70,6 @@ class AuthValidateMixin(serializers.Serializer):
class BaseAccountSerializer(
AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer
):
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
class Meta:
model = BaseAccount
fields_mini = ["id", "name", "username"]

View File

@@ -130,7 +130,7 @@ class ChangeSecretRecordSerializer(serializers.ModelSerializer):
read_only_fields = fields
@staticmethod
def get_is_success(obj) -> bool:
def get_is_success(obj):
return obj.status == ChangeSecretRecordStatusChoice.success
@@ -157,7 +157,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
read_only_fields = fields
@staticmethod
def get_asset(instance) -> str:
def get_asset(instance):
return str(instance.asset)
@staticmethod
@@ -165,7 +165,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
return str(instance.account)
@staticmethod
def get_is_success(obj) -> str:
def get_is_success(obj):
if obj.status == ChangeSecretRecordStatusChoice.success.value:
return _("Success")
return _("Failed")
@@ -196,9 +196,9 @@ class ChangeSecretAccountSerializer(serializers.ModelSerializer):
read_only_fields = fields
@staticmethod
def get_meta(obj) -> dict:
def get_meta(obj):
return account_secret_task_status.get(str(obj.id))
@staticmethod
def get_ttl(obj) -> int:
def get_ttl(obj):
return account_secret_task_status.get_ttl(str(obj.id))

View File

@@ -69,7 +69,7 @@ class AssetRiskSerializer(serializers.Serializer):
risk_summary = serializers.SerializerMethodField()
@staticmethod
def get_risk_summary(obj) -> dict:
def get_risk_summary(obj):
summary = {}
for risk in RiskChoice.choices:
summary[f"{risk[0]}_count"] = obj.get(f"{risk[0]}_count", 0)

View File

@@ -0,0 +1,36 @@
{% load i18n %}
<h3>{% trans 'Task name' %}: {{ name }}</h3>
<h3>{% trans 'Task execution id' %}: {{ execution_id }}</h3>
<p>{% trans 'Respectful' %} {{ recipient }}</p>
<p>{% trans 'Hello! The following is the failure of changing the password of your assets or pushing the account. Please check and handle it in time.' %}</p>
<table style="width: 100%; border-collapse: collapse; max-width: 100%; text-align: left; margin-top: 20px;">
<caption></caption>
<thead>
<tr style="background-color: #f2f2f2;">
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Asset' %}</th>
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Account' %}</th>
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Error' %}</th>
</tr>
</thead>
<tbody>
{% for asset_name, account_username, error in asset_account_errors %}
<tr>
<td style="border: 1px solid #ddd; padding: 10px;">{{ asset_name }}</td>
<td style="border: 1px solid #ddd; padding: 10px;">{{ account_username }}</td>
<td style="border: 1px solid #ddd; padding: 10px;">
<div style="
max-width: 90%;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
display: block;"
title="{{ error }}"
>
{{ error }}
</div>
</td>
</tr>
{% endfor %}
</tbody>
</table>

View File

@@ -3,4 +3,3 @@ from .connect_method import *
from .login_acl import *
from .login_asset_acl import *
from .login_asset_check import *
from .data_masking import *

View File

@@ -1,20 +0,0 @@
from orgs.mixins.api import OrgBulkModelViewSet
from .common import ACLUserFilterMixin
from ..models import DataMaskingRule
from .. import serializers
__all__ = ['DataMaskingRuleViewSet']
class DataMaskingRuleFilter(ACLUserFilterMixin):
class Meta:
model = DataMaskingRule
fields = ('name',)
class DataMaskingRuleViewSet(OrgBulkModelViewSet):
model = DataMaskingRule
filterset_class = DataMaskingRuleFilter
search_fields = ('name',)
serializer_class = serializers.DataMaskingRuleSerializer

View File

@@ -8,7 +8,7 @@ __all__ = ['LoginAssetACLViewSet']
class LoginAssetACLFilter(ACLUserAssetFilterMixin):
class Meta:
model = models.LoginAssetACL
fields = ['name', 'action']
fields = ['name', ]
class LoginAssetACLViewSet(OrgBulkModelViewSet):

View File

@@ -1,45 +0,0 @@
# Generated by Django 4.1.13 on 2025-10-07 16:16
import common.db.fields
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('acls', '0002_auto_20210926_1047'),
]
operations = [
migrations.CreateModel(
name='DataMaskingRule',
fields=[
('created_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Created by')),
('updated_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Updated by')),
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
('priority', models.IntegerField(default=50, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority')),
('action', models.CharField(default='reject', max_length=64, verbose_name='Action')),
('is_active', models.BooleanField(default=True, verbose_name='Active')),
('users', common.db.fields.JSONManyToManyField(default=dict, to='users.User', verbose_name='Users')),
('assets', common.db.fields.JSONManyToManyField(default=dict, to='assets.Asset', verbose_name='Assets')),
('accounts', models.JSONField(default=list, verbose_name='Accounts')),
('name', models.CharField(max_length=128, verbose_name='Name')),
('fields_pattern', models.CharField(default='password', max_length=128, verbose_name='Fields pattern')),
('masking_method', models.CharField(choices=[('fixed_char', 'Fixed Character Replacement'), ('hide_middle', 'Hide Middle Characters'), ('keep_prefix', 'Keep Prefix Only'), ('keep_suffix', 'Keep Suffix Only')], default='fixed_char', max_length=32, verbose_name='Masking Method')),
('mask_pattern', models.CharField(blank=True, default='######', max_length=128, null=True, verbose_name='Mask Pattern')),
('reviewers', models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL, verbose_name='Reviewers')),
],
options={
'verbose_name': 'Data Masking Rule',
'unique_together': {('org_id', 'name')},
},
),
]

View File

@@ -2,4 +2,3 @@ from .command_acl import *
from .connect_method import *
from .login_acl import *
from .login_asset_acl import *
from .data_masking import *

View File

@@ -1,42 +0,0 @@
from django.db import models
from acls.models import UserAssetAccountBaseACL
from common.utils import get_logger
from django.utils.translation import gettext_lazy as _
logger = get_logger(__file__)
__all__ = ['MaskingMethod', 'DataMaskingRule']
class MaskingMethod(models.TextChoices):
fixed_char = "fixed_char", _("Fixed Character Replacement") # 固定字符替换
hide_middle = "hide_middle", _("Hide Middle Characters") # 隐藏中间几位
keep_prefix = "keep_prefix", _("Keep Prefix Only") # 只保留前缀
keep_suffix = "keep_suffix", _("Keep Suffix Only") # 只保留后缀
class DataMaskingRule(UserAssetAccountBaseACL):
name = models.CharField(max_length=128, verbose_name=_("Name"))
fields_pattern = models.CharField(max_length=128, default='password', verbose_name=_("Fields pattern"))
masking_method = models.CharField(
max_length=32,
choices=MaskingMethod.choices,
default=MaskingMethod.fixed_char,
verbose_name=_("Masking Method"),
)
mask_pattern = models.CharField(
max_length=128,
verbose_name=_("Mask Pattern"),
default="######",
blank=True,
null=True,
)
def __str__(self):
return self.name
class Meta:
unique_together = [('org_id', 'name')]
verbose_name = _("Data Masking Rule")

View File

@@ -1,52 +1,30 @@
from django.utils import timezone
from django.template.loader import render_to_string
from django.utils.translation import gettext_lazy as _
from accounts.models import Account
from acls.models import LoginACL, LoginAssetACL
from assets.models import Asset
from audits.models import UserLoginLog
from common.views.template import custom_render_to_string
from notifications.notifications import UserMessage
from users.models import User
class UserLoginReminderMsg(UserMessage):
subject = _('User login reminder')
template_name = 'acls/user_login_reminder.html'
contexts = [
{"name": "city", "label": _('Login city'), "default": "Shanghai"},
{"name": "username", "label": _('User'), "default": "john"},
{"name": "ip", "label": "IP", "default": "192.168.1.1"},
{"name": "recipient_name", "label": _("Recipient name"), "default": "John"},
{"name": "recipient_username", "label": _("Recipient username"), "default": "john"},
{"name": "user_agent", "label": _('User agent'), "default": "Mozilla/5.0"},
{"name": "acl_name", "label": _('ACL name'), "default": "login acl"},
{"name": "login_from", "label": _('Login from'), "default": "web"},
{"name": "time", "label": _('Login time'), "default": "2025-01-01 12:00:00"},
]
def __init__(self, user, user_log: UserLoginLog, acl: LoginACL):
def __init__(self, user, user_log: UserLoginLog):
self.user_log = user_log
self.acl_name = str(acl)
self.login_from = user_log.get_type_display()
now = timezone.localtime(user_log.datetime)
self.time = now.strftime('%Y-%m-%d %H:%M:%S')
super().__init__(user)
def get_html_msg(self) -> dict:
user_log = self.user_log
context = {
'ip': user_log.ip,
'time': self.time,
'city': user_log.city,
'acl_name': self.acl_name,
'login_from': self.login_from,
'username': user_log.username,
'recipient_name': self.user.name,
'recipient_username': self.user.username,
'recipient': self.user,
'user_agent': user_log.user_agent,
}
message = custom_render_to_string(self.template_name, context)
message = render_to_string('acls/user_login_reminder.html', context)
return {
'subject': str(self.subject),
@@ -62,55 +40,24 @@ class UserLoginReminderMsg(UserMessage):
class AssetLoginReminderMsg(UserMessage):
subject = _('User login alert for asset')
template_name = 'acls/asset_login_reminder.html'
contexts = [
{"name": "city", "label": _('Login city'), "default": "Shanghai"},
{"name": "username", "label": _('User'), "default": "john"},
{"name": "name", "label": _('Name'), "default": "John"},
{"name": "asset", "label": _('Asset'), "default": "dev server"},
{"name": "recipient_name", "label": _('Recipient name'), "default": "John"},
{"name": "recipient_username", "label": _('Recipient username'), "default": "john"},
{"name": "account", "label": _('Account Input username'), "default": "root"},
{"name": "account_name", "label": _('Account name'), "default": "root"},
{"name": "acl_name", "label": _('ACL name'), "default": "login acl"},
{"name": "ip", "label": "IP", "default": "192.168.1.1"},
{"name": "login_from", "label": _('Login from'), "default": "web"},
{"name": "time", "label": _('Login time'), "default": "2025-01-01 12:00:00"}
]
def __init__(
self, user, asset: Asset, login_user: User,
account: Account, acl: LoginAssetACL,
ip, input_username, login_from
):
self.ip = ip
def __init__(self, user, asset: Asset, login_user: User, account: Account, input_username):
self.asset = asset
self.login_user = login_user
self.account = account
self.acl_name = str(acl)
self.login_from = login_from
self.login_user = login_user
self.input_username = input_username
now = timezone.localtime(timezone.now())
self.time = now.strftime('%Y-%m-%d %H:%M:%S')
super().__init__(user)
def get_html_msg(self) -> dict:
context = {
'ip': self.ip,
'time': self.time,
'login_from': self.login_from,
'recipient_name': self.user.name,
'recipient_username': self.user.username,
'recipient': self.user,
'username': self.login_user.username,
'name': self.login_user.name,
'asset': str(self.asset),
'account': self.input_username,
'account_name': self.account.name,
'acl_name': self.acl_name,
}
message = custom_render_to_string(self.template_name, context)
message = render_to_string('acls/asset_login_reminder.html', context)
return {
'subject': str(self.subject),

View File

@@ -3,4 +3,3 @@ from .connect_method import *
from .login_acl import *
from .login_asset_acl import *
from .login_asset_check import *
from .data_masking import *

View File

@@ -90,7 +90,7 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
fields_small = fields_mini + [
"is_active", "priority", "action",
"date_created", "date_updated",
"comment", "created_by"
"comment", "created_by", "org_id",
]
fields_m2m = ["reviewers", ]
fields = fields_small + fields_m2m
@@ -100,20 +100,6 @@ class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
'reviewers': {'label': _('Recipients')},
}
class BaseUserACLSerializer(BaseACLSerializer):
users = JSONManyToManyField(label=_('User'))
class Meta(BaseACLSerializer.Meta):
fields = BaseACLSerializer.Meta.fields + ['users']
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
assets = JSONManyToManyField(label=_('Asset'))
accounts = serializers.ListField(label=_('Account'))
class Meta(BaseUserACLSerializer.Meta):
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts', 'org_id']
def validate_reviewers(self, reviewers):
action = self.initial_data.get('action')
if not action and self.instance:
@@ -132,4 +118,19 @@ class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
"None of the reviewers belong to Organization `{}`".format(org.name)
)
raise serializers.ValidationError(error)
return valid_reviewers
return valid_reviewers
class BaseUserACLSerializer(BaseACLSerializer):
users = JSONManyToManyField(label=_('User'))
class Meta(BaseACLSerializer.Meta):
fields = BaseACLSerializer.Meta.fields + ['users']
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
assets = JSONManyToManyField(label=_('Asset'))
accounts = serializers.ListField(label=_('Account'))
class Meta(BaseUserACLSerializer.Meta):
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts']

View File

@@ -1,4 +1,4 @@
from common.serializers.mixin import CommonBulkModelSerializer
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
from ..const import ActionChoices
from ..models import ConnectMethodACL
@@ -6,15 +6,16 @@ from ..models import ConnectMethodACL
__all__ = ["ConnectMethodACLSerializer"]
class ConnectMethodACLSerializer(BaseSerializer, CommonBulkModelSerializer):
class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
class Meta(BaseSerializer.Meta):
model = ConnectMethodACL
fields = [
i for i in BaseSerializer.Meta.fields + ['connect_methods']
if i not in ['assets', 'accounts', 'org_id']
if i not in ['assets', 'accounts']
]
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
ActionChoices.review,
ActionChoices.accept,
ActionChoices.notice,
ActionChoices.face_verify,
ActionChoices.face_online,

View File

@@ -1,19 +0,0 @@
from django.utils.translation import gettext_lazy as _
from acls.models import MaskingMethod, DataMaskingRule
from common.serializers.fields import LabeledChoiceField
from common.serializers.mixin import CommonBulkModelSerializer
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
__all__ = ['DataMaskingRuleSerializer']
class DataMaskingRuleSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
masking_method = LabeledChoiceField(
choices=MaskingMethod.choices, default=MaskingMethod.fixed_char, label=_('Masking Method')
)
class Meta(BaseSerializer.Meta):
model = DataMaskingRule
fields = BaseSerializer.Meta.fields + ['fields_pattern', 'masking_method', 'mask_pattern']

View File

@@ -1,7 +1,7 @@
from django.utils.translation import gettext as _
from common.serializers import CommonBulkModelSerializer
from common.serializers import MethodSerializer
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from .base import BaseUserACLSerializer
from .rules import RuleSerializer
from ..const import ActionChoices
@@ -12,12 +12,12 @@ __all__ = ["LoginACLSerializer"]
common_help_text = _("With * indicating a match all. ")
class LoginACLSerializer(BaseUserACLSerializer, CommonBulkModelSerializer):
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
rules = MethodSerializer(label=_('Rule'))
class Meta(BaseUserACLSerializer.Meta):
model = LoginACL
fields = list((set(BaseUserACLSerializer.Meta.fields) | {'rules'}))
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
action_choices_exclude = [
ActionChoices.warning,
ActionChoices.notify_and_warn,

View File

@@ -1,17 +1,13 @@
{% load i18n %}
<h3>{% trans 'Dear' %}: {{ recipient_name }}[{{ recipient_username }}]</h3>
<h3>{% trans 'Dear' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
<hr>
<p>{% trans 'We would like to inform you that a user has recently logged into the following asset:' %}<p>
<p><strong>{% trans 'Asset details' %}:</strong></p>
<ul>
<li><strong>{% trans 'User' %}:</strong> [{{ name }}({{ username }})]</li>
<li><strong>IP:</strong> [{{ ip }}]</li>
<li><strong>{% trans 'Assets' %}:</strong> [{{ asset }}]</li>
<li><strong>{% trans 'Account' %}:</strong> [{{ account_name }}({{ account }})]</li>
<li><strong>{% trans 'Login asset acl' %}:</strong> [{{ acl_name }}]</li>
<li><strong>{% trans 'Login from' %}:</strong> [{{ login_from }}]</li>
<li><strong>{% trans 'Time' %}:</strong> [{{ time }}]</li>
</ul>
<hr>

View File

@@ -1,6 +1,6 @@
{% load i18n %}
<h3>{% trans 'Dear' %}: {{ recipient_name }}[{{ recipient_username }}]</h3>
<h3>{% trans 'Dear' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
<hr>
<p>{% trans 'We would like to inform you that a user has recently logged:' %}<p>
<p><strong>{% trans 'User details' %}:</strong></p>
@@ -8,10 +8,7 @@
<li><strong>{% trans 'User' %}:</strong> [{{ username }}]</li>
<li><strong>IP:</strong> [{{ ip }}]</li>
<li><strong>{% trans 'Login city' %}:</strong> [{{ city }}]</li>
<li><strong>{% trans 'Login from' %}:</strong> [{{ login_from }}]</li>
<li><strong>{% trans 'User agent' %}:</strong> [{{ user_agent }}]</li>
<li><strong>{% trans 'Login acl' %}:</strong> [{{ acl_name }}]</li>
<li><strong>{% trans 'Time' %}:</strong> [{{ time }}]</li>
</ul>
<hr>

View File

@@ -11,7 +11,6 @@ router.register(r'login-asset-acls', api.LoginAssetACLViewSet, 'login-asset-acl'
router.register(r'command-filter-acls', api.CommandFilterACLViewSet, 'command-filter-acl')
router.register(r'command-groups', api.CommandGroupViewSet, 'command-group')
router.register(r'connect-method-acls', api.ConnectMethodACLViewSet, 'connect-method-acl')
router.register(r'data-masking-rules', api.DataMaskingRuleViewSet, 'data-masking-rule')
urlpatterns = [
path('login-asset/check/', api.LoginAssetCheckAPI.as_view(), name='login-asset-check'),

View File

@@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
#
from collections import defaultdict
from django.conf import settings
from django.db import transaction
from django.shortcuts import get_object_or_404
from django.utils.translation import gettext as _
from django_filters import rest_framework as drf_filters
@@ -112,7 +113,7 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
("accounts", AccountSerializer),
)
rbac_perms = (
("match", "assets.view_asset"),
("match", "assets.match_asset"),
("platform", "assets.view_platform"),
("gateways", "assets.view_gateway"),
("accounts", "assets.view_account"),
@@ -180,18 +181,33 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
def sync_platform_protocols(self, request, *args, **kwargs):
platform_id = request.data.get('platform_id')
platform = get_object_or_404(Platform, pk=platform_id)
asset_ids = list(platform.assets.values_list('id', flat=True))
platform_protocols = list(platform.protocols.values('name', 'port'))
assets = platform.assets.all()
with transaction.atomic():
if asset_ids:
Protocol.objects.filter(asset_id__in=asset_ids).delete()
if asset_ids and platform_protocols:
objs = []
for aid in asset_ids:
for p in platform_protocols:
objs.append(Protocol(name=p['name'], port=p['port'], asset_id=aid))
Protocol.objects.bulk_create(objs)
platform_protocols = {
p['name']: p['port']
for p in platform.protocols.values('name', 'port')
}
asset_protocols_map = defaultdict(set)
protocols = assets.prefetch_related('protocols').values_list(
'id', 'protocols__name'
)
for asset_id, protocol in protocols:
asset_id = str(asset_id)
asset_protocols_map[asset_id].add(protocol)
objs = []
for asset_id, protocols in asset_protocols_map.items():
protocol_names = set(platform_protocols) - protocols
if not protocol_names:
continue
for name in protocol_names:
objs.append(
Protocol(
name=name,
port=platform_protocols[name],
asset_id=asset_id,
)
)
Protocol.objects.bulk_create(objs)
return Response(status=status.HTTP_200_OK)
def filter_bulk_update_data(self):

View File

@@ -14,7 +14,6 @@ class FavoriteAssetViewSet(BulkModelViewSet):
serializer_class = FavoriteAssetSerializer
permission_classes = (IsValidUser,)
filterset_fields = ['asset']
page_no_limit = True
def dispatch(self, request, *args, **kwargs):
with tmp_to_root_org():

View File

@@ -43,7 +43,7 @@ class NodeViewSet(SuggestionMixin, OrgBulkModelViewSet):
search_fields = ('full_value',)
serializer_class = serializers.NodeSerializer
rbac_perms = {
'match': 'assets.view_node',
'match': 'assets.match_node',
'check_assets_amount_task': 'assets.change_node'
}

View File

@@ -7,7 +7,7 @@ from rest_framework.decorators import action
from rest_framework.response import Response
from assets.const import AllTypes
from assets.models import Platform, Node, Asset, PlatformProtocol, PlatformAutomation
from assets.models import Platform, Node, Asset, PlatformProtocol
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer, PlatformListSerializer
from common.api import JMSModelViewSet
from common.permissions import IsValidUser
@@ -43,7 +43,6 @@ class AssetPlatformViewSet(JMSModelViewSet):
'ops_methods': 'assets.view_platform',
'filter_nodes_assets': 'assets.view_platform',
}
page_no_limit = True
def get_queryset(self):
# 因为没有走分页逻辑,所以需要这里 prefetch
@@ -112,10 +111,7 @@ class PlatformProtocolViewSet(JMSModelViewSet):
class PlatformAutomationMethodsApi(generics.ListAPIView):
queryset = PlatformAutomation.objects.none()
rbac_perms = {
'list': 'assets.view_platform'
}
permission_classes = (IsValidUser,)
@staticmethod
def automation_methods():

View File

@@ -161,7 +161,6 @@ class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
'GET': 'assets.view_asset',
'list': 'assets.view_asset',
}
queryset = Node.objects.none()
def get_assets(self):
key = self.request.query_params.get('key')

View File

@@ -6,13 +6,11 @@
tasks:
- name: Test SQLServer connection
mssql_script:
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
encryption: "{{ jms_asset.encryption | default(None) }}"
tds_version: "{{ jms_asset.tds_version | default(None) }}"
script: |
SELECT @@version

View File

@@ -1,6 +1,5 @@
from django.utils.translation import gettext_lazy as _
from orgs.models import Organization
from .base import BaseType
@@ -53,41 +52,3 @@ class GPTTypes(BaseType):
return [
cls.CHATGPT,
]
CHATX_NAME = 'ChatX'
def create_or_update_chatx_resources(chatx_name=CHATX_NAME, org_id=Organization.SYSTEM_ID):
from django.apps import apps
platform_model = apps.get_model('assets', 'Platform')
asset_model = apps.get_model('assets', 'Asset')
account_model = apps.get_model('accounts', 'Account')
platform, __ = platform_model.objects.get_or_create(
name=chatx_name,
defaults={
'internal': True,
'type': chatx_name,
'category': 'ai',
}
)
asset, __ = asset_model.objects.get_or_create(
address=chatx_name,
defaults={
'name': chatx_name,
'platform': platform,
'org_id': org_id
}
)
account, __ = account_model.objects.get_or_create(
username=chatx_name,
defaults={
'name': chatx_name,
'asset': asset,
'org_id': org_id
}
)
return asset, account

View File

@@ -250,12 +250,6 @@ class Protocol(ChoicesMixin, models.TextChoices):
'default': False,
'label': _('Auth username')
},
'enable_cluster_mode': {
'type': 'bool',
'default': False,
'label': _('Enable cluster mode'),
'help_text': _('Enable if this Redis instance is part of a cluster')
},
}
},
}
@@ -268,14 +262,6 @@ class Protocol(ChoicesMixin, models.TextChoices):
'port_from_addr': True,
'required': True,
'secret_types': ['token'],
'setting': {
'namespace': {
'type': 'str',
'required': False,
'default': '',
'label': _('Namespace')
}
}
},
cls.http: {
'port': 80,

View File

@@ -112,7 +112,7 @@ class Protocol(models.Model):
return protocols[0] if len(protocols) > 0 else {}
@property
def setting(self) -> dict:
def setting(self):
if self._setting is not None:
return self._setting
return self.asset_platform_protocol.get('setting', {})
@@ -122,7 +122,7 @@ class Protocol(models.Model):
self._setting = value
@property
def public(self) -> bool:
def public(self):
return self.asset_platform_protocol.get('public', True)
@@ -210,7 +210,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
return self.category == const.Category.DS and hasattr(self, 'ds')
@lazyproperty
def spec_info(self) -> dict:
def spec_info(self):
instance = getattr(self, self.category, None)
if not instance:
return {}
@@ -240,7 +240,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
return info
@lazyproperty
def auto_config(self) -> dict:
def auto_config(self):
platform = self.platform
auto_config = {
'su_enabled': platform.su_enabled,
@@ -343,11 +343,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
return names
@lazyproperty
def type(self) -> str:
def type(self):
return self.platform.type
@lazyproperty
def category(self) -> str:
def category(self):
return self.platform.category
def is_category(self, category):
@@ -408,7 +408,8 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
return tree_node
@staticmethod
def get_secret_type_assets(assets, secret_type):
def get_secret_type_assets(asset_ids, secret_type):
assets = Asset.objects.filter(id__in=asset_ids)
asset_protocol = assets.prefetch_related('protocols').values_list('id', 'protocols__name')
protocol_secret_types_map = const.Protocol.protocol_secret_types()
asset_secret_types_mapp = defaultdict(set)

View File

@@ -28,8 +28,7 @@ class MyAsset(JMSBaseModel):
@staticmethod
def set_asset_custom_value(assets, user):
asset_ids = [asset.id for asset in assets]
my_assets = MyAsset.objects.filter(asset_id__in=asset_ids, user=user).all()
my_assets = MyAsset.objects.filter(asset__in=assets, user=user).all()
customs = {my_asset.asset.id: my_asset.custom_to_dict() for my_asset in my_assets}
for asset in assets:
custom = customs.get(asset.id)

View File

@@ -573,7 +573,7 @@ class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
return not self.__gt__(other)
@property
def name(self) -> str:
def name(self):
return self.value
def computed_full_value(self):

View File

@@ -25,7 +25,7 @@ class PlatformProtocol(models.Model):
return '{}/{}'.format(self.name, self.port)
@property
def secret_types(self) -> list:
def secret_types(self):
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
@lazyproperty

View File

@@ -147,7 +147,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'),
attrs=('id', 'name', 'type'))
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
@@ -426,18 +425,6 @@ class DetailMixin(serializers.Serializer):
gathered_info = MethodSerializer(label=_('Gathered info'), read_only=True)
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
@staticmethod
def get_auto_config(obj) -> dict:
return obj.auto_config
@staticmethod
def get_gathered_info(obj) -> dict:
return obj.gathered_info
@staticmethod
def get_spec_info(obj) -> dict:
return obj.spec_info
def get_instance(self):
request = self.context.get('request')
if not self.instance and UUID_PATTERN.findall(request.path):

View File

@@ -59,10 +59,7 @@ class DatabaseSerializer(AssetSerializer):
if not platform:
return
if platform.type in [
'mysql', 'mariadb', 'oracle', 'sqlserver',
'db2', 'dameng', 'clickhouse', 'redis'
]:
if platform.type in ['mysql', 'mariadb']:
db_field.required = False
db_field.allow_blank = True
db_field.allow_null = True

View File

@@ -26,13 +26,4 @@ class WebSerializer(AssetSerializer):
'submit_selector': {
'default': 'id=login_button',
},
'script': {
'default': [],
}
}
def to_internal_value(self, data):
data = data.copy()
if data.get('script') in ("", None):
data.pop('script', None)
return super().to_internal_value(data)

View File

@@ -19,13 +19,11 @@ __all__ = [
class BaseAutomationSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSerializer):
assets = ObjectRelatedField(many=True, required=False, queryset=Asset.objects, label=_('Assets'))
nodes = ObjectRelatedField(many=True, required=False, queryset=Node.objects, label=_('Nodes'))
executed_amount = serializers.IntegerField(read_only=True, label=_('Executed amount'))
class Meta:
read_only_fields = [
'date_created', 'date_updated', 'created_by',
'periodic_display', 'executed_amount', 'type',
'last_execution_date',
'periodic_display', 'executed_amount', 'type', 'last_execution_date'
]
mini_fields = [
'id', 'name', 'type', 'is_periodic', 'interval',

View File

@@ -84,7 +84,6 @@ class PlatformAutomationSerializer(serializers.ModelSerializer):
class PlatformProtocolSerializer(serializers.ModelSerializer):
setting = MethodSerializer(required=False, label=_("Setting"))
port_from_addr = serializers.BooleanField(label=_("Port from addr"), read_only=True)
port = serializers.IntegerField(label=_("Port"), required=False, min_value=0, max_value=65535)
class Meta:
model = PlatformProtocol

View File

@@ -23,8 +23,6 @@ logger = get_logger(__name__)
class OperatorLogHandler(metaclass=Singleton):
CACHE_KEY = 'OPERATOR_LOG_CACHE_KEY'
SYSTEM_OBJECTS = frozenset({"Role"})
PREFER_CURRENT_ELSE_USER = frozenset({"SSOToken"})
def __init__(self):
self.log_client = self.get_storage_client()
@@ -144,21 +142,13 @@ class OperatorLogHandler(metaclass=Singleton):
after = self.__data_processing(after)
return before, after
def get_org_id(self, user, object_name):
if object_name in self.SYSTEM_OBJECTS:
return Organization.SYSTEM_ID
current = get_current_org_id()
current_id = str(current) if current else None
if object_name in self.PREFER_CURRENT_ELSE_USER:
if current_id and current_id != Organization.DEFAULT_ID:
return current_id
org = user.orgs.distinct().first()
return str(org.id) if org else Organization.DEFAULT_ID
return current_id or Organization.DEFAULT_ID
@staticmethod
def get_org_id(object_name):
system_obj = ('Role',)
org_id = get_current_org_id()
if object_name in system_obj:
org_id = Organization.SYSTEM_ID
return org_id
def create_or_update_operate_log(
self, action, resource_type, resource=None, resource_display=None,
@@ -178,7 +168,7 @@ class OperatorLogHandler(metaclass=Singleton):
# 前后都没变化,没必要生成日志,除非手动强制保存
return
org_id = self.get_org_id(user, object_name)
org_id = self.get_org_id(object_name)
data = {
'id': log_id, "user": str(user), 'action': action,
'resource_type': str(resource_type), 'org_id': org_id,

View File

@@ -1,6 +1,6 @@
import os
import uuid
from datetime import timedelta, datetime
from datetime import timedelta
from importlib import import_module
from django.conf import settings
@@ -40,7 +40,7 @@ __all__ = [
class JobLog(JobExecution):
@property
def creator_name(self) -> str:
def creator_name(self):
return self.creator.name
class Meta:
@@ -232,7 +232,7 @@ class UserLoginLog(models.Model):
return '%s(%s)' % (self.username, self.city)
@property
def backend_display(self) -> str:
def backend_display(self):
return gettext(self.backend)
@classmethod
@@ -258,7 +258,7 @@ class UserLoginLog(models.Model):
return login_logs
@property
def reason_display(self) -> str:
def reason_display(self):
from authentication.errors import reason_choices, old_reason_choices
reason = reason_choices.get(self.reason)
@@ -300,15 +300,15 @@ class UserSession(models.Model):
return '%s(%s)' % (self.user, self.ip)
@property
def backend_display(self) -> str:
def backend_display(self):
return gettext(self.backend)
@property
def is_active(self) -> bool:
def is_active(self):
return user_session_manager.check_active(self.key)
@property
def date_expired(self) -> datetime:
def date_expired(self):
session_store_cls = import_module(settings.SESSION_ENGINE).SessionStore
session_store = session_store_cls(session_key=self.key)
cache_key = session_store.cache_key

View File

@@ -119,11 +119,11 @@ class OperateLogSerializer(BulkOrgResourceModelSerializer):
fields = fields_small
@staticmethod
def get_resource_type(instance) -> str:
def get_resource_type(instance):
return _(instance.resource_type)
@staticmethod
def get_resource(instance) -> str:
def get_resource(instance):
return i18n_trans(instance.resource)
@@ -147,11 +147,11 @@ class ActivityUnionLogSerializer(serializers.Serializer):
r_type = serializers.CharField(read_only=True)
@staticmethod
def get_timestamp(obj) -> str:
def get_timestamp(obj):
return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S')
@staticmethod
def get_content(obj) -> str:
def get_content(obj):
if not obj['r_detail']:
action = obj['r_action'].replace('_', ' ').capitalize()
ctn = _('%s %s this resource') % (obj['r_user'], _(action).lower())
@@ -160,7 +160,7 @@ class ActivityUnionLogSerializer(serializers.Serializer):
return ctn
@staticmethod
def get_detail_url(obj) -> str:
def get_detail_url(obj):
detail_url = ''
detail_id, obj_type = obj['r_detail_id'], obj['r_type']
if not detail_id:
@@ -210,7 +210,7 @@ class UserSessionSerializer(serializers.ModelSerializer):
"backend_display": {"label": _("Auth backend display")},
}
def get_is_current_user_session(self, obj) -> bool:
def get_is_current_user_session(self, obj):
request = self.context.get('request')
if not request:
return False

View File

@@ -116,7 +116,7 @@ def send_login_info_to_reviewers(instance: UserLoginLog | str, auth_acl_id):
reviewers = acl.reviewers.all()
for reviewer in reviewers:
UserLoginReminderMsg(reviewer, instance, acl).publish_async()
UserLoginReminderMsg(reviewer, instance).publish_async()
@receiver(post_auth_success)

View File

@@ -47,21 +47,20 @@ def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
objs = model.objects.filter(pk__in=pk_set)
objs_display = [str(o) for o in objs]
action = M2M_ACTION[action]
changed_field = current_instance.get(field_name, {})
changed_value = changed_field.get('value', [])
changed_field = current_instance.get(field_name, [])
after, before, before_value = None, None, None
if action == ActionChoices.create:
before_value = list(set(changed_value) - set(objs_display))
before_value = list(set(changed_field) - set(objs_display))
elif action == ActionChoices.delete:
before_value = list(set(changed_value).symmetric_difference(set(objs_display)))
before_value = list(
set(changed_field).symmetric_difference(set(objs_display))
)
if changed_field:
after = {field_name: changed_field}
if before_value:
before_change_field = changed_field.copy()
before_change_field['value'] = before_value
before = {field_name: before_change_field}
before = {field_name: before_value}
if sorted(str(before)) == sorted(str(after)):
return

View File

@@ -2,14 +2,15 @@
#
import datetime
import os
import subprocess
from celery import shared_task
from django.conf import settings
from django.core.files.storage import default_storage
from django.db import transaction
from django.utils import timezone
from django.utils._os import safe_join
from django.utils.translation import gettext_lazy as _
from django.utils._os import safe_join
from common.const.crontab import CRONTAB_AT_AM_TWO
from common.storage.ftp_file import FTPFileStorageHandler
@@ -78,7 +79,7 @@ def clean_celery_tasks_period():
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;"
safe_run_cmd(command, (settings.CELERY_LOG_DIR, expire_days))
celery_log_path = safe_join(settings.LOG_DIR, 'celery.log')
command = "echo > %s"
command = "echo > {}".format(celery_log_path)
safe_run_cmd(command, (celery_log_path,))

View File

@@ -69,8 +69,6 @@ class RDPFileClientProtocolURLMixin:
'autoreconnection enabled:i': '1',
'bookmarktype:i': '3',
'use redirection server name:i': '0',
'bitmapcachepersistenable:i': '0',
'bitmapcachesize:i': '1500',
}
# copy from
@@ -78,6 +76,7 @@ class RDPFileClientProtocolURLMixin:
rdp_low_speed_broadband_option = {
"connection type:i": 2,
"disable wallpaper:i": 1,
"bitmapcachepersistenable:i": 1,
"disable full window drag:i": 1,
"disable menu anims:i": 1,
"allow font smoothing:i": 0,
@@ -88,6 +87,7 @@ class RDPFileClientProtocolURLMixin:
rdp_high_speed_broadband_option = {
"connection type:i": 4,
"disable wallpaper:i": 0,
"bitmapcachepersistenable:i": 1,
"disable full window drag:i": 1,
"disable menu anims:i": 0,
"allow font smoothing:i": 0,
@@ -362,7 +362,6 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
self.validate_serializer(serializer)
return super().perform_create(serializer)
def _insert_connect_options(self, data, user):
connect_options = data.pop('connect_options', {})
default_name_opts = {
@@ -376,7 +375,7 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
for name in default_name_opts.keys():
value = preferences.get(name, default_name_opts[name])
connect_options[name] = value
connect_options['lang'] = getattr(user, 'lang') or settings.LANGUAGE_CODE
connect_options['lang'] = getattr(user, 'lang', settings.LANGUAGE_CODE)
data['connect_options'] = connect_options
@staticmethod
@@ -432,7 +431,7 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
if account.username != AliasAccount.INPUT:
data['input_username'] = ''
ticket = self._validate_acl(user, asset, account, connect_method, protocol)
ticket = self._validate_acl(user, asset, account, connect_method)
if ticket:
data['from_ticket'] = ticket
@@ -471,7 +470,7 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
after=after, object_name=object_name
)
def _validate_acl(self, user, asset, account, connect_method, protocol):
def _validate_acl(self, user, asset, account, connect_method):
from acls.models import LoginAssetACL
kwargs = {'user': user, 'asset': asset, 'account': account}
if account.username == AliasAccount.INPUT:
@@ -524,15 +523,9 @@ class ConnectionTokenViewSet(AuthFaceMixin, ExtraActionApiMixin, RootOrgViewMixi
return
self._record_operate_log(acl, asset)
os = get_request_os(self.request) if self.request else 'windows'
method = ConnectMethodUtil.get_connect_method(
connect_method, protocol=protocol, os=os
)
login_from = method['label'] if method else connect_method
for reviewer in reviewers:
AssetLoginReminderMsg(
reviewer, asset, user, account, acl,
ip, self.input_username, login_from
reviewer, asset, user, account, self.input_username
).publish_async()
def create_face_verify(self, response):
@@ -565,9 +558,7 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
rbac_perms = {
'create': 'authentication.add_superconnectiontoken',
'renewal': 'authentication.add_superconnectiontoken',
'list': 'authentication.view_superconnectiontoken',
'check': 'authentication.view_superconnectiontoken',
'retrieve': 'authentication.view_superconnectiontoken',
'get_secret_detail': 'authentication.view_superconnectiontokensecret',
'get_applet_info': 'authentication.view_superconnectiontoken',
'release_applet_account': 'authentication.view_superconnectiontoken',
@@ -575,12 +566,7 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
}
def get_queryset(self):
return ConnectionToken.objects.none()
def get_object(self):
pk = self.kwargs.get(self.lookup_field)
token = get_object_or_404(ConnectionToken, pk=pk)
return token
return ConnectionToken.objects.all()
def get_user(self, serializer):
return serializer.validated_data.get('user')
@@ -632,8 +618,6 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
token_id = request.data.get('id') or ''
token = ConnectionToken.get_typed_connection_token(token_id)
if not token:
raise PermissionDenied('Token {} is not valid'.format(token))
token.is_valid()
serializer = self.get_serializer(instance=token)

View File

@@ -67,9 +67,8 @@ class UserResetPasswordSendCodeApi(CreateAPIView):
code = random_string(settings.SMS_CODE_LENGTH, lower=False, upper=False)
subject = '%s: %s' % (get_login_title(), _('Forgot password'))
tip = _('The validity period of the verification code is {} minute').format(settings.VERIFY_CODE_TTL // 60)
context = {
'user': user, 'title': subject, 'code': code, 'tip': tip,
'user': user, 'title': subject, 'code': code,
}
message = render_to_string('authentication/_msg_reset_password_code.html', context)
content = {'subject': subject, 'message': message}

View File

@@ -25,10 +25,7 @@ class JMSBaseAuthBackend:
"""
# 三方用户认证完成后,在后续的 get_user 获取逻辑中,也应该需要检查用户是否有效
is_valid = getattr(user, 'is_valid', None)
if not is_valid:
logger.info("User %s is not valid", getattr(user, "username", "<unknown>"))
return False
return True
return is_valid or is_valid is None
# allow user to authenticate
def username_allow_authenticate(self, username):

View File

@@ -136,7 +136,7 @@ class SignatureAuthentication(signature.SignatureAuthentication):
# example implementation:
try:
key = AccessKey.objects.get(id=key_id)
if not key.is_valid:
if not key.is_active:
return None, None
user, secret = key.user, str(key.secret)
after_authenticate_update_date(user, key)

View File

@@ -3,7 +3,6 @@ from django.contrib import auth
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.http import urlencode
from django.utils.translation import gettext_lazy as _
from django.views import View
from authentication.backends.base import BaseAuthCallbackClientView
@@ -62,10 +61,6 @@ class OAuth2AuthCallbackView(View, FlashMessageMixin):
return HttpResponseRedirect(
settings.AUTH_OAUTH2_AUTHENTICATION_REDIRECT_URI
)
else:
if getattr(request, 'error_message', ''):
response = self.get_failed_response('/', title=_('OAuth2 Error'), msg=request.error_message)
return response
logger.debug(log_prompt.format('Redirect'))
redirect_url = settings.AUTH_OAUTH2_PROVIDER_END_SESSION_ENDPOINT or '/'

View File

@@ -134,7 +134,6 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
log_prompt = "Process GET requests [OIDCAuthCallbackView]: {}"
logger.debug(log_prompt.format('Start'))
callback_params = request.GET
error_title = _("OpenID Error")
# Retrieve the state value that was previously generated. No state means that we cannot
# authenticate the user (so a failure should be returned).
@@ -173,9 +172,10 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
try:
user = auth.authenticate(nonce=nonce, request=request, code_verifier=code_verifier)
except IntegrityError as e:
title = _("OpenID Error")
msg = _('Please check if a user with the same username or email already exists')
logger.error(e, exc_info=True)
response = self.get_failed_response('/', error_title, msg)
response = self.get_failed_response('/', title, msg)
return response
if user:
logger.debug(log_prompt.format('Login: {}'.format(user)))
@@ -194,6 +194,7 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
return HttpResponseRedirect(
next_url or settings.AUTH_OPENID_AUTHENTICATION_REDIRECT_URI
)
if 'error' in callback_params:
logger.debug(
log_prompt.format('Error in callback params: {}'.format(callback_params['error']))
@@ -204,12 +205,9 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
# OpenID Connect Provider authenticate endpoint.
logger.debug(log_prompt.format('Logout'))
auth.logout(request)
redirect_url = settings.AUTH_OPENID_AUTHENTICATION_FAILURE_REDIRECT_URI
if not user and getattr(request, 'error_message', ''):
response = self.get_failed_response(redirect_url, title=error_title, msg=request.error_message)
return response
logger.debug(log_prompt.format('Redirect'))
return HttpResponseRedirect(redirect_url)
return HttpResponseRedirect(settings.AUTH_OPENID_AUTHENTICATION_FAILURE_REDIRECT_URI)
class OIDCAuthCallbackClientView(BaseAuthCallbackClientView):

View File

@@ -252,7 +252,6 @@ class Saml2AuthCallbackView(View, PrepareRequestMixin, FlashMessageMixin):
def post(self, request):
log_prompt = "Process SAML2 POST requests: {}"
post_data = request.POST
error_title = _("SAML2 Error")
try:
saml_instance = self.init_saml_auth(request)
@@ -280,18 +279,15 @@ class Saml2AuthCallbackView(View, PrepareRequestMixin, FlashMessageMixin):
try:
user = auth.authenticate(request=request, saml_user_data=saml_user_data)
except IntegrityError as e:
title = _("SAML2 Error")
msg = _('Please check if a user with the same username or email already exists')
logger.error(e, exc_info=True)
response = self.get_failed_response('/', error_title, msg)
response = self.get_failed_response('/', title, msg)
return response
if user and user.is_valid:
logger.debug(log_prompt.format('Login: {}'.format(user)))
auth.login(self.request, user)
if not user and getattr(request, 'error_message', ''):
response = self.get_failed_response('/', title=error_title, msg=request.error_message)
return response
logger.debug(log_prompt.format('Redirect'))
redir = post_data.get('RelayState')
if not redir or len(redir) == 0:

View File

@@ -14,9 +14,7 @@ class TempTokenAuthBackend(JMSBaseAuthBackend):
return settings.AUTH_TEMP_TOKEN
def authenticate(self, request, username='', password=''):
tokens = self.model.objects.filter(username=username).order_by('-date_created')[:500]
token = next((t for t in tokens if t.secret == password), None)
token = self.model.objects.filter(username=username, secret=password).first()
if not token:
return None
if not token.is_valid:

View File

@@ -114,12 +114,12 @@ class BlockMFAError(AuthFailedNeedLogMixin, AuthFailedError):
super().__init__(username=username, request=request, ip=ip)
class BlockLoginError(AuthFailedNeedLogMixin, AuthFailedNeedBlockMixin, AuthFailedError):
class BlockLoginError(AuthFailedNeedBlockMixin, AuthFailedError):
error = 'block_login'
def __init__(self, username, ip, request):
def __init__(self, username, ip):
self.msg = const.block_user_login_msg.format(settings.SECURITY_LOGIN_LIMIT_TIME)
super().__init__(username=username, ip=ip, request=request)
super().__init__(username=username, ip=ip)
class SessionEmptyError(AuthFailedError):

View File

@@ -50,7 +50,7 @@ class UserLoginForm(forms.Form):
class UserCheckOtpCodeForm(forms.Form):
code = forms.CharField(label=_('MFA Code'), max_length=128, required=False)
mfa_type = forms.CharField(label=_('MFA type'), max_length=128, required=False)
mfa_type = forms.CharField(label=_('MFA type'), max_length=128)
class CustomCaptchaTextInput(CaptchaTextInput):

View File

@@ -38,7 +38,7 @@ class BaseMFA(abc.ABC):
if not ok:
return False, msg
cache.set(cache_key, code, settings.VERIFY_CODE_TTL)
cache.set(cache_key, code, 60)
return True, msg
def is_authenticated(self):
@@ -72,9 +72,10 @@ class BaseMFA(abc.ABC):
def is_active(self):
return False
@classmethod
def global_enabled(cls):
return cls.name in settings.SECURITY_MFA_ENABLED_BACKENDS
@staticmethod
@abc.abstractmethod
def global_enabled():
return False
@abc.abstractmethod
def get_enable_url(self) -> str:

View File

@@ -39,9 +39,9 @@ class MFACustom(BaseMFA):
def is_active(self):
return True
@classmethod
def global_enabled(cls):
return super().global_enabled() and settings.MFA_CUSTOM and callable(mfa_custom_method)
@staticmethod
def global_enabled():
return settings.MFA_CUSTOM and callable(mfa_custom_method)
def get_enable_url(self) -> str:
return ''

View File

@@ -39,20 +39,19 @@ class MFAEmail(BaseMFA):
def send_challenge(self):
code = random_string(settings.SMS_CODE_LENGTH, lower=False, upper=False)
subject = '%s: %s' % (get_login_title(), _('MFA code'))
tip = _('The validity period of the verification code is {} minute').format(settings.VERIFY_CODE_TTL // 60)
context = {
'user': self.user, 'title': subject, 'code': code, 'tip': tip,
'user': self.user, 'title': subject, 'code': code,
}
message = render_to_string('authentication/_msg_mfa_email_code.html', context)
content = {'subject': subject, 'message': message}
sender_util = SendAndVerifyCodeUtil(
self.user.email, code=code, backend=self.name, **content
self.user.email, code=code, backend=self.name, timeout=60, **content
)
sender_util.gen_and_send_async()
@classmethod
def global_enabled(cls):
return super().global_enabled and settings.SECURITY_MFA_BY_EMAIL
@staticmethod
def global_enabled():
return settings.SECURITY_MFA_BY_EMAIL
def disable(self):
return '/ui/#/profile/index'

View File

@@ -29,10 +29,9 @@ class MFAFace(BaseMFA, AuthFaceMixin):
return True
return bool(self.user.face_vector)
@classmethod
def global_enabled(cls):
@staticmethod
def global_enabled():
return (
super().global_enabled() and
settings.XPACK_LICENSE_IS_VALID and
settings.XPACK_LICENSE_EDITION_ULTIMATE and
settings.FACE_RECOGNITION_ENABLED

View File

@@ -25,6 +25,10 @@ class MFAOtp(BaseMFA):
return True
return self.user.otp_secret_key
@staticmethod
def global_enabled():
return True
def get_enable_url(self) -> str:
return reverse('authentication:user-otp-enable-start')

View File

@@ -23,9 +23,9 @@ class MFAPasskey(BaseMFA):
return False
return self.user.passkey_set.count()
@classmethod
def global_enabled(cls):
return super().global_enabled() and settings.AUTH_PASSKEY
@staticmethod
def global_enabled():
return settings.AUTH_PASSKEY
def get_enable_url(self) -> str:
return '/ui/#/profile/passkeys'

View File

@@ -27,9 +27,9 @@ class MFARadius(BaseMFA):
def is_active(self):
return True
@classmethod
def global_enabled(cls):
return super().global_enabled() and settings.OTP_IN_RADIUS
@staticmethod
def global_enabled():
return settings.OTP_IN_RADIUS
def get_enable_url(self) -> str:
return ''

Some files were not shown because too many files have changed in this diff Show More