mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-24 13:02:37 +00:00
Compare commits
288 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1c11eb2d7 | ||
|
|
8a77a7b8b5 | ||
|
|
7eed182627 | ||
|
|
ec847d3ecb | ||
|
|
a0994e2e12 | ||
|
|
17e3ddda05 | ||
|
|
6e2e92be5e | ||
|
|
e90d8c8561 | ||
|
|
cf972942fa | ||
|
|
72e35d5553 | ||
|
|
0ba84e7e18 | ||
|
|
fbc5ae1b9b | ||
|
|
2fcf045826 | ||
|
|
32cba4f2a1 | ||
|
|
b76aa3b259 | ||
|
|
3f9a17347d | ||
|
|
c01d1973d9 | ||
|
|
b216a9abc0 | ||
|
|
c628ba1c4b | ||
|
|
ebbae36c49 | ||
|
|
69ef25666e | ||
|
|
d0475397d0 | ||
|
|
dad45e7ace | ||
|
|
720f9cd397 | ||
|
|
81dee0c403 | ||
|
|
105ef791b8 | ||
|
|
a19c0bde60 | ||
|
|
3996daf4a7 | ||
|
|
ac235f788e | ||
|
|
67e334bf43 | ||
|
|
f7f9fb1bdf | ||
|
|
8979228e0b | ||
|
|
024beca690 | ||
|
|
5c0359e394 | ||
|
|
4ce4bde368 | ||
|
|
809bad271a | ||
|
|
d3bfc03849 | ||
|
|
04c0121b37 | ||
|
|
b97b50ab31 | ||
|
|
d8a8c8153b | ||
|
|
a68ad7be68 | ||
|
|
4041f1aeec | ||
|
|
59388655ea | ||
|
|
ef7463c588 | ||
|
|
7e7d6d94e6 | ||
|
|
6febc104de | ||
|
|
e629e6fb3f | ||
|
|
1b99a7e06f | ||
|
|
733b95ee99 | ||
|
|
a42641ca9a | ||
|
|
90c48d303e | ||
|
|
949166eaed | ||
|
|
b179264127 | ||
|
|
a2b9a5cee3 | ||
|
|
9e80b70c12 | ||
|
|
2454a07d38 | ||
|
|
ad7647c0cc | ||
|
|
18986bc805 | ||
|
|
157d81f117 | ||
|
|
8558204450 | ||
|
|
c18388e27a | ||
|
|
b6f86e8fb6 | ||
|
|
7280c6726e | ||
|
|
6f135c34c1 | ||
|
|
52830db500 | ||
|
|
6411642ced | ||
|
|
791b175465 | ||
|
|
d29e8317e5 | ||
|
|
a868751edd | ||
|
|
e03f49e52b | ||
|
|
2324cdc14e | ||
|
|
f3c90c6bbd | ||
|
|
ac6ffb24ee | ||
|
|
6f3ae4f73f | ||
|
|
1345998acd | ||
|
|
6812cbe314 | ||
|
|
3bba8e4043 | ||
|
|
8abf954015 | ||
|
|
1a77dc44cd | ||
|
|
13ae50f5d0 | ||
|
|
2029e9f8df | ||
|
|
1e97a23bc5 | ||
|
|
262d070f3c | ||
|
|
4ceaba60ed | ||
|
|
40b7331ec0 | ||
|
|
04cda3da0b | ||
|
|
1951c595ed | ||
|
|
c74584f63c | ||
|
|
c295f44d9c | ||
|
|
ead824a03c | ||
|
|
50be7c6fc8 | ||
|
|
bab4562820 | ||
|
|
104dd9721b | ||
|
|
cdcfdeefc5 | ||
|
|
613a7d63b5 | ||
|
|
c6a3a141bb | ||
|
|
93e5a0ba5c | ||
|
|
129c0e1bf4 | ||
|
|
62c57d2fdf | ||
|
|
4711813af8 | ||
|
|
384873b4cb | ||
|
|
33860bb955 | ||
|
|
9e410bb389 | ||
|
|
db2ab1513e | ||
|
|
18e525c943 | ||
|
|
9337463471 | ||
|
|
8fdd89e67c | ||
|
|
c7882a615f | ||
|
|
e6d50cc8b4 | ||
|
|
3bd7410ab8 | ||
|
|
c610ec797f | ||
|
|
188a2846ed | ||
|
|
df99067ee3 | ||
|
|
ca17faaf01 | ||
|
|
a487d30001 | ||
|
|
fae5d07df6 | ||
|
|
df31f47c68 | ||
|
|
d1acab3aa9 | ||
|
|
15363a7f72 | ||
|
|
d573ade525 | ||
|
|
7ac00d5fdf | ||
|
|
2f6c9f8260 | ||
|
|
41732d7a7b | ||
|
|
28d19fd91f | ||
|
|
65269db849 | ||
|
|
df2858470a | ||
|
|
1c8ad40565 | ||
|
|
78de2a2403 | ||
|
|
218f917f69 | ||
|
|
bb25bf7621 | ||
|
|
f6cc7046a2 | ||
|
|
1bc6e50b06 | ||
|
|
1d3135d2d7 | ||
|
|
308d87d021 | ||
|
|
db04f6ca18 | ||
|
|
a7cd0bc0fe | ||
|
|
24708a6c5e | ||
|
|
55a10a8d1d | ||
|
|
32b6a1f1a4 | ||
|
|
c1c70849e9 | ||
|
|
7a6ed91f62 | ||
|
|
497a52a509 | ||
|
|
57e12256e7 | ||
|
|
b8ec60dea1 | ||
|
|
c9afd94714 | ||
|
|
a0c61ab8cb | ||
|
|
567b62516a | ||
|
|
404fadd899 | ||
|
|
ee1ec6aeee | ||
|
|
783bddf2c7 | ||
|
|
5ae49295e9 | ||
|
|
8d6d188ac7 | ||
|
|
912ff3df24 | ||
|
|
995d8cadb9 | ||
|
|
6e5cea49ae | ||
|
|
a33a452434 | ||
|
|
fe2f54fcf6 | ||
|
|
1e3154d9b6 | ||
|
|
a1c09591d3 | ||
|
|
d4e0a51a08 | ||
|
|
bba4c15d6d | ||
|
|
3e33c74b64 | ||
|
|
556d29360e | ||
|
|
9329a1563c | ||
|
|
8bf11c9ade | ||
|
|
bbb802d894 | ||
|
|
8e7226d9dc | ||
|
|
2bd889e505 | ||
|
|
3dcfd0035a | ||
|
|
edfda5825c | ||
|
|
3a196f0814 | ||
|
|
a4a671afd4 | ||
|
|
c337bbff8f | ||
|
|
863140e185 | ||
|
|
ad0d264c2a | ||
|
|
7f85e503d5 | ||
|
|
61ff3db0f1 | ||
|
|
fa08517bea | ||
|
|
f86d045c01 | ||
|
|
1a7fd58abf | ||
|
|
d808256e6a | ||
|
|
305a1b10ed | ||
|
|
8c277e8875 | ||
|
|
ca965aca9e | ||
|
|
061b60ef59 | ||
|
|
c008115888 | ||
|
|
8d1fb84aaf | ||
|
|
43d61b5348 | ||
|
|
c26a786287 | ||
|
|
cb2bd0cf2c | ||
|
|
3048e6311b | ||
|
|
5e16b6387a | ||
|
|
93e1adf376 | ||
|
|
556bd3682e | ||
|
|
6bbbe312a2 | ||
|
|
1ac64db0ba | ||
|
|
fa54a98d6c | ||
|
|
31de9375e7 | ||
|
|
697270e3e6 | ||
|
|
56c324b04e | ||
|
|
984b94c874 | ||
|
|
50df7f1304 | ||
|
|
7bd7be78a4 | ||
|
|
8e5833aef0 | ||
|
|
f20b465ddf | ||
|
|
409d254a2e | ||
|
|
e6d30fa77d | ||
|
|
b25404cac1 | ||
|
|
ef4cc5f646 | ||
|
|
f0dc519423 | ||
|
|
2cb6da3129 | ||
|
|
1819083a25 | ||
|
|
bdeec0d3cb | ||
|
|
8fc5c4cf9e | ||
|
|
89051b2c67 | ||
|
|
9123839b48 | ||
|
|
258c8a30d1 | ||
|
|
af75b5269c | ||
|
|
0a66693a41 | ||
|
|
7151201d58 | ||
|
|
51820f23bf | ||
|
|
8772cd8c71 | ||
|
|
60cb1f8136 | ||
|
|
5f1b7ff8f9 | ||
|
|
37b150bc04 | ||
|
|
1432fe1609 | ||
|
|
8ae98887ee | ||
|
|
24a1738e73 | ||
|
|
188c04c9a6 | ||
|
|
bb4da12366 | ||
|
|
382112ee33 | ||
|
|
3e69e6840b | ||
|
|
a82ed3e924 | ||
|
|
b347acd5ec | ||
|
|
ccd6b01020 | ||
|
|
831b67eae4 | ||
|
|
3ab634d88e | ||
|
|
867ad94a30 | ||
|
|
7d0a19635a | ||
|
|
4642804077 | ||
|
|
d405bae205 | ||
|
|
68841d1f15 | ||
|
|
4cad5affec | ||
|
|
2f8a07e665 | ||
|
|
78133b0c60 | ||
|
|
88d9078c43 | ||
|
|
5559f112db | ||
|
|
9a4b32cb3c | ||
|
|
ddf4b61c9f | ||
|
|
0eaaa7b4f6 | ||
|
|
09160fed5d | ||
|
|
18af5e8c4a | ||
|
|
1ed388459b | ||
|
|
2e944c6898 | ||
|
|
8409523fee | ||
|
|
16634907b4 | ||
|
|
cfa5de13ab | ||
|
|
28c8ec1fab | ||
|
|
a14ebc5f0f | ||
|
|
6af20d298d | ||
|
|
795d6e01dc | ||
|
|
acf8b5798b | ||
|
|
abcd12f645 | ||
|
|
30fe5214c7 | ||
|
|
708a87c903 | ||
|
|
6a30e0739d | ||
|
|
3951b8b080 | ||
|
|
c295f1451a | ||
|
|
c4a94876cc | ||
|
|
dcab934d9f | ||
|
|
4ecb0b760f | ||
|
|
b27b02eb9d | ||
|
|
70cf847cd9 | ||
|
|
2099baaaff | ||
|
|
b22aed0cc3 | ||
|
|
3e7f83d44e | ||
|
|
40f8b99242 | ||
|
|
9ff345747b | ||
|
|
9319c4748c | ||
|
|
e8b4ee5c40 | ||
|
|
429e838973 | ||
|
|
ee1aff243c | ||
|
|
ea7133dea0 | ||
|
|
e7229963bf | ||
|
|
0f7b41d177 | ||
|
|
c4146744e5 | ||
|
|
dc32224294 | ||
|
|
d07a230ba6 |
@@ -7,4 +7,5 @@ django.db
|
||||
celerybeat.pid
|
||||
### Vagrant ###
|
||||
.vagrant/
|
||||
apps/xpack/.git
|
||||
apps/xpack/.git
|
||||
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,3 +1,4 @@
|
||||
*.mmdb filter=lfs diff=lfs merge=lfs -text
|
||||
*.mo filter=lfs diff=lfs merge=lfs -text
|
||||
*.ipdb filter=lfs diff=lfs merge=lfs -text
|
||||
|
||||
|
||||
3
.github/release-config.yml
vendored
3
.github/release-config.yml
vendored
@@ -41,4 +41,5 @@ version-resolver:
|
||||
default: patch
|
||||
template: |
|
||||
## 版本变化 What’s Changed
|
||||
$CHANGES
|
||||
$CHANGES
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -41,3 +41,4 @@ release/*
|
||||
releashe
|
||||
/apps/script.py
|
||||
data/*
|
||||
|
||||
|
||||
@@ -126,3 +126,4 @@ enforcement ladder](https://github.com/mozilla/diversity).
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
|
||||
|
||||
@@ -23,3 +23,4 @@ When reporting issues, always include:
|
||||
|
||||
Because the issues are open to the public, when submitting files, be sure to remove any sensitive information, e.g. user name, password, IP address, and company name. You can
|
||||
replace those parts with "REDACTED" or other strings like "****".
|
||||
|
||||
|
||||
113
Dockerfile
113
Dockerfile
@@ -1,54 +1,66 @@
|
||||
FROM python:3.8-slim as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
sshpass"
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
openssh-client \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iproute2 \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
ca-certificates \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
|
||||
RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& sed -i 's/security.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& apt update && sleep 1 && apt update \
|
||||
&& apt -y install ${BUILD_DEPENDENCIES} \
|
||||
&& apt -y install ${DEPENDENCIES} \
|
||||
&& apt -y install ${TOOLS} \
|
||||
&& localedef -c -f UTF-8 -i zh_CN zh_CN.UTF-8 \
|
||||
&& cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
sed -i 's@http://.*.debian.org@http://mirrors.ustc.edu.cn@g' /etc/apt/sources.list \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& sed -i "s@# alias l@alias l@g" ~/.bashrc \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mv /bin/sh /bin/sh.bak \
|
||||
&& ln -s /bin/bash /bin/sh
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG ORACLE_LIB_MAJOR=19
|
||||
ARG ORACLE_LIB_MINOR=10
|
||||
ENV ORACLE_FILE="instantclient-basiclite-linux.${TARGETARCH:-amd64}-${ORACLE_LIB_MAJOR}.${ORACLE_LIB_MINOR}.0.0.0dbru.zip"
|
||||
@@ -65,25 +77,22 @@ RUN mkdir -p /opt/oracle/ \
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
||||
ARG PIP_MIRROR=https://mirrors.aliyun.com/pypi/simple/
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://mirrors.aliyun.com/pypi/simple/
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
# 因为以 jms 或者 jumpserver 开头的 mirror 上可能没有
|
||||
RUN pip install --upgrade pip==20.2.4 setuptools==49.6.0 wheel==0.34.2 -i ${PIP_MIRROR} \
|
||||
&& pip install --no-cache-dir $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install --no-cache-dir -r requirements/requirements.txt -i ${PIP_MIRROR} \
|
||||
&& rm -rf ~/.cache/pip
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
ADD . .
|
||||
RUN cd utils \
|
||||
&& bash -ixeu build.sh \
|
||||
&& mv ../release/jumpserver /opt/jumpserver \
|
||||
&& rm -rf /tmp/build \
|
||||
&& echo > /opt/jumpserver/config.yml
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& rm -rf /tmp/build
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
VOLUME /opt/jumpserver/data
|
||||
|
||||
95
Dockerfile.loong64
Normal file
95
Dockerfile.loong64
Normal file
@@ -0,0 +1,95 @@
|
||||
FROM python:3.8-slim as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
openssh-client \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
ca-certificates \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
set -ex \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& sed -i "s@# alias l@alias l@g" ~/.bashrc \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-36.0.1-cp38-cp38-linux_loongarch64.whl \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp38-cp38-linux_loongarch64.whl \
|
||||
&& pip install $(grep 'PyNaCl' requirements/requirements.txt) \
|
||||
&& GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true pip install grpcio \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& rm -rf /tmp/build
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
ENV LANG=zh_CN.UTF-8
|
||||
|
||||
EXPOSE 8070
|
||||
EXPOSE 8080
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
3
LICENSE
3
LICENSE
@@ -671,4 +671,5 @@ into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
||||
|
||||
18
README.md
18
README.md
@@ -16,15 +16,13 @@
|
||||
|
||||
|
||||
|
||||
JumpServer 是全球首款开源的堡垒机,使用 GPLv3 开源协议,是符合 4A 规范的运维安全审计系统。
|
||||
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。
|
||||
|
||||
JumpServer 使用 Python 开发,配备了业界领先的 Web Terminal 方案,交互界面美观、用户体验好。
|
||||
|
||||
JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向扩展,无资产数量及并发限制。
|
||||
|
||||
改变世界,从一点点开始 ...
|
||||
|
||||
> 如需进一步了解 JumpServer 开源项目,推荐阅读 [JumpServer 的初心和使命](https://mp.weixin.qq.com/s/S6q_2rP_9MwaVwyqLQnXzA)
|
||||
|
||||
### 特色优势
|
||||
|
||||
@@ -95,11 +93,15 @@ JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向
|
||||
|
||||
### 案例研究
|
||||
|
||||
- [JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147)
|
||||
- [JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882)
|
||||
- [携程 JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851)
|
||||
- [小红书的JumpServer堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516)
|
||||
- [JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732)
|
||||
- [腾讯海外游戏:基于JumpServer构建游戏安全运营能力](https://blog.fit2cloud.com/?p=3704)
|
||||
- [万华化学:通过JumpServer管理全球化分布式IT资产,并且实现与云管平台的联动](https://blog.fit2cloud.com/?p=3504)
|
||||
- [雪花啤酒:JumpServer堡垒机使用体会](https://blog.fit2cloud.com/?p=3412)
|
||||
- [顺丰科技:JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147)
|
||||
- [沐瞳游戏:通过JumpServer管控多项目分布式资产](https://blog.fit2cloud.com/?p=3213)
|
||||
- [携程:JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851)
|
||||
- [大智慧:JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882)
|
||||
- [小红书:JumpServer 堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516)
|
||||
- [中手游:JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732)
|
||||
- [中通快递:JumpServer主机安全运维实践](https://blog.fit2cloud.com/?p=708)
|
||||
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687)
|
||||
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)
|
||||
|
||||
@@ -92,4 +92,3 @@ Licensed under The GNU General Public License version 3 (GPLv3) (the "License")
|
||||
https://www.gnu.org/licenses/gpl-3.0.htmll
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
|
||||
@@ -18,3 +18,4 @@ All security bugs should be reported to the contact as below:
|
||||
- ibuler@fit2cloud.com
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
|
||||
|
||||
56
Vagrantfile
vendored
56
Vagrantfile
vendored
@@ -1,56 +0,0 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
# The most common configuration options are documented and commented below.
|
||||
# For a complete reference, please see the online documentation at
|
||||
# https://docs.vagrantup.com.
|
||||
|
||||
# Every Vagrant development environment requires a box. You can search for
|
||||
# boxes at https://vagrantcloud.com/search.
|
||||
config.vm.box_check_update = false
|
||||
config.vm.box = "centos/7"
|
||||
config.vm.hostname = "jumpserver"
|
||||
config.vm.network "private_network", ip: "172.17.8.101"
|
||||
config.vm.provider "virtualbox" do |vb|
|
||||
vb.memory = "4096"
|
||||
vb.cpus = 2
|
||||
vb.name = "jumpserver"
|
||||
end
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", type: "rsync",
|
||||
rsync__verbose: true,
|
||||
rsync__exclude: ['.git*', 'node_modules*','*.log','*.box','Vagrantfile']
|
||||
|
||||
config.vm.provision "shell", inline: <<-SHELL
|
||||
## 设置yum的阿里云源
|
||||
sudo curl -o /etc/yum.repos.d/CentOS-Base.repo http://mirrors.aliyun.com/repo/Centos-7.repo
|
||||
sudo sed -i -e '/mirrors.cloud.aliyuncs.com/d' -e '/mirrors.aliyuncs.com/d' /etc/yum.repos.d/CentOS-Base.repo
|
||||
sudo curl -o /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo
|
||||
sudo yum makecache
|
||||
|
||||
## 安装依赖包
|
||||
sudo yum install -y python36 python36-devel python36-pip \
|
||||
libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel sshpass \
|
||||
openldap-devel mariadb-devel mysql-devel libffi-devel \
|
||||
openssh-clients telnet openldap-clients gcc
|
||||
|
||||
## 配置pip阿里云源
|
||||
mkdir /home/vagrant/.pip
|
||||
cat << EOF | sudo tee -a /home/vagrant/.pip/pip.conf
|
||||
[global]
|
||||
timeout = 6000
|
||||
index-url = https://mirrors.aliyun.com/pypi/simple/
|
||||
|
||||
[install]
|
||||
use-mirrors = true
|
||||
mirrors = https://mirrors.aliyun.com/pypi/simple/
|
||||
trusted-host=mirrors.aliyun.com
|
||||
EOF
|
||||
|
||||
python3.6 -m venv /home/vagrant/venv
|
||||
source /home/vagrant/venv/bin/activate
|
||||
echo 'source /home/vagrant/venv/bin/activate' >> /home/vagrant/.bash_profile
|
||||
SHELL
|
||||
end
|
||||
@@ -44,58 +44,29 @@ class LoginACL(BaseACL):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def action_reject(self):
|
||||
return self.action == self.ActionChoices.reject
|
||||
|
||||
@property
|
||||
def action_allow(self):
|
||||
return self.action == self.ActionChoices.allow
|
||||
def is_action(self, action):
|
||||
return self.action == action
|
||||
|
||||
@classmethod
|
||||
def filter_acl(cls, user):
|
||||
return user.login_acls.all().valid().distinct()
|
||||
|
||||
@staticmethod
|
||||
def allow_user_confirm_if_need(user, ip):
|
||||
acl = LoginACL.filter_acl(user).filter(
|
||||
action=LoginACL.ActionChoices.confirm
|
||||
).first()
|
||||
acl = acl if acl and acl.reviewers.exists() else None
|
||||
if not acl:
|
||||
return False, acl
|
||||
ip_group = acl.rules.get('ip_group')
|
||||
time_periods = acl.rules.get('time_period')
|
||||
is_contain_ip = contains_ip(ip, ip_group)
|
||||
is_contain_time_period = contains_time_period(time_periods)
|
||||
return is_contain_ip and is_contain_time_period, acl
|
||||
def match(user, ip):
|
||||
acls = LoginACL.filter_acl(user)
|
||||
if not acls:
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def allow_user_to_login(user, ip):
|
||||
acl = LoginACL.filter_acl(user).exclude(
|
||||
action=LoginACL.ActionChoices.confirm
|
||||
).first()
|
||||
if not acl:
|
||||
return True, ''
|
||||
ip_group = acl.rules.get('ip_group')
|
||||
time_periods = acl.rules.get('time_period')
|
||||
is_contain_ip = contains_ip(ip, ip_group)
|
||||
is_contain_time_period = contains_time_period(time_periods)
|
||||
|
||||
reject_type = ''
|
||||
if is_contain_ip and is_contain_time_period:
|
||||
# 满足条件
|
||||
allow = acl.action_allow
|
||||
if not allow:
|
||||
reject_type = 'ip' if is_contain_ip else 'time'
|
||||
else:
|
||||
# 不满足条件
|
||||
# 如果acl本身允许,那就拒绝;如果本身拒绝,那就允许
|
||||
allow = not acl.action_allow
|
||||
if not allow:
|
||||
reject_type = 'ip' if not is_contain_ip else 'time'
|
||||
|
||||
return allow, reject_type
|
||||
for acl in acls:
|
||||
if acl.is_action(LoginACL.ActionChoices.confirm) and not acl.reviewers.exists():
|
||||
continue
|
||||
ip_group = acl.rules.get('ip_group')
|
||||
time_periods = acl.rules.get('time_period')
|
||||
is_contain_ip = contains_ip(ip, ip_group)
|
||||
is_contain_time_period = contains_time_period(time_periods)
|
||||
if is_contain_ip and is_contain_time_period:
|
||||
# 满足条件,则返回
|
||||
return acl
|
||||
|
||||
def create_confirm_ticket(self, request):
|
||||
from tickets import const
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
from common.drf.serializers import BulkModelSerializer
|
||||
from common.drf.serializers import MethodSerializer
|
||||
|
||||
@@ -4,6 +4,7 @@ from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
from common.tree import TreeNodeSerializer
|
||||
from common.mixins.api import SuggestionMixin
|
||||
from .. import serializers
|
||||
|
||||
@@ -7,3 +7,7 @@ from django.apps import AppConfig
|
||||
class ApplicationsConfig(AppConfig):
|
||||
name = 'applications'
|
||||
verbose_name = _('Applications')
|
||||
|
||||
def ready(self):
|
||||
from . import signal_handlers
|
||||
super().ready()
|
||||
|
||||
@@ -27,6 +27,7 @@ class AppType(models.TextChoices):
|
||||
sqlserver = 'sqlserver', 'SQLServer'
|
||||
redis = 'redis', 'Redis'
|
||||
mongodb = 'mongodb', 'MongoDB'
|
||||
clickhouse = 'clickhouse', 'ClickHouse'
|
||||
|
||||
# remote-app category
|
||||
chrome = 'chrome', 'Chrome'
|
||||
@@ -42,7 +43,7 @@ class AppType(models.TextChoices):
|
||||
return {
|
||||
AppCategory.db: [
|
||||
cls.mysql, cls.mariadb, cls.oracle, cls.pgsql,
|
||||
cls.sqlserver, cls.redis, cls.mongodb
|
||||
cls.sqlserver, cls.redis, cls.mongodb, cls.clickhouse
|
||||
],
|
||||
AppCategory.remote_app: [
|
||||
cls.chrome, cls.mysql_workbench,
|
||||
@@ -82,10 +83,4 @@ class AppType(models.TextChoices):
|
||||
|
||||
if AppCategory.is_xpack(category):
|
||||
return True
|
||||
return tp in ['oracle', 'postgresql', 'sqlserver']
|
||||
|
||||
|
||||
class OracleVersion(models.TextChoices):
|
||||
version_11g = '11g', '11g'
|
||||
version_12c = '12c', '12c'
|
||||
version_other = 'other', _('Other')
|
||||
return tp in ['oracle', 'postgresql', 'sqlserver', 'clickhouse']
|
||||
|
||||
18
apps/applications/migrations/0024_alter_application_type.py
Normal file
18
apps/applications/migrations/0024_alter_application_type.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('applications', '0023_auto_20220715_1556'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='application',
|
||||
name='type',
|
||||
field=models.CharField(choices=[('mysql', 'MySQL'), ('mariadb', 'MariaDB'), ('oracle', 'Oracle'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('chrome', 'Chrome'), ('mysql_workbench', 'MySQL Workbench'), ('vmware_client', 'vSphere Client'), ('custom', 'Custom'), ('k8s', 'Kubernetes')], max_length=16, verbose_name='Type'),
|
||||
),
|
||||
]
|
||||
@@ -10,9 +10,7 @@ from common.mixins import CommonModelMixin
|
||||
from common.tree import TreeNode
|
||||
from common.utils import is_uuid
|
||||
from assets.models import Asset, SystemUser
|
||||
from ..const import OracleVersion
|
||||
|
||||
from ..utils import KubernetesTree
|
||||
from .. import const
|
||||
|
||||
|
||||
@@ -175,6 +173,7 @@ class ApplicationTreeNodeMixin:
|
||||
return pid
|
||||
|
||||
def as_tree_node(self, pid, k8s_as_tree=False):
|
||||
from ..utils import KubernetesTree
|
||||
if self.type == const.AppType.k8s and k8s_as_tree:
|
||||
node = KubernetesTree(pid).as_tree_node(self)
|
||||
else:
|
||||
@@ -304,15 +303,6 @@ class Application(CommonModelMixin, OrgModelMixin, ApplicationTreeNodeMixin):
|
||||
target_ip = self.attrs.get('host')
|
||||
return target_ip
|
||||
|
||||
def get_target_protocol_for_oracle(self):
|
||||
""" Oracle 类型需要单独处理,因为要携带版本号 """
|
||||
if not self.is_type(self.APP_TYPE.oracle):
|
||||
return
|
||||
version = self.attrs.get('version', OracleVersion.version_12c)
|
||||
if version == OracleVersion.version_other:
|
||||
return
|
||||
return 'oracle_%s' % version
|
||||
|
||||
|
||||
class ApplicationUser(SystemUser):
|
||||
class Meta:
|
||||
|
||||
@@ -16,7 +16,7 @@ from .. import const
|
||||
|
||||
__all__ = [
|
||||
'AppSerializer', 'MiniAppSerializer', 'AppSerializerMixin',
|
||||
'AppAccountSerializer', 'AppAccountSecretSerializer'
|
||||
'AppAccountSerializer', 'AppAccountSecretSerializer', 'AppAccountBackUpSerializer'
|
||||
]
|
||||
|
||||
|
||||
@@ -32,21 +32,23 @@ class AppSerializerMixin(serializers.Serializer):
|
||||
return instance
|
||||
|
||||
def get_attrs_serializer(self):
|
||||
default_serializer = serializers.Serializer(read_only=True)
|
||||
instance = self.app
|
||||
if instance:
|
||||
_type = instance.type
|
||||
_category = instance.category
|
||||
else:
|
||||
_type = self.context['request'].query_params.get('type')
|
||||
_category = self.context['request'].query_params.get('category')
|
||||
if _type:
|
||||
if isinstance(self, AppAccountSecretSerializer):
|
||||
serializer_class = type_secret_serializer_classes_mapping.get(_type)
|
||||
tp = getattr(self, 'tp', None)
|
||||
default_serializer = serializers.Serializer(read_only=True)
|
||||
if not tp:
|
||||
if instance:
|
||||
tp = instance.type
|
||||
category = instance.category
|
||||
else:
|
||||
serializer_class = type_serializer_classes_mapping.get(_type)
|
||||
elif _category:
|
||||
serializer_class = category_serializer_classes_mapping.get(_category)
|
||||
tp = self.context['request'].query_params.get('type')
|
||||
category = self.context['request'].query_params.get('category')
|
||||
if tp:
|
||||
if isinstance(self, AppAccountBackUpSerializer):
|
||||
serializer_class = type_secret_serializer_classes_mapping.get(tp)
|
||||
else:
|
||||
serializer_class = type_serializer_classes_mapping.get(tp)
|
||||
elif category:
|
||||
serializer_class = category_serializer_classes_mapping.get(category)
|
||||
else:
|
||||
serializer_class = default_serializer
|
||||
|
||||
@@ -154,11 +156,6 @@ class AppAccountSerializer(AppSerializerMixin, AuthSerializerMixin, BulkOrgResou
|
||||
|
||||
class AppAccountSecretSerializer(SecretReadableMixin, AppAccountSerializer):
|
||||
class Meta(AppAccountSerializer.Meta):
|
||||
fields_backup = [
|
||||
'id', 'app_display', 'attrs', 'username', 'password', 'private_key',
|
||||
'public_key', 'date_created', 'date_updated', 'version'
|
||||
]
|
||||
|
||||
extra_kwargs = {
|
||||
'password': {'write_only': False},
|
||||
'private_key': {'write_only': False},
|
||||
@@ -166,3 +163,22 @@ class AppAccountSecretSerializer(SecretReadableMixin, AppAccountSerializer):
|
||||
'app_display': {'label': _('Application display')},
|
||||
'systemuser_display': {'label': _('System User')}
|
||||
}
|
||||
|
||||
|
||||
class AppAccountBackUpSerializer(AppAccountSecretSerializer):
|
||||
class Meta(AppAccountSecretSerializer.Meta):
|
||||
fields = [
|
||||
'id', 'app_display', 'attrs', 'username', 'password', 'private_key',
|
||||
'public_key', 'date_created', 'date_updated', 'version'
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.tp = kwargs.pop('tp', None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
return queryset
|
||||
|
||||
def to_representation(self, instance):
|
||||
return super(AppAccountSerializer, self).to_representation(instance)
|
||||
|
||||
@@ -13,3 +13,14 @@ class DBSerializer(serializers.Serializer):
|
||||
database = serializers.CharField(
|
||||
max_length=128, required=True, allow_null=True, label=_('Database')
|
||||
)
|
||||
use_ssl = serializers.BooleanField(default=False, label=_('Use SSL'))
|
||||
ca_cert = serializers.CharField(
|
||||
required=False, allow_null=True, label=_('CA certificate')
|
||||
)
|
||||
client_cert = serializers.CharField(
|
||||
required=False, allow_null=True, label=_('Client certificate file')
|
||||
)
|
||||
cert_key = serializers.CharField(
|
||||
required=False, allow_null=True, label=_('Certificate key file')
|
||||
)
|
||||
allow_invalid_cert = serializers.BooleanField(default=False, label=_('Allow invalid cert'))
|
||||
|
||||
@@ -6,6 +6,7 @@ from .pgsql import *
|
||||
from .sqlserver import *
|
||||
from .redis import *
|
||||
from .mongodb import *
|
||||
from .clickhouse import *
|
||||
|
||||
from .chrome import *
|
||||
from .mysql_workbench import *
|
||||
|
||||
@@ -23,7 +23,7 @@ class ChromeSerializer(RemoteAppSerializer):
|
||||
)
|
||||
chrome_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Chrome password'), allow_null=True
|
||||
label=_('Chrome password'), allow_null=True, encrypted_key='chrome_password'
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from ..application_category import DBSerializer
|
||||
|
||||
__all__ = ['ClickHouseSerializer']
|
||||
|
||||
|
||||
class ClickHouseSerializer(DBSerializer):
|
||||
port = serializers.IntegerField(
|
||||
default=9000, label=_('Port'), allow_null=True,
|
||||
help_text=_(
|
||||
'Typically, the port is 9000,'
|
||||
'the HTTP interface and the native interface use different ports'
|
||||
),
|
||||
)
|
||||
@@ -2,15 +2,9 @@ from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from ..application_category import DBSerializer
|
||||
from applications.const import OracleVersion
|
||||
|
||||
__all__ = ['OracleSerializer']
|
||||
|
||||
|
||||
class OracleSerializer(DBSerializer):
|
||||
version = serializers.ChoiceField(
|
||||
choices=OracleVersion.choices, default=OracleVersion.version_12c,
|
||||
allow_null=True, label=_('Version'),
|
||||
help_text=_('Magnus currently supports only 11g and 12c connections')
|
||||
)
|
||||
port = serializers.IntegerField(default=1521, label=_('Port'), allow_null=True)
|
||||
|
||||
@@ -31,6 +31,7 @@ type_serializer_classes_mapping = {
|
||||
const.AppType.sqlserver.value: application_type.SQLServerSerializer,
|
||||
const.AppType.redis.value: application_type.RedisSerializer,
|
||||
const.AppType.mongodb.value: application_type.MongoDBSerializer,
|
||||
const.AppType.clickhouse.value: application_type.ClickHouseSerializer,
|
||||
# cloud
|
||||
const.AppType.k8s.value: application_type.K8SSerializer
|
||||
}
|
||||
|
||||
2
apps/applications/signal_handlers.py
Normal file
2
apps/applications/signal_handlers.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
@@ -1,18 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from urllib3.exceptions import MaxRetryError
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from kubernetes import client
|
||||
from kubernetes.client import api_client
|
||||
from kubernetes.client.api import core_v1_api
|
||||
from kubernetes import client
|
||||
from kubernetes.client.exceptions import ApiException
|
||||
|
||||
from rest_framework.generics import get_object_or_404
|
||||
|
||||
from common.utils import get_logger
|
||||
from common.tree import TreeNode
|
||||
from assets.models import SystemUser
|
||||
|
||||
from common.tree import TreeNode
|
||||
from common.utils import get_logger
|
||||
from .. import const
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -23,7 +19,8 @@ class KubernetesClient:
|
||||
self.url = url
|
||||
self.token = token
|
||||
|
||||
def get_api(self):
|
||||
@property
|
||||
def api(self):
|
||||
configuration = client.Configuration()
|
||||
configuration.host = self.url
|
||||
configuration.verify_ssl = False
|
||||
@@ -32,63 +29,46 @@ class KubernetesClient:
|
||||
api = core_v1_api.CoreV1Api(c)
|
||||
return api
|
||||
|
||||
def get_namespace_list(self):
|
||||
api = self.get_api()
|
||||
namespace_list = []
|
||||
for ns in api.list_namespace().items:
|
||||
namespace_list.append(ns.metadata.name)
|
||||
return namespace_list
|
||||
def get_namespaces(self):
|
||||
namespaces = []
|
||||
resp = self.api.list_namespace()
|
||||
for ns in resp.items:
|
||||
namespaces.append(ns.metadata.name)
|
||||
return namespaces
|
||||
|
||||
def get_services(self):
|
||||
api = self.get_api()
|
||||
ret = api.list_service_for_all_namespaces(watch=False)
|
||||
for i in ret.items:
|
||||
print("%s \t%s \t%s \t%s \t%s \n" % (
|
||||
i.kind, i.metadata.namespace, i.metadata.name, i.spec.cluster_ip, i.spec.ports))
|
||||
def get_pods(self, namespace):
|
||||
pods = []
|
||||
resp = self.api.list_namespaced_pod(namespace)
|
||||
for pd in resp.items:
|
||||
pods.append(pd.metadata.name)
|
||||
return pods
|
||||
|
||||
def get_pod_info(self, namespace, pod):
|
||||
api = self.get_api()
|
||||
resp = api.read_namespaced_pod(namespace=namespace, name=pod)
|
||||
return resp
|
||||
def get_containers(self, namespace, pod_name):
|
||||
containers = []
|
||||
resp = self.api.read_namespaced_pod(pod_name, namespace)
|
||||
for container in resp.spec.containers:
|
||||
containers.append(container.name)
|
||||
return containers
|
||||
|
||||
def get_pod_logs(self, namespace, pod):
|
||||
api = self.get_api()
|
||||
log_content = api.read_namespaced_pod_log(pod, namespace, pretty=True, tail_lines=200)
|
||||
return log_content
|
||||
@classmethod
|
||||
def run(cls, asset, secret, tp='namespace'):
|
||||
k8s_url = f'{asset.address}'
|
||||
k8s = cls(k8s_url, secret)
|
||||
func_name = f'get_{tp}s'
|
||||
if hasattr(k8s, func_name):
|
||||
return getattr(k8s, func_name)()
|
||||
return []
|
||||
|
||||
def get_pods(self):
|
||||
api = self.get_api()
|
||||
try:
|
||||
ret = api.list_pod_for_all_namespaces(watch=False, _request_timeout=(3, 3))
|
||||
except MaxRetryError:
|
||||
logger.warning('Kubernetes connection timed out')
|
||||
return
|
||||
except ApiException as e:
|
||||
if e.status == 401:
|
||||
logger.warning('Kubernetes User not authenticated')
|
||||
else:
|
||||
logger.warning(e)
|
||||
return
|
||||
data = {}
|
||||
for i in ret.items:
|
||||
namespace = i.metadata.namespace
|
||||
pod_info = {
|
||||
'pod_name': i.metadata.name,
|
||||
'containers': [j.name for j in i.spec.containers]
|
||||
}
|
||||
if namespace in data:
|
||||
data[namespace].append(pod_info)
|
||||
else:
|
||||
data[namespace] = [pod_info, ]
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def get_kubernetes_data(app_id, system_user_id):
|
||||
@classmethod
|
||||
def get_kubernetes_data(cls, app_id, system_user_id, tp, *args):
|
||||
from ..models import Application
|
||||
app = get_object_or_404(Application, id=app_id)
|
||||
system_user = get_object_or_404(SystemUser, id=system_user_id)
|
||||
k8s = KubernetesClient(app.attrs['cluster'], system_user.token)
|
||||
return k8s.get_pods()
|
||||
k8s = cls(app.attrs['cluster'], system_user.token)
|
||||
func_name = f'get_{tp}s'
|
||||
if hasattr(k8s, func_name):
|
||||
return getattr(k8s, func_name)(*args)
|
||||
return []
|
||||
|
||||
|
||||
class KubernetesTree:
|
||||
@@ -118,11 +98,10 @@ class KubernetesTree:
|
||||
)
|
||||
return node
|
||||
|
||||
def as_namespace_pod_tree_node(self, name, meta, type, counts=0, is_container=False):
|
||||
def as_namespace_pod_tree_node(self, name, meta, type, is_container=False):
|
||||
from ..models import ApplicationTreeNodeMixin
|
||||
i = ApplicationTreeNodeMixin.create_tree_id(self.tree_id, type, name)
|
||||
meta.update({type: name})
|
||||
name = name if is_container else f'{name}({counts})'
|
||||
node = self.create_tree_node(
|
||||
i, self.tree_id, name, type, meta, icon='cloud', is_container=is_container
|
||||
)
|
||||
@@ -157,30 +136,30 @@ class KubernetesTree:
|
||||
system_user_id = parent_info.get('system_user_id')
|
||||
|
||||
tree_nodes = []
|
||||
data = KubernetesClient.get_kubernetes_data(app_id, system_user_id)
|
||||
if not data:
|
||||
return tree_nodes
|
||||
|
||||
if pod_name:
|
||||
for container in next(
|
||||
filter(
|
||||
lambda x: x['pod_name'] == pod_name, data[namespace]
|
||||
)
|
||||
)['containers']:
|
||||
tp = 'container'
|
||||
containers = KubernetesClient.get_kubernetes_data(
|
||||
app_id, system_user_id, tp, namespace, pod_name
|
||||
)
|
||||
for container in containers:
|
||||
container_node = self.as_namespace_pod_tree_node(
|
||||
container, parent_info, 'container', is_container=True
|
||||
container, parent_info, tp, is_container=True
|
||||
)
|
||||
tree_nodes.append(container_node)
|
||||
elif namespace:
|
||||
for pod in data[namespace]:
|
||||
pod_nodes = self.as_namespace_pod_tree_node(
|
||||
pod['pod_name'], parent_info, 'pod', len(pod['containers'])
|
||||
tp = 'pod'
|
||||
pods = KubernetesClient.get_kubernetes_data(app_id, system_user_id, tp, namespace)
|
||||
for pod in pods:
|
||||
pod_node = self.as_namespace_pod_tree_node(
|
||||
pod, parent_info, tp
|
||||
)
|
||||
tree_nodes.append(pod_nodes)
|
||||
tree_nodes.append(pod_node)
|
||||
elif system_user_id:
|
||||
for namespace, pods in data.items():
|
||||
tp = 'namespace'
|
||||
namespaces = KubernetesClient.get_kubernetes_data(app_id, system_user_id, tp)
|
||||
for namespace in namespaces:
|
||||
namespace_node = self.as_namespace_pod_tree_node(
|
||||
namespace, parent_info, 'namespace', len(pods)
|
||||
namespace, parent_info, tp
|
||||
)
|
||||
tree_nodes.append(namespace_node)
|
||||
return tree_nodes
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.shortcuts import get_object_or_404
|
||||
from django.db.models import Q
|
||||
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from common.mixins.api import SuggestionMixin
|
||||
from common.mixins.api import SuggestionMixin, RenderToJsonMixin
|
||||
from users.models import User, UserGroup
|
||||
from users.serializers import UserSerializer, UserGroupSerializer
|
||||
from users.filters import UserFilter
|
||||
@@ -88,7 +88,7 @@ class AssetPlatformRetrieveApi(RetrieveAPIView):
|
||||
return asset.platform
|
||||
|
||||
|
||||
class AssetPlatformViewSet(ModelViewSet):
|
||||
class AssetPlatformViewSet(ModelViewSet, RenderToJsonMixin):
|
||||
queryset = Platform.objects.all()
|
||||
serializer_class = serializers.PlatformSerializer
|
||||
filterset_fields = ['name', 'base']
|
||||
|
||||
@@ -24,7 +24,7 @@ class SerializeToTreeNodeMixin:
|
||||
'title': _name(node),
|
||||
'pId': node.parent_key,
|
||||
'isParent': True,
|
||||
'open': node.is_org_root(),
|
||||
'open': True,
|
||||
'meta': {
|
||||
'data': {
|
||||
"id": node.id,
|
||||
|
||||
@@ -101,6 +101,8 @@ class NodeListAsTreeApi(generics.ListAPIView):
|
||||
|
||||
class NodeChildrenApi(generics.ListCreateAPIView):
|
||||
serializer_class = serializers.NodeSerializer
|
||||
search_fields = ('value',)
|
||||
|
||||
instance = None
|
||||
is_initial = False
|
||||
|
||||
@@ -179,8 +181,15 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
||||
"""
|
||||
model = Node
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not self.request.GET.get('search'):
|
||||
return queryset
|
||||
queryset = super().filter_queryset(queryset)
|
||||
queryset = self.model.get_ancestor_queryset(queryset)
|
||||
return queryset
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
nodes = self.get_queryset().order_by('value')
|
||||
nodes = self.filter_queryset(self.get_queryset()).order_by('value')
|
||||
nodes = self.serialize_nodes(nodes, with_asset_amount=True)
|
||||
assets = self.get_assets()
|
||||
data = [*nodes, *assets]
|
||||
|
||||
@@ -208,7 +208,7 @@ class SystemUserTaskApi(generics.CreateAPIView):
|
||||
|
||||
class SystemUserCommandFilterRuleListApi(generics.ListAPIView):
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_commandfilterule'
|
||||
'list': 'assets.view_commandfilterule',
|
||||
}
|
||||
|
||||
def get_serializer_class(self):
|
||||
@@ -223,12 +223,14 @@ class SystemUserCommandFilterRuleListApi(generics.ListAPIView):
|
||||
if not system_user:
|
||||
system_user_id = self.request.query_params.get('system_user_id')
|
||||
asset_id = self.request.query_params.get('asset_id')
|
||||
node_id = self.request.query_params.get('node_id')
|
||||
application_id = self.request.query_params.get('application_id')
|
||||
rules = CommandFilterRule.get_queryset(
|
||||
user_id=user_id,
|
||||
user_group_id=user_group_id,
|
||||
system_user_id=system_user_id,
|
||||
asset_id=asset_id,
|
||||
node_id=node_id,
|
||||
application_id=application_id
|
||||
)
|
||||
return rules
|
||||
|
||||
@@ -21,8 +21,8 @@ class Migration(migrations.Migration):
|
||||
('name', models.CharField(max_length=64, verbose_name='Name')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
('date_updated', models.DateTimeField(auto_now=True)),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('created_by', models.CharField(blank=True, default='', max_length=128, verbose_name='Created by')),
|
||||
],
|
||||
options={
|
||||
|
||||
@@ -20,7 +20,7 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('is_periodic', models.BooleanField(default=False, verbose_name='Periodic perform')),
|
||||
('interval', models.IntegerField(blank=True, default=24, null=True, verbose_name='Cycle perform')),
|
||||
('crontab', models.CharField(blank=True, max_length=128, null=True, verbose_name='Regularly perform')),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
|
||||
18
apps/assets/migrations/0092_commandfilter_nodes.py
Normal file
18
apps/assets/migrations/0092_commandfilter_nodes.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.15 on 2022-10-09 09:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0091_auto_20220629_1826'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='commandfilter',
|
||||
name='nodes',
|
||||
field=models.ManyToManyField(blank=True, related_name='cmd_filters', to='assets.Node', verbose_name='Nodes'),
|
||||
),
|
||||
]
|
||||
18
apps/assets/migrations/0093_alter_systemuser_protocol.py
Normal file
18
apps/assets/migrations/0093_alter_systemuser_protocol.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0092_commandfilter_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='protocol',
|
||||
field=models.CharField(choices=[('ssh', 'SSH'), ('rdp', 'RDP'), ('telnet', 'Telnet'), ('vnc', 'VNC'), ('mysql', 'MySQL'), ('oracle', 'Oracle'), ('mariadb', 'MariaDB'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('k8s', 'K8S')], default='ssh', max_length=16, verbose_name='Protocol'),
|
||||
),
|
||||
]
|
||||
@@ -116,9 +116,9 @@ class NodesRelationMixin:
|
||||
nodes = []
|
||||
for node in self.get_nodes():
|
||||
_nodes = node.get_ancestors(with_self=True)
|
||||
nodes.append(_nodes)
|
||||
nodes.extend(list(_nodes))
|
||||
if flat:
|
||||
nodes = list(reduce(lambda x, y: set(x) | set(y), nodes))
|
||||
nodes = list(set([node.id for node in nodes]))
|
||||
return nodes
|
||||
|
||||
|
||||
|
||||
@@ -1,28 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import io
|
||||
import os
|
||||
import uuid
|
||||
from hashlib import md5
|
||||
|
||||
import sshpubkeys
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.db.models import QuerySet
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from common.utils import random_string, signer
|
||||
from common.db import fields
|
||||
from common.utils import random_string
|
||||
from common.utils import (
|
||||
ssh_key_string_to_obj, ssh_key_gen, get_logger, lazyproperty
|
||||
)
|
||||
from common.utils.encode import ssh_pubkey_gen
|
||||
from common.validators import alphanumeric
|
||||
from common.db import fields
|
||||
from common.utils.encode import (
|
||||
parse_ssh_public_key_str, parse_ssh_private_key_str
|
||||
)
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@@ -64,16 +63,16 @@ class AuthMixin:
|
||||
|
||||
@property
|
||||
def ssh_key_fingerprint(self):
|
||||
public_key = None
|
||||
if self.public_key:
|
||||
public_key = self.public_key
|
||||
elif self.private_key:
|
||||
try:
|
||||
public_key = ssh_pubkey_gen(private_key=self.private_key, password=self.password)
|
||||
public_key = parse_ssh_public_key_str(self.private_key, password=self.password)
|
||||
except IOError as e:
|
||||
return str(e)
|
||||
else:
|
||||
if not public_key:
|
||||
return ''
|
||||
|
||||
public_key_obj = sshpubkeys.SSHKey(public_key)
|
||||
fingerprint = public_key_obj.hash_md5()
|
||||
return fingerprint
|
||||
@@ -88,24 +87,29 @@ class AuthMixin:
|
||||
|
||||
@property
|
||||
def private_key_file(self):
|
||||
if not self.private_key_obj:
|
||||
if not self.private_key:
|
||||
return None
|
||||
private_key_str = self.get_private_key()
|
||||
if not private_key_str:
|
||||
return None
|
||||
project_dir = settings.PROJECT_DIR
|
||||
tmp_dir = os.path.join(project_dir, 'tmp')
|
||||
key_name = '.' + md5(self.private_key.encode('utf-8')).hexdigest()
|
||||
key_path = os.path.join(tmp_dir, key_name)
|
||||
if not os.path.exists(key_path):
|
||||
self.private_key_obj.write_private_key_file(key_path)
|
||||
with open(key_path, 'w') as f:
|
||||
f.write(private_key_str)
|
||||
os.chmod(key_path, 0o400)
|
||||
return key_path
|
||||
|
||||
def get_private_key(self):
|
||||
if not self.private_key_obj:
|
||||
if not self.private_key:
|
||||
return None
|
||||
string_io = io.StringIO()
|
||||
self.private_key_obj.write_private_key(string_io)
|
||||
private_key = string_io.getvalue()
|
||||
return private_key
|
||||
private_key_str = parse_ssh_private_key_str(self.private_key, password=self.password)
|
||||
if not private_key_str and self.password:
|
||||
# 由于历史原因,密码可能是真实的密码,而非私钥的 passphrase,所以这里再尝试一次
|
||||
private_key_str = parse_ssh_private_key_str(self.private_key)
|
||||
return private_key_str
|
||||
|
||||
@property
|
||||
def public_key_obj(self):
|
||||
@@ -234,4 +238,3 @@ class BaseUser(OrgModelMixin, AuthMixin):
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from users.models import User, UserGroup
|
||||
from applications.models import Application
|
||||
from ..models import SystemUser, Asset
|
||||
from ..models import SystemUser, Asset, Node
|
||||
|
||||
from common.utils import lazyproperty, get_logger, get_object_or_none
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
@@ -33,6 +33,10 @@ class CommandFilter(OrgModelMixin):
|
||||
'users.UserGroup', related_name='cmd_filters', blank=True,
|
||||
verbose_name=_("User group"),
|
||||
)
|
||||
nodes = models.ManyToManyField(
|
||||
'assets.Node', related_name='cmd_filters', blank=True,
|
||||
verbose_name=_("Nodes")
|
||||
)
|
||||
assets = models.ManyToManyField(
|
||||
'assets.Asset', related_name='cmd_filters', blank=True,
|
||||
verbose_name=_("Asset")
|
||||
@@ -46,8 +50,8 @@ class CommandFilter(OrgModelMixin):
|
||||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
||||
comment = models.TextField(blank=True, default='', verbose_name=_("Comment"))
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
date_updated = models.DateTimeField(auto_now=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_updated = models.DateTimeField(auto_now=True, verbose_name=_('Date updated'))
|
||||
created_by = models.CharField(
|
||||
max_length=128, blank=True, default='', verbose_name=_('Created by')
|
||||
)
|
||||
@@ -189,7 +193,8 @@ class CommandFilterRule(OrgModelMixin):
|
||||
|
||||
@classmethod
|
||||
def get_queryset(cls, user_id=None, user_group_id=None, system_user_id=None,
|
||||
asset_id=None, application_id=None, org_id=None):
|
||||
asset_id=None, node_id=None, application_id=None, org_id=None):
|
||||
# user & user_group
|
||||
user_groups = []
|
||||
user = get_object_or_none(User, pk=user_id)
|
||||
if user:
|
||||
@@ -198,8 +203,18 @@ class CommandFilterRule(OrgModelMixin):
|
||||
if user_group:
|
||||
org_id = user_group.org_id
|
||||
user_groups.append(user_group)
|
||||
system_user = get_object_or_none(SystemUser, pk=system_user_id)
|
||||
|
||||
# asset & node
|
||||
nodes = []
|
||||
asset = get_object_or_none(Asset, pk=asset_id)
|
||||
if asset:
|
||||
nodes.extend(asset.get_all_nodes())
|
||||
node = get_object_or_none(Node, pk=node_id)
|
||||
if node:
|
||||
org_id = node.org_id
|
||||
nodes.extend(list(node.get_ancestors(with_self=True)))
|
||||
|
||||
system_user = get_object_or_none(SystemUser, pk=system_user_id)
|
||||
application = get_object_or_none(Application, pk=application_id)
|
||||
q = Q()
|
||||
if user:
|
||||
@@ -212,6 +227,8 @@ class CommandFilterRule(OrgModelMixin):
|
||||
if asset:
|
||||
org_id = asset.org_id
|
||||
q |= Q(assets=asset)
|
||||
if nodes:
|
||||
q |= Q(nodes__in=set(nodes))
|
||||
if application:
|
||||
org_id = application.org_id
|
||||
q |= Q(applications=application)
|
||||
|
||||
@@ -25,7 +25,6 @@ from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from orgs.utils import get_current_org, tmp_to_org, tmp_to_root_org
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
__all__ = ['Node', 'FamilyMixin', 'compute_parent_key', 'NodeQuerySet']
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -98,6 +97,14 @@ class FamilyMixin:
|
||||
q |= Q(key=self.key)
|
||||
return Node.objects.filter(q)
|
||||
|
||||
@classmethod
|
||||
def get_ancestor_queryset(cls, queryset, with_self=True):
|
||||
parent_keys = set()
|
||||
for i in queryset:
|
||||
parent_keys.update(set(i.get_ancestor_keys(with_self=with_self)))
|
||||
queryset = queryset.model.objects.filter(key__in=list(parent_keys)).distinct()
|
||||
return queryset
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
return self.get_children(with_self=False)
|
||||
@@ -396,7 +403,7 @@ class NodeAllAssetsMappingMixin:
|
||||
mapping[ancestor_key].update(asset_ids)
|
||||
|
||||
t3 = time.time()
|
||||
logger.info('t1-t2(DB Query): {} s, t3-t2(Generate mapping): {} s'.format(t2-t1, t3-t2))
|
||||
logger.info('t1-t2(DB Query): {} s, t3-t2(Generate mapping): {} s'.format(t2 - t1, t3 - t2))
|
||||
return mapping
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from django.core.cache import cache
|
||||
|
||||
from common.utils import signer, get_object_or_none
|
||||
from common.utils import signer, get_object_or_none, is_uuid
|
||||
from .base import BaseUser
|
||||
from .asset import Asset
|
||||
from .authbook import AuthBook
|
||||
@@ -34,6 +34,7 @@ class ProtocolMixin:
|
||||
sqlserver = 'sqlserver', 'SQLServer'
|
||||
redis = 'redis', 'Redis'
|
||||
mongodb = 'mongodb', 'MongoDB'
|
||||
clickhouse = 'clickhouse', 'ClickHouse'
|
||||
k8s = 'k8s', 'K8S'
|
||||
|
||||
SUPPORT_PUSH_PROTOCOLS = [Protocol.ssh, Protocol.rdp]
|
||||
@@ -46,7 +47,7 @@ class ProtocolMixin:
|
||||
]
|
||||
APPLICATION_CATEGORY_DB_PROTOCOLS = [
|
||||
Protocol.mysql, Protocol.mariadb, Protocol.oracle,
|
||||
Protocol.postgresql, Protocol.sqlserver,
|
||||
Protocol.postgresql, Protocol.sqlserver, Protocol.clickhouse,
|
||||
Protocol.redis, Protocol.mongodb
|
||||
]
|
||||
APPLICATION_CATEGORY_CLOUD_PROTOCOLS = [
|
||||
@@ -321,9 +322,20 @@ class SystemUser(ProtocolMixin, AuthMixin, BaseUser):
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
return assets
|
||||
|
||||
def filter_contain_protocol_assets(self, assets_or_ids):
|
||||
if not assets_or_ids:
|
||||
return assets_or_ids
|
||||
if is_uuid(assets_or_ids[0]):
|
||||
assets = Asset.objects.filter(id__in=assets_or_ids)
|
||||
else:
|
||||
assets = assets_or_ids
|
||||
assets = [asset for asset in assets if self.protocol in asset.protocols_as_dict]
|
||||
return assets
|
||||
|
||||
def add_related_assets(self, assets_or_ids):
|
||||
self.assets.add(*tuple(assets_or_ids))
|
||||
self.add_related_assets_to_su_from_if_need(assets_or_ids)
|
||||
assets = self.filter_contain_protocol_assets(assets_or_ids)
|
||||
self.assets.add(*tuple(assets))
|
||||
self.add_related_assets_to_su_from_if_need(assets)
|
||||
|
||||
def add_related_assets_to_su_from_if_need(self, assets_or_ids):
|
||||
if self.protocol not in [self.Protocol.ssh.value]:
|
||||
|
||||
@@ -20,6 +20,6 @@ class AccountBackupExecutionTaskMsg(object):
|
||||
"please go to personal information -> file encryption password to set the encryption password").format(name)
|
||||
|
||||
def publish(self, attachment_list=None):
|
||||
send_mail_attachment_async.delay(
|
||||
send_mail_attachment_async(
|
||||
self.subject, self.message, [self.user.email], attachment_list
|
||||
)
|
||||
|
||||
@@ -76,10 +76,6 @@ class AccountSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
|
||||
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields_backup = [
|
||||
'hostname', 'ip', 'platform', 'protocols', 'username', 'password',
|
||||
'private_key', 'public_key', 'date_created', 'date_updated', 'version'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'password': {'write_only': False},
|
||||
'private_key': {'write_only': False},
|
||||
@@ -88,6 +84,22 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
}
|
||||
|
||||
|
||||
class AccountBackUpSerializer(AccountSecretSerializer):
|
||||
class Meta(AccountSecretSerializer.Meta):
|
||||
fields = [
|
||||
'id', 'hostname', 'ip', 'username', 'password',
|
||||
'private_key', 'public_key', 'date_created',
|
||||
'date_updated', 'version'
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
return queryset
|
||||
|
||||
def to_representation(self, instance):
|
||||
return super(AccountSerializer, self).to_representation(instance)
|
||||
|
||||
|
||||
class AccountTaskSerializer(serializers.Serializer):
|
||||
ACTION_CHOICES = (
|
||||
('test', 'test'),
|
||||
|
||||
@@ -189,6 +189,9 @@ class PlatformSerializer(serializers.ModelSerializer):
|
||||
'id', 'name', 'base', 'charset',
|
||||
'internal', 'meta', 'comment'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'internal': {'read_only': True},
|
||||
}
|
||||
|
||||
|
||||
class AssetSimpleSerializer(serializers.ModelSerializer):
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from io import StringIO
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.utils import ssh_pubkey_gen, ssh_private_key_gen, validate_ssh_private_key
|
||||
from common.drf.fields import EncryptedField
|
||||
from assets.models import Type
|
||||
from common.drf.fields import EncryptedField
|
||||
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str, parse_ssh_public_key_str
|
||||
from .utils import validate_password_for_ansible
|
||||
|
||||
|
||||
class AuthSerializer(serializers.ModelSerializer):
|
||||
password = EncryptedField(required=False, allow_blank=True, allow_null=True, max_length=1024, label=_('Password'))
|
||||
private_key = EncryptedField(required=False, allow_blank=True, allow_null=True, max_length=16384, label=_('Private key'))
|
||||
private_key = EncryptedField(required=False, allow_blank=True, allow_null=True, max_length=16384,
|
||||
label=_('Private key'))
|
||||
|
||||
def gen_keys(self, private_key=None, password=None):
|
||||
if private_key is None:
|
||||
return None, None
|
||||
public_key = ssh_pubkey_gen(private_key=private_key, password=password)
|
||||
public_key = parse_ssh_public_key_str(text=private_key, password=password)
|
||||
return private_key, public_key
|
||||
|
||||
def save(self, **kwargs):
|
||||
@@ -57,10 +57,7 @@ class AuthSerializerMixin(serializers.ModelSerializer):
|
||||
if not valid:
|
||||
raise serializers.ValidationError(_("private key invalid or passphrase error"))
|
||||
|
||||
private_key = ssh_private_key_gen(private_key, password=passphrase)
|
||||
string_io = StringIO()
|
||||
private_key.write_private_key(string_io)
|
||||
private_key = string_io.getvalue()
|
||||
private_key = parse_ssh_private_key_str(private_key, password=passphrase)
|
||||
return private_key
|
||||
|
||||
def validate_public_key(self, public_key):
|
||||
|
||||
@@ -21,7 +21,7 @@ class CommandFilterSerializer(BulkOrgResourceModelSerializer):
|
||||
'comment', 'created_by',
|
||||
]
|
||||
fields_fk = ['rules']
|
||||
fields_m2m = ['users', 'user_groups', 'system_users', 'assets', 'applications']
|
||||
fields_m2m = ['users', 'user_groups', 'system_users', 'nodes', 'assets', 'applications']
|
||||
fields = fields_small + fields_fk + fields_m2m
|
||||
extra_kwargs = {
|
||||
'rules': {'read_only': True},
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.db.models import Count
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.mixins.serializers import BulkSerializerMixin
|
||||
from common.utils import ssh_pubkey_gen
|
||||
from common.drf.fields import EncryptedField
|
||||
from common.drf.serializers import SecretReadableMixin
|
||||
from common.mixins.serializers import BulkSerializerMixin
|
||||
from common.utils import parse_ssh_public_key_str
|
||||
from common.validators import alphanumeric_re, alphanumeric_cn_re, alphanumeric_win_re
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from ..models import SystemUser, Asset
|
||||
from .utils import validate_password_for_ansible
|
||||
from .base import AuthSerializerMixin
|
||||
from .utils import validate_password_for_ansible
|
||||
from ..models import SystemUser, Asset
|
||||
|
||||
__all__ = [
|
||||
'SystemUserSerializer', 'MiniSystemUserSerializer',
|
||||
@@ -36,9 +36,6 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
token = EncryptedField(
|
||||
label=_('Token'), required=False, write_only=True, style={'base_template': 'textarea.html'}
|
||||
)
|
||||
applications_amount = serializers.IntegerField(
|
||||
source='apps_amount', read_only=True, label=_('Apps amount')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = SystemUser
|
||||
@@ -53,7 +50,7 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
'su_enabled', 'su_from',
|
||||
'date_created', 'date_updated', 'comment', 'created_by',
|
||||
]
|
||||
fields_m2m = ['cmd_filters', 'assets_amount', 'applications_amount', 'nodes']
|
||||
fields_m2m = ['cmd_filters', 'nodes']
|
||||
fields = fields_small + fields_m2m
|
||||
extra_kwargs = {
|
||||
'cmd_filters': {"required": False, 'label': _('Command filter')},
|
||||
@@ -214,7 +211,7 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
elif attrs.get('private_key'):
|
||||
private_key = attrs['private_key']
|
||||
password = attrs.get('password')
|
||||
public_key = ssh_pubkey_gen(private_key, password=password, username=username)
|
||||
public_key = parse_ssh_public_key_str(private_key, password=password)
|
||||
attrs['public_key'] = public_key
|
||||
return attrs
|
||||
|
||||
@@ -241,7 +238,6 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
def setup_eager_loading(cls, queryset):
|
||||
""" Perform necessary eager loading of data. """
|
||||
queryset = queryset \
|
||||
.annotate(assets_amount=Count("assets")) \
|
||||
.prefetch_related('nodes', 'cmd_filters')
|
||||
return queryset
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str
|
||||
|
||||
|
||||
def validate_password_for_ansible(password):
|
||||
""" 校验 Ansible 不支持的特殊字符 """
|
||||
@@ -15,3 +17,9 @@ def validate_password_for_ansible(password):
|
||||
if '"' in password:
|
||||
raise serializers.ValidationError(_('Password can not contains `"` '))
|
||||
|
||||
|
||||
def validate_ssh_key(ssh_key, passphrase=None):
|
||||
valid = validate_ssh_private_key(ssh_key, password=passphrase)
|
||||
if not valid:
|
||||
raise serializers.ValidationError(_("private key invalid or passphrase error"))
|
||||
return parse_ssh_private_key_str(ssh_key, passphrase)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import os
|
||||
import threading
|
||||
|
||||
from django.db.models.signals import (
|
||||
m2m_changed, post_save, post_delete
|
||||
@@ -9,15 +7,15 @@ from django.db.models.signals import (
|
||||
from django.dispatch import receiver
|
||||
from django.utils.functional import LazyObject
|
||||
|
||||
from common.signals import django_ready
|
||||
from common.utils.connection import RedisPubSub
|
||||
from common.utils import get_logger
|
||||
from assets.models import Asset, Node
|
||||
from common.signals import django_ready
|
||||
from common.utils import get_logger
|
||||
from common.utils.connection import RedisPubSub
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
# clear node assets mapping for memory
|
||||
# ------------------------------------
|
||||
|
||||
@@ -78,9 +76,4 @@ def subscribe_node_assets_mapping_expire(sender, **kwargs):
|
||||
Node.expire_node_all_asset_ids_mapping_from_memory(org_id)
|
||||
Node.expire_node_all_asset_ids_mapping_from_memory(root_org_id)
|
||||
|
||||
def keep_subscribe_node_assets_relation():
|
||||
node_assets_mapping_for_memory_pub_sub.subscribe(handle_node_relation_change)
|
||||
|
||||
t = threading.Thread(target=keep_subscribe_node_assets_relation)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
node_assets_mapping_for_memory_pub_sub.subscribe(handle_node_relation_change)
|
||||
|
||||
@@ -4,15 +4,16 @@ from openpyxl import Workbook
|
||||
from collections import defaultdict, OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import F
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from assets.models import AuthBook
|
||||
from assets.serializers import AccountSecretSerializer
|
||||
from assets.models import AuthBook, SystemUser, Asset
|
||||
from assets.serializers import AccountBackUpSerializer
|
||||
from assets.notifications import AccountBackupExecutionTaskMsg
|
||||
from applications.models import Account
|
||||
from applications.models import Account, Application
|
||||
from applications.const import AppType
|
||||
from applications.serializers import AppAccountSecretSerializer
|
||||
from applications.serializers import AppAccountBackUpSerializer
|
||||
from users.models import User
|
||||
from common.utils import get_logger
|
||||
from common.utils.timezone import local_now_display
|
||||
@@ -38,7 +39,7 @@ class BaseAccountHandler:
|
||||
@classmethod
|
||||
def get_header_fields(cls, serializer: serializers.Serializer):
|
||||
try:
|
||||
backup_fields = getattr(serializer, 'Meta').fields_backup
|
||||
backup_fields = getattr(serializer, 'Meta').fields
|
||||
except AttributeError:
|
||||
backup_fields = serializer.fields.keys()
|
||||
header_fields = {}
|
||||
@@ -51,17 +52,41 @@ class BaseAccountHandler:
|
||||
header_fields[field] = str(v.label)
|
||||
return header_fields
|
||||
|
||||
@staticmethod
|
||||
def load_auth(tp, value, system_user):
|
||||
if value:
|
||||
return value
|
||||
if system_user:
|
||||
return getattr(system_user, tp, '')
|
||||
return ''
|
||||
|
||||
@classmethod
|
||||
def create_row(cls, account, serializer_cls, header_fields=None):
|
||||
serializer = serializer_cls(account)
|
||||
if not header_fields:
|
||||
header_fields = cls.get_header_fields(serializer)
|
||||
data = cls.unpack_data(serializer.data)
|
||||
def replace_auth(cls, account, system_user_dict):
|
||||
system_user = system_user_dict.get(account.systemuser_id)
|
||||
account.username = cls.load_auth('username', account.username, system_user)
|
||||
account.password = cls.load_auth('password', account.password, system_user)
|
||||
account.private_key = cls.load_auth('private_key', account.private_key, system_user)
|
||||
account.public_key = cls.load_auth('public_key', account.public_key, system_user)
|
||||
return account
|
||||
|
||||
@classmethod
|
||||
def create_row(cls, data, header_fields):
|
||||
data = cls.unpack_data(data)
|
||||
row_dict = {}
|
||||
for field, header_name in header_fields.items():
|
||||
row_dict[header_name] = str(data[field])
|
||||
row_dict[header_name] = str(data.get(field, field))
|
||||
return row_dict
|
||||
|
||||
@classmethod
|
||||
def add_rows(cls, data, header_fields, sheet):
|
||||
data_map = defaultdict(list)
|
||||
for i in data:
|
||||
row = cls.create_row(i, header_fields)
|
||||
if sheet not in data_map:
|
||||
data_map[sheet].append(list(row.keys()))
|
||||
data_map[sheet].append(list(row.values()))
|
||||
return data_map
|
||||
|
||||
|
||||
class AssetAccountHandler(BaseAccountHandler):
|
||||
@staticmethod
|
||||
@@ -72,22 +97,27 @@ class AssetAccountHandler(BaseAccountHandler):
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls):
|
||||
data_map = defaultdict(list)
|
||||
def replace_account_info(cls, account, asset_dict, system_user_dict):
|
||||
asset = asset_dict.get(account.asset_id)
|
||||
account.ip = asset.ip if asset else ''
|
||||
account.hostname = asset.hostname if asset else ''
|
||||
account = cls.replace_auth(account, system_user_dict)
|
||||
return account
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls, system_user_dict):
|
||||
sheet_name = AuthBook._meta.verbose_name
|
||||
assets = Asset.objects.only('id', 'hostname', 'ip')
|
||||
asset_dict = {asset.id: asset for asset in assets}
|
||||
accounts = AuthBook.objects.all()
|
||||
if not accounts.exists():
|
||||
return
|
||||
|
||||
accounts = AuthBook.get_queryset().select_related('systemuser')
|
||||
if not accounts.first():
|
||||
return data_map
|
||||
|
||||
header_fields = cls.get_header_fields(AccountSecretSerializer(accounts.first()))
|
||||
header_fields = cls.get_header_fields(AccountBackUpSerializer(accounts.first()))
|
||||
for account in accounts:
|
||||
account.load_auth()
|
||||
row = cls.create_row(account, AccountSecretSerializer, header_fields)
|
||||
if sheet_name not in data_map:
|
||||
data_map[sheet_name].append(list(row.keys()))
|
||||
data_map[sheet_name].append(list(row.values()))
|
||||
|
||||
cls.replace_account_info(account, asset_dict, system_user_dict)
|
||||
data = AccountBackUpSerializer(accounts, many=True).data
|
||||
data_map = cls.add_rows(data, header_fields, sheet_name)
|
||||
logger.info('\n\033[33m- 共收集 {} 条资产账号\033[0m'.format(accounts.count()))
|
||||
return data_map
|
||||
|
||||
@@ -101,18 +131,36 @@ class AppAccountHandler(BaseAccountHandler):
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls):
|
||||
data_map = defaultdict(list)
|
||||
accounts = Account.get_queryset().select_related('systemuser')
|
||||
for account in accounts:
|
||||
account.load_auth()
|
||||
app_type = account.type
|
||||
def replace_account_info(cls, account, app_dict, system_user_dict):
|
||||
app = app_dict.get(account.app_id)
|
||||
account.type = app.type if app else ''
|
||||
account.app_display = app.name if app else ''
|
||||
account.category = app.category if app else ''
|
||||
account = cls.replace_auth(account, system_user_dict)
|
||||
return account
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls, system_user_dict):
|
||||
apps = Application.objects.only('id', 'type', 'name', 'category')
|
||||
app_dict = {app.id: app for app in apps}
|
||||
qs = Account.objects.all().annotate(app_type=F('app__type'))
|
||||
if not qs.exists():
|
||||
return
|
||||
|
||||
account_type_map = defaultdict(list)
|
||||
for i in qs:
|
||||
account_type_map[i.app_type].append(i)
|
||||
data_map = {}
|
||||
for app_type, accounts in account_type_map.items():
|
||||
sheet_name = AppType.get_label(app_type)
|
||||
row = cls.create_row(account, AppAccountSecretSerializer)
|
||||
if sheet_name not in data_map:
|
||||
data_map[sheet_name].append(list(row.keys()))
|
||||
data_map[sheet_name].append(list(row.values()))
|
||||
logger.info('\n\033[33m- 共收集{}条应用账号\033[0m'.format(accounts.count()))
|
||||
header_fields = cls.get_header_fields(AppAccountBackUpSerializer(tp=app_type))
|
||||
if not accounts:
|
||||
continue
|
||||
for account in accounts:
|
||||
cls.replace_account_info(account, app_dict, system_user_dict)
|
||||
data = AppAccountBackUpSerializer(accounts, many=True, tp=app_type).data
|
||||
data_map.update(cls.add_rows(data, header_fields, sheet_name))
|
||||
logger.info('\n\033[33m- 共收集{}条应用账号\033[0m'.format(qs.count()))
|
||||
return data_map
|
||||
|
||||
|
||||
@@ -137,12 +185,16 @@ class AccountBackupHandler:
|
||||
# Print task start date
|
||||
time_start = time.time()
|
||||
files = []
|
||||
system_user_qs = SystemUser.objects.only(
|
||||
'id', 'username', 'password', 'private_key', 'public_key'
|
||||
)
|
||||
system_user_dict = {i.id: i for i in system_user_qs}
|
||||
for account_type in self.execution.types:
|
||||
handler = handler_map.get(account_type)
|
||||
if not handler:
|
||||
continue
|
||||
|
||||
data_map = handler.create_data_map()
|
||||
data_map = handler.create_data_map(system_user_dict)
|
||||
if not data_map:
|
||||
continue
|
||||
|
||||
|
||||
@@ -50,6 +50,10 @@ def clean_ansible_task_hosts(assets, system_user=None):
|
||||
for asset in assets:
|
||||
if not check_asset_can_run_ansible(asset):
|
||||
continue
|
||||
# 资产平台不包含系统用户的协议, 不推送
|
||||
if system_user and system_user.protocol not in asset.protocols_as_dict:
|
||||
logger.info(_('Asset protocol not support system user protocol, skipped: {}').format(system_user.protocol))
|
||||
continue
|
||||
cleaned_assets.append(asset)
|
||||
if not cleaned_assets:
|
||||
logger.info(_("No assets matched, stop task"))
|
||||
|
||||
@@ -1,20 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin
|
||||
from importlib import import_module
|
||||
|
||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin
|
||||
from django.db.models import F, Value
|
||||
from django.db.models.functions import Concat
|
||||
from django.conf import settings
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework import generics
|
||||
|
||||
from common.drf.api import JMSReadOnlyModelViewSet
|
||||
from common.plugins.es import QuerySet as ESQuerySet
|
||||
from common.drf.filters import DatetimeRangeFilter
|
||||
from common.api import CommonGenericViewSet
|
||||
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet, OrgRelationMixin
|
||||
from orgs.utils import current_org
|
||||
from ops.models import CommandExecution
|
||||
from . import filters
|
||||
from .backends import TYPE_ENGINE_MAPPING
|
||||
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog
|
||||
from .serializers import FTPLogSerializer, UserLoginLogSerializer, CommandExecutionSerializer
|
||||
from .serializers import OperateLogSerializer, PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
||||
from .serializers import (
|
||||
OperateLogSerializer, OperateLogActionDetailSerializer,
|
||||
PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
||||
)
|
||||
|
||||
|
||||
class FTPLogViewSet(CreateModelMixin,
|
||||
@@ -67,7 +76,7 @@ class MyLoginLogAPIView(UserLoginCommonMixin, generics.ListAPIView):
|
||||
return qs
|
||||
|
||||
|
||||
class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet):
|
||||
model = OperateLog
|
||||
serializer_class = OperateLogSerializer
|
||||
extra_filter_backends = [DatetimeRangeFilter]
|
||||
@@ -78,6 +87,22 @@ class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
search_fields = ['resource']
|
||||
ordering = ['-datetime']
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('type') == 'action_detail':
|
||||
return OperateLogActionDetailSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_queryset(self):
|
||||
qs = OperateLog.objects.all()
|
||||
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||
if es_config:
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||
store = engine_mod.OperateLogStore(es_config)
|
||||
if store.ping(timeout=2):
|
||||
qs = ESQuerySet(store)
|
||||
qs.model = OperateLog
|
||||
return qs
|
||||
|
||||
|
||||
class PasswordChangeLogViewSet(ListModelMixin, CommonGenericViewSet):
|
||||
queryset = PasswordChangeLog.objects.all()
|
||||
@@ -126,9 +151,7 @@ class CommandExecutionViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
class CommandExecutionHostRelationViewSet(OrgRelationMixin, OrgBulkModelViewSet):
|
||||
serializer_class = CommandExecutionHostsRelationSerializer
|
||||
m2m_field = CommandExecution.hosts.field
|
||||
filterset_fields = [
|
||||
'id', 'asset', 'commandexecution'
|
||||
]
|
||||
filterset_class = filters.CommandExecutionFilter
|
||||
search_fields = ('asset__hostname', )
|
||||
http_method_names = ['options', 'get']
|
||||
rbac_perms = {
|
||||
|
||||
18
apps/audits/backends/__init__.py
Normal file
18
apps/audits/backends/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from importlib import import_module
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
TYPE_ENGINE_MAPPING = {
|
||||
'db': 'audits.backends.db',
|
||||
'es': 'audits.backends.es',
|
||||
}
|
||||
|
||||
|
||||
def get_operate_log_storage(default=False):
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['db'])
|
||||
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||
if not default and es_config:
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||
storage = engine_mod.OperateLogStore(es_config)
|
||||
return storage
|
||||
38
apps/audits/backends/db.py
Normal file
38
apps/audits/backends/db.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from audits.models import OperateLog
|
||||
|
||||
|
||||
class OperateLogStore(object):
|
||||
def __init__(self, config):
|
||||
self.model = OperateLog
|
||||
self.max_length = 1024
|
||||
self.max_length_tip_msg = _(
|
||||
'The text content is too long. Use Elasticsearch to store operation logs'
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def ping(timeout=None):
|
||||
return True
|
||||
|
||||
def save(self, **kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
before = kwargs.get('before') or {}
|
||||
after = kwargs.get('after') or {}
|
||||
if len(str(before)) > self.max_length:
|
||||
before = {_('Tips'): self.max_length_tip_msg}
|
||||
if len(str(after)) > self.max_length:
|
||||
after = {_('Tips'): self.max_length_tip_msg}
|
||||
|
||||
op_log = self.model.objects.filter(pk=log_id).first()
|
||||
if op_log is not None:
|
||||
raw_after = op_log.after or {}
|
||||
raw_before = op_log.before or {}
|
||||
raw_before.update(before)
|
||||
raw_after.update(after)
|
||||
op_log.before = raw_before
|
||||
op_log.after = raw_after
|
||||
op_log.save()
|
||||
else:
|
||||
self.model.objects.create(**kwargs)
|
||||
85
apps/audits/backends/es.py
Normal file
85
apps/audits/backends/es.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import uuid
|
||||
|
||||
from common.utils.timezone import local_now_display
|
||||
from common.utils import get_logger
|
||||
from common.utils.encode import Singleton
|
||||
from common.plugins.es import ES
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class OperateLogStore(ES, metaclass=Singleton):
|
||||
def __init__(self, config):
|
||||
properties = {
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"user": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"action": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"resource_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"org_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"datetime": {
|
||||
"type": "date",
|
||||
"format": "yyyy-MM-dd HH:mm:ss"
|
||||
}
|
||||
}
|
||||
exact_fields = {}
|
||||
match_fields = {
|
||||
'id', 'user', 'action', 'resource_type',
|
||||
'resource', 'remote_addr', 'org_id'
|
||||
}
|
||||
keyword_fields = {
|
||||
'id', 'user', 'action', 'resource_type', 'org_id'
|
||||
}
|
||||
if not config.get('INDEX'):
|
||||
config['INDEX'] = 'jumpserver_operate_log'
|
||||
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
|
||||
self.pre_use_check()
|
||||
|
||||
@staticmethod
|
||||
def make_data(data):
|
||||
op_id = data.get('id', str(uuid.uuid4()))
|
||||
datetime_param = data.get('datetime', local_now_display())
|
||||
data = {
|
||||
'id': op_id, 'user': data['user'], 'action': data['action'],
|
||||
'resource_type': data['resource_type'], 'resource': data['resource'],
|
||||
'remote_addr': data['remote_addr'], 'datetime': datetime_param,
|
||||
'before': data['before'], 'after': data['after'], 'org_id': data['org_id']
|
||||
}
|
||||
return data
|
||||
|
||||
def save(self, **kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
before = kwargs.get('before') or {}
|
||||
after = kwargs.get('after') or {}
|
||||
|
||||
op_log = self.get({'id': log_id})
|
||||
if op_log is not None:
|
||||
data = {'doc': {}}
|
||||
raw_after = op_log.get('after') or {}
|
||||
raw_before = op_log.get('before') or {}
|
||||
raw_before.update(before)
|
||||
raw_after.update(after)
|
||||
data['doc']['before'] = raw_before
|
||||
data['doc']['after'] = raw_after
|
||||
self.es.update(
|
||||
index=self.index, doc_type=self.doc_type,
|
||||
id=op_log.get('es_id'), body=data, refresh=True
|
||||
)
|
||||
else:
|
||||
data = self.make_data(kwargs)
|
||||
self.es.index(
|
||||
index=self.index, doc_type=self.doc_type, body=data,
|
||||
refresh=True
|
||||
)
|
||||
@@ -7,19 +7,30 @@ DEFAULT_CITY = _("Unknown")
|
||||
MODELS_NEED_RECORD = (
|
||||
# users
|
||||
'User', 'UserGroup',
|
||||
# authentication
|
||||
'AccessKey', 'TempToken',
|
||||
# acls
|
||||
'LoginACL', 'LoginAssetACL', 'LoginConfirmSetting',
|
||||
# assets
|
||||
'Asset', 'Node', 'AdminUser', 'SystemUser', 'Domain', 'Gateway', 'CommandFilterRule',
|
||||
'CommandFilter', 'Platform', 'AuthBook',
|
||||
'CommandFilter', 'Platform', 'Label',
|
||||
# applications
|
||||
'Application',
|
||||
# account
|
||||
'AuthBook',
|
||||
# orgs
|
||||
'Organization',
|
||||
# settings
|
||||
'Setting',
|
||||
# perms
|
||||
'AssetPermission', 'ApplicationPermission',
|
||||
# notifications
|
||||
'SystemMsgSubscription', 'UserMsgSubscription',
|
||||
# Terminal
|
||||
'Terminal', 'Endpoint', 'EndpointRule', 'CommandStorage', 'ReplayStorage',
|
||||
# rbac
|
||||
'Role', 'SystemRole', 'OrgRole', 'RoleBinding', 'OrgRoleBinding', 'SystemRoleBinding',
|
||||
# xpack
|
||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'GatherUserTask',
|
||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'ApplicationChangeAuthPlan',
|
||||
'GatherUserTask', 'Interface',
|
||||
)
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
from django.db.models import F, Value
|
||||
from django.db.models.functions import Concat
|
||||
from django_filters.rest_framework import CharFilter
|
||||
from rest_framework import filters
|
||||
from rest_framework.compat import coreapi, coreschema
|
||||
|
||||
from orgs.utils import current_org
|
||||
from ops.models import CommandExecution
|
||||
from common.drf.filters import BaseFilterSet
|
||||
|
||||
|
||||
__all__ = ['CurrentOrgMembersFilter']
|
||||
__all__ = ['CurrentOrgMembersFilter', 'CommandExecutionFilter']
|
||||
|
||||
|
||||
class CurrentOrgMembersFilter(filters.BaseFilterBackend):
|
||||
@@ -30,3 +34,22 @@ class CurrentOrgMembersFilter(filters.BaseFilterBackend):
|
||||
else:
|
||||
queryset = queryset.filter(user__in=self._get_user_list())
|
||||
return queryset
|
||||
|
||||
|
||||
class CommandExecutionFilter(BaseFilterSet):
|
||||
hostname_ip = CharFilter(method='filter_hostname_ip')
|
||||
|
||||
class Meta:
|
||||
model = CommandExecution.hosts.through
|
||||
fields = (
|
||||
'id', 'asset', 'commandexecution', 'hostname_ip'
|
||||
)
|
||||
|
||||
def filter_hostname_ip(self, queryset, name, value):
|
||||
queryset = queryset.annotate(
|
||||
hostname_ip=Concat(
|
||||
F('asset__hostname'), Value('('),
|
||||
F('asset__ip'), Value(')')
|
||||
)
|
||||
).filter(hostname_ip__icontains=value)
|
||||
return queryset
|
||||
|
||||
191
apps/audits/handler.py
Normal file
191
apps/audits/handler.py
Normal file
@@ -0,0 +1,191 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_request_ip, get_logger
|
||||
from common.utils.timezone import as_current_tz
|
||||
from common.utils.encode import Singleton
|
||||
from common.local import encrypted_field_set
|
||||
from settings.serializers import SettingsSerializer
|
||||
from jumpserver.utils import current_request
|
||||
from audits.models import OperateLog
|
||||
from orgs.utils import get_current_org_id
|
||||
|
||||
from .backends import get_operate_log_storage
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ModelClient:
|
||||
@staticmethod
|
||||
def save(**kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
op_log = OperateLog.objects.filter(pk=log_id).first()
|
||||
if op_log is not None:
|
||||
raw_after = op_log.after or {}
|
||||
raw_before = op_log.before or {}
|
||||
cur_before = kwargs.get('before') or {}
|
||||
cur_after = kwargs.get('after') or {}
|
||||
raw_before.update(cur_before)
|
||||
raw_after.update(cur_after)
|
||||
op_log.before = raw_before
|
||||
op_log.after = raw_after
|
||||
op_log.save()
|
||||
else:
|
||||
OperateLog.objects.create(**kwargs)
|
||||
|
||||
|
||||
class OperatorLogHandler(metaclass=Singleton):
|
||||
CACHE_KEY = 'OPERATOR_LOG_CACHE_KEY'
|
||||
|
||||
def __init__(self):
|
||||
self.log_client = self.get_storage_client()
|
||||
|
||||
@staticmethod
|
||||
def get_storage_client():
|
||||
client = get_operate_log_storage()
|
||||
return client
|
||||
|
||||
@staticmethod
|
||||
def _consistent_type_to_str(value1, value2):
|
||||
if isinstance(value1, datetime):
|
||||
value1 = as_current_tz(value1).strftime('%Y-%m-%d %H:%M:%S')
|
||||
if isinstance(value2, datetime):
|
||||
value2 = as_current_tz(value2).strftime('%Y-%m-%d %H:%M:%S')
|
||||
return value1, value2
|
||||
|
||||
def _look_for_two_dict_change(self, left_dict, right_dict):
|
||||
# 以右边的字典为基础
|
||||
before, after = {}, {}
|
||||
for key, value in right_dict.items():
|
||||
pre_value = left_dict.get(key, '')
|
||||
pre_value, value = self._consistent_type_to_str(pre_value, value)
|
||||
if sorted(str(value)) == sorted(str(pre_value)):
|
||||
continue
|
||||
if pre_value:
|
||||
before[key] = pre_value
|
||||
if value:
|
||||
after[key] = value
|
||||
return before, after
|
||||
|
||||
def cache_instance_before_data(self, instance_dict):
|
||||
instance_id = instance_dict.get('id')
|
||||
if instance_id is None:
|
||||
return
|
||||
|
||||
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||
cache.set(key, instance_dict, 3 * 60)
|
||||
|
||||
def get_instance_dict_from_cache(self, instance_id):
|
||||
if instance_id is None:
|
||||
return None
|
||||
|
||||
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||
cache_instance = cache.get(key, {})
|
||||
log_id = cache_instance.get('operate_log_id')
|
||||
return log_id, cache_instance
|
||||
|
||||
def get_instance_current_with_cache_diff(self, current_instance):
|
||||
log_id, before, after = None, None, None
|
||||
instance_id = current_instance.get('id')
|
||||
if instance_id is None:
|
||||
return log_id, before, after
|
||||
|
||||
log_id, cache_instance = self.get_instance_dict_from_cache(instance_id)
|
||||
if not cache_instance:
|
||||
return log_id, before, after
|
||||
|
||||
before, after = self._look_for_two_dict_change(
|
||||
cache_instance, current_instance
|
||||
)
|
||||
return log_id, before, after
|
||||
|
||||
@staticmethod
|
||||
def get_resource_display_from_setting(resource):
|
||||
resource_display = None
|
||||
setting_serializer = SettingsSerializer()
|
||||
label = setting_serializer.get_field_label(resource)
|
||||
if label is not None:
|
||||
resource_display = label
|
||||
return resource_display
|
||||
|
||||
def get_resource_display(self, resource):
|
||||
resource_display = str(resource)
|
||||
return_value = self.get_resource_display_from_setting(resource_display)
|
||||
if return_value is not None:
|
||||
resource_display = return_value
|
||||
return resource_display
|
||||
|
||||
@staticmethod
|
||||
def serialized_value(value: (list, tuple)):
|
||||
if len(value) == 0:
|
||||
return ''
|
||||
if isinstance(value[0], str):
|
||||
return ','.join(value)
|
||||
return ','.join([i['value'] for i in value if i.get('value')])
|
||||
|
||||
def __data_processing(self, dict_item, loop=True):
|
||||
encrypt_value = '******'
|
||||
for key, value in dict_item.items():
|
||||
if isinstance(value, bool):
|
||||
value = _('Yes') if value else _('No')
|
||||
elif isinstance(value, (list, tuple)):
|
||||
value = self.serialized_value(value)
|
||||
elif isinstance(value, dict) and loop:
|
||||
self.__data_processing(value, loop=False)
|
||||
if key in encrypted_field_set:
|
||||
value = encrypt_value
|
||||
dict_item[key] = value
|
||||
return dict_item
|
||||
|
||||
def data_processing(self, before, after):
|
||||
if before:
|
||||
before = self.__data_processing(before)
|
||||
if after:
|
||||
after = self.__data_processing(after)
|
||||
return before, after
|
||||
|
||||
def create_or_update_operate_log(
|
||||
self, action, resource_type, resource=None,
|
||||
force=False, log_id=None, before=None, after=None
|
||||
):
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
return
|
||||
|
||||
remote_addr = get_request_ip(current_request)
|
||||
resource_display = self.get_resource_display(resource)
|
||||
before, after = self.data_processing(before, after)
|
||||
if not force and not any([before, after]):
|
||||
# 前后都没变化,没必要生成日志,除非手动强制保存
|
||||
return
|
||||
|
||||
data = {
|
||||
'id': log_id, "user": str(user), 'action': action,
|
||||
'resource_type': str(resource_type), 'resource': resource_display,
|
||||
'remote_addr': remote_addr, 'before': before, 'after': after,
|
||||
'org_id': get_current_org_id(),
|
||||
}
|
||||
with transaction.atomic():
|
||||
if self.log_client.ping(timeout=1):
|
||||
client = self.log_client
|
||||
else:
|
||||
logger.info('Switch default operate log storage save.')
|
||||
client = get_operate_log_storage(default=True)
|
||||
|
||||
try:
|
||||
client.save(**data)
|
||||
except Exception as e:
|
||||
error_msg = 'An error occurred saving OperateLog.' \
|
||||
'Error: %s, Data: %s' % (e, data)
|
||||
logger.error(error_msg)
|
||||
|
||||
|
||||
op_handler = OperatorLogHandler()
|
||||
create_or_update_operate_log = op_handler.create_or_update_operate_log
|
||||
cache_instance_before_data = op_handler.cache_instance_before_data
|
||||
get_instance_current_with_cache_diff = op_handler.get_instance_current_with_cache_diff
|
||||
get_instance_dict_from_cache = op_handler.get_instance_dict_from_cache
|
||||
24
apps/audits/migrations/0015_auto_20221011_1745.py
Normal file
24
apps/audits/migrations/0015_auto_20221011_1745.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.2.14 on 2022-10-11 09:45
|
||||
|
||||
import common.db.encoder
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0014_auto_20220505_1902'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='operatelog',
|
||||
name='after',
|
||||
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='operatelog',
|
||||
name='before',
|
||||
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||
),
|
||||
]
|
||||
@@ -4,8 +4,9 @@ from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext, ugettext_lazy as _
|
||||
from django.utils import timezone
|
||||
from common.utils import lazyproperty
|
||||
|
||||
from common.utils import lazyproperty
|
||||
from common.db.encoder import ModelJSONFieldEncoder
|
||||
from orgs.mixins.models import OrgModelMixin, Organization
|
||||
from orgs.utils import current_org
|
||||
|
||||
@@ -65,6 +66,8 @@ class OperateLog(OrgModelMixin):
|
||||
resource = models.CharField(max_length=128, verbose_name=_("Resource"))
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
||||
before = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||
after = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
||||
@@ -78,6 +81,21 @@ class OperateLog(OrgModelMixin):
|
||||
self.org_id = Organization.ROOT_ID
|
||||
return super(OperateLog, self).save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
self = cls()
|
||||
for k, v in d.items():
|
||||
setattr(self, k, v)
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_multi_dict(cls, l):
|
||||
operate_logs = []
|
||||
for d in l:
|
||||
operate_log = cls.from_dict(d)
|
||||
operate_logs.append(operate_log)
|
||||
return operate_logs
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Operate log")
|
||||
|
||||
|
||||
@@ -47,6 +47,12 @@ class UserLoginLogSerializer(serializers.ModelSerializer):
|
||||
}
|
||||
|
||||
|
||||
class OperateLogActionDetailSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.OperateLog
|
||||
fields = ('before', 'after')
|
||||
|
||||
|
||||
class OperateLogSerializer(serializers.ModelSerializer):
|
||||
action_display = serializers.CharField(source='get_action_display', label=_('Action'))
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from django.db.models.signals import (
|
||||
post_save, m2m_changed, pre_delete
|
||||
post_save, m2m_changed, pre_delete, pre_save
|
||||
)
|
||||
from django.dispatch import receiver
|
||||
from django.conf import settings
|
||||
@@ -16,24 +16,32 @@ from django.utils import translation
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.request import Request
|
||||
|
||||
from assets.models import Asset, SystemUser
|
||||
from users.models import User
|
||||
from assets.models import Asset, SystemUser, CommandFilter
|
||||
from terminal.models import Session, Command
|
||||
from perms.models import AssetPermission, ApplicationPermission
|
||||
from rbac.models import Role
|
||||
|
||||
from audits.utils import model_to_dict_for_operate_log as model_to_dict
|
||||
from audits.handler import (
|
||||
get_instance_current_with_cache_diff, cache_instance_before_data,
|
||||
create_or_update_operate_log, get_instance_dict_from_cache
|
||||
)
|
||||
from authentication.signals import post_auth_failed, post_auth_success
|
||||
from authentication.utils import check_different_city_login_if_need
|
||||
from jumpserver.utils import current_request
|
||||
from users.models import User
|
||||
from users.signals import post_user_change_password
|
||||
from terminal.models import Session, Command
|
||||
from .utils import write_login_log, create_operate_log
|
||||
from .utils import write_login_log
|
||||
from . import models, serializers
|
||||
from .models import OperateLog
|
||||
from orgs.utils import current_org
|
||||
from perms.models import AssetPermission, ApplicationPermission
|
||||
from .const import MODELS_NEED_RECORD
|
||||
from terminal.backends.command.serializers import SessionCommandSerializer
|
||||
from terminal.serializers import SessionSerializer
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL
|
||||
from common.utils import get_request_ip, get_logger, get_syslogger
|
||||
from common.utils.encode import data_to_json
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
sys_logger = get_syslogger(__name__)
|
||||
json_render = JSONRenderer()
|
||||
@@ -62,70 +70,6 @@ class AuthBackendLabelMapping(LazyObject):
|
||||
|
||||
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
||||
|
||||
|
||||
M2M_NEED_RECORD = {
|
||||
User.groups.through._meta.object_name: (
|
||||
_('User and Group'),
|
||||
_('{User} JOINED {UserGroup}'),
|
||||
_('{User} LEFT {UserGroup}')
|
||||
),
|
||||
SystemUser.assets.through._meta.object_name: (
|
||||
_('Asset and SystemUser'),
|
||||
_('{Asset} ADD {SystemUser}'),
|
||||
_('{Asset} REMOVE {SystemUser}')
|
||||
),
|
||||
Asset.nodes.through._meta.object_name: (
|
||||
_('Node and Asset'),
|
||||
_('{Node} ADD {Asset}'),
|
||||
_('{Node} REMOVE {Asset}')
|
||||
),
|
||||
AssetPermission.users.through._meta.object_name: (
|
||||
_('User asset permissions'),
|
||||
_('{AssetPermission} ADD {User}'),
|
||||
_('{AssetPermission} REMOVE {User}'),
|
||||
),
|
||||
AssetPermission.user_groups.through._meta.object_name: (
|
||||
_('User group asset permissions'),
|
||||
_('{AssetPermission} ADD {UserGroup}'),
|
||||
_('{AssetPermission} REMOVE {UserGroup}'),
|
||||
),
|
||||
AssetPermission.assets.through._meta.object_name: (
|
||||
_('Asset permission'),
|
||||
_('{AssetPermission} ADD {Asset}'),
|
||||
_('{AssetPermission} REMOVE {Asset}'),
|
||||
),
|
||||
AssetPermission.nodes.through._meta.object_name: (
|
||||
_('Node permission'),
|
||||
_('{AssetPermission} ADD {Node}'),
|
||||
_('{AssetPermission} REMOVE {Node}'),
|
||||
),
|
||||
AssetPermission.system_users.through._meta.object_name: (
|
||||
_('Asset permission and SystemUser'),
|
||||
_('{AssetPermission} ADD {SystemUser}'),
|
||||
_('{AssetPermission} REMOVE {SystemUser}'),
|
||||
),
|
||||
ApplicationPermission.users.through._meta.object_name: (
|
||||
_('User application permissions'),
|
||||
_('{ApplicationPermission} ADD {User}'),
|
||||
_('{ApplicationPermission} REMOVE {User}'),
|
||||
),
|
||||
ApplicationPermission.user_groups.through._meta.object_name: (
|
||||
_('User group application permissions'),
|
||||
_('{ApplicationPermission} ADD {UserGroup}'),
|
||||
_('{ApplicationPermission} REMOVE {UserGroup}'),
|
||||
),
|
||||
ApplicationPermission.applications.through._meta.object_name: (
|
||||
_('Application permission'),
|
||||
_('{ApplicationPermission} ADD {Application}'),
|
||||
_('{ApplicationPermission} REMOVE {Application}'),
|
||||
),
|
||||
ApplicationPermission.system_users.through._meta.object_name: (
|
||||
_('Application permission and SystemUser'),
|
||||
_('{ApplicationPermission} ADD {SystemUser}'),
|
||||
_('{ApplicationPermission} REMOVE {SystemUser}'),
|
||||
),
|
||||
}
|
||||
|
||||
M2M_ACTION = {
|
||||
POST_ADD: OperateLog.ACTION_CREATE,
|
||||
POST_REMOVE: OperateLog.ACTION_DELETE,
|
||||
@@ -137,60 +81,115 @@ M2M_ACTION = {
|
||||
def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
|
||||
if action not in M2M_ACTION:
|
||||
return
|
||||
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
if not instance:
|
||||
return
|
||||
|
||||
sender_name = sender._meta.object_name
|
||||
if sender_name in M2M_NEED_RECORD:
|
||||
org_id = current_org.id
|
||||
remote_addr = get_request_ip(current_request)
|
||||
user = str(user)
|
||||
resource_type, resource_tmpl_add, resource_tmpl_remove = M2M_NEED_RECORD[sender_name]
|
||||
action = M2M_ACTION[action]
|
||||
if action == OperateLog.ACTION_CREATE:
|
||||
resource_tmpl = resource_tmpl_add
|
||||
elif action == OperateLog.ACTION_DELETE:
|
||||
resource_tmpl = resource_tmpl_remove
|
||||
resource_type = instance._meta.verbose_name
|
||||
current_instance = model_to_dict(instance, include_model_fields=False)
|
||||
|
||||
to_create = []
|
||||
objs = model.objects.filter(pk__in=pk_set)
|
||||
instance_id = current_instance.get('id')
|
||||
log_id, before_instance = get_instance_dict_from_cache(instance_id)
|
||||
|
||||
instance_name = instance._meta.object_name
|
||||
instance_value = str(instance)
|
||||
field_name = str(model._meta.verbose_name)
|
||||
objs = model.objects.filter(pk__in=pk_set)
|
||||
objs_display = [str(o) for o in objs]
|
||||
action = M2M_ACTION[action]
|
||||
changed_field = current_instance.get(field_name, [])
|
||||
|
||||
model_name = model._meta.object_name
|
||||
after, before, before_value = None, None, None
|
||||
if action == OperateLog.ACTION_CREATE:
|
||||
before_value = list(set(changed_field) - set(objs_display))
|
||||
elif action == OperateLog.ACTION_DELETE:
|
||||
before_value = list(
|
||||
set(changed_field).symmetric_difference(set(objs_display))
|
||||
)
|
||||
|
||||
for obj in objs:
|
||||
resource = resource_tmpl.format(**{
|
||||
instance_name: instance_value,
|
||||
model_name: str(obj)
|
||||
})[:128] # `resource` 字段只有 128 个字符长 😔
|
||||
if changed_field:
|
||||
after = {field_name: changed_field}
|
||||
if before_value:
|
||||
before = {field_name: before_value}
|
||||
|
||||
to_create.append(OperateLog(
|
||||
user=user, action=action, resource_type=resource_type,
|
||||
resource=resource, remote_addr=remote_addr, org_id=org_id
|
||||
))
|
||||
OperateLog.objects.bulk_create(to_create)
|
||||
if sorted(str(before)) == sorted(str(after)):
|
||||
return
|
||||
|
||||
create_or_update_operate_log(
|
||||
OperateLog.ACTION_UPDATE, resource_type,
|
||||
resource=instance, log_id=log_id, before=before, after=after
|
||||
)
|
||||
|
||||
|
||||
def signal_of_operate_log_whether_continue(sender, instance, created, update_fields=None):
|
||||
condition = True
|
||||
if not instance:
|
||||
condition = False
|
||||
if instance and getattr(instance, SKIP_SIGNAL, False):
|
||||
condition = False
|
||||
# 终端模型的 create 事件由系统产生,不记录
|
||||
if instance._meta.object_name == 'Terminal' and created:
|
||||
condition = False
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
condition = False
|
||||
# 不在记录白名单中,跳过
|
||||
if sender._meta.object_name not in MODELS_NEED_RECORD:
|
||||
condition = False
|
||||
return condition
|
||||
|
||||
|
||||
@receiver(pre_save)
|
||||
def on_object_pre_create_or_update(sender, instance=None, raw=False, using=None, update_fields=None, **kwargs):
|
||||
ok = signal_of_operate_log_whether_continue(
|
||||
sender, instance, False, update_fields
|
||||
)
|
||||
if not ok:
|
||||
return
|
||||
instance_before_data = {'id': instance.id}
|
||||
raw_instance = type(instance).objects.filter(pk=instance.id).first()
|
||||
if raw_instance:
|
||||
instance_before_data = model_to_dict(raw_instance)
|
||||
operate_log_id = str(uuid.uuid4())
|
||||
instance_before_data['operate_log_id'] = operate_log_id
|
||||
setattr(instance, 'operate_log_id', operate_log_id)
|
||||
cache_instance_before_data(instance_before_data)
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
ok = signal_of_operate_log_whether_continue(
|
||||
sender, instance, created, update_fields
|
||||
)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
log_id, before, after = None, None, None
|
||||
if created:
|
||||
action = models.OperateLog.ACTION_CREATE
|
||||
after = model_to_dict(instance)
|
||||
log_id = getattr(instance, 'operate_log_id', None)
|
||||
else:
|
||||
action = models.OperateLog.ACTION_UPDATE
|
||||
create_operate_log(action, sender, instance)
|
||||
current_instance = model_to_dict(instance)
|
||||
log_id, before, after = get_instance_current_with_cache_diff(current_instance)
|
||||
|
||||
resource_type = sender._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
action, resource_type, resource=instance,
|
||||
log_id=log_id, before=before, after=after
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_delete)
|
||||
def on_object_delete(sender, instance=None, **kwargs):
|
||||
create_operate_log(models.OperateLog.ACTION_DELETE, sender, instance)
|
||||
ok = signal_of_operate_log_whether_continue(sender, instance, False)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
resource_type = sender._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
models.OperateLog.ACTION_DELETE, resource_type,
|
||||
resource=instance, before=model_to_dict(instance)
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_user_change_password, sender=User)
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import csv
|
||||
import codecs
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.db import transaction
|
||||
from django.utils import translation
|
||||
from itertools import chain
|
||||
from datetime import datetime
|
||||
|
||||
from audits.models import OperateLog
|
||||
from common.utils import validate_ip, get_ip_city, get_request_ip, get_logger
|
||||
from jumpserver.utils import current_request
|
||||
from .const import DEFAULT_CITY, MODELS_NEED_RECORD
|
||||
from django.http import HttpResponse
|
||||
from django.db import models
|
||||
|
||||
from settings.serializers import SettingsSerializer
|
||||
from common.utils.timezone import as_current_tz
|
||||
from common.utils import validate_ip, get_ip_city, get_logger
|
||||
from .const import DEFAULT_CITY
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -46,23 +48,59 @@ def write_login_log(*args, **kwargs):
|
||||
UserLoginLog.objects.create(**kwargs)
|
||||
|
||||
|
||||
def create_operate_log(action, sender, resource):
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
return
|
||||
model_name = sender._meta.object_name
|
||||
if model_name not in MODELS_NEED_RECORD:
|
||||
return
|
||||
with translation.override('en'):
|
||||
resource_type = sender._meta.verbose_name
|
||||
remote_addr = get_request_ip(current_request)
|
||||
def get_resource_display(resource):
|
||||
resource_display = str(resource)
|
||||
setting_serializer = SettingsSerializer()
|
||||
label = setting_serializer.get_field_label(resource_display)
|
||||
if label is not None:
|
||||
resource_display = label
|
||||
return resource_display
|
||||
|
||||
data = {
|
||||
"user": str(user), 'action': action, 'resource_type': resource_type,
|
||||
'resource': str(resource), 'remote_addr': remote_addr,
|
||||
}
|
||||
with transaction.atomic():
|
||||
try:
|
||||
OperateLog.objects.create(**data)
|
||||
except Exception as e:
|
||||
logger.error("Create operate log error: {}".format(e))
|
||||
|
||||
def model_to_dict_for_operate_log(
|
||||
instance, include_model_fields=True, include_related_fields=True
|
||||
):
|
||||
model_need_continue_fields = ['date_updated']
|
||||
m2m_need_continue_fields = ['history_passwords']
|
||||
opts = instance._meta
|
||||
data = {}
|
||||
for f in chain(opts.concrete_fields, opts.private_fields):
|
||||
if isinstance(f, (models.FileField, models.ImageField)):
|
||||
continue
|
||||
|
||||
if getattr(f, 'attname', None) in model_need_continue_fields:
|
||||
continue
|
||||
|
||||
value = getattr(instance, f.name) or getattr(instance, f.attname)
|
||||
if not isinstance(value, bool) and not value:
|
||||
continue
|
||||
|
||||
if getattr(f, 'primary_key', False):
|
||||
f.verbose_name = 'id'
|
||||
elif isinstance(value, list):
|
||||
value = [str(v) for v in value]
|
||||
elif isinstance(value, datetime):
|
||||
value = as_current_tz(value).strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
if include_model_fields or getattr(f, 'primary_key', False):
|
||||
data[str(f.verbose_name)] = value
|
||||
|
||||
if include_related_fields:
|
||||
for f in chain(opts.many_to_many, opts.related_objects):
|
||||
value = []
|
||||
if instance.pk is not None:
|
||||
related_name = getattr(f, 'attname', '') or getattr(f, 'related_name', '')
|
||||
if not related_name or related_name in m2m_need_continue_fields:
|
||||
continue
|
||||
try:
|
||||
value = [str(i) for i in getattr(instance, related_name).all()]
|
||||
except:
|
||||
pass
|
||||
if not value:
|
||||
continue
|
||||
try:
|
||||
field_key = getattr(f, 'verbose_name', None) or f.related_model._meta.verbose_name
|
||||
data[str(field_key)] = value
|
||||
except:
|
||||
pass
|
||||
return data
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
import abc
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from common.drf.api import JMSModelViewSet
|
||||
from common.http import is_true
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from perms.models.base import Action
|
||||
from terminal.models import EndpointRule
|
||||
from ..models import ConnectionToken
|
||||
from ..serializers import (
|
||||
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
||||
SuperConnectionTokenSerializer, ConnectionTokenDisplaySerializer,
|
||||
)
|
||||
from ..models import ConnectionToken
|
||||
|
||||
|
||||
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
||||
|
||||
@@ -63,12 +63,15 @@ class ConnectionTokenMixin:
|
||||
|
||||
def get_smart_endpoint(self, protocol, asset=None, application=None):
|
||||
if asset:
|
||||
target_instance = asset
|
||||
target_ip = asset.get_target_ip()
|
||||
elif application:
|
||||
target_instance = application
|
||||
target_ip = application.get_target_ip()
|
||||
else:
|
||||
target_instance = None
|
||||
target_ip = ''
|
||||
endpoint = EndpointRule.match_endpoint(target_ip, protocol, self.request)
|
||||
endpoint = EndpointRule.match_endpoint(target_instance, target_ip, protocol, self.request)
|
||||
return endpoint
|
||||
|
||||
@staticmethod
|
||||
@@ -86,7 +89,7 @@ class ConnectionTokenMixin:
|
||||
filename, ssh_token = self.get_ssh_token(token)
|
||||
else:
|
||||
raise ValueError('Protocol not support: {}'.format(protocol))
|
||||
|
||||
filename = urllib.parse.unquote(filename)
|
||||
return {
|
||||
"filename": filename,
|
||||
"protocol": protocol,
|
||||
@@ -163,6 +166,9 @@ class ConnectionTokenMixin:
|
||||
rdp_options['session bpp:i'] = os.getenv('JUMPSERVER_COLOR_DEPTH', '32')
|
||||
rdp_options['audiomode:i'] = self.parse_env_bool('JUMPSERVER_DISABLE_AUDIO', 'false', '2', '0')
|
||||
|
||||
if token.asset and token.asset.platform.meta.get('console', None) == 'true':
|
||||
rdp_options['administrative session:i:'] = '1'
|
||||
|
||||
if token.asset:
|
||||
name = token.asset.hostname
|
||||
elif token.application and token.application.category_remote_app:
|
||||
@@ -174,9 +180,8 @@ class ConnectionTokenMixin:
|
||||
rdp_options['remoteapplicationname:s'] = name
|
||||
else:
|
||||
name = '*'
|
||||
|
||||
filename = "{}-{}-jumpserver".format(token.user.username, name)
|
||||
filename = urllib.parse.quote(filename)
|
||||
prefix_name = f'{token.user.username}-{name}'
|
||||
filename = self.get_connect_filename(prefix_name)
|
||||
|
||||
content = ''
|
||||
for k, v in rdp_options.items():
|
||||
@@ -184,6 +189,15 @@ class ConnectionTokenMixin:
|
||||
|
||||
return filename, content
|
||||
|
||||
@staticmethod
|
||||
def get_connect_filename(prefix_name):
|
||||
prefix_name = prefix_name.replace('/', '_')
|
||||
prefix_name = prefix_name.replace('\\', '_')
|
||||
prefix_name = prefix_name.replace('.', '_')
|
||||
filename = f'{prefix_name}-jumpserver'
|
||||
filename = urllib.parse.quote(filename)
|
||||
return filename
|
||||
|
||||
def get_ssh_token(self, token: ConnectionToken):
|
||||
if token.asset:
|
||||
name = token.asset.hostname
|
||||
@@ -191,7 +205,8 @@ class ConnectionTokenMixin:
|
||||
name = token.application.name
|
||||
else:
|
||||
name = '*'
|
||||
filename = f'{token.user.username}-{name}-jumpserver'
|
||||
prefix_name = f'{token.user.username}-{name}'
|
||||
filename = self.get_connect_filename(prefix_name)
|
||||
|
||||
endpoint = self.get_smart_endpoint(
|
||||
protocol='ssh', asset=token.asset, application=token.application
|
||||
@@ -326,4 +341,3 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||
'msg': f'Token is renewed, date expired: {date_expired}'
|
||||
}
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@ from rest_framework.permissions import AllowAny
|
||||
|
||||
from common.utils import get_logger
|
||||
from .. import errors, mixins
|
||||
from django.contrib.auth import logout as auth_logout
|
||||
|
||||
|
||||
__all__ = ['TicketStatusApi']
|
||||
logger = get_logger(__name__)
|
||||
@@ -17,7 +19,16 @@ class TicketStatusApi(mixins.AuthMixin, APIView):
|
||||
def get(self, request, *args, **kwargs):
|
||||
try:
|
||||
self.check_user_login_confirm()
|
||||
self.request.session['auth_third_party_done'] = 1
|
||||
self.request.session.pop('auth_third_party_required', '')
|
||||
return Response({"msg": "ok"})
|
||||
except errors.LoginConfirmOtherError as e:
|
||||
reason = e.msg
|
||||
username = e.username
|
||||
self.send_auth_signal(success=False, username=username, reason=reason)
|
||||
# 若为三方登录,此时应退出登录
|
||||
auth_logout(request)
|
||||
return Response(e.as_data(), status=200)
|
||||
except errors.NeedMoreInfoError as e:
|
||||
return Response(e.as_data(), status=200)
|
||||
|
||||
|
||||
@@ -1,13 +1,71 @@
|
||||
from rest_framework.generics import CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.template.loader import render_to_string
|
||||
from django.core.cache import cache
|
||||
from django.shortcuts import reverse
|
||||
|
||||
from authentication.serializers import PasswordVerifySerializer
|
||||
from common.utils.verify_code import SendAndVerifyCodeUtil
|
||||
from common.permissions import IsValidUser
|
||||
from common.utils.random import random_string
|
||||
from common.utils import get_object_or_none
|
||||
from authentication.serializers import (
|
||||
PasswordVerifySerializer, ResetPasswordCodeSerializer
|
||||
)
|
||||
from settings.utils import get_login_title
|
||||
from users.models import User
|
||||
from authentication.mixins import authenticate
|
||||
from authentication.errors import PasswordInvalid
|
||||
from authentication.mixins import AuthMixin
|
||||
|
||||
|
||||
class UserResetPasswordSendCodeApi(CreateAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
serializer_class = ResetPasswordCodeSerializer
|
||||
|
||||
@staticmethod
|
||||
def is_valid_user(**kwargs):
|
||||
user = get_object_or_none(User, **kwargs)
|
||||
if not user:
|
||||
err_msg = _('User does not exist: {}').format(_("No user matched"))
|
||||
return None, err_msg
|
||||
if not user.is_local:
|
||||
err_msg = _(
|
||||
'The user is from {}, please go to the corresponding system to change the password'
|
||||
).format(user.get_source_display())
|
||||
return None, err_msg
|
||||
return user, None
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
token = request.GET.get('token')
|
||||
userinfo = cache.get(token)
|
||||
if not userinfo:
|
||||
return reverse('authentication:forgot-previewing')
|
||||
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
username = userinfo.get('username')
|
||||
form_type = serializer.validated_data['form_type']
|
||||
code = random_string(6, lower=False, upper=False)
|
||||
other_args = {}
|
||||
|
||||
target = serializer.validated_data[form_type]
|
||||
query_key = 'phone' if form_type == 'sms' else form_type
|
||||
user, err = self.is_valid_user(username=username, **{query_key: target})
|
||||
if not user:
|
||||
return Response({'error': err}, status=400)
|
||||
|
||||
subject = '%s: %s' % (get_login_title(), _('Forgot password'))
|
||||
context = {
|
||||
'user': user, 'title': subject, 'code': code,
|
||||
}
|
||||
message = render_to_string('authentication/_msg_reset_password_code.html', context)
|
||||
other_args['subject'], other_args['message'] = subject, message
|
||||
SendAndVerifyCodeUtil(target, code, backend=form_type, **other_args).gen_and_send_async()
|
||||
return Response({'data': 'ok'}, status=200)
|
||||
|
||||
|
||||
class UserPasswordVerifyApi(AuthMixin, CreateAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
serializer_class = PasswordVerifySerializer
|
||||
|
||||
@@ -49,7 +49,7 @@ class JMSBaseAuthBackend:
|
||||
if not allow:
|
||||
info = 'User {} skip authentication backend {}, because it not in {}'
|
||||
info = info.format(username, backend_name, ','.join(allowed_backend_names))
|
||||
logger.debug(info)
|
||||
logger.info(info)
|
||||
return allow
|
||||
|
||||
|
||||
|
||||
@@ -3,9 +3,10 @@
|
||||
from django.urls import path
|
||||
import django_cas_ng.views
|
||||
|
||||
from .views import CASLoginView
|
||||
|
||||
urlpatterns = [
|
||||
path('login/', django_cas_ng.views.LoginView.as_view(), name='cas-login'),
|
||||
path('login/', CASLoginView.as_view(), name='cas-login'),
|
||||
path('logout/', django_cas_ng.views.LogoutView.as_view(), name='cas-logout'),
|
||||
path('callback/', django_cas_ng.views.CallbackView.as_view(), name='cas-proxy-callback'),
|
||||
]
|
||||
|
||||
15
apps/authentication/backends/cas/views.py
Normal file
15
apps/authentication/backends/cas/views.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from django_cas_ng.views import LoginView
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.http import HttpResponseRedirect
|
||||
|
||||
__all__ = ['LoginView']
|
||||
|
||||
|
||||
class CASLoginView(LoginView):
|
||||
def get(self, request):
|
||||
try:
|
||||
return super().get(request)
|
||||
except PermissionDenied:
|
||||
return HttpResponseRedirect('/')
|
||||
|
||||
|
||||
61
apps/authentication/backends/custom.py
Normal file
61
apps/authentication/backends/custom.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from django.conf import settings
|
||||
from django.utils.module_loading import import_string
|
||||
from common.utils import get_logger
|
||||
from django.contrib.auth import get_user_model
|
||||
from authentication.signals import user_auth_failed, user_auth_success
|
||||
|
||||
from .base import JMSModelBackend
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
custom_authenticate_method = None
|
||||
|
||||
if settings.AUTH_CUSTOM:
|
||||
""" 保证自定义认证方法在服务运行时不能被更改,只在第一次调用时加载一次 """
|
||||
try:
|
||||
custom_auth_method_path = 'data.auth.main.authenticate'
|
||||
custom_authenticate_method = import_string(custom_auth_method_path)
|
||||
except Exception as e:
|
||||
logger.warning('Import custom auth method failed: {}, Maybe not enabled'.format(e))
|
||||
|
||||
|
||||
class CustomAuthBackend(JMSModelBackend):
|
||||
|
||||
def is_enabled(self):
|
||||
return settings.AUTH_CUSTOM and callable(custom_authenticate_method)
|
||||
|
||||
@staticmethod
|
||||
def get_or_create_user_from_userinfo(userinfo: dict):
|
||||
username = userinfo['username']
|
||||
attrs = ['name', 'username', 'email', 'is_active']
|
||||
defaults = {attr: userinfo[attr] for attr in attrs}
|
||||
user, created = get_user_model().objects.get_or_create(
|
||||
username=username, defaults=defaults
|
||||
)
|
||||
return user, created
|
||||
|
||||
def authenticate(self, request, username=None, password=None, **kwargs):
|
||||
try:
|
||||
userinfo: dict = custom_authenticate_method(
|
||||
username=username, password=password, **kwargs
|
||||
)
|
||||
user, created = self.get_or_create_user_from_userinfo(userinfo)
|
||||
except Exception as e:
|
||||
logger.error('Custom authenticate error: {}'.format(e))
|
||||
return None
|
||||
|
||||
if self.user_can_authenticate(user):
|
||||
logger.info(f'Custom authenticate success: {user.username}')
|
||||
user_auth_success.send(
|
||||
sender=self.__class__, request=request, user=user,
|
||||
backend=settings.AUTH_BACKEND_CUSTOM
|
||||
)
|
||||
return user
|
||||
else:
|
||||
logger.info(f'Custom authenticate failed: {user.username}')
|
||||
user_auth_failed.send(
|
||||
sender=self.__class__, request=request, username=user.username,
|
||||
reason=_('User invalid, disabled or expired'),
|
||||
backend=settings.AUTH_BACKEND_CUSTOM
|
||||
)
|
||||
return None
|
||||
4
apps/authentication/backends/oauth2/__init__.py
Normal file
4
apps/authentication/backends/oauth2/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from .backends import *
|
||||
170
apps/authentication/backends/oauth2/backends.py
Normal file
170
apps/authentication/backends/oauth2/backends.py
Normal file
@@ -0,0 +1,170 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import requests
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.utils.http import urlencode
|
||||
from django.conf import settings
|
||||
from django.urls import reverse
|
||||
|
||||
from common.utils import get_logger
|
||||
from users.utils import construct_user_email
|
||||
from authentication.utils import build_absolute_uri
|
||||
from authentication.signals import user_auth_failed, user_auth_success
|
||||
from common.exceptions import JMSException
|
||||
|
||||
from .signals import (
|
||||
oauth2_create_or_update_user
|
||||
)
|
||||
from ..base import JMSModelBackend
|
||||
|
||||
|
||||
__all__ = ['OAuth2Backend']
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class OAuth2Backend(JMSModelBackend):
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_OAUTH2
|
||||
|
||||
def get_or_create_user_from_userinfo(self, request, userinfo):
|
||||
log_prompt = "Get or Create user [OAuth2Backend]: {}"
|
||||
logger.debug(log_prompt.format('start'))
|
||||
|
||||
# Construct user attrs value
|
||||
user_attrs = {}
|
||||
for field, attr in settings.AUTH_OAUTH2_USER_ATTR_MAP.items():
|
||||
user_attrs[field] = userinfo.get(attr, '')
|
||||
|
||||
username = user_attrs.get('username')
|
||||
if not username:
|
||||
error_msg = 'username is missing'
|
||||
logger.error(log_prompt.format(error_msg))
|
||||
raise JMSException(error_msg)
|
||||
|
||||
email = user_attrs.get('email', '')
|
||||
email = construct_user_email(user_attrs.get('username'), email)
|
||||
user_attrs.update({'email': email})
|
||||
|
||||
logger.debug(log_prompt.format(user_attrs))
|
||||
user, created = get_user_model().objects.get_or_create(
|
||||
username=username, defaults=user_attrs
|
||||
)
|
||||
logger.debug(log_prompt.format("user: {}|created: {}".format(user, created)))
|
||||
logger.debug(log_prompt.format("Send signal => oauth2 create or update user"))
|
||||
oauth2_create_or_update_user.send(
|
||||
sender=self.__class__, request=request, user=user, created=created,
|
||||
attrs=user_attrs
|
||||
)
|
||||
return user, created
|
||||
|
||||
@staticmethod
|
||||
def get_response_data(response_data):
|
||||
if response_data.get('data') is not None:
|
||||
response_data = response_data['data']
|
||||
return response_data
|
||||
|
||||
@staticmethod
|
||||
def get_query_dict(response_data, query_dict):
|
||||
query_dict.update({
|
||||
'uid': response_data.get('uid', ''),
|
||||
'access_token': response_data.get('access_token', '')
|
||||
})
|
||||
return query_dict
|
||||
|
||||
def authenticate(self, request, code=None, **kwargs):
|
||||
log_prompt = "Process authenticate [OAuth2Backend]: {}"
|
||||
logger.debug(log_prompt.format('Start'))
|
||||
if code is None:
|
||||
logger.error(log_prompt.format('code is missing'))
|
||||
return None
|
||||
|
||||
query_dict = {
|
||||
'client_id': settings.AUTH_OAUTH2_CLIENT_ID,
|
||||
'client_secret': settings.AUTH_OAUTH2_CLIENT_SECRET,
|
||||
'grant_type': 'authorization_code',
|
||||
'code': code,
|
||||
'redirect_uri': build_absolute_uri(
|
||||
request, path=reverse(settings.AUTH_OAUTH2_AUTH_LOGIN_CALLBACK_URL_NAME)
|
||||
)
|
||||
}
|
||||
if '?' in settings.AUTH_OAUTH2_ACCESS_TOKEN_ENDPOINT:
|
||||
separator = '&'
|
||||
else:
|
||||
separator = '?'
|
||||
access_token_url = '{url}{separator}{query}'.format(
|
||||
url=settings.AUTH_OAUTH2_ACCESS_TOKEN_ENDPOINT, separator=separator, query=urlencode(query_dict)
|
||||
)
|
||||
token_method = settings.AUTH_OAUTH2_ACCESS_TOKEN_METHOD.lower()
|
||||
requests_func = getattr(requests, token_method, requests.get)
|
||||
logger.debug(log_prompt.format('Call the access token endpoint[method: %s]' % token_method))
|
||||
headers = {
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
access_token_response = requests_func(access_token_url, headers=headers)
|
||||
try:
|
||||
access_token_response.raise_for_status()
|
||||
access_token_response_data = access_token_response.json()
|
||||
response_data = self.get_response_data(access_token_response_data)
|
||||
except Exception as e:
|
||||
error = "Json access token response error, access token response " \
|
||||
"content is: {}, error is: {}".format(access_token_response.content, str(e))
|
||||
logger.error(log_prompt.format(error))
|
||||
return None
|
||||
|
||||
query_dict = self.get_query_dict(response_data, query_dict)
|
||||
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
'Authorization': 'Bearer {}'.format(response_data.get('access_token', ''))
|
||||
}
|
||||
|
||||
logger.debug(log_prompt.format('Get userinfo endpoint'))
|
||||
if '?' in settings.AUTH_OAUTH2_PROVIDER_USERINFO_ENDPOINT:
|
||||
separator = '&'
|
||||
else:
|
||||
separator = '?'
|
||||
userinfo_url = '{url}{separator}{query}'.format(
|
||||
url=settings.AUTH_OAUTH2_PROVIDER_USERINFO_ENDPOINT, separator=separator,
|
||||
query=urlencode(query_dict)
|
||||
)
|
||||
userinfo_response = requests.get(userinfo_url, headers=headers)
|
||||
try:
|
||||
userinfo_response.raise_for_status()
|
||||
userinfo_response_data = userinfo_response.json()
|
||||
if 'data' in userinfo_response_data:
|
||||
userinfo = userinfo_response_data['data']
|
||||
else:
|
||||
userinfo = userinfo_response_data
|
||||
except Exception as e:
|
||||
error = "Json userinfo response error, userinfo response " \
|
||||
"content is: {}, error is: {}".format(userinfo_response.content, str(e))
|
||||
logger.error(log_prompt.format(error))
|
||||
return None
|
||||
|
||||
try:
|
||||
logger.debug(log_prompt.format('Update or create oauth2 user'))
|
||||
user, created = self.get_or_create_user_from_userinfo(request, userinfo)
|
||||
except JMSException:
|
||||
return None
|
||||
|
||||
if self.user_can_authenticate(user):
|
||||
logger.debug(log_prompt.format('OAuth2 user login success'))
|
||||
logger.debug(log_prompt.format('Send signal => oauth2 user login success'))
|
||||
user_auth_success.send(
|
||||
sender=self.__class__, request=request, user=user,
|
||||
backend=settings.AUTH_BACKEND_OAUTH2
|
||||
)
|
||||
return user
|
||||
else:
|
||||
logger.debug(log_prompt.format('OAuth2 user login failed'))
|
||||
logger.debug(log_prompt.format('Send signal => oauth2 user login failed'))
|
||||
user_auth_failed.send(
|
||||
sender=self.__class__, request=request, username=user.username,
|
||||
reason=_('User invalid, disabled or expired'),
|
||||
backend=settings.AUTH_BACKEND_OAUTH2
|
||||
)
|
||||
return None
|
||||
7
apps/authentication/backends/oauth2/signals.py
Normal file
7
apps/authentication/backends/oauth2/signals.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from django.dispatch import Signal
|
||||
|
||||
|
||||
oauth2_create_or_update_user = Signal(
|
||||
providing_args=['request', 'user', 'created', 'name', 'username', 'email']
|
||||
)
|
||||
|
||||
12
apps/authentication/backends/oauth2/urls.py
Normal file
12
apps/authentication/backends/oauth2/urls.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path('login/', views.OAuth2AuthRequestView.as_view(), name='login'),
|
||||
path('callback/', views.OAuth2AuthCallbackView.as_view(), name='login-callback'),
|
||||
path('logout/', views.OAuth2EndSessionView.as_view(), name='logout')
|
||||
]
|
||||
95
apps/authentication/backends/oauth2/views.py
Normal file
95
apps/authentication/backends/oauth2/views.py
Normal file
@@ -0,0 +1,95 @@
|
||||
from django.views import View
|
||||
from django.conf import settings
|
||||
from django.contrib import auth
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
from django.utils.http import urlencode
|
||||
|
||||
from authentication.utils import build_absolute_uri
|
||||
from common.utils import get_logger
|
||||
from authentication.mixins import authenticate
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class OAuth2AuthRequestView(View):
|
||||
|
||||
def get(self, request):
|
||||
log_prompt = "Process OAuth2 GET requests: {}"
|
||||
logger.debug(log_prompt.format('Start'))
|
||||
|
||||
query_dict = {
|
||||
'client_id': settings.AUTH_OAUTH2_CLIENT_ID, 'response_type': 'code',
|
||||
'scope': settings.AUTH_OAUTH2_SCOPE,
|
||||
'redirect_uri': build_absolute_uri(
|
||||
request, path=reverse(settings.AUTH_OAUTH2_AUTH_LOGIN_CALLBACK_URL_NAME)
|
||||
)
|
||||
}
|
||||
|
||||
if '?' in settings.AUTH_OAUTH2_PROVIDER_AUTHORIZATION_ENDPOINT:
|
||||
separator = '&'
|
||||
else:
|
||||
separator = '?'
|
||||
redirect_url = '{url}{separator}{query}'.format(
|
||||
url=settings.AUTH_OAUTH2_PROVIDER_AUTHORIZATION_ENDPOINT,
|
||||
separator=separator,
|
||||
query=urlencode(query_dict)
|
||||
)
|
||||
logger.debug(log_prompt.format('Redirect login url'))
|
||||
return HttpResponseRedirect(redirect_url)
|
||||
|
||||
|
||||
class OAuth2AuthCallbackView(View):
|
||||
http_method_names = ['get', ]
|
||||
|
||||
def get(self, request):
|
||||
""" Processes GET requests. """
|
||||
log_prompt = "Process GET requests [OAuth2AuthCallbackView]: {}"
|
||||
logger.debug(log_prompt.format('Start'))
|
||||
callback_params = request.GET
|
||||
|
||||
if 'code' in callback_params:
|
||||
logger.debug(log_prompt.format('Process authenticate'))
|
||||
user = authenticate(code=callback_params['code'], request=request)
|
||||
if user and user.is_valid:
|
||||
logger.debug(log_prompt.format('Login: {}'.format(user)))
|
||||
auth.login(self.request, user)
|
||||
logger.debug(log_prompt.format('Redirect'))
|
||||
return HttpResponseRedirect(
|
||||
settings.AUTH_OAUTH2_AUTHENTICATION_REDIRECT_URI
|
||||
)
|
||||
|
||||
logger.debug(log_prompt.format('Redirect'))
|
||||
# OAuth2 服务端认证成功, 但是用户被禁用了, 这时候需要调用服务端的logout
|
||||
redirect_url = settings.AUTH_OAUTH2_PROVIDER_END_SESSION_ENDPOINT
|
||||
return HttpResponseRedirect(redirect_url)
|
||||
|
||||
|
||||
class OAuth2EndSessionView(View):
|
||||
http_method_names = ['get', 'post', ]
|
||||
|
||||
def get(self, request):
|
||||
""" Processes GET requests. """
|
||||
log_prompt = "Process GET requests [OAuth2EndSessionView]: {}"
|
||||
logger.debug(log_prompt.format('Start'))
|
||||
return self.post(request)
|
||||
|
||||
def post(self, request):
|
||||
""" Processes POST requests. """
|
||||
log_prompt = "Process POST requests [OAuth2EndSessionView]: {}"
|
||||
logger.debug(log_prompt.format('Start'))
|
||||
|
||||
logout_url = settings.LOGOUT_REDIRECT_URL or '/'
|
||||
|
||||
# Log out the current user.
|
||||
if request.user.is_authenticated:
|
||||
logger.debug(log_prompt.format('Log out the current user: {}'.format(request.user)))
|
||||
auth.logout(request)
|
||||
|
||||
if settings.AUTH_OAUTH2_LOGOUT_COMPLETELY:
|
||||
logger.debug(log_prompt.format('Log out OAUTH2 platform user session synchronously'))
|
||||
next_url = settings.AUTH_OAUTH2_PROVIDER_END_SESSION_ENDPOINT
|
||||
return HttpResponseRedirect(next_url)
|
||||
|
||||
logger.debug(log_prompt.format('Redirect'))
|
||||
return HttpResponseRedirect(logout_url)
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
import base64
|
||||
import requests
|
||||
|
||||
from rest_framework.exceptions import ParseError
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.backends import ModelBackend
|
||||
@@ -18,14 +19,16 @@ from django.urls import reverse
|
||||
from django.conf import settings
|
||||
|
||||
from common.utils import get_logger
|
||||
from authentication.utils import build_absolute_uri_for_oidc
|
||||
from users.utils import construct_user_email
|
||||
|
||||
from ..base import JMSBaseAuthBackend
|
||||
from .utils import validate_and_return_id_token, build_absolute_uri
|
||||
from .utils import validate_and_return_id_token
|
||||
from .decorator import ssl_verification
|
||||
from .signals import (
|
||||
openid_create_or_update_user, openid_user_login_failed, openid_user_login_success
|
||||
openid_create_or_update_user
|
||||
)
|
||||
from authentication.signals import user_auth_success, user_auth_failed
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
@@ -85,7 +88,7 @@ class OIDCAuthCodeBackend(OIDCBaseBackend):
|
||||
"""
|
||||
|
||||
@ssl_verification
|
||||
def authenticate(self, request, nonce=None, **kwargs):
|
||||
def authenticate(self, request, nonce=None, code_verifier=None, **kwargs):
|
||||
""" Authenticates users in case of the OpenID Connect Authorization code flow. """
|
||||
log_prompt = "Process authenticate [OIDCAuthCodeBackend]: {}"
|
||||
logger.debug(log_prompt.format('start'))
|
||||
@@ -127,10 +130,12 @@ class OIDCAuthCodeBackend(OIDCBaseBackend):
|
||||
token_payload = {
|
||||
'grant_type': 'authorization_code',
|
||||
'code': code,
|
||||
'redirect_uri': build_absolute_uri(
|
||||
'redirect_uri': build_absolute_uri_for_oidc(
|
||||
request, path=reverse(settings.AUTH_OPENID_AUTH_LOGIN_CALLBACK_URL_NAME)
|
||||
)
|
||||
}
|
||||
if settings.AUTH_OPENID_PKCE and code_verifier:
|
||||
token_payload['code_verifier'] = code_verifier
|
||||
if settings.AUTH_OPENID_CLIENT_AUTH_METHOD == 'client_secret_post':
|
||||
token_payload.update({
|
||||
'client_id': settings.AUTH_OPENID_CLIENT_ID,
|
||||
@@ -211,14 +216,18 @@ class OIDCAuthCodeBackend(OIDCBaseBackend):
|
||||
if self.user_can_authenticate(user):
|
||||
logger.debug(log_prompt.format('OpenID user login success'))
|
||||
logger.debug(log_prompt.format('Send signal => openid user login success'))
|
||||
openid_user_login_success.send(sender=self.__class__, request=request, user=user)
|
||||
user_auth_success.send(
|
||||
sender=self.__class__, request=request, user=user,
|
||||
backend=settings.AUTH_BACKEND_OIDC_CODE
|
||||
)
|
||||
return user
|
||||
else:
|
||||
logger.debug(log_prompt.format('OpenID user login failed'))
|
||||
logger.debug(log_prompt.format('Send signal => openid user login failed'))
|
||||
openid_user_login_failed.send(
|
||||
user_auth_failed.send(
|
||||
sender=self.__class__, request=request, username=user.username,
|
||||
reason="User is invalid"
|
||||
reason="User is invalid", backend=settings.AUTH_BACKEND_OIDC_CODE
|
||||
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -269,8 +278,9 @@ class OIDCAuthPasswordBackend(OIDCBaseBackend):
|
||||
"content is: {}, error is: {}".format(token_response.content, str(e))
|
||||
logger.debug(log_prompt.format(error))
|
||||
logger.debug(log_prompt.format('Send signal => openid user login failed'))
|
||||
openid_user_login_failed.send(
|
||||
sender=self.__class__, request=request, username=username, reason=error
|
||||
user_auth_failed.send(
|
||||
sender=self.__class__, request=request, username=username, reason=error,
|
||||
backend=settings.AUTH_BACKEND_OIDC_PASSWORD
|
||||
)
|
||||
return
|
||||
|
||||
@@ -297,8 +307,9 @@ class OIDCAuthPasswordBackend(OIDCBaseBackend):
|
||||
"content is: {}, error is: {}".format(claims_response.content, str(e))
|
||||
logger.debug(log_prompt.format(error))
|
||||
logger.debug(log_prompt.format('Send signal => openid user login failed'))
|
||||
openid_user_login_failed.send(
|
||||
sender=self.__class__, request=request, username=username, reason=error
|
||||
user_auth_failed.send(
|
||||
sender=self.__class__, request=request, username=username, reason=error,
|
||||
backend=settings.AUTH_BACKEND_OIDC_PASSWORD
|
||||
)
|
||||
return
|
||||
|
||||
@@ -310,13 +321,16 @@ class OIDCAuthPasswordBackend(OIDCBaseBackend):
|
||||
if self.user_can_authenticate(user):
|
||||
logger.debug(log_prompt.format('OpenID user login success'))
|
||||
logger.debug(log_prompt.format('Send signal => openid user login success'))
|
||||
openid_user_login_success.send(
|
||||
sender=self.__class__, request=request, user=user
|
||||
user_auth_success.send(
|
||||
sender=self.__class__, request=request, user=user,
|
||||
backend=settings.AUTH_BACKEND_OIDC_PASSWORD
|
||||
)
|
||||
return user
|
||||
else:
|
||||
logger.debug(log_prompt.format('OpenID user login failed'))
|
||||
logger.debug(log_prompt.format('Send signal => openid user login failed'))
|
||||
openid_user_login_failed.send(
|
||||
sender=self.__class__, request=request, username=username, reason="User is invalid"
|
||||
user_auth_failed.send(
|
||||
sender=self.__class__, request=request, username=username, reason="User is invalid",
|
||||
backend=settings.AUTH_BACKEND_OIDC_PASSWORD
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -13,6 +13,4 @@ from django.dispatch import Signal
|
||||
openid_create_or_update_user = Signal(
|
||||
providing_args=['request', 'user', 'created', 'name', 'username', 'email']
|
||||
)
|
||||
openid_user_login_success = Signal(providing_args=['request', 'user'])
|
||||
openid_user_login_failed = Signal(providing_args=['request', 'username', 'reason'])
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
import datetime as dt
|
||||
from calendar import timegm
|
||||
from urllib.parse import urlparse, urljoin
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.utils.encoding import force_bytes, smart_bytes
|
||||
@@ -110,17 +110,3 @@ def _validate_claims(id_token, nonce=None, validate_nonce=True):
|
||||
raise SuspiciousOperation('Incorrect id_token: nonce')
|
||||
|
||||
logger.debug(log_prompt.format('End'))
|
||||
|
||||
|
||||
def build_absolute_uri(request, path=None):
|
||||
"""
|
||||
Build absolute redirect uri
|
||||
"""
|
||||
if path is None:
|
||||
path = '/'
|
||||
|
||||
if settings.BASE_SITE_URL:
|
||||
redirect_uri = urljoin(settings.BASE_SITE_URL, path)
|
||||
else:
|
||||
redirect_uri = request.build_absolute_uri(path)
|
||||
return redirect_uri
|
||||
|
||||
@@ -9,7 +9,10 @@
|
||||
|
||||
"""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import time
|
||||
import secrets
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import auth
|
||||
@@ -20,7 +23,8 @@ from django.utils.crypto import get_random_string
|
||||
from django.utils.http import is_safe_url, urlencode
|
||||
from django.views.generic import View
|
||||
|
||||
from .utils import get_logger, build_absolute_uri
|
||||
from authentication.utils import build_absolute_uri_for_oidc
|
||||
from .utils import get_logger
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -37,6 +41,19 @@ class OIDCAuthRequestView(View):
|
||||
|
||||
http_method_names = ['get', ]
|
||||
|
||||
@staticmethod
|
||||
def gen_code_verifier(length=128):
|
||||
# length range 43 ~ 128
|
||||
return secrets.token_urlsafe(length-32)
|
||||
|
||||
@staticmethod
|
||||
def gen_code_challenge(code_verifier, code_challenge_method):
|
||||
if code_challenge_method == 'plain':
|
||||
return code_verifier
|
||||
h = hashlib.sha256(code_verifier.encode('ascii')).digest()
|
||||
b = base64.urlsafe_b64encode(h)
|
||||
return b.decode('ascii')[:-1]
|
||||
|
||||
def get(self, request):
|
||||
""" Processes GET requests. """
|
||||
|
||||
@@ -50,11 +67,21 @@ class OIDCAuthRequestView(View):
|
||||
'scope': settings.AUTH_OPENID_SCOPES,
|
||||
'response_type': 'code',
|
||||
'client_id': settings.AUTH_OPENID_CLIENT_ID,
|
||||
'redirect_uri': build_absolute_uri(
|
||||
'redirect_uri': build_absolute_uri_for_oidc(
|
||||
request, path=reverse(settings.AUTH_OPENID_AUTH_LOGIN_CALLBACK_URL_NAME)
|
||||
)
|
||||
})
|
||||
|
||||
if settings.AUTH_OPENID_PKCE:
|
||||
code_verifier = self.gen_code_verifier()
|
||||
code_challenge_method = settings.AUTH_OPENID_CODE_CHALLENGE_METHOD or 'S256'
|
||||
code_challenge = self.gen_code_challenge(code_verifier, code_challenge_method)
|
||||
authentication_request_params.update({
|
||||
'code_challenge_method': code_challenge_method,
|
||||
'code_challenge': code_challenge
|
||||
})
|
||||
request.session['oidc_auth_code_verifier'] = code_verifier
|
||||
|
||||
# States should be used! They are recommended in order to maintain state between the
|
||||
# authentication request and the callback.
|
||||
if settings.AUTH_OPENID_USE_STATE:
|
||||
@@ -137,8 +164,9 @@ class OIDCAuthCallbackView(View):
|
||||
|
||||
# Authenticates the end-user.
|
||||
next_url = request.session.get('oidc_auth_next_url', None)
|
||||
code_verifier = request.session.get('oidc_auth_code_verifier', None)
|
||||
logger.debug(log_prompt.format('Process authenticate'))
|
||||
user = auth.authenticate(nonce=nonce, request=request)
|
||||
user = auth.authenticate(nonce=nonce, request=request, code_verifier=code_verifier)
|
||||
if user and user.is_valid:
|
||||
logger.debug(log_prompt.format('Login: {}'.format(user)))
|
||||
auth.login(self.request, user)
|
||||
@@ -216,7 +244,7 @@ class OIDCEndSessionView(View):
|
||||
""" Returns the end-session URL. """
|
||||
q = QueryDict(mutable=True)
|
||||
q[settings.AUTH_OPENID_PROVIDER_END_SESSION_REDIRECT_URI_PARAMETER] = \
|
||||
build_absolute_uri(self.request, path=settings.LOGOUT_REDIRECT_URL or '/')
|
||||
build_absolute_uri_for_oidc(self.request, path=settings.LOGOUT_REDIRECT_URL or '/')
|
||||
q[settings.AUTH_OPENID_PROVIDER_END_SESSION_ID_TOKEN_PARAMETER] = \
|
||||
self.request.session['oidc_auth_id_token']
|
||||
return '{}?{}'.format(settings.AUTH_OPENID_PROVIDER_END_SESSION_ENDPOINT, q.urlencode())
|
||||
|
||||
@@ -7,9 +7,9 @@ from django.db import transaction
|
||||
from common.utils import get_logger
|
||||
from authentication.errors import reason_choices, reason_user_invalid
|
||||
from .signals import (
|
||||
saml2_user_authenticated, saml2_user_authentication_failed,
|
||||
saml2_create_or_update_user
|
||||
)
|
||||
from authentication.signals import user_auth_failed, user_auth_success
|
||||
from ..base import JMSModelBackend
|
||||
|
||||
__all__ = ['SAML2Backend']
|
||||
@@ -39,7 +39,7 @@ class SAML2Backend(JMSModelBackend):
|
||||
return user, created
|
||||
|
||||
def authenticate(self, request, saml_user_data=None, **kwargs):
|
||||
log_prompt = "Process authenticate [SAML2AuthCodeBackend]: {}"
|
||||
log_prompt = "Process authenticate [SAML2Backend]: {}"
|
||||
logger.debug(log_prompt.format('Start'))
|
||||
if saml_user_data is None:
|
||||
logger.error(log_prompt.format('saml_user_data is missing'))
|
||||
@@ -48,21 +48,23 @@ class SAML2Backend(JMSModelBackend):
|
||||
logger.debug(log_prompt.format('saml data, {}'.format(saml_user_data)))
|
||||
username = saml_user_data.get('username')
|
||||
if not username:
|
||||
logger.debug(log_prompt.format('username is missing'))
|
||||
logger.warning(log_prompt.format('username is missing'))
|
||||
return None
|
||||
|
||||
user, created = self.get_or_create_from_saml_data(request, **saml_user_data)
|
||||
|
||||
if self.user_can_authenticate(user):
|
||||
logger.debug(log_prompt.format('SAML2 user login success'))
|
||||
saml2_user_authenticated.send(
|
||||
sender=self, request=request, user=user, created=created
|
||||
user_auth_success.send(
|
||||
sender=self.__class__, request=request, user=user, created=created,
|
||||
backend=settings.AUTH_BACKEND_SAML2
|
||||
)
|
||||
return user
|
||||
else:
|
||||
logger.debug(log_prompt.format('SAML2 user login failed'))
|
||||
saml2_user_authentication_failed.send(
|
||||
sender=self, request=request, username=username,
|
||||
reason=reason_choices.get(reason_user_invalid)
|
||||
user_auth_failed.send(
|
||||
sender=self.__class__, request=request, username=username,
|
||||
reason=reason_choices.get(reason_user_invalid),
|
||||
backend=settings.AUTH_BACKEND_SAML2
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -2,5 +2,3 @@ from django.dispatch import Signal
|
||||
|
||||
|
||||
saml2_create_or_update_user = Signal(providing_args=('user', 'created', 'request', 'attrs'))
|
||||
saml2_user_authenticated = Signal(providing_args=('user', 'created', 'request'))
|
||||
saml2_user_authentication_failed = Signal(providing_args=('request', 'username', 'reason'))
|
||||
|
||||
@@ -3,7 +3,7 @@ import copy
|
||||
from urllib import parse
|
||||
|
||||
from django.views import View
|
||||
from django.contrib import auth as auth
|
||||
from django.contrib import auth
|
||||
from django.urls import reverse
|
||||
from django.conf import settings
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
@@ -271,7 +271,10 @@ class Saml2AuthCallbackView(View, PrepareRequestMixin):
|
||||
auth.login(self.request, user)
|
||||
|
||||
logger.debug(log_prompt.format('Redirect'))
|
||||
next_url = saml_instance.redirect_to(post_data.get('RelayState', '/'))
|
||||
redir = post_data.get('RelayState')
|
||||
if not redir or len(redir) == 0:
|
||||
redir = "/"
|
||||
next_url = saml_instance.redirect_to(redir)
|
||||
return HttpResponseRedirect(next_url)
|
||||
|
||||
@csrf_exempt
|
||||
|
||||
@@ -2,7 +2,7 @@ from django.db.models import TextChoices
|
||||
|
||||
from authentication.confirm import CONFIRM_BACKENDS
|
||||
from .confirm import ConfirmMFA, ConfirmPassword, ConfirmReLogin
|
||||
from .mfa import MFAOtp, MFASms, MFARadius
|
||||
from .mfa import MFAOtp, MFASms, MFARadius, MFACustom
|
||||
|
||||
RSA_PRIVATE_KEY = 'rsa_private_key'
|
||||
RSA_PUBLIC_KEY = 'rsa_public_key'
|
||||
@@ -35,3 +35,4 @@ class MFAType(TextChoices):
|
||||
OTP = MFAOtp.name, MFAOtp.display_name
|
||||
SMS = MFASms.name, MFASms.display_name
|
||||
Radius = MFARadius.name, MFARadius.display_name
|
||||
Custom = MFACustom.name, MFACustom.display_name
|
||||
|
||||
@@ -12,12 +12,13 @@ class AuthFailedNeedLogMixin:
|
||||
username = ''
|
||||
request = None
|
||||
error = ''
|
||||
msg = ''
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
post_auth_failed.send(
|
||||
sender=self.__class__, username=self.username,
|
||||
request=self.request, reason=self.error
|
||||
request=self.request, reason=self.msg
|
||||
)
|
||||
|
||||
|
||||
@@ -55,7 +56,8 @@ class BlockGlobalIpLoginError(AuthFailedError):
|
||||
error = 'block_global_ip_login'
|
||||
|
||||
def __init__(self, username, ip, **kwargs):
|
||||
self.msg = const.block_ip_login_msg.format(settings.SECURITY_LOGIN_IP_LIMIT_TIME)
|
||||
if not self.msg:
|
||||
self.msg = const.block_ip_login_msg.format(settings.SECURITY_LOGIN_IP_LIMIT_TIME)
|
||||
LoginIpBlockUtil(ip).set_block_if_need()
|
||||
super().__init__(username=username, ip=ip, **kwargs)
|
||||
|
||||
@@ -65,22 +67,21 @@ class CredentialError(
|
||||
BlockGlobalIpLoginError, AuthFailedError
|
||||
):
|
||||
def __init__(self, error, username, ip, request):
|
||||
super().__init__(error=error, username=username, ip=ip, request=request)
|
||||
util = LoginBlockUtil(username, ip)
|
||||
times_remainder = util.get_remainder_times()
|
||||
block_time = settings.SECURITY_LOGIN_LIMIT_TIME
|
||||
|
||||
if times_remainder < 1:
|
||||
self.msg = const.block_user_login_msg.format(settings.SECURITY_LOGIN_LIMIT_TIME)
|
||||
return
|
||||
|
||||
default_msg = const.invalid_login_msg.format(
|
||||
times_try=times_remainder, block_time=block_time
|
||||
)
|
||||
if error == const.reason_password_failed:
|
||||
self.msg = default_msg
|
||||
else:
|
||||
self.msg = const.reason_choices.get(error, default_msg)
|
||||
default_msg = const.invalid_login_msg.format(
|
||||
times_try=times_remainder, block_time=block_time
|
||||
)
|
||||
if error == const.reason_password_failed:
|
||||
self.msg = default_msg
|
||||
else:
|
||||
self.msg = const.reason_choices.get(error, default_msg)
|
||||
# 先处理 msg 在 super,记录日志时原因才准确
|
||||
super().__init__(error=error, username=username, ip=ip, request=request)
|
||||
|
||||
|
||||
class MFAFailedError(AuthFailedNeedLogMixin, AuthFailedError):
|
||||
@@ -138,18 +139,11 @@ class ACLError(AuthFailedNeedLogMixin, AuthFailedError):
|
||||
}
|
||||
|
||||
|
||||
class LoginIPNotAllowed(ACLError):
|
||||
class LoginACLIPAndTimePeriodNotAllowed(ACLError):
|
||||
def __init__(self, username, request, **kwargs):
|
||||
self.username = username
|
||||
self.request = request
|
||||
super().__init__(_("IP is not allowed"), **kwargs)
|
||||
|
||||
|
||||
class TimePeriodNotAllowed(ACLError):
|
||||
def __init__(self, username, request, **kwargs):
|
||||
self.username = username
|
||||
self.request = request
|
||||
super().__init__(_("Time Period is not allowed"), **kwargs)
|
||||
super().__init__(_("Current IP and Time period is not allowed"), **kwargs)
|
||||
|
||||
|
||||
class MFACodeRequiredError(AuthFailedError):
|
||||
|
||||
@@ -69,10 +69,16 @@ class LoginConfirmWaitError(LoginConfirmBaseError):
|
||||
class LoginConfirmOtherError(LoginConfirmBaseError):
|
||||
error = 'login_confirm_error'
|
||||
|
||||
def __init__(self, ticket_id, status):
|
||||
def __init__(self, ticket_id, status, username):
|
||||
self.username = username
|
||||
msg = const.login_confirm_error_msg.format(status)
|
||||
super().__init__(ticket_id=ticket_id, msg=msg)
|
||||
|
||||
def as_data(self):
|
||||
ret = super().as_data()
|
||||
ret['data']['username'] = self.username
|
||||
return ret
|
||||
|
||||
|
||||
class PasswordTooSimple(NeedRedirectError):
|
||||
default_code = 'passwd_too_simple'
|
||||
|
||||
@@ -62,7 +62,7 @@ class CustomCaptchaTextInput(CaptchaTextInput):
|
||||
|
||||
|
||||
class CaptchaMixin(forms.Form):
|
||||
captcha = CaptchaField(widget=CustomCaptchaTextInput)
|
||||
captcha = CaptchaField(widget=CustomCaptchaTextInput, label=_('Captcha'))
|
||||
|
||||
|
||||
class ChallengeMixin(forms.Form):
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from .otp import MFAOtp, otp_failed_msg
|
||||
from .sms import MFASms
|
||||
from .radius import MFARadius
|
||||
|
||||
MFA_BACKENDS = [MFAOtp, MFASms, MFARadius]
|
||||
from .custom import MFACustom
|
||||
|
||||
59
apps/authentication/mfa/custom.py
Normal file
59
apps/authentication/mfa/custom.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from django.conf import settings
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from .base import BaseMFA
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
mfa_custom_method = None
|
||||
|
||||
if settings.MFA_CUSTOM:
|
||||
""" 保证自定义认证方法在服务运行时不能被更改,只在第一次调用时加载一次 """
|
||||
try:
|
||||
mfa_custom_method_path = 'data.mfa.main.check_code'
|
||||
mfa_custom_method = import_string(mfa_custom_method_path)
|
||||
except Exception as e:
|
||||
logger.warning('Import custom auth method failed: {}, Maybe not enabled'.format(e))
|
||||
|
||||
custom_failed_msg = _("MFA Custom code invalid")
|
||||
|
||||
|
||||
class MFACustom(BaseMFA):
|
||||
name = 'mfa_custom'
|
||||
display_name = 'Custom'
|
||||
placeholder = _("MFA custom verification code")
|
||||
|
||||
def check_code(self, code):
|
||||
assert self.is_authenticated()
|
||||
ok = False
|
||||
try:
|
||||
ok = mfa_custom_method(user=self.user, code=code)
|
||||
except Exception as exc:
|
||||
logger.error('Custom authenticate error: {}'.format(exc))
|
||||
msg = '' if ok else custom_failed_msg
|
||||
return ok, msg
|
||||
|
||||
def is_active(self):
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def global_enabled():
|
||||
return settings.MFA_CUSTOM and callable(mfa_custom_method)
|
||||
|
||||
def get_enable_url(self) -> str:
|
||||
return ''
|
||||
|
||||
def can_disable(self):
|
||||
return False
|
||||
|
||||
def disable(self):
|
||||
return ''
|
||||
|
||||
@staticmethod
|
||||
def help_text_of_disable():
|
||||
return _("MFA custom global enabled, cannot disable")
|
||||
|
||||
def get_disable_url(self) -> str:
|
||||
return ''
|
||||
@@ -2,7 +2,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
|
||||
from .base import BaseMFA
|
||||
from common.sdk.sms import SendAndVerifySMSUtil
|
||||
from common.utils.verify_code import SendAndVerifyCodeUtil
|
||||
|
||||
sms_failed_msg = _("SMS verify code invalid")
|
||||
|
||||
@@ -15,7 +15,7 @@ class MFASms(BaseMFA):
|
||||
def __init__(self, user):
|
||||
super().__init__(user)
|
||||
phone = user.phone if self.is_authenticated() else ''
|
||||
self.sms = SendAndVerifySMSUtil(phone)
|
||||
self.sms = SendAndVerifyCodeUtil(phone, backend=self.name)
|
||||
|
||||
def check_code(self, code):
|
||||
assert self.is_authenticated()
|
||||
@@ -37,7 +37,7 @@ class MFASms(BaseMFA):
|
||||
return True
|
||||
|
||||
def send_challenge(self):
|
||||
self.sms.gen_and_send()
|
||||
self.sms.gen_and_send_async()
|
||||
|
||||
@staticmethod
|
||||
def global_enabled():
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
import base64
|
||||
|
||||
from django.shortcuts import redirect, reverse
|
||||
from django.shortcuts import redirect, reverse, render
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.http import HttpResponse
|
||||
from django.conf import settings
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.contrib.auth import logout as auth_logout
|
||||
|
||||
from apps.authentication import mixins
|
||||
from common.utils import gen_key_pair
|
||||
from common.utils import get_request_ip
|
||||
from .signals import post_auth_failed
|
||||
|
||||
|
||||
class MFAMiddleware:
|
||||
@@ -13,6 +18,7 @@ class MFAMiddleware:
|
||||
这个 中间件 是用来全局拦截开启了 MFA 却没有认证的,如 OIDC, CAS,使用第三方库做的登录,直接 login 了,
|
||||
所以只能在 Middleware 中控制
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
@@ -42,6 +48,61 @@ class MFAMiddleware:
|
||||
return redirect(url)
|
||||
|
||||
|
||||
class ThirdPartyLoginMiddleware(mixins.AuthMixin):
|
||||
"""OpenID、CAS、SAML2登录规则设置验证"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
response = self.get_response(request)
|
||||
# 没有认证过,证明不是从 第三方 来的
|
||||
if request.user.is_anonymous:
|
||||
return response
|
||||
if not request.session.get('auth_third_party_required'):
|
||||
return response
|
||||
|
||||
white_urls = [
|
||||
'jsi18n/', '/static/',
|
||||
'login/guard', 'login/wait-confirm',
|
||||
'login-confirm-ticket/status',
|
||||
'settings/public/open',
|
||||
'core/auth/login', 'core/auth/logout'
|
||||
]
|
||||
for url in white_urls:
|
||||
if request.path.find(url) > -1:
|
||||
return response
|
||||
|
||||
ip = get_request_ip(request)
|
||||
try:
|
||||
self.request = request
|
||||
self._check_login_acl(request.user, ip)
|
||||
except Exception as e:
|
||||
post_auth_failed.send(
|
||||
sender=self.__class__, username=request.user.username,
|
||||
request=self.request, reason=e.msg
|
||||
)
|
||||
auth_logout(request)
|
||||
context = {
|
||||
'title': _('Authentication failed'),
|
||||
'message': _('Authentication failed (before login check failed): {}').format(e),
|
||||
'interval': 10,
|
||||
'redirect_url': reverse('authentication:login'),
|
||||
'auto_redirect': True,
|
||||
}
|
||||
response = render(request, 'authentication/auth_fail_flash_message_standalone.html', context)
|
||||
else:
|
||||
if not self.request.session['auth_confirm_required']:
|
||||
return response
|
||||
guard_url = reverse('authentication:login-guard')
|
||||
args = request.META.get('QUERY_STRING', '')
|
||||
if args:
|
||||
guard_url = "%s?%s" % (guard_url, args)
|
||||
response = redirect(guard_url)
|
||||
finally:
|
||||
return response
|
||||
|
||||
|
||||
class SessionCookieMiddleware(MiddlewareMixin):
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-28 08:07
|
||||
|
||||
import common.db.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
@@ -27,7 +28,7 @@ class Migration(migrations.Migration):
|
||||
models.UUIDField(default=uuid.uuid4, editable=False,
|
||||
verbose_name='AccessKeySecret')),
|
||||
('user', models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=common.db.models.CASCADE_SIGNAL_SKIP,
|
||||
related_name='access_keys',
|
||||
to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
|
||||
@@ -15,7 +15,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='accesskey',
|
||||
name='date_created',
|
||||
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2019, 7, 29, 6, 23, 54, 115123, tzinfo=utc)),
|
||||
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2019, 7, 29, 6, 23, 54, 115123, tzinfo=utc), verbose_name='Date created'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Generated by Django 3.1.13 on 2021-12-27 02:59
|
||||
|
||||
import common.db.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -16,6 +16,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='ssotoken',
|
||||
name='user',
|
||||
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='User'),
|
||||
field=models.ForeignKey(db_constraint=False, on_delete=common.db.models.CASCADE_SIGNAL_SKIP, to=settings.AUTH_USER_MODEL, verbose_name='User'),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -225,6 +225,7 @@ class MFAMixin:
|
||||
self.request.session['auth_mfa_time'] = time.time()
|
||||
self.request.session['auth_mfa_required'] = 0
|
||||
self.request.session['auth_mfa_type'] = mfa_type
|
||||
MFABlockUtils(self.request.user.username, self.get_request_ip()).clean_failed_count()
|
||||
|
||||
def clean_mfa_mark(self):
|
||||
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type']
|
||||
@@ -328,13 +329,59 @@ class AuthACLMixin:
|
||||
|
||||
def _check_login_acl(self, user, ip):
|
||||
# ACL 限制用户登录
|
||||
is_allowed, limit_type = LoginACL.allow_user_to_login(user, ip)
|
||||
if is_allowed:
|
||||
acl = LoginACL.match(user, ip)
|
||||
if not acl:
|
||||
return
|
||||
if limit_type == 'ip':
|
||||
raise errors.LoginIPNotAllowed(username=user.username, request=self.request)
|
||||
elif limit_type == 'time':
|
||||
raise errors.TimePeriodNotAllowed(username=user.username, request=self.request)
|
||||
|
||||
acl: LoginACL
|
||||
if acl.is_action(acl.ActionChoices.allow):
|
||||
return
|
||||
|
||||
if acl.is_action(acl.ActionChoices.reject):
|
||||
raise errors.LoginACLIPAndTimePeriodNotAllowed(user.username, request=self.request)
|
||||
|
||||
if acl.is_action(acl.ActionChoices.confirm):
|
||||
self.request.session['auth_confirm_required'] = '1'
|
||||
self.request.session['auth_acl_id'] = str(acl.id)
|
||||
return
|
||||
|
||||
def check_user_login_confirm_if_need(self, user):
|
||||
if not self.request.session.get("auth_confirm_required"):
|
||||
return
|
||||
acl_id = self.request.session.get('auth_acl_id')
|
||||
logger.debug('Login confirm acl id: {}'.format(acl_id))
|
||||
if not acl_id:
|
||||
return
|
||||
acl = LoginACL.filter_acl(user).filter(id=acl_id).first()
|
||||
if not acl:
|
||||
return
|
||||
if not acl.is_action(acl.ActionChoices.confirm):
|
||||
return
|
||||
self.get_ticket_or_create(acl)
|
||||
self.check_user_login_confirm()
|
||||
|
||||
def get_ticket_or_create(self, acl):
|
||||
ticket = self.get_ticket()
|
||||
if not ticket or ticket.is_state(ticket.State.closed):
|
||||
ticket = acl.create_confirm_ticket(self.request)
|
||||
self.request.session['auth_ticket_id'] = str(ticket.id)
|
||||
return ticket
|
||||
|
||||
def check_user_login_confirm(self):
|
||||
ticket = self.get_ticket()
|
||||
if not ticket:
|
||||
raise errors.LoginConfirmOtherError('', "Not found", '')
|
||||
elif ticket.is_state(ticket.State.approved):
|
||||
self.request.session["auth_confirm_required"] = ''
|
||||
return
|
||||
elif ticket.is_status(ticket.Status.open):
|
||||
raise errors.LoginConfirmWaitError(ticket.id)
|
||||
else:
|
||||
# rejected, closed
|
||||
ticket_id = ticket.id
|
||||
status = ticket.get_state_display()
|
||||
username = ticket.applicant.username
|
||||
raise errors.LoginConfirmOtherError(ticket_id, status, username)
|
||||
|
||||
def get_ticket(self):
|
||||
from tickets.models import ApplyLoginTicket
|
||||
@@ -346,44 +393,6 @@ class AuthACLMixin:
|
||||
ticket = ApplyLoginTicket.all().filter(id=ticket_id).first()
|
||||
return ticket
|
||||
|
||||
def get_ticket_or_create(self, confirm_setting):
|
||||
ticket = self.get_ticket()
|
||||
if not ticket or ticket.is_status(ticket.Status.closed):
|
||||
ticket = confirm_setting.create_confirm_ticket(self.request)
|
||||
self.request.session['auth_ticket_id'] = str(ticket.id)
|
||||
return ticket
|
||||
|
||||
def check_user_login_confirm(self):
|
||||
ticket = self.get_ticket()
|
||||
if not ticket:
|
||||
raise errors.LoginConfirmOtherError('', "Not found")
|
||||
|
||||
if ticket.is_status(ticket.Status.open):
|
||||
raise errors.LoginConfirmWaitError(ticket.id)
|
||||
elif ticket.is_state(ticket.State.approved):
|
||||
self.request.session["auth_confirm"] = "1"
|
||||
return
|
||||
elif ticket.is_state(ticket.State.rejected):
|
||||
raise errors.LoginConfirmOtherError(
|
||||
ticket.id, ticket.get_state_display()
|
||||
)
|
||||
elif ticket.is_state(ticket.State.closed):
|
||||
raise errors.LoginConfirmOtherError(
|
||||
ticket.id, ticket.get_state_display()
|
||||
)
|
||||
else:
|
||||
raise errors.LoginConfirmOtherError(
|
||||
ticket.id, ticket.get_status_display()
|
||||
)
|
||||
|
||||
def check_user_login_confirm_if_need(self, user):
|
||||
ip = self.get_request_ip()
|
||||
is_allowed, confirm_setting = LoginACL.allow_user_confirm_if_need(user, ip)
|
||||
if self.request.session.get('auth_confirm') or not is_allowed:
|
||||
return
|
||||
self.get_ticket_or_create(confirm_setting)
|
||||
self.check_user_login_confirm()
|
||||
|
||||
|
||||
class AuthMixin(CommonMixin, AuthPreCheckMixin, AuthACLMixin, MFAMixin, AuthPostCheckMixin):
|
||||
request = None
|
||||
@@ -482,7 +491,9 @@ class AuthMixin(CommonMixin, AuthPreCheckMixin, AuthACLMixin, MFAMixin, AuthPost
|
||||
return self.check_user_auth(valid_data)
|
||||
|
||||
def clear_auth_mark(self):
|
||||
keys = ['auth_password', 'user_id', 'auth_confirm', 'auth_ticket_id']
|
||||
keys = [
|
||||
'auth_password', 'user_id', 'auth_confirm_required', 'auth_ticket_id', 'auth_acl_id'
|
||||
]
|
||||
for k in keys:
|
||||
self.request.session.pop(k, '')
|
||||
|
||||
@@ -502,4 +513,20 @@ class AuthMixin(CommonMixin, AuthPreCheckMixin, AuthACLMixin, MFAMixin, AuthPost
|
||||
args = self.request.META.get('QUERY_STRING', '')
|
||||
if args:
|
||||
guard_url = "%s?%s" % (guard_url, args)
|
||||
return redirect(guard_url)
|
||||
response = redirect(guard_url)
|
||||
self.set_browser_default_language_if_need(response)
|
||||
return response
|
||||
|
||||
def set_browser_default_language_if_need(self, response):
|
||||
# en, ja, zh-CN,zh;q=0.9
|
||||
browser_lang = self.request.headers.get('Accept-Language', '')
|
||||
# 浏览器首选语言
|
||||
if browser_lang.startswith('en'):
|
||||
browser_lang = 'en'
|
||||
elif browser_lang.startswith('ja'):
|
||||
browser_lang = 'ja'
|
||||
else:
|
||||
browser_lang = 'zh'
|
||||
request_lang = self.request.LANGUAGE_CODE
|
||||
lang = request_lang or browser_lang
|
||||
response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang)
|
||||
|
||||
@@ -16,10 +16,10 @@ class AccessKey(models.Model):
|
||||
default=uuid.uuid4, editable=False)
|
||||
secret = models.UUIDField(verbose_name='AccessKeySecret',
|
||||
default=uuid.uuid4, editable=False)
|
||||
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='User',
|
||||
on_delete=models.CASCADE, related_name='access_keys')
|
||||
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'),
|
||||
on_delete=models.CASCADE_SIGNAL_SKIP, related_name='access_keys')
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
|
||||
def get_id(self):
|
||||
return str(self.id)
|
||||
@@ -51,7 +51,7 @@ class SSOToken(models.JMSBaseModel):
|
||||
"""
|
||||
authkey = models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name=_('Token'))
|
||||
expired = models.BooleanField(default=False, verbose_name=_('Expired'))
|
||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE, verbose_name=_('User'), db_constraint=False)
|
||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE_SIGNAL_SKIP, verbose_name=_('User'), db_constraint=False)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('SSO token')
|
||||
|
||||
@@ -1,15 +1,38 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import EncryptedField
|
||||
|
||||
__all__ = [
|
||||
'MFAChallengeSerializer', 'MFASelectTypeSerializer',
|
||||
'PasswordVerifySerializer',
|
||||
'PasswordVerifySerializer', 'ResetPasswordCodeSerializer',
|
||||
]
|
||||
|
||||
|
||||
class ResetPasswordCodeSerializer(serializers.Serializer):
|
||||
form_type = serializers.ChoiceField(
|
||||
choices=[('sms', _('SMS')), ('email', _('Email'))], default='email'
|
||||
)
|
||||
email = serializers.CharField(allow_blank=True)
|
||||
sms = serializers.CharField(allow_blank=True)
|
||||
|
||||
def create(self, attrs):
|
||||
error = []
|
||||
validate_backends = {
|
||||
'email': _('Email'), 'sms': _('SMS')
|
||||
}
|
||||
form_type = attrs.get('form_type', 'email')
|
||||
validate_backend_input = attrs.get(form_type)
|
||||
if not validate_backend_input:
|
||||
error.append(_('The {} cannot be empty').format(
|
||||
validate_backends.get(validate_backend_input))
|
||||
)
|
||||
if error:
|
||||
raise serializers.ValidationError(error)
|
||||
|
||||
|
||||
class PasswordVerifySerializer(serializers.Serializer):
|
||||
password = EncryptedField()
|
||||
|
||||
|
||||
@@ -6,13 +6,8 @@ from django.core.cache import cache
|
||||
from django.dispatch import receiver
|
||||
from django_cas_ng.signals import cas_user_authenticated
|
||||
|
||||
from authentication.backends.oidc.signals import (
|
||||
openid_user_login_failed, openid_user_login_success
|
||||
)
|
||||
from authentication.backends.saml2.signals import (
|
||||
saml2_user_authenticated, saml2_user_authentication_failed
|
||||
)
|
||||
from .signals import post_auth_success, post_auth_failed
|
||||
from apps.jumpserver.settings.auth import AUTHENTICATION_BACKENDS_THIRD_PARTY
|
||||
from .signals import post_auth_success, post_auth_failed, user_auth_failed, user_auth_success
|
||||
|
||||
|
||||
@receiver(user_logged_in)
|
||||
@@ -25,7 +20,9 @@ def on_user_auth_login_success(sender, user, request, **kwargs):
|
||||
and user.mfa_enabled \
|
||||
and not request.session.get('auth_mfa'):
|
||||
request.session['auth_mfa_required'] = 1
|
||||
|
||||
if not request.session.get("auth_third_party_done") and \
|
||||
request.session.get('auth_backend') in AUTHENTICATION_BACKENDS_THIRD_PARTY:
|
||||
request.session['auth_third_party_required'] = 1
|
||||
# 单点登录,超过了自动退出
|
||||
if settings.USER_LOGIN_SINGLE_MACHINE_ENABLED:
|
||||
lock_key = 'single_machine_login_' + str(user.id)
|
||||
@@ -39,31 +36,19 @@ def on_user_auth_login_success(sender, user, request, **kwargs):
|
||||
request.session['auth_session_expiration_required'] = 1
|
||||
|
||||
|
||||
@receiver(openid_user_login_success)
|
||||
def on_oidc_user_login_success(sender, request, user, create=False, **kwargs):
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_OIDC_CODE
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
|
||||
|
||||
@receiver(openid_user_login_failed)
|
||||
def on_oidc_user_login_failed(sender, username, request, reason, **kwargs):
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_OIDC_CODE
|
||||
post_auth_failed.send(sender, username=username, request=request, reason=reason)
|
||||
|
||||
|
||||
@receiver(cas_user_authenticated)
|
||||
def on_cas_user_login_success(sender, request, user, **kwargs):
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_CAS
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
|
||||
|
||||
@receiver(saml2_user_authenticated)
|
||||
def on_saml2_user_login_success(sender, request, user, **kwargs):
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_SAML2
|
||||
@receiver(user_auth_success)
|
||||
def on_user_login_success(sender, request, user, backend, create=False, **kwargs):
|
||||
request.session['auth_backend'] = backend
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
|
||||
|
||||
@receiver(saml2_user_authentication_failed)
|
||||
def on_saml2_user_login_failed(sender, request, username, reason, **kwargs):
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_SAML2
|
||||
@receiver(user_auth_failed)
|
||||
def on_user_login_failed(sender, username, request, reason, backend, **kwargs):
|
||||
request.session['auth_backend'] = backend
|
||||
post_auth_failed.send(sender, username=username, request=request, reason=reason)
|
||||
|
||||
@@ -3,3 +3,7 @@ from django.dispatch import Signal
|
||||
|
||||
post_auth_success = Signal(providing_args=('user', 'request'))
|
||||
post_auth_failed = Signal(providing_args=('username', 'request', 'reason'))
|
||||
|
||||
|
||||
user_auth_success = Signal(providing_args=('user', 'request', 'backend', 'create'))
|
||||
user_auth_failed = Signal(providing_args=('username', 'request', 'reason', 'backend'))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user