Compare commits

..

21 Commits

Author SHA1 Message Date
Bai
a474d9be3e perf: serialize_nodes/assets as tree-node, if not nodes or assets, return 2025-12-24 16:52:24 +08:00
Bai
3a4e93af2f refactor: finished AssetTree API. support gloabl and real org; support asset-tree of AssetPage and PermPage; support search tree re-initial 2025-12-24 15:45:39 +08:00
Bai
9c2ddbba7e refactor: while search tree, re-initial ZTree use API data. 2025-12-23 18:51:26 +08:00
Bai
39129cecbe refactor: finished NodeChildrenAsTreeApi. But, need TODO support GLOBAL org logic. 2025-12-23 18:46:18 +08:00
Bai
88819bbf26 perf: Modify AssetViewSet filter by node 2025-12-22 17:13:05 +08:00
Bai
a88e35156a perf: AssetTree if with_assets pre fetch asset attrs 2025-12-22 16:51:08 +08:00
Bai
22a27946a7 perf: AssetTree support with_assets and full_tree kwargs 2025-12-22 16:32:48 +08:00
Bai
4983465a23 perf: UserPermTree support with_assets params 2025-12-22 13:28:32 +08:00
Bai
4d9fc9dfd6 perf: UserPermUtil supoort get_node_assets and get_node_all_assets 2025-12-22 12:32:27 +08:00
Bai
c7cb83fa1d perf: split UserPermUtil from UserPermTree 2025-12-22 11:23:13 +08:00
Bai
ee92c72b50 perf: add UserPermTree, finished 2025-12-19 19:53:08 +08:00
Bai
6a05fbe0fe perf: add AssetSearchTree, move remove_zero_assets_node from asset-tree to asset-search-tree 2025-12-19 15:19:54 +08:00
Bai
0284be169a perf: add AssetSearchTree, modify Node-Model get_all_assets function use node not nodes 2025-12-19 14:44:17 +08:00
Bai
a4e9d4f815 perf: add AssetSearchTree, supported: category search; modify fake generate asset set node_id 2025-12-19 14:25:16 +08:00
Bai
bbe549696a perf: add AssetSearchTree, not yet supported: category search 2025-12-19 10:54:07 +08:00
Bai
56f720271a refactor: format tree.print 2025-12-18 18:30:36 +08:00
Bai
9755076f7f refactor: add tree.py and asset_tree.py, finished build AssetTree. 2025-12-18 15:37:18 +08:00
Bai
8d7abef191 perf: add migrations - migrate asset node_id field 2025-12-16 18:50:15 +08:00
Bai
aaa40722c4 perf: add util - cleanup and kepp one node for Multi-Parent-Nodes Assets and generate report 2025-12-16 16:29:24 +08:00
Bai
ca39344937 perf: add util - cleanup and kepp one node for Multi-Parent-Nodes Assets and generate report 2025-12-16 16:28:37 +08:00
Bai
4b9a8227c9 perf: add util - find Multi-Parent Assets and generate report 2025-12-16 15:32:41 +08:00
20 changed files with 3598 additions and 2543 deletions

View File

@@ -1,4 +1,4 @@
FROM jumpserver/core-base:20251225_092635 AS stage-build
FROM jumpserver/core-base:20251128_025056 AS stage-build
ARG VERSION

View File

@@ -5,6 +5,7 @@ from rest_framework.request import Request
from assets.models import Node, Platform, Protocol, MyAsset
from assets.utils import get_node_from_request, is_query_node_all_assets
from common.utils import lazyproperty, timeit
from assets.tree.asset_tree import AssetTreeNode
class SerializeToTreeNodeMixin:
@@ -19,22 +20,22 @@ class SerializeToTreeNodeMixin:
return False
@timeit
def serialize_nodes(self, nodes: List[Node], with_asset_amount=False):
if with_asset_amount:
def _name(node: Node):
return '{} ({})'.format(node.value, node.assets_amount)
else:
def _name(node: Node):
return node.value
def serialize_nodes(self, nodes: List[AssetTreeNode], with_asset_amount=False, expand_level=1, with_assets=False):
if not nodes:
return []
def _open(node):
if not self.is_sync:
# 异步加载资产树时,默认展开节点
return True
if not node.parent_key:
return True
def _name(node: AssetTreeNode):
v = node.value
if not with_asset_amount:
return v
v = f'{v} ({node.assets_amount_total})'
return v
def is_parent(node: AssetTreeNode):
if with_assets:
return node.assets_amount > 0 or not node.is_leaf
else:
return False
return not node.is_leaf
data = [
{
@@ -42,15 +43,17 @@ class SerializeToTreeNodeMixin:
'name': _name(node),
'title': _name(node),
'pId': node.parent_key,
'isParent': True,
'open': _open(node),
'isParent': is_parent(node),
'open': node.level <= expand_level,
'meta': {
'type': 'node',
'data': {
"id": node.id,
"key": node.key,
"value": node.value,
"assets_amount": node.assets_amount,
"assets_amount_total": node.assets_amount_total,
},
'type': 'node'
}
}
for node in nodes
@@ -72,6 +75,9 @@ class SerializeToTreeNodeMixin:
@timeit
def serialize_assets(self, assets, node_key=None, get_pid=None):
if not assets:
return []
if not get_pid and not node_key:
get_pid = lambda asset, platform: getattr(asset, 'parent_key', '')

View File

@@ -1,6 +1,6 @@
# ~*~ coding: utf-8 ~*~
from django.db.models import Q
from django.db.models import Q, Count
from django.utils.translation import gettext_lazy as _
from rest_framework.generics import get_object_or_404
from rest_framework.response import Response
@@ -11,12 +11,16 @@ from common.tree import TreeNodeSerializer
from common.utils import get_logger
from orgs.mixins import generics
from orgs.utils import current_org
from orgs.models import Organization
from .mixin import SerializeToTreeNodeMixin
from .. import serializers
from ..const import AllTypes
from ..models import Node, Platform, Asset
from assets.tree.asset_tree import AssetTree
logger = get_logger(__file__)
__all__ = [
'NodeChildrenApi',
'NodeChildrenAsTreeApi',
@@ -25,14 +29,13 @@ __all__ = [
class NodeChildrenApi(generics.ListCreateAPIView):
"""
节点的增删改查
"""
''' 节点的增删改查 '''
serializer_class = serializers.NodeSerializer
search_fields = ('value',)
instance = None
is_initial = False
perm_model = Node
def initial(self, request, *args, **kwargs):
super().initial(request, *args, **kwargs)
@@ -65,42 +68,16 @@ class NodeChildrenApi(generics.ListCreateAPIView):
else:
node = Node.org_root()
return node
if pk:
node = get_object_or_404(Node, pk=pk)
else:
node = get_object_or_404(Node, key=key)
return node
def get_org_root_queryset(self, query_all):
if query_all:
return Node.objects.all()
else:
return Node.org_root_nodes()
def get_queryset(self):
query_all = self.request.query_params.get("all", "0") == "all"
if self.is_initial and current_org.is_root():
return self.get_org_root_queryset(query_all)
if self.is_initial:
with_self = True
else:
with_self = False
if not self.instance:
return Node.objects.none()
if query_all:
queryset = self.instance.get_all_children(with_self=with_self)
else:
queryset = self.instance.get_children(with_self=with_self)
return queryset
class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
"""
节点子节点作为树返回,
''' 节点子节点作为树返回,
[
{
"id": "",
@@ -109,51 +86,96 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
"meta": ""
}
]
'''
"""
model = Node
def filter_queryset(self, queryset):
""" queryset is Node queryset """
if not self.request.GET.get('search'):
return queryset
queryset = super().filter_queryset(queryset)
queryset = self.model.get_ancestor_queryset(queryset)
return queryset
def get_queryset_for_assets(self):
query_all = self.request.query_params.get("all", "0") == "all"
include_assets = self.request.query_params.get('assets', '0') == '1'
if not self.instance or not include_assets:
return Asset.objects.none()
if not self.request.GET.get('search') and self.instance.is_org_root():
return Asset.objects.none()
if query_all:
assets = self.instance.get_all_assets()
else:
assets = self.instance.get_assets()
return assets.only(
"id", "name", "address", "platform_id",
"org_id", "is_active", 'comment'
).prefetch_related('platform')
def filter_queryset_for_assets(self, assets):
search = self.request.query_params.get('search')
if search:
q = Q(name__icontains=search) | Q(address__icontains=search)
assets = assets.filter(q)
return assets
def list(self, request, *args, **kwargs):
nodes = self.filter_queryset(self.get_queryset()).order_by('value')
search = request.query_params.get('search')
with_assets = request.query_params.get('assets', '0') == '1'
with_asset_amount = request.query_params.get('asset_amount', '1') == '1'
nodes = self.serialize_nodes(nodes, with_asset_amount=with_asset_amount)
assets = self.filter_queryset_for_assets(self.get_queryset_for_assets())
node_key = self.instance.key if self.instance else None
assets = self.serialize_assets(assets, node_key=node_key)
with_asset_amount = True
nodes, assets, expand_level = self.get_nodes_assets(search, with_assets)
nodes = self.serialize_nodes(nodes, with_asset_amount=with_asset_amount, expand_level=expand_level)
assets = self.serialize_assets(assets)
data = [*nodes, *assets]
return Response(data=data)
def get_nodes_assets(self, search, with_assets):
#
# 资产管理-节点树
#
# 全局组织: 初始化节点树, 返回所有节点, 不包含资产, 不展开节点
# 实体组织: 初始化节点树, 返回所有节点, 不包含资产, 展开一级节点
# 前端搜索
if not with_assets:
if current_org.is_root():
orgs = Organization.objects.all()
expand_level = 0
else:
orgs = [current_org]
expand_level = 1
nodes = []
assets = []
for org in orgs:
tree = AssetTree(org=org)
org_nodes = tree.get_nodes()
nodes.extend(org_nodes)
return nodes, assets, expand_level
#
# 权限管理、账号发现、风险检测 - 资产节点树
#
# 全局组织: 搜索资产, 生成资产节点树, 过滤每个组织前 1000 个资产, 展开所有节点
# 实体组织: 搜索资产, 生成资产节点树, 过滤前 1000 个资产, 展开所有节点
if search:
if current_org.is_root():
orgs = list(Organization.objects.all())
else:
orgs = [current_org]
nodes = []
assets = []
assets_q_object = Q(name__icontains=search) | Q(address__icontains=search)
with_assets_limit = 1000 / len(orgs)
for org in orgs:
tree = AssetTree(
assets_q_object=assets_q_object, org=org,
with_assets=True, with_assets_limit=with_assets_limit, full_tree=False
)
nodes.extend(tree.get_nodes())
assets.extend(tree.get_assets())
expand_level = 10000 # search 时展开所有节点
return nodes, assets, expand_level
# 全局组织: 展开某个节点及其资产
# 实体组织: 展开某个节点及其资产
# 实体组织: 初始化资产节点树, 自动展开根节点及其资产, 所以节点要包含自己 (特殊情况)
if self.instance:
nodes = []
tree = AssetTree(with_assets_node_id=self.instance.id, org=self.instance.org)
nodes_with_self = False
if not current_org.is_root() and self.instance.is_org_root():
nodes_with_self = True
nodes = tree.get_node_children(key=self.instance.key, with_self=nodes_with_self)
assets = tree.get_assets()
expand_level = 1 # 默认只展开第一级
return nodes, assets, expand_level
# 全局组织: 初始化资产节点树, 仅返回各组织根节点, 不展开
orgs = Organization.objects.all()
nodes = []
assets = []
for org in orgs:
tree = AssetTree(org=org, with_assets=False)
if not tree.root:
continue
nodes.append(tree.root)
expand_level = 0 # 默认不展开节点
return nodes, assets, expand_level
class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
serializer_class = TreeNodeSerializer

View File

@@ -63,11 +63,11 @@ class NodeFilterBackend(filters.BaseFilterBackend):
query_all = is_query_node_all_assets(request)
if query_all:
return queryset.filter(
Q(nodes__key__startswith=f'{node.key}:') |
Q(nodes__key=node.key)
Q(node__key__startswith=f'{node.key}:') |
Q(node__key=node.key)
).distinct()
else:
return queryset.filter(nodes__key=node.key).distinct()
return queryset.filter(node__key=node.key).distinct()
class IpInFilterBackend(filters.BaseFilterBackend):

View File

@@ -0,0 +1,126 @@
# Generated by Django 4.1.13 on 2025-12-16 09:14
from django.db import migrations, models, transaction
import django.db.models.deletion
def log(msg=''):
print(f' -> {msg}')
def ensure_asset_single_node(apps, schema_editor):
print('')
log('Checking that all assets are linked to only one node...')
Asset = apps.get_model('assets', 'Asset')
Through = Asset.nodes.through
assets_count_multi_nodes = Through.objects.values('asset_id').annotate(
node_count=models.Count('node_id')
).filter(node_count__gt=1).count()
if assets_count_multi_nodes > 0:
raise Exception(
f'There are {assets_count_multi_nodes} assets associated with more than one node. '
'Please ensure each asset is linked to only one node before applying this migration.'
)
else:
log('All assets are linked to only one node. Proceeding with the migration.')
def ensure_asset_has_node(apps, schema_editor):
log('Checking that all assets are linked to at least one node...')
Asset = apps.get_model('assets', 'Asset')
Through = Asset.nodes.through
asset_count = Asset.objects.count()
through_asset_count = Through.objects.values('asset_id').count()
assets_count_without_node = asset_count - through_asset_count
if assets_count_without_node > 0:
raise Exception(
f'Some assets ({assets_count_without_node}) are not associated with any node. '
'Please ensure all assets are linked to a node before applying this migration.'
)
else:
log('All assets are linked to a node. Proceeding with the migration.')
def migrate_asset_node_id_field(apps, schema_editor):
log('Migrating node_id field for all assets...')
Asset = apps.get_model('assets', 'Asset')
Through = Asset.nodes.through
assets = Asset.objects.filter(node_id__isnull=True)
log (f'Found {assets.count()} assets to migrate.')
asset_node_mapper = {
str(asset_id): str(node_id)
for asset_id, node_id in Through.objects.values_list('asset_id', 'node_id')
}
# 测试
asset_node_mapper.pop(None, None) # Remove any entries with None keys
for asset in assets:
node_id = asset_node_mapper.get(str(asset.id))
if not node_id:
raise Exception(
f'Asset (ID: {asset.id}) is not associated with any node. '
'Cannot migrate node_id field.'
)
asset.node_id = node_id
with transaction.atomic():
total = len(assets)
batch_size = 5000
for i in range(0, total, batch_size):
batch = assets[i:i+batch_size]
start = i + 1
end = min(i + batch_size, total)
for asset in batch:
asset.save(update_fields=['node_id'])
log(f"Migrated {start}-{end}/{total} assets")
count = Asset.objects.filter(node_id__isnull=True).count()
if count > 0:
log('Warning: Some assets still have null node_id after migration.')
raise Exception('Migration failed: Some assets have null node_id.')
count = Asset.objects.filter(node_id__isnull=False).count()
log(f'Successfully migrated node_id for {count} assets.')
class Migration(migrations.Migration):
dependencies = [
('assets', '0019_alter_asset_connectivity'),
]
operations = [
migrations.RunPython(
ensure_asset_single_node,
reverse_code=migrations.RunPython.noop
),
migrations.RunPython(
ensure_asset_has_node,
reverse_code=migrations.RunPython.noop
),
migrations.AddField(
model_name='asset',
name='node',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='direct_assets', to='assets.node', verbose_name='Node'),
),
migrations.RunPython(
migrate_asset_node_id_field,
reverse_code=migrations.RunPython.noop
),
migrations.AlterField(
model_name='asset',
name='node',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='direct_assets', to='assets.node', verbose_name='Node'),
),
]

View File

@@ -172,6 +172,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
"assets.Zone", null=True, blank=True, related_name='assets',
verbose_name=_("Zone"), on_delete=models.SET_NULL
)
node = models.ForeignKey(
'assets.Node', null=False, blank=False, on_delete=models.PROTECT,
related_name='direct_assets', verbose_name=_("Node")
)
# TODO: 删除完代码中所有使用的地方后,再删除 nodes 字段,并将 node 字段的 related_name 改为 'assets'
nodes = models.ManyToManyField(
'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes")
)

View File

@@ -394,7 +394,7 @@ class NodeAssetsMixin(NodeAllAssetsMappingMixin):
def get_all_assets(self):
from .asset import Asset
q = Q(nodes__key__startswith=f'{self.key}:') | Q(nodes__key=self.key)
q = Q(node__key__startswith=f'{self.key}:') | Q(node__key=self.key)
return Asset.objects.filter(q).distinct()
def get_assets_amount(self):
@@ -416,8 +416,8 @@ class NodeAssetsMixin(NodeAllAssetsMappingMixin):
def get_assets(self):
from .asset import Asset
assets = Asset.objects.filter(nodes=self)
return assets.distinct()
assets = Asset.objects.filter(node=self)
return assets
def get_valid_assets(self):
return self.get_assets().valid()
@@ -531,6 +531,15 @@ class SomeNodesMixin:
root_nodes = cls.objects.filter(parent_key='', key__regex=r'^[0-9]+$') \
.exclude(key__startswith='-').order_by('key')
return root_nodes
@classmethod
def get_or_create_org_root(cls, org):
org_root = cls.org_root_nodes().filter(org_id=org.id).first()
if org_root:
return org_root
with tmp_to_org(org):
org_root = cls.create_org_root_node()
return org_root
class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):

View File

@@ -0,0 +1 @@
from .asset_tree import *

View File

@@ -0,0 +1,246 @@
from collections import defaultdict
from django.db.models import Count, Q
from orgs.utils import current_org
from orgs.models import Organization
from assets.models import Asset, Node, Platform
from assets.const.category import Category
from common.utils import get_logger, timeit, lazyproperty
from .tree import TreeNode, Tree
logger = get_logger(__name__)
__all__ = ['AssetTree', 'AssetTreeNode']
class AssetTreeNodeAsset:
def __init__(self, id, node_id, parent_key, name, address,
platform_id, is_active, comment, org_id):
self.id = id
self.node_id = node_id
self.parent_key = parent_key
self.name = name
self.address = address
self.platform_id = platform_id
self.is_active = is_active
self.comment = comment
self.org_id = org_id
@lazyproperty
def org(self):
return Organization.get_instance(self.org_id)
@property
def org_name(self) -> str:
return self.org.name
class AssetTreeNode(TreeNode):
def __init__(self, _id, key, value, assets_amount=0, assets=None):
super().__init__(_id, key, value)
self.assets_amount = assets_amount
self.assets_amount_total = 0
self.assets: list[AssetTreeNodeAsset] = []
self.init_assets(assets)
def init_assets(self, assets):
if not assets:
return
for asset in assets:
asset['parent_key'] = self.key
self.assets.append(AssetTreeNodeAsset(**asset))
def get_assets(self):
return self.assets
def as_dict(self, simple=True):
data = super().as_dict(simple=simple)
data.update({
'assets_amount_total': self.assets_amount_total,
'assets_amount': self.assets_amount,
'assets': len(self.assets),
})
return data
class AssetTree(Tree):
TreeNode = AssetTreeNode
def __init__(self, assets_q_object: Q = None, category=None, org=None,
with_assets=False, with_assets_node_id=None, with_assets_limit=1000,
full_tree=True):
'''
:param assets_q_object: 只生成这些资产所在的节点树
:param category: 只生成该类别资产所在的节点树
:param org: 只生成该组织下的资产节点树
:param with_assets_node_id: 仅指定节点下包含资产
:param with_assets: 所有节点都包含资产
:param with_assets_limit: 包含资产时, 所有资产的最大数量
:param full_tree: 完整树包含所有节点否则只包含节点的资产总数不为0的节点
'''
super().__init__()
## 通过资产构建节点树, 支持 Q, category, org 等过滤条件 ##
self._assets_q_object: Q = assets_q_object or Q()
self._category = self._check_category(category)
self._category_platform_ids = set()
self._org: Organization = org or current_org
# org 下全量节点属性映射, 构建资产树时根据完整的节点进行构建
self._nodes_attr_mapper = defaultdict(dict)
# 节点直接资产数量映射, 用于计算节点下总资产数量
self._nodes_assets_amount_mapper = defaultdict(int)
# 节点下是否包含资产
self._with_assets = with_assets # 所有节点都包含资产
self._with_assets_node_id = with_assets_node_id # 仅指定节点下包含资产, 优先级高于 with_assets
self._with_assets_limit = with_assets_limit
self._node_assets_mapper = defaultdict(dict)
# 是否包含资产总数量为 0 的节点
self._full_tree = full_tree
# 初始化时构建树
self.build()
def _check_category(self, category):
if category is None:
return None
if category in Category.values:
return category
logger.warning(f"Invalid category '{category}' for AssetSearchTree.")
return None
@timeit
def build(self):
self._load_nodes_attr_mapper()
self._load_category_platforms_if_needed()
self._load_nodes_assets_amount()
self._load_nodes_assets_if_needed()
self._init_tree()
self._compute_assets_amount_total()
self._remove_nodes_with_zero_assets_if_needed()
@timeit
def _load_category_platforms_if_needed(self):
if self._category is None:
return
ids = Platform.objects.filter(category=self._category).values_list('id', flat=True)
ids = self._uuids_to_string(ids)
self._category_platform_ids = ids
@timeit
def _load_nodes_attr_mapper(self):
nodes = Node.objects.filter(org_id=self._org.id).values('id', 'key', 'value')
# 保证节点按 key 顺序加载,以便后续构建树时父节点总在子节点前面
nodes = sorted(nodes, key=lambda n: [int(i) for i in n['key'].split(':')])
for node in list(nodes):
node['id'] = str(node['id'])
self._nodes_attr_mapper[node['id']] = node
@timeit
def _load_nodes_assets_amount(self):
q = self._make_assets_q_object()
nodes_amount = Asset.objects.filter(q).values('node_id').annotate(
amount=Count('id')
).values('node_id', 'amount')
for nc in list(nodes_amount):
nid = str(nc['node_id'])
self._nodes_assets_amount_mapper[nid] = nc['amount']
@timeit
def _load_nodes_assets_if_needed(self):
need_load = self._with_assets or self._with_assets_node_id
if not need_load:
return
q = self._make_assets_q_object()
if self._with_assets_node_id:
# 仅指定节点下包含资产,优先级高于 with_assets
q &= Q(node_id=self._with_assets_node_id)
assets = Asset.objects.filter(q).values(
'node_id', 'id', 'platform_id', 'name', 'address', 'is_active', 'comment', 'org_id'
)
# 按照 node_key 排序,尽可能保证前面节点的资产较多
# 限制资产数量
assets = assets.order_by('node__key')[:self._with_assets_limit]
for asset in list(assets):
nid = asset['node_id'] = str(asset['node_id'])
aid = asset['id'] = str(asset['id'])
self._node_assets_mapper[nid][aid] = asset
@timeit
def _make_assets_q_object(self) -> Q:
q = Q(org_id=self._org.id)
if self._category_platform_ids:
q &= Q(platform_id__in=self._category_platform_ids)
if self._assets_q_object:
q &= self._assets_q_object
return q
@timeit
def _init_tree(self):
for nid in self._nodes_attr_mapper.keys():
data = self._get_tree_node_data(nid)
node = self.TreeNode(**data)
self.add_node(node)
def _get_tree_node_data(self, node_id):
attr = self._nodes_attr_mapper[node_id]
assets_amount = self._nodes_assets_amount_mapper.get(node_id, 0)
data = {
'_id': node_id,
'key': attr['key'],
'value': attr['value'],
'assets_amount': assets_amount,
}
assets = self._node_assets_mapper[node_id].values()
if assets:
assets = list(assets)
data.update({ 'assets': assets })
return data
@timeit
def _compute_assets_amount_total(self):
for node in reversed(list(self.nodes.values())):
total = node.assets_amount
for child in node.children:
child: AssetTreeNode
total += child.assets_amount_total
node: AssetTreeNode
node.assets_amount_total = total
@timeit
def _remove_nodes_with_zero_assets_if_needed(self):
if self._full_tree:
return
nodes: list[AssetTreeNode] = list(self.nodes.values())
nodes_to_remove = [
node for node in nodes if not node.is_root and node.assets_amount_total == 0
]
for node in nodes_to_remove:
self.remove_node(node)
def get_assets(self):
assets = []
for node in self.nodes.values():
node: AssetTreeNode
_assets = node.get_assets()
assets.extend(_assets)
return assets
def _uuids_to_string(self, uuids):
return [ str(u) for u in uuids ]
def print(self, count=20, simple=True):
print('org_name: ', getattr(self._org, 'name', 'No-org'))
print(f'asset_category: {self._category}')
super().print(count=count, simple=simple)

164
apps/assets/tree/tree.py Normal file
View File

@@ -0,0 +1,164 @@
from common.utils import get_logger, lazyproperty
__all__ = ['TreeNode', 'Tree']
logger = get_logger(__name__)
class TreeNode(object):
def __init__(self, _id, key, value):
self.id = _id
self.key = key
self.value = value
self.children = []
self.parent = None
@lazyproperty
def parent_key(self):
if self.is_root:
return None
return ':'.join(self.key.split(':')[:-1])
@property
def is_root(self):
return self.key.isdigit()
def add_child(self, child_node: 'TreeNode'):
child_node.parent = self
self.children.append(child_node)
def remove_child(self, child_node: 'TreeNode'):
self.children.remove(child_node)
child_node.parent = None
@property
def is_leaf(self):
return len(self.children) == 0
@lazyproperty
def level(self):
return self.key.count(':') + 1
@property
def children_count(self):
return len(self.children)
def as_dict(self, simple=True):
data = {
'key': self.key,
}
if simple:
return data
data.update({
'id': self.id,
'value': self.value,
'level': self.level,
'children_count': self.children_count,
'is_root': self.is_root,
'is_leaf': self.is_leaf,
})
return data
def print(self, simple=True, is_print_keys=False):
def info_as_string(_info):
return ' | '.join(s.ljust(25) for s in _info)
if is_print_keys:
info_keys = [k for k in self.as_dict(simple=simple).keys()]
info_keys_string = info_as_string(info_keys)
print('-' * len(info_keys_string))
print(info_keys_string)
print('-' * len(info_keys_string))
info_values = [str(v) for v in self.as_dict(simple=simple).values()]
info_values_as_string = info_as_string(info_values)
print(info_values_as_string)
print('-' * len(info_values_as_string))
class Tree(object):
def __init__(self):
self.root = None
# { key -> TreeNode }
self.nodes: dict[TreeNode] = {}
@property
def size(self):
return len(self.nodes)
@property
def is_empty(self):
return self.size == 0
@property
def depth(self):
" 返回树的最大深度以及对应的节点key "
if self.is_empty:
return 0, 0
node = max(self.nodes.values(), key=lambda node: node.level)
node: TreeNode
print(f"max_depth_node_key: {node.key}")
return node.level
@property
def width(self):
" 返回树的最大宽度,以及对应的层级数 "
if self.is_empty:
return 0, 0
node = max(self.nodes.values(), key=lambda node: node.children_count)
node: TreeNode
print(f"max_width_level: {node.level + 1}")
return node.children_count
def add_node(self, node: TreeNode):
if node.is_root:
self.root = node
self.nodes[node.key] = node
return
parent = self.get_node(node.parent_key)
if not parent:
error = f""" Cannot add node {node.key}: parent key {node.parent_key} not found.
Please ensure parent nodes are added before child nodes."""
raise ValueError(error)
parent.add_child(node)
self.nodes[node.key] = node
def get_node(self, key: str) -> TreeNode:
return self.nodes.get(key)
def remove_node(self, node: TreeNode):
if node.is_root:
self.root = None
else:
parent: TreeNode = node.parent
parent.remove_child(node)
self.nodes.pop(node.key, None)
def get_nodes(self):
return list(self.nodes.values())
def get_node_children(self, key, with_self=False):
node = self.get_node(key)
if not node:
return []
nodes = []
if with_self:
nodes.append(node)
nodes.extend(node.children)
return nodes
def print(self, count=10, simple=True):
print('tree_root_key: ', getattr(self.root, 'key', 'No-root'))
print('tree_size: ', self.size)
print('tree_depth: ', self.depth)
print('tree_width: ', self.width)
is_print_key = True
for n in list(self.nodes.values())[:count]:
n: TreeNode
n.print(simple=simple, is_print_keys=is_print_key)
is_print_key = False

View File

@@ -12,7 +12,7 @@ from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.utils.encoding import force_bytes
from django.utils.encoding import force_bytes, smart_bytes
from jwkest import JWKESTException
from jwkest.jwk import KEYS
from jwkest.jws import JWS
@@ -58,7 +58,7 @@ def _get_jwks_keys(shared_key):
# Adds the shared key (which can correspond to the client_secret) as an oct key so it can be
# used for HMAC signatures.
logger.debug(log_prompt.format('Add key'))
jwks_keys.add({'key': force_bytes(shared_key), 'kty': 'oct'})
jwks_keys.add({'key': smart_bytes(shared_key), 'kty': 'oct'})
logger.debug(log_prompt.format('End'))
return jwks_keys

View File

@@ -28,7 +28,7 @@ is_available:
sample: true
'''
import telnetlib3
import telnetlib
from ansible.module_utils.basic import AnsibleModule
@@ -57,9 +57,9 @@ def main():
port = module.params['login_port']
timeout = module.params['timeout']
try:
client = telnetlib3.Telnet(host, port, timeout=timeout)
client = telnetlib.Telnet(host, port, timeout=timeout)
client.close()
except Exception as err: # noqa
except Exception as err: # noqa
result['is_available'] = False
module.fail_json(msg='Unable to connect to asset: %s' % err)

73
apps/perms/tree.py Normal file
View File

@@ -0,0 +1,73 @@
from collections import defaultdict
from django.db.models import Q, Count
from common.utils import get_logger
from users.models import User
from assets.tree.asset_tree import AssetTree, AssetTreeNode
from perms.utils.utils import UserPermUtil
__all__ = ['UserPermTree']
logger = get_logger(__name__)
class PermTreeNode(AssetTreeNode):
class Type:
# Neither a permission node nor a node with direct permission assets
BRIDGE = 'bridge'
# Node with direct permission
DN = 'dn'
# Node with only direct permission assets
DA = 'da'
def __init__(self, tp, _id, key, value, assets_count=0, assets=None):
super().__init__(_id, key, value, assets_count)
self.type = tp or self.Type.BRIDGE
def as_dict(self, simple=True):
data = super().as_dict(simple=simple)
data.update({
'type': self.type,
})
return data
class UserPermTree(AssetTree):
TreeNode = PermTreeNode
def __init__(self, user=None, assets_q_object=None, category=None, org=None, with_assets=False):
super().__init__(
assets_q_object=assets_q_object,
category=category,
org=org,
with_assets=with_assets,
full_tree=False
)
self._user: User = user
self._util = UserPermUtil(user, org=self._org)
def _make_assets_q_object(self):
q = super()._make_assets_q_object()
q_perm_assets = Q(id__in=self._util._user_direct_asset_ids)
q_perm_nodes = Q(node_id__in=self._util._user_direct_node_all_children_ids)
q = q & (q_perm_assets | q_perm_nodes)
return q
def _get_tree_node_data(self, node_id):
data = super()._get_tree_node_data(node_id)
if node_id in self._util._user_direct_node_all_children_ids:
tp = PermTreeNode.Type.DN
elif self._nodes_assets_count_mapper.get(node_id, 0) > 0:
tp = PermTreeNode.Type.DA
else:
tp = PermTreeNode.Type.BRIDGE
data.update({ 'tp': tp })
return data
def print(self, simple=True, count=10):
self._util.print()
super().print(simple=simple, count=count)

138
apps/perms/utils/utils.py Normal file
View File

@@ -0,0 +1,138 @@
from django.db.models import Q
from common.utils import timeit, lazyproperty, get_logger, is_uuid
from orgs.utils import current_org
from users.models import User
from assets.models import Node, Asset
from perms.models import AssetPermission
logger = get_logger(__name__)
__all__ = ['UserPermUtil']
class UserPermUtil(object):
UserGroupThrough = User.groups.through
PermUserThrough = AssetPermission.users.through
PermUserGroupThrough = AssetPermission.user_groups.through
PermAssetThrough = AssetPermission.assets.through
PermNodeThrough = AssetPermission.nodes.through
def __init__(self, user, org=None):
self._user: User = user
self._org = org or current_org
self._user_permission_ids = set()
self._user_group_ids = set()
self._user_group_permission_ids = set()
self._user_all_permission_ids = set()
self._user_direct_asset_ids = set()
self._user_direct_node_ids = set()
self._user_direct_node_all_children_ids = set()
self._init()
def _init(self):
self._load_user_permission_ids()
self._load_user_group_ids()
self._load_user_group_permission_ids()
self._load_user_direct_asset_ids()
self._load_user_direct_node_ids()
self._load_user_direct_node_all_children_ids()
@timeit
def _load_user_permission_ids(self):
perm_ids = self.PermUserThrough.objects.filter(
user_id=self._user.id
).distinct('assetpermission_id').values_list('assetpermission_id', flat=True)
perm_ids = self._uuids_to_string(perm_ids)
self._user_permission_ids.update(perm_ids)
self._user_all_permission_ids.update(perm_ids)
@timeit
def _load_user_group_ids(self):
group_ids = self.UserGroupThrough.objects.filter(
user_id=self._user.id
).distinct('usergroup_id').values_list('usergroup_id', flat=True)
group_ids = self._uuids_to_string(group_ids)
self._user_group_ids.update(group_ids)
@timeit
def _load_user_group_permission_ids(self):
perm_ids = self.PermUserGroupThrough.objects.filter(
usergroup_id__in=self._user_group_ids
).distinct('assetpermission_id').values_list('assetpermission_id', flat=True)
perm_ids = self._uuids_to_string(perm_ids)
self._user_group_permission_ids.update(perm_ids)
self._user_all_permission_ids.update(perm_ids)
@timeit
def _load_user_direct_asset_ids(self):
asset_ids = self.PermAssetThrough.objects.filter(
assetpermission_id__in=self._user_all_permission_ids
).distinct('asset_id').values_list('asset_id', flat=True)
asset_ids = self._uuids_to_string(asset_ids)
self._user_direct_asset_ids.update(asset_ids)
@timeit
def _load_user_direct_node_ids(self):
node_ids = self.PermNodeThrough.objects.filter(
assetpermission_id__in=self._user_all_permission_ids
).distinct('node_id').values_list('node_id', flat=True)
node_ids = self._uuids_to_string(node_ids)
self._user_direct_node_ids.update(node_ids)
@timeit
def _load_user_direct_node_all_children_ids(self):
nid_key_pairs = Node.objects.filter(org_id=self._org.id).values_list('id', 'key')
nid_key_mapper = { str(nid): key for nid, key in nid_key_pairs }
dn_keys = [ nid_key_mapper[nid] for nid in self._user_direct_node_ids ]
def has_ancestor_in_direct_nodes(key: str) -> bool:
ancestor_keys = [ ':'.join(key.split(':')[:i]) for i in range(1, key.count(':') + 1) ]
return bool(set(ancestor_keys) & set(dn_keys))
dn_children_ids = [ nid for nid, key in nid_key_mapper.items() if has_ancestor_in_direct_nodes(key) ]
self._user_direct_node_all_children_ids.update(self._user_direct_node_ids)
self._user_direct_node_all_children_ids.update(dn_children_ids)
def get_node_assets(self, node: Node):
''' 获取节点下授权的直接资产, Luna 页面展开时需要 '''
q = Q(node_id=node.id)
if str(node.id) not in self._user_direct_node_all_children_ids:
q &= Q(id__in=self._user_direct_asset_ids)
assets = Asset.objects.filter(q)
return assets
def get_node_all_assets(self, node: Node):
''' 获取节点及其子节点下所有授权资产, 测试时需要 '''
if str(node.id) in self._user_direct_node_all_children_ids:
assets = node.get_all_assets()
return assets
children_ids = node.get_all_children(with_self=True).values_list('id', flat=True)
children_ids = self._uuids_to_string(children_ids)
dn_all_nodes_ids = set(children_ids) & self._user_direct_node_all_children_ids
other_nodes_ids = set(children_ids) - dn_all_nodes_ids
q = Q(node_id__in=dn_all_nodes_ids)
q |= Q(node_id__in=other_nodes_ids) & Q(id__in=self._user_direct_asset_ids)
assets = Asset.objects.filter(q)
return assets
def _uuids_to_string(self, uuids):
return [ str(u) for u in uuids ]
def print(self):
print('user_perm_tree:', self._user.username)
print('user_permission_ids_count:', len(self._user_permission_ids))
print('user_group_ids_count:', len(self._user_group_ids))
print('user_group_permission_ids_count:', len(self._user_permission_ids) - len(self._user_group_ids))
print('user_all_permission_ids_count:', len(self._user_all_permission_ids))
print('user_direct_asset_ids_count:', len(self._user_direct_asset_ids))
print('user_direct_node_ids_count:', len(self._user_direct_node_ids))
print('user_direct_node_all_children_ids_count:', len(self._user_direct_node_all_children_ids))

View File

@@ -2,8 +2,7 @@
#
import asyncio
import socket
import telnetlib3
import telnetlib
from settings.utils import generate_ips
@@ -13,7 +12,7 @@ PROMPT_REGEX = r'[\<|\[](.*)[\>|\]]'
async def telnet(dest_addr, port_number=23, timeout=10):
loop = asyncio.get_running_loop()
try:
connection = await loop.run_in_executor(None, telnetlib3.Telnet, dest_addr, port_number, timeout)
connection = await loop.run_in_executor(None, telnetlib.Telnet, dest_addr, port_number, timeout)
except asyncio.TimeoutError:
return False, 'Timeout'
except (ConnectionRefusedError, socket.timeout, socket.gaierror) as e:

View File

@@ -40,11 +40,11 @@ dependencies = [
'pyyaml==6.0.1',
'requests==2.32.4',
'simplejson==3.19.1',
'six==1.17.0',
'six==1.16.0',
'sshtunnel==0.4.0',
'sshpubkeys==3.3.1',
'uritemplate==4.1.1',
'vine==5.1.0',
'vine==5.0.0',
'werkzeug==3.0.6',
'unicodecsv==0.14.1',
'httpsig==1.3.0',
@@ -68,32 +68,32 @@ dependencies = [
'ipip-ipdb==1.6.1',
'pywinrm==0.4.3',
'python-nmap==0.7.1',
'django==5.2.9',
'django==4.1.13',
'django-bootstrap3==23.4',
'django-filter==24.3',
'django-filter==23.2',
'django-formtools==2.5.1',
'django-ranged-response==0.2.0',
'django-simple-captcha==0.5.18',
'django-timezone-field==7.1',
'djangorestframework==3.15.0',
'django-timezone-field==5.1',
'djangorestframework==3.14.0',
'djangorestframework-bulk==0.2.1',
'django-simple-history==3.6.0',
'django-private-storage==3.1.3',
'django-private-storage==3.1',
'drf-nested-routers==0.93.4',
'drf-writable-nested==0.7.0',
'rest-condition==1.0.3',
'drf-spectacular==0.29.0',
'pillow==12.0.0',
'drf-spectacular==0.28.0',
'pillow==10.2.0',
'pytz==2025.2',
'django-proxy==1.2.2',
'python-daemon==3.0.1',
'eventlet==0.40.3',
'greenlet==3.2.4',
'gunicorn==23.0.0',
'celery==5.6.0',
'celery==5.3.1',
'flower==2.0.1',
'django-celery-beat==2.8.1',
'kombu==5.6.0',
'django-celery-beat==2.6.0',
'kombu==5.3.5',
'uvicorn==0.22.0',
'websockets==11.0.3',
'python-ldap==3.4.5',
@@ -103,10 +103,10 @@ dependencies = [
'python-cas==1.6.0',
'django-auth-ldap==4.4.0',
'mysqlclient==2.2.4',
'pymssql==2.3.10',
'django-redis==5.4.0',
'pymssql==2.3.4',
'django-redis==5.3.0',
'python-redis-lock==4.0.0',
'pyopenssl==24.0.0',
'pyopenssl==23.2.0',
'redis',
'pymongo==4.6.3',
'pyfreerdp==0.0.2',
@@ -153,7 +153,6 @@ dependencies = [
'pdf2image==1.17.0',
'drf-spectacular-sidecar==2025.8.1',
"django-oauth-toolkit==2.4.0",
"telnetlib3==2.0.8",
]
[project.urls]

View File

@@ -0,0 +1,358 @@
import os
import sys
import django
import random
from datetime import datetime
if os.path.exists('../../apps'):
sys.path.insert(0, '../../apps')
if os.path.exists('../apps'):
sys.path.insert(0, '../apps')
elif os.path.exists('./apps'):
sys.path.insert(0, './apps')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jumpserver.settings")
django.setup()
from assets.models import Asset, Node
from orgs.models import Organization
from django.db.models import Count
OUTPUT_FILE = 'report_cleanup_and_keep_one_node_for_multi_parent_nodes_assets.txt'
# Special organization IDs and names
SPECIAL_ORGS = {
'00000000-0000-0000-0000-000000000000': 'GLOBAL',
'00000000-0000-0000-0000-000000000002': 'DEFAULT',
'00000000-0000-0000-0000-000000000004': 'SYSTEM',
}
try:
AssetNodeThrough = Asset.nodes.through
except Exception as e:
print("Failed to get AssetNodeThrough model. Check Asset.nodes field definition.")
raise e
def log(msg=''):
"""Print log with timestamp to console"""
print(f"[{datetime.now().strftime('%H:%M:%S')}] {msg}")
def write_report(content):
"""Write content to report file"""
with open(OUTPUT_FILE, 'a', encoding='utf-8') as f:
f.write(content)
def get_org_name(org_id, orgs_map):
"""Get organization name, check special orgs first, then orgs_map"""
# Check if it's a special organization
org_id_str = str(org_id)
if org_id_str in SPECIAL_ORGS:
return SPECIAL_ORGS[org_id_str]
# Try to get from orgs_map
org = orgs_map.get(org_id)
if org:
return org.name
return 'Unknown'
def find_and_cleanup_multi_parent_assets():
"""Find and cleanup assets with multiple parent nodes"""
log("Searching for assets with multiple parent nodes...")
# Find all asset_ids that belong to multiple node_ids
multi_parent_assets = AssetNodeThrough.objects.values('asset_id').annotate(
node_count=Count('node_id', distinct=True)
).filter(node_count__gt=1).order_by('-node_count')
total_count = multi_parent_assets.count()
log(f"Found {total_count:,} assets with multiple parent nodes\n")
if total_count == 0:
log("✓ All assets already have single parent node")
return {}
# Collect all asset_ids and node_ids
asset_ids = [item['asset_id'] for item in multi_parent_assets]
# Get all through records
all_through_records = AssetNodeThrough.objects.filter(asset_id__in=asset_ids)
node_ids = list(set(through.node_id for through in all_through_records))
# Batch fetch all objects
log("Batch loading Asset objects...")
assets_map = {asset.id: asset for asset in Asset.objects.filter(id__in=asset_ids)}
log("Batch loading Node objects...")
nodes_map = {node.id: node for node in Node.objects.filter(id__in=node_ids)}
# Batch fetch all Organization objects
org_ids = list(set(asset.org_id for asset in assets_map.values())) + \
list(set(node.org_id for node in nodes_map.values()))
org_ids = list(set(org_ids))
log("Batch loading Organization objects...")
orgs_map = {org.id: org for org in Organization.objects.filter(id__in=org_ids)}
# Build mapping of asset_id -> list of through_records
asset_nodes_map = {}
for through in all_through_records:
if through.asset_id not in asset_nodes_map:
asset_nodes_map[through.asset_id] = []
asset_nodes_map[through.asset_id].append(through)
# Organize by organization
org_cleanup_data = {} # org_id -> { asset_id -> { keep_node_id, remove_node_ids } }
for item in multi_parent_assets:
asset_id = item['asset_id']
# Get Asset object
asset = assets_map.get(asset_id)
if not asset:
log(f"⚠ Asset {asset_id} not found in map, skipping")
continue
org_id = asset.org_id
# Initialize org data if not exists
if org_id not in org_cleanup_data:
org_cleanup_data[org_id] = {}
# Get all nodes for this asset
through_records = asset_nodes_map.get(asset_id, [])
if len(through_records) < 2:
continue
# Randomly select one node to keep
keep_through = random.choice(through_records)
remove_throughs = [t for t in through_records if t.id != keep_through.id]
org_cleanup_data[org_id][asset_id] = {
'asset_name': asset.name,
'keep_node_id': keep_through.node_id,
'keep_node': nodes_map.get(keep_through.node_id),
'remove_records': remove_throughs,
'remove_nodes': [nodes_map.get(t.node_id) for t in remove_throughs]
}
return org_cleanup_data
def perform_cleanup(org_cleanup_data, dry_run=False):
"""Perform the actual cleanup - delete extra node relationships"""
if dry_run:
log("DRY RUN: Simulating cleanup process (no data will be deleted)...")
else:
log("\nStarting cleanup process...")
total_deleted = 0
for org_id in org_cleanup_data.keys():
for asset_id, cleanup_info in org_cleanup_data[org_id].items():
# Delete the extra relationships
for through_record in cleanup_info['remove_records']:
if not dry_run:
through_record.delete()
total_deleted += 1
return total_deleted
def verify_cleanup():
"""Verify that there are no more assets with multiple parent nodes"""
log("\n" + "="*80)
log("VERIFICATION: Checking for remaining assets with multiple parent nodes...")
log("="*80)
# Find all asset_ids that belong to multiple node_ids
multi_parent_assets = AssetNodeThrough.objects.values('asset_id').annotate(
node_count=Count('node_id', distinct=True)
).filter(node_count__gt=1).order_by('-node_count')
remaining_count = multi_parent_assets.count()
if remaining_count == 0:
log(f"✓ Verification successful: No assets with multiple parent nodes remaining\n")
return True
else:
log(f"✗ Verification failed: Found {remaining_count:,} assets still with multiple parent nodes\n")
# Show some details
for item in multi_parent_assets[:10]:
asset_id = item['asset_id']
node_count = item['node_count']
try:
asset = Asset.objects.get(id=asset_id)
log(f" - Asset: {asset.name} ({asset_id}) has {node_count} parent nodes")
except:
log(f" - Asset ID: {asset_id} has {node_count} parent nodes")
if remaining_count > 10:
log(f" ... and {remaining_count - 10} more")
return False
def generate_report(org_cleanup_data, total_deleted):
"""Generate and write report to file"""
# Clear previous report
if os.path.exists(OUTPUT_FILE):
os.remove(OUTPUT_FILE)
# Write header
write_report(f"Multi-Parent Assets Cleanup Report\n")
write_report(f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
write_report(f"{'='*80}\n\n")
# Get all organizations
all_org_ids = list(set(org_id for org_id in org_cleanup_data.keys()))
all_orgs = {org.id: org for org in Organization.objects.filter(id__in=all_org_ids)}
# Calculate statistics
total_orgs = Organization.objects.count()
orgs_processed = len(org_cleanup_data)
orgs_no_issues = total_orgs - orgs_processed
total_assets_cleaned = sum(len(assets) for assets in org_cleanup_data.values())
# Overview
write_report("OVERVIEW\n")
write_report(f"{'-'*80}\n")
write_report(f"Total organizations: {total_orgs:,}\n")
write_report(f"Organizations processed: {orgs_processed:,}\n")
write_report(f"Organizations without issues: {orgs_no_issues:,}\n")
write_report(f"Total assets cleaned: {total_assets_cleaned:,}\n")
total_relationships = AssetNodeThrough.objects.count()
write_report(f"Total relationships (through records): {total_relationships:,}\n")
write_report(f"Total relationships deleted: {total_deleted:,}\n\n")
# Summary by organization
write_report("Summary by Organization:\n")
for org_id in sorted(org_cleanup_data.keys()):
org_name = get_org_name(org_id, all_orgs)
asset_count = len(org_cleanup_data[org_id])
write_report(f" - {org_name} ({org_id}): {asset_count:,} assets cleaned\n")
write_report(f"\n{'='*80}\n\n")
# Detailed cleanup information grouped by organization
for org_id in sorted(org_cleanup_data.keys()):
org_name = get_org_name(org_id, all_orgs)
asset_count = len(org_cleanup_data[org_id])
write_report(f"ORGANIZATION: {org_name} ({org_id})\n")
write_report(f"Total assets cleaned: {asset_count:,}\n")
write_report(f"{'-'*80}\n\n")
for asset_id, cleanup_info in org_cleanup_data[org_id].items():
write_report(f"Asset: {cleanup_info['asset_name']} ({asset_id})\n")
# Kept node
keep_node = cleanup_info['keep_node']
if keep_node:
write_report(f" ✓ Kept: {keep_node.name} (key: {keep_node.key}) (id: {keep_node.id})\n")
else:
write_report(f" ✓ Kept: Unknown (id: {cleanup_info['keep_node_id']})\n")
# Removed nodes
write_report(f" ✗ Removed: {len(cleanup_info['remove_nodes'])} node(s)\n")
for node in cleanup_info['remove_nodes']:
if node:
write_report(f" - {node.name} (key: {node.key}) (id: {node.id})\n")
else:
write_report(f" - Unknown\n")
write_report(f"\n")
write_report(f"{'='*80}\n\n")
log(f"✓ Report written to {OUTPUT_FILE}")
def main():
try:
# Display warning banner
warning_message = """
╔══════════════════════════════════════════════════════════════════════════════╗
║ ⚠️ WARNING ⚠️ ║
║ ║
║ This script is designed for TEST/FAKE DATA ONLY! ║
║ DO NOT run this script in PRODUCTION environment! ║
║ ║
║ This script will DELETE asset-node relationships from the database. ║
║ Use only for data cleanup in development/testing environments. ║
║ ║
╚══════════════════════════════════════════════════════════════════════════════╝
"""
print(warning_message)
# Ask user to confirm before proceeding
confirm = input("Do you understand the warning and want to continue? (yes/no): ").strip().lower()
if confirm not in ['yes', 'y']:
log("✗ Operation cancelled by user")
sys.exit(0)
log("✓ Proceeding with operation\n")
org_cleanup_data = find_and_cleanup_multi_parent_assets()
if not org_cleanup_data:
log("✓ Cleanup complete, no assets to process")
sys.exit(0)
total_assets = sum(len(assets) for assets in org_cleanup_data.values())
log(f"\nProcessing {total_assets:,} assets across {len(org_cleanup_data):,} organizations...")
# First, do a dry-run to show what will be deleted
log("\n" + "="*80)
log("PREVIEW: Simulating cleanup process...")
log("="*80)
total_deleted_preview = perform_cleanup(org_cleanup_data, dry_run=True)
log(f"✓ Dry-run complete: {total_deleted_preview:,} relationships would be deleted\n")
# Generate preview report
generate_report(org_cleanup_data, total_deleted_preview)
log(f"✓ Preview report written to {OUTPUT_FILE}\n")
# Ask for confirmation 3 times before actual deletion
log("="*80)
log("FINAL CONFIRMATION: Do you want to proceed with actual cleanup?")
log("="*80)
confirmation_count = 3
for attempt in range(1, confirmation_count + 1):
response = input(f"Confirm cleanup (attempt {attempt}/{confirmation_count})? (yes/no): ").strip().lower()
if response not in ['yes', 'y']:
log(f"✗ Cleanup cancelled by user at attempt {attempt}")
sys.exit(1)
log("✓ All confirmations received, proceeding with actual cleanup")
# Perform cleanup
total_deleted = perform_cleanup(org_cleanup_data)
log(f"✓ Deleted {total_deleted:,} relationships")
# Generate final report
generate_report(org_cleanup_data, total_deleted)
# Verify cleanup by checking for remaining multi-parent assets
verify_cleanup()
log(f"✓ Cleanup complete: processed {total_assets:,} assets")
sys.exit(0)
except Exception as e:
log(f"✗ Error occurred: {str(e)}")
import traceback
traceback.print_exc()
sys.exit(2)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,270 @@
import os
import sys
import django
from datetime import datetime
if os.path.exists('../../apps'):
sys.path.insert(0, '../../apps')
if os.path.exists('../apps'):
sys.path.insert(0, '../apps')
elif os.path.exists('./apps'):
sys.path.insert(0, './apps')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jumpserver.settings")
django.setup()
from assets.models import Asset, Node
from orgs.models import Organization
from django.db.models import Count
OUTPUT_FILE = 'report_find_multi_parent_nodes_assets.txt'
# Special organization IDs and names
SPECIAL_ORGS = {
'00000000-0000-0000-0000-000000000000': 'GLOBAL',
'00000000-0000-0000-0000-000000000002': 'DEFAULT',
'00000000-0000-0000-0000-000000000004': 'SYSTEM',
}
try:
AssetNodeThrough = Asset.nodes.through
except Exception as e:
print("Failed to get AssetNodeThrough model. Check Asset.nodes field definition.")
raise e
def log(msg=''):
"""Print log with timestamp"""
print(f"[{datetime.now().strftime('%H:%M:%S')}] {msg}")
def get_org_name(org_id, orgs_map):
"""Get organization name, check special orgs first, then orgs_map"""
# Check if it's a special organization
org_id_str = str(org_id)
if org_id_str in SPECIAL_ORGS:
return SPECIAL_ORGS[org_id_str]
# Try to get from orgs_map
org = orgs_map.get(org_id)
if org:
return org.name
return 'Unknown'
def write_report(content):
"""Write content to report file"""
with open(OUTPUT_FILE, 'a', encoding='utf-8') as f:
f.write(content)
def find_assets_multiple_parents():
"""Find assets belonging to multiple node_ids organized by organization"""
log("Searching for assets with multiple parent nodes...")
# Find all asset_ids that belong to multiple node_ids
multi_parent_assets = AssetNodeThrough.objects.values('asset_id').annotate(
node_count=Count('node_id', distinct=True)
).filter(node_count__gt=1).order_by('-node_count')
total_count = multi_parent_assets.count()
log(f"Found {total_count:,} assets with multiple parent nodes\n")
if total_count == 0:
log("✓ All assets belong to only one node")
return {}
# Collect all asset_ids and node_ids that need to be fetched
asset_ids = [item['asset_id'] for item in multi_parent_assets]
# Get all through records for these assets
all_through_records = AssetNodeThrough.objects.filter(asset_id__in=asset_ids)
node_ids = list(set(through.node_id for through in all_through_records))
# Batch fetch all Asset and Node objects
log("Batch loading Asset objects...")
assets_map = {asset.id: asset for asset in Asset.objects.filter(id__in=asset_ids)}
log("Batch loading Node objects...")
nodes_map = {node.id: node for node in Node.objects.filter(id__in=node_ids)}
# Batch fetch all Organization objects
org_ids = list(set(asset.org_id for asset in assets_map.values())) + \
list(set(node.org_id for node in nodes_map.values()))
org_ids = list(set(org_ids)) # Remove duplicates
log("Batch loading Organization objects...")
orgs_map = {org.id: org for org in Organization.objects.filter(id__in=org_ids)}
# Build mapping of asset_id -> list of through_records
asset_nodes_map = {}
for through in all_through_records:
if through.asset_id not in asset_nodes_map:
asset_nodes_map[through.asset_id] = []
asset_nodes_map[through.asset_id].append(through)
# Organize by organization first, then by node count, then by asset
org_assets_data = {} # org_id -> { node_count -> [asset_data] }
for item in multi_parent_assets:
asset_id = item['asset_id']
node_count = item['node_count']
# Get Asset object from map
asset = assets_map.get(asset_id)
if not asset:
log(f"⚠ Asset {asset_id} not found in map, skipping")
continue
org_id = asset.org_id
# Initialize org data if not exists
if org_id not in org_assets_data:
org_assets_data[org_id] = {}
# Get all nodes for this asset
through_records = asset_nodes_map.get(asset_id, [])
node_details = []
for through in through_records:
# Get Node object from map
node = nodes_map.get(through.node_id)
if not node:
log(f"⚠ Node {through.node_id} not found in map, skipping")
continue
node_details.append({
'id': node.id,
'name': node.name,
'key': node.key,
'path': node.full_value if hasattr(node, 'full_value') else ''
})
if not node_details:
continue
if node_count not in org_assets_data[org_id]:
org_assets_data[org_id][node_count] = []
org_assets_data[org_id][node_count].append({
'asset_id': asset.id,
'asset_name': asset.name,
'nodes': node_details
})
return org_assets_data
def generate_report(org_assets_data):
"""Generate and write report to file organized by organization"""
# Clear previous report
if os.path.exists(OUTPUT_FILE):
os.remove(OUTPUT_FILE)
# Write header
write_report(f"Multi-Parent Assets Report\n")
write_report(f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
write_report(f"{'='*80}\n\n")
# Get all organizations
all_org_ids = list(set(org_id for org_id in org_assets_data.keys()))
all_orgs = {org.id: org for org in Organization.objects.filter(id__in=all_org_ids)}
# Calculate statistics
total_orgs = Organization.objects.count()
orgs_with_issues = len(org_assets_data)
orgs_without_issues = total_orgs - orgs_with_issues
total_assets_with_issues = sum(
len(assets)
for org_id in org_assets_data
for assets in org_assets_data[org_id].values()
)
# Overview
write_report("OVERVIEW\n")
write_report(f"{'-'*80}\n")
write_report(f"Total organizations: {total_orgs:,}\n")
write_report(f"Organizations with multiple-parent assets: {orgs_with_issues:,}\n")
write_report(f"Organizations without issues: {orgs_without_issues:,}\n")
write_report(f"Total assets with multiple parent nodes: {total_assets_with_issues:,}\n\n")
# Summary by organization
write_report("Summary by Organization:\n")
for org_id in sorted(org_assets_data.keys()):
org_name = get_org_name(org_id, all_orgs)
org_asset_count = sum(
len(assets)
for assets in org_assets_data[org_id].values()
)
write_report(f" - {org_name} ({org_id}): {org_asset_count:,} assets\n")
write_report(f"\n{'='*80}\n\n")
# Detailed sections grouped by organization, then node count
for org_id in sorted(org_assets_data.keys()):
org_name = get_org_name(org_id, all_orgs)
org_asset_count = sum(
len(assets)
for assets in org_assets_data[org_id].values()
)
write_report(f"ORGANIZATION: {org_name} ({org_id})\n")
write_report(f"Total assets with issues: {org_asset_count:,}\n")
write_report(f"{'-'*80}\n\n")
# Group by node count within this organization
for node_count in sorted(org_assets_data[org_id].keys(), reverse=True):
assets = org_assets_data[org_id][node_count]
write_report(f" Section: {node_count} Parent Nodes ({len(assets):,} assets)\n")
write_report(f" {'-'*76}\n\n")
for asset in assets:
write_report(f" {asset['asset_name']} ({asset['asset_id']})\n")
for node in asset['nodes']:
write_report(f" {node['name']} ({node['key']}) ({node['path']}) ({node['id']})\n")
write_report(f"\n")
write_report(f"\n")
write_report(f"{'='*80}\n\n")
log(f"✓ Report written to {OUTPUT_FILE}")
def main():
try:
org_assets_data = find_assets_multiple_parents()
if not org_assets_data:
log("✓ Detection complete, no issues found")
sys.exit(0)
total_assets = sum(
len(assets)
for org_id in org_assets_data
for assets in org_assets_data[org_id].values()
)
log(f"Generating report for {total_assets:,} assets across {len(org_assets_data):,} organizations...")
generate_report(org_assets_data)
log(f"✗ Detected {total_assets:,} assets with multiple parent nodes")
sys.exit(1)
except Exception as e:
log(f"✗ Error occurred: {str(e)}")
import traceback
traceback.print_exc()
sys.exit(2)
if __name__ == "__main__":
main()

View File

@@ -3,7 +3,7 @@ from random import choice
import forgery_py
from assets.const import AllTypes
from assets.const import AllTypes, Category
from assets.models import *
from .base import FakeDataGenerator
@@ -48,12 +48,12 @@ class AssetsGenerator(FakeDataGenerator):
def pre_generate(self):
self.node_ids = list(Node.objects.all().values_list('id', flat=True))
self.platform_ids = list(Platform.objects.filter(category='host').values_list('id', flat=True))
self.platform_ids = list(Platform.objects.filter(category=Category.DATABASE).values_list('id', flat=True))
def set_assets_nodes(self, assets):
for asset in assets:
nodes_id_add_to = random.sample(self.node_ids, 3)
asset.nodes.add(*nodes_id_add_to)
nodes_id_add_to = random.choice(self.node_ids)
asset.node_id = nodes_id_add_to
def do_generate(self, batch, batch_size):
assets = []

4483
uv.lock generated

File diff suppressed because it is too large Load Diff