perf: move storage sdk to core (#14318)

* perf: move storage sdk to core

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: ibuler <ibuler@qq.com>
Co-authored-by: Bai <baijiangjie@gmail.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
This commit is contained in:
fit2bot
2024-10-22 15:20:10 +08:00
committed by GitHub
parent 8434d8d5ba
commit cc1fcd2b98
17 changed files with 976 additions and 345 deletions

View File

@@ -1,9 +1,9 @@
import os
import jms_storage
from django.conf import settings
from django.core.files.storage import default_storage
from common.storage import jms_storage
from common.utils import get_logger, make_dirs
from terminal.models import ReplayStorage

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) 2018
#
__version__ = '0.0.59'
from .ftp import FTPStorage
from .oss import OSSStorage
from .obs import OBSStorage
from .s3 import S3Storage
from .azure import AzureStorage
from .ceph import CEPHStorage
from .jms import JMSReplayStorage, JMSCommandStorage
from .multi import MultiObjectStorage
from .sftp import SFTPStorage
def get_object_storage(config):
if config.get("TYPE") in ["s3", "ceph", "swift", "cos"]:
return S3Storage(config)
elif config.get("TYPE") == "oss":
return OSSStorage(config)
elif config.get("TYPE") == "server":
return JMSReplayStorage(config)
elif config.get("TYPE") == "azure":
return AzureStorage(config)
elif config.get("TYPE") == "ceph":
return CEPHStorage(config)
elif config.get("TYPE") == "ftp":
return FTPStorage(config)
elif config.get("TYPE") == "obs":
return OBSStorage(config)
elif config.get("TYPE") == "sftp":
return SFTPStorage(config)
else:
return JMSReplayStorage(config)
def get_multi_object_storage(configs):
return MultiObjectStorage(configs)

View File

@@ -0,0 +1,61 @@
# -*- coding: utf-8 -*-
#
import os
from azure.storage.blob import BlobServiceClient
from .base import ObjectStorage
class AzureStorage(ObjectStorage):
def __init__(self, config):
self.account_name = config.get("ACCOUNT_NAME", None)
self.account_key = config.get("ACCOUNT_KEY", None)
self.container_name = config.get("CONTAINER_NAME", None)
self.endpoint_suffix = config.get("ENDPOINT_SUFFIX", 'core.chinacloudapi.cn')
if self.account_name and self.account_key:
self.service_client = BlobServiceClient(
account_url=f'https://{self.account_name}.blob.{self.endpoint_suffix}',
credential={'account_name': self.account_name, 'account_key': self.account_key}
)
self.client = self.service_client.get_container_client(self.container_name)
else:
self.client = None
def upload(self, src, target):
try:
self.client.upload_blob(target, src)
return True, None
except Exception as e:
return False, e
def download(self, src, target):
try:
blob_data = self.client.download_blob(blob=src)
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
with open(target, 'wb') as writer:
writer.write(blob_data.readall())
return True, None
except Exception as e:
return False, e
def delete(self, path):
try:
self.client.delete_blob(path)
return True, False
except Exception as e:
return False, e
def exists(self, path):
resp = self.client.list_blobs(name_starts_with=path)
return len(list(resp)) != 0
def list_buckets(self):
return list(self.service_client.list_containers())
@property
def type(self):
return 'azure'

View File

@@ -0,0 +1,51 @@
# -*- coding: utf-8 -*-
#
import abc
class ObjectStorage(metaclass=abc.ABCMeta):
@abc.abstractmethod
def upload(self, src, target):
return None, None
@abc.abstractmethod
def download(self, src, target):
pass
@abc.abstractmethod
def delete(self, path):
pass
@abc.abstractmethod
def exists(self, path):
pass
def is_valid(self, src, target):
ok, msg = self.upload(src=src, target=target)
if not ok:
return False
self.delete(path=target)
return True
class LogStorage(metaclass=abc.ABCMeta):
@abc.abstractmethod
def save(self, command):
pass
@abc.abstractmethod
def bulk_save(self, command_set, raise_on_error=True):
pass
@abc.abstractmethod
def filter(self, date_from=None, date_to=None,
user=None, asset=None, account=None,
input=None, session=None):
pass
@abc.abstractmethod
def count(self, date_from=None, date_to=None,
user=None, asset=None, account=None,
input=None, session=None):
pass

View File

@@ -0,0 +1,68 @@
# -*- coding: utf-8 -*-
#
import os
import boto
import boto.s3.connection
from .base import ObjectStorage
class CEPHStorage(ObjectStorage):
def __init__(self, config):
self.bucket = config.get("BUCKET", None)
self.region = config.get("REGION", None)
self.access_key = config.get("ACCESS_KEY", None)
self.secret_key = config.get("SECRET_KEY", None)
self.hostname = config.get("HOSTNAME", None)
self.port = config.get("PORT", 7480)
if self.hostname and self.access_key and self.secret_key:
self.conn = boto.connect_s3(
aws_access_key_id=self.access_key,
aws_secret_access_key=self.secret_key,
host=self.hostname,
port=self.port,
is_secure=False,
calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)
try:
self.client = self.conn.get_bucket(bucket_name=self.bucket)
except Exception:
self.client = None
def upload(self, src, target):
try:
key = self.client.new_key(target)
key.set_contents_from_filename(src)
return True, None
except Exception as e:
return False, e
def download(self, src, target):
try:
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
key = self.client.get_key(src)
key.get_contents_to_filename(target)
return True, None
except Exception as e:
return False, e
def delete(self, path):
try:
self.client.delete_key(path)
return True, None
except Exception as e:
return False, e
def exists(self, path):
try:
return self.client.get_key(path)
except Exception:
return False
@property
def type(self):
return 'ceph'

View File

@@ -0,0 +1,116 @@
# -*- coding: utf-8 -*-
#
import os
from ftplib import FTP, error_perm
from .base import ObjectStorage
class FTPStorage(ObjectStorage):
def __init__(self, config):
self.host = config.get("HOST", None)
self.port = int(config.get("PORT", 21))
self.username = config.get("USERNAME", None)
self.password = config.get("PASSWORD", None)
self.pasv = bool(config.get("PASV", False))
self.dir = config.get("DIR", "replay")
self.client = FTP()
self.client.encoding = 'utf-8'
self.client.set_pasv(self.pasv)
self.pwd = '.'
self.connect()
def connect(self, timeout=-999, source_address=None):
self.client.connect(self.host, self.port, timeout, source_address)
self.client.login(self.username, self.password)
if not self.check_dir_exist(self.dir):
self.mkdir(self.dir)
self.client.cwd(self.dir)
self.pwd = self.client.pwd()
def confirm_connected(self):
try:
self.client.pwd()
except Exception:
self.connect()
def upload(self, src, target):
self.confirm_connected()
target_dir = os.path.dirname(target)
exist = self.check_dir_exist(target_dir)
if not exist:
ok = self.mkdir(target_dir)
if not ok:
raise PermissionError('Dir create error: %s' % target)
try:
with open(src, 'rb') as f:
self.client.storbinary('STOR '+target, f)
return True, None
except Exception as e:
return False, e
def download(self, src, target):
self.confirm_connected()
try:
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
with open(target, 'wb') as f:
self.client.retrbinary('RETR ' + src, f.write)
return True, None
except Exception as e:
return False, e
def delete(self, path):
self.confirm_connected()
if not self.exists(path):
raise FileNotFoundError('File not exist error(%s)' % path)
try:
self.client.delete(path)
return True, None
except Exception as e:
return False, e
def check_dir_exist(self, d):
pwd = self.client.pwd()
try:
self.client.cwd(d)
self.client.cwd(pwd)
return True
except error_perm:
return False
def mkdir(self, dirs):
self.confirm_connected()
# 创建多级目录ftplib不支持一次创建多级目录
dir_list = dirs.split('/')
pwd = self.client.pwd()
try:
for d in dir_list:
if not d or d in ['.']:
continue
# 尝试切换目录
try:
self.client.cwd(d)
continue
except:
pass
# 切换失败创建这个目录,再切换
try:
self.client.mkd(d)
self.client.cwd(d)
except:
return False
return True
finally:
self.client.cwd(pwd)
def exists(self, target):
self.confirm_connected()
try:
self.client.size(target)
return True
except:
return False
def close(self):
self.client.close()

View File

@@ -0,0 +1,50 @@
# -*- coding: utf-8 -*-
#
import os
from .base import ObjectStorage, LogStorage
class JMSReplayStorage(ObjectStorage):
def __init__(self, config):
self.client = config.get("SERVICE")
def upload(self, src, target):
session_id = os.path.basename(target).split('.')[0]
ok = self.client.push_session_replay(src, session_id)
return ok, None
def delete(self, path):
return False, Exception("Not support not")
def exists(self, path):
return False
def download(self, src, target):
return False, Exception("Not support not")
@property
def type(self):
return 'jms'
class JMSCommandStorage(LogStorage):
def __init__(self, config):
self.client = config.get("SERVICE")
if not self.client:
raise Exception("Not found app service")
def save(self, command):
return self.client.push_session_command([command])
def bulk_save(self, command_set, raise_on_error=True):
return self.client.push_session_command(command_set)
def filter(self, date_from=None, date_to=None,
user=None, asset=None, account=None,
input=None, session=None):
pass
def count(self, date_from=None, date_to=None,
user=None, asset=None, account=None,
input=None, session=None):
pass

View File

@@ -0,0 +1,77 @@
# -*- coding: utf-8 -*-
#
from .base import ObjectStorage, LogStorage
class MultiObjectStorage(ObjectStorage):
def __init__(self, configs):
self.configs = configs
self.storage_list = []
self.init_storage_list()
def init_storage_list(self):
from . import get_object_storage
if isinstance(self.configs, dict):
configs = self.configs.values()
else:
configs = self.configs
for config in configs:
try:
storage = get_object_storage(config)
self.storage_list.append(storage)
except Exception:
pass
def upload(self, src, target):
success = []
msg = []
for storage in self.storage_list:
ok, err = storage.upload(src, target)
success.append(ok)
msg.append(err)
return success, msg
def download(self, src, target):
success = False
msg = None
for storage in self.storage_list:
try:
if not storage.exists(src):
continue
ok, msg = storage.download(src, target)
if ok:
success = True
msg = ''
break
except:
pass
return success, msg
def delete(self, path):
success = True
msg = None
for storage in self.storage_list:
try:
if storage.exists(path):
ok, msg = storage.delete(path)
if not ok:
success = False
except:
pass
return success, msg
def exists(self, path):
for storage in self.storage_list:
try:
if storage.exists(path):
return True
except:
pass
return False

View File

@@ -0,0 +1,70 @@
# -*- coding: utf-8 -*-
#
import os
from obs.client import ObsClient
from .base import ObjectStorage
class OBSStorage(ObjectStorage):
def __init__(self, config):
self.endpoint = config.get("ENDPOINT", None)
self.bucket = config.get("BUCKET", None)
self.access_key = config.get("ACCESS_KEY", None)
self.secret_key = config.get("SECRET_KEY", None)
if self.access_key and self.secret_key and self.endpoint:
proxy_host = os.getenv("proxy_host")
proxy_port = os.getenv("proxy_port")
proxy_username = os.getenv("proxy_username")
proxy_password = os.getenv("proxy_password")
self.obsClient = ObsClient(access_key_id=self.access_key, secret_access_key=self.secret_key, server=self.endpoint, proxy_host=proxy_host, proxy_port=proxy_port, proxy_username=proxy_username, proxy_password=proxy_password)
else:
self.obsClient = None
def upload(self, src, target):
try:
resp = self.obsClient.putFile(self.bucket, target, src)
if resp.status < 300:
return True, None
else:
return False, resp.reason
except Exception as e:
return False, e
def exists(self, path):
resp = self.obsClient.getObjectMetadata(self.bucket, path)
if resp.status < 300:
return True
return False
def delete(self, path):
try:
resp = self.obsClient.deleteObject(self.bucket, path)
if resp.status < 300:
return True, None
else:
return False, resp.reason
except Exception as e:
return False, e
def download(self, src, target):
try:
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
resp = self.obsClient.getObject(self.bucket, src, target)
if resp.status < 300:
return True, None
else:
return False, resp.reason
except Exception as e:
return False, e
def list_buckets(self):
resp = self.obsClient.listBuckets()
if resp.status < 300:
return [b.name for b in resp.body.buckets]
else:
raise RuntimeError(resp.status, str(resp.reason))
@property
def type(self):
return 'obs'

View File

@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*-
#
import os
import time
import oss2
from .base import ObjectStorage
class OSSStorage(ObjectStorage):
def __init__(self, config):
self.endpoint = config.get("ENDPOINT", None)
self.bucket = config.get("BUCKET", None)
self.access_key = config.get("ACCESS_KEY", None)
self.secret_key = config.get("SECRET_KEY", None)
if self.access_key and self.secret_key:
self.auth = oss2.Auth(self.access_key, self.secret_key)
else:
self.auth = None
if self.auth and self.endpoint and self.bucket:
self.client = oss2.Bucket(self.auth, self.endpoint, self.bucket)
else:
self.client = None
def upload(self, src, target):
try:
self.client.put_object_from_file(target, src)
return True, None
except Exception as e:
return False, e
def exists(self, path):
try:
return self.client.object_exists(path)
except Exception as e:
return False
def delete(self, path):
try:
self.client.delete_object(path)
return True, None
except Exception as e:
return False, e
def restore(self, path):
meta = self.client.head_object(path)
if meta.resp.headers['x-oss-storage-class'] == oss2.BUCKET_STORAGE_CLASS_ARCHIVE:
self.client.restore_object(path)
while True:
meta = self.client.head_object(path)
if meta.resp.headers['x-oss-restore'] == 'ongoing-request="true"':
time.sleep(5)
else:
break
def download(self, src, target):
try:
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
self.restore(src)
self.client.get_object_to_file(src, target)
return True, None
except Exception as e:
return False, e
def list_buckets(self):
service = oss2.Service(self.auth,self.endpoint)
return ([b.name for b in oss2.BucketIterator(service)])
@property
def type(self):
return 'oss'

View File

@@ -0,0 +1,74 @@
# -*- coding: utf-8 -*-
#
import boto3
import os
from .base import ObjectStorage
class S3Storage(ObjectStorage):
def __init__(self, config):
self.bucket = config.get("BUCKET", "jumpserver")
self.region = config.get("REGION", None)
self.access_key = config.get("ACCESS_KEY", None)
self.secret_key = config.get("SECRET_KEY", None)
self.endpoint = config.get("ENDPOINT", None)
try:
self.client = boto3.client(
's3', region_name=self.region,
aws_access_key_id=self.access_key,
aws_secret_access_key=self.secret_key,
endpoint_url=self.endpoint
)
except ValueError:
pass
def upload(self, src, target):
try:
self.client.upload_file(Filename=src, Bucket=self.bucket, Key=target)
return True, None
except Exception as e:
return False, e
def exists(self, path):
try:
self.client.head_object(Bucket=self.bucket, Key=path)
return True
except Exception as e:
return False
def download(self, src, target):
try:
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
self.client.download_file(self.bucket, src, target)
return True, None
except Exception as e:
return False, e
def delete(self, path):
try:
self.client.delete_object(Bucket=self.bucket, Key=path)
return True, None
except Exception as e:
return False, e
def generate_presigned_url(self, path, expire=3600):
try:
return self.client.generate_presigned_url(
ClientMethod='get_object',
Params={'Bucket': self.bucket, 'Key': path},
ExpiresIn=expire,
HttpMethod='GET'), None
except Exception as e:
return False, e
def list_buckets(self):
response = self.client.list_buckets()
buckets = response.get('Buckets', [])
result = [b['Name'] for b in buckets if b.get('Name')]
return result
@property
def type(self):
return 's3'

View File

@@ -0,0 +1,107 @@
# -*- coding: utf-8 -*-
import os
import io
import paramiko
from jms_storage.base import ObjectStorage
class SFTPStorage(ObjectStorage):
def __init__(self, config):
self.sftp = None
self.sftp_host = config.get('SFTP_HOST', None)
self.sftp_port = int(config.get('SFTP_PORT', 22))
self.sftp_username = config.get('SFTP_USERNAME', '')
self.sftp_secret_type = config.get('STP_SECRET_TYPE', 'password')
self.sftp_password = config.get('SFTP_PASSWORD', '')
self.sftp_private_key = config.get('STP_PRIVATE_KEY', '')
self.sftp_passphrase = config.get('STP_PASSPHRASE', '')
self.sftp_root_path = config.get('SFTP_ROOT_PATH', '/tmp')
self.ssh = paramiko.SSHClient()
self.connect()
def connect(self):
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if self.sftp_secret_type == 'password':
self.ssh.connect(self.sftp_host, self.sftp_port, self.sftp_username, self.sftp_password)
elif self.sftp_secret_type == 'ssh_key':
pkey = paramiko.RSAKey.from_private_key(io.StringIO(self.sftp_private_key))
self.ssh.connect(self.sftp_host, self.sftp_port, self.sftp_username, pkey=pkey,
passphrase=self.sftp_passphrase)
self.sftp = self.ssh.open_sftp()
def confirm_connected(self):
try:
self.sftp.getcwd()
except Exception as e:
self.connect()
def upload(self, src, target):
local_file = src
remote_file = os.path.join(self.sftp_root_path, target)
try:
self.confirm_connected()
mode = os.stat(local_file).st_mode
remote_dir = os.path.dirname(remote_file)
if not self.exists(remote_dir):
self.sftp.mkdir(remote_dir)
self.sftp.put(local_file, remote_file)
self.sftp.chmod(remote_file, mode)
return True, None
except Exception as e:
return False, e
def download(self, src, target):
remote_file = src
local_file = target
self.confirm_connected()
try:
local_dir = os.path.dirname(local_file)
if not os.path.exists(local_dir):
os.makedirs(local_dir)
mode = self.sftp.stat(remote_file).st_mode
self.sftp.get(remote_file, local_file)
os.chmod(local_file, mode)
return True, None
except Exception as e:
return False, e
def delete(self, path):
path = os.path.join(self.sftp_root_path, path)
self.confirm_connected()
if not self.exists(path):
raise FileNotFoundError('File not exist error(%s)' % path)
try:
self.sftp.remove(path)
return True, None
except Exception as e:
return False, e
def check_dir_exist(self, d):
self.confirm_connected()
try:
self.sftp.stat(d)
return True
except Exception:
return False
def mkdir(self, dirs):
self.confirm_connected()
try:
if not self.exists(dirs):
self.sftp.mkdir(dirs)
return True, None
except Exception as e:
return False, e
def exists(self, target):
self.confirm_connected()
try:
self.sftp.stat(target)
return True
except:
return False
def close(self):
self.sftp.close()
self.ssh.close()