mirror of
https://github.com/jumpserver/jumpserver.git
synced 2026-01-29 21:51:31 +00:00
merge: with v3
This commit is contained in:
@@ -1,325 +1,144 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
import ansible.constants as C
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.plugins.callback.default import CallbackModule
|
||||
from ansible.plugins.callback.minimal import CallbackModule as CMDCallBackModule
|
||||
|
||||
from common.utils.strings import safe_str
|
||||
|
||||
|
||||
class CallbackMixin:
|
||||
def __init__(self, display=None):
|
||||
# result_raw example: {
|
||||
# "ok": {"hostname": {"task_name": {},...},..},
|
||||
# "failed": {"hostname": {"task_name": {}..}, ..},
|
||||
# "unreachable: {"hostname": {"task_name": {}, ..}},
|
||||
# "skipped": {"hostname": {"task_name": {}, ..}, ..},
|
||||
# }
|
||||
# results_summary example: {
|
||||
# "contacted": {"hostname": {"task_name": {}}, "hostname": {}},
|
||||
# "dark": {"hostname": {"task_name": {}, "task_name": {}},...,},
|
||||
# "success": True
|
||||
# }
|
||||
self.results_raw = dict(
|
||||
ok=defaultdict(dict),
|
||||
failed=defaultdict(dict),
|
||||
unreachable=defaultdict(dict),
|
||||
skippe=defaultdict(dict),
|
||||
)
|
||||
self.results_summary = dict(
|
||||
contacted=defaultdict(dict),
|
||||
dark=defaultdict(dict),
|
||||
success=True
|
||||
)
|
||||
self.results = {
|
||||
'raw': self.results_raw,
|
||||
'summary': self.results_summary,
|
||||
}
|
||||
super().__init__()
|
||||
if display:
|
||||
self._display = display
|
||||
|
||||
cols = os.environ.get("TERM_COLS", None)
|
||||
self._display.columns = 79
|
||||
if cols and cols.isdigit():
|
||||
self._display.columns = int(cols) - 1
|
||||
|
||||
def display(self, msg):
|
||||
self._display.display(msg)
|
||||
|
||||
def gather_result(self, t, result):
|
||||
self._clean_results(result._result, result._task.action)
|
||||
host = result._host.get_name()
|
||||
task_name = result.task_name
|
||||
task_result = result._result
|
||||
|
||||
self.results_raw[t][host][task_name] = task_result
|
||||
self.clean_result(t, host, task_name, task_result)
|
||||
|
||||
def close(self):
|
||||
if hasattr(self._display, 'close'):
|
||||
self._display.close()
|
||||
|
||||
|
||||
class AdHocResultCallback(CallbackMixin, CallbackModule, CMDCallBackModule):
|
||||
"""
|
||||
Task result Callback
|
||||
"""
|
||||
context = None
|
||||
|
||||
def clean_result(self, t, host, task_name, task_result):
|
||||
contacted = self.results_summary["contacted"]
|
||||
dark = self.results_summary["dark"]
|
||||
|
||||
if task_result.get('rc') is not None:
|
||||
cmd = task_result.get('cmd')
|
||||
if isinstance(cmd, list):
|
||||
cmd = " ".join(cmd)
|
||||
else:
|
||||
cmd = str(cmd)
|
||||
detail = {
|
||||
'cmd': cmd,
|
||||
'stderr': task_result.get('stderr'),
|
||||
'stdout': safe_str(str(task_result.get('stdout', ''))),
|
||||
'rc': task_result.get('rc'),
|
||||
'delta': task_result.get('delta'),
|
||||
'msg': task_result.get('msg', '')
|
||||
}
|
||||
else:
|
||||
detail = {
|
||||
"changed": task_result.get('changed', False),
|
||||
"msg": task_result.get('msg', '')
|
||||
}
|
||||
|
||||
if t in ("ok", "skipped"):
|
||||
contacted[host][task_name] = detail
|
||||
else:
|
||||
dark[host][task_name] = detail
|
||||
|
||||
def v2_runner_on_failed(self, result, ignore_errors=False):
|
||||
self.results_summary['success'] = False
|
||||
self.gather_result("failed", result)
|
||||
|
||||
if result._task.action in C.MODULE_NO_JSON:
|
||||
CMDCallBackModule.v2_runner_on_failed(self,
|
||||
result, ignore_errors=ignore_errors
|
||||
)
|
||||
else:
|
||||
super().v2_runner_on_failed(
|
||||
result, ignore_errors=ignore_errors
|
||||
)
|
||||
|
||||
def v2_runner_on_ok(self, result):
|
||||
self.gather_result("ok", result)
|
||||
if result._task.action in C.MODULE_NO_JSON:
|
||||
CMDCallBackModule.v2_runner_on_ok(self, result)
|
||||
else:
|
||||
super().v2_runner_on_ok(result)
|
||||
|
||||
def v2_runner_on_skipped(self, result):
|
||||
self.gather_result("skipped", result)
|
||||
super().v2_runner_on_skipped(result)
|
||||
|
||||
def v2_runner_on_unreachable(self, result):
|
||||
self.results_summary['success'] = False
|
||||
self.gather_result("unreachable", result)
|
||||
super().v2_runner_on_unreachable(result)
|
||||
|
||||
def v2_runner_on_start(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def display_skipped_hosts(self):
|
||||
pass
|
||||
|
||||
def display_ok_hosts(self):
|
||||
pass
|
||||
|
||||
def display_failed_stderr(self):
|
||||
pass
|
||||
|
||||
def set_play_context(self, context):
|
||||
# for k, v in context._attributes.items():
|
||||
# print("{} ==> {}".format(k, v))
|
||||
if self.context and isinstance(self.context, dict):
|
||||
for k, v in self.context.items():
|
||||
setattr(context, k, v)
|
||||
|
||||
|
||||
class CommandResultCallback(AdHocResultCallback):
|
||||
"""
|
||||
Command result callback
|
||||
|
||||
results_command: {
|
||||
"cmd": "",
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
"rc": 0,
|
||||
"delta": 0:0:0.123
|
||||
class DefaultCallback:
|
||||
STATUS_MAPPER = {
|
||||
'successful': 'success',
|
||||
'failure': 'failed',
|
||||
'failed': 'failed',
|
||||
'running': 'running',
|
||||
'pending': 'pending',
|
||||
'unknown': 'unknown'
|
||||
}
|
||||
"""
|
||||
def __init__(self, display=None, **kwargs):
|
||||
|
||||
self.results_command = dict()
|
||||
super().__init__(display)
|
||||
def __init__(self):
|
||||
self.result = dict(
|
||||
ok=defaultdict(dict),
|
||||
failures=defaultdict(dict),
|
||||
dark=defaultdict(dict),
|
||||
skipped=defaultdict(dict),
|
||||
)
|
||||
self.summary = dict(
|
||||
ok=[],
|
||||
failures={},
|
||||
dark={},
|
||||
skipped=[],
|
||||
)
|
||||
self.status = 'running'
|
||||
self.finished = False
|
||||
|
||||
def gather_result(self, t, res):
|
||||
super().gather_result(t, res)
|
||||
self.gather_cmd(t, res)
|
||||
@property
|
||||
def host_results(self):
|
||||
results = {}
|
||||
for state, hosts in self.result.items():
|
||||
for host, items in hosts.items():
|
||||
results[host] = items
|
||||
return results
|
||||
|
||||
def v2_playbook_on_play_start(self, play):
|
||||
now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
msg = '$ {} ({})'.format(play.name, now)
|
||||
self._play = play
|
||||
self._display.banner(msg)
|
||||
def is_success(self):
|
||||
return self.status != 'success'
|
||||
|
||||
def v2_runner_on_unreachable(self, result):
|
||||
self.results_summary['success'] = False
|
||||
self.gather_result("unreachable", result)
|
||||
msg = result._result.get("msg")
|
||||
if not msg:
|
||||
msg = json.dumps(result._result, indent=4)
|
||||
self._display.display("%s | FAILED! => \n%s" % (
|
||||
result._host.get_name(),
|
||||
msg,
|
||||
), color=C.COLOR_ERROR)
|
||||
def event_handler(self, data, **kwargs):
|
||||
event = data.get('event', None)
|
||||
if not event:
|
||||
return
|
||||
event_data = data.get('event_data', {})
|
||||
host = event_data.get('remote_addr', '')
|
||||
task = event_data.get('task', '')
|
||||
res = event_data.get('res', {})
|
||||
handler = getattr(self, event, self.on_any)
|
||||
handler(event_data, host=host, task=task, res=res)
|
||||
|
||||
def v2_runner_on_failed(self, result, ignore_errors=False):
|
||||
self.results_summary['success'] = False
|
||||
self.gather_result("failed", result)
|
||||
msg = result._result.get("msg", '')
|
||||
stderr = result._result.get("stderr")
|
||||
if stderr:
|
||||
msg += '\n' + stderr
|
||||
module_stdout = result._result.get("module_stdout")
|
||||
if module_stdout:
|
||||
msg += '\n' + module_stdout
|
||||
if not msg:
|
||||
msg = json.dumps(result._result, indent=4)
|
||||
self._display.display("%s | FAILED! => \n%s" % (
|
||||
result._host.get_name(),
|
||||
msg,
|
||||
), color=C.COLOR_ERROR)
|
||||
|
||||
def v2_playbook_on_stats(self, stats):
|
||||
pass
|
||||
|
||||
def _print_task_banner(self, task):
|
||||
pass
|
||||
|
||||
def gather_cmd(self, t, res):
|
||||
host = res._host.get_name()
|
||||
cmd = {}
|
||||
if t == "ok":
|
||||
cmd['cmd'] = res._result.get('cmd')
|
||||
cmd['stderr'] = res._result.get('stderr')
|
||||
cmd['stdout'] = safe_str(str(res._result.get('stdout', '')))
|
||||
cmd['rc'] = res._result.get('rc')
|
||||
cmd['delta'] = res._result.get('delta')
|
||||
else:
|
||||
cmd['err'] = "Error: {}".format(res)
|
||||
self.results_command[host] = cmd
|
||||
|
||||
|
||||
class PlaybookResultCallBack(CallbackBase):
|
||||
"""
|
||||
Custom callback model for handlering the output data of
|
||||
execute playbook file,
|
||||
Base on the build-in callback plugins of ansible which named `json`.
|
||||
"""
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'stdout'
|
||||
CALLBACK_NAME = 'Dict'
|
||||
|
||||
def __init__(self, display=None):
|
||||
super(PlaybookResultCallBack, self).__init__(display)
|
||||
self.results = []
|
||||
self.output = ""
|
||||
self.item_results = {} # {"host": []}
|
||||
|
||||
def _new_play(self, play):
|
||||
return {
|
||||
'play': {
|
||||
'name': play.name,
|
||||
'id': str(play._uuid)
|
||||
},
|
||||
'tasks': []
|
||||
def runner_on_ok(self, event_data, host=None, task=None, res=None):
|
||||
detail = {
|
||||
'action': event_data.get('task_action', ''),
|
||||
'res': res,
|
||||
'rc': res.get('rc', 0),
|
||||
'stdout': res.get('stdout', ''),
|
||||
}
|
||||
self.result['ok'][host][task] = detail
|
||||
|
||||
def _new_task(self, task):
|
||||
return {
|
||||
'task': {
|
||||
'name': task.get_name(),
|
||||
},
|
||||
'hosts': {}
|
||||
def runner_on_failed(self, event_data, host=None, task=None, res=None, **kwargs):
|
||||
detail = {
|
||||
'action': event_data.get('task_action', ''),
|
||||
'res': res,
|
||||
'rc': res.get('rc', 0),
|
||||
'stdout': res.get('stdout', ''),
|
||||
'stderr': ';'.join([res.get('stderr', ''), res.get('msg', '')]).strip(';')
|
||||
}
|
||||
self.result['failures'][host][task] = detail
|
||||
|
||||
def v2_playbook_on_no_hosts_matched(self):
|
||||
self.output = "skipping: No match hosts."
|
||||
def runner_on_skipped(self, event_data, host=None, task=None, **kwargs):
|
||||
detail = {
|
||||
'action': event_data.get('task_action', ''),
|
||||
'res': {},
|
||||
'rc': 0,
|
||||
}
|
||||
self.result['skipped'][host][task] = detail
|
||||
|
||||
def v2_playbook_on_no_hosts_remaining(self):
|
||||
def runner_on_unreachable(self, event_data, host=None, task=None, res=None, **kwargs):
|
||||
detail = {
|
||||
'action': event_data.get('task_action', ''),
|
||||
'res': res,
|
||||
'rc': 255,
|
||||
'stderr': ';'.join([res.get('stderr', ''), res.get('msg', '')]).strip(';')
|
||||
}
|
||||
self.result['dark'][host][task] = detail
|
||||
|
||||
def runner_on_start(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||
self.results[-1]['tasks'].append(self._new_task(task))
|
||||
def runner_retry(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def v2_playbook_on_play_start(self, play):
|
||||
self.results.append(self._new_play(play))
|
||||
def runner_on_file_diff(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def v2_playbook_on_stats(self, stats):
|
||||
hosts = sorted(stats.processed.keys())
|
||||
summary = {}
|
||||
for h in hosts:
|
||||
s = stats.summarize(h)
|
||||
summary[h] = s
|
||||
def runner_item_on_failed(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
if self.output:
|
||||
pass
|
||||
else:
|
||||
self.output = {
|
||||
'plays': self.results,
|
||||
'stats': summary
|
||||
}
|
||||
def runner_item_on_skipped(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def gather_result(self, res):
|
||||
if res._task.loop and "results" in res._result and res._host.name in self.item_results:
|
||||
res._result.update({"results": self.item_results[res._host.name]})
|
||||
del self.item_results[res._host.name]
|
||||
def playbook_on_play_start(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
self.results[-1]['tasks'][-1]['hosts'][res._host.name] = res._result
|
||||
def playbook_on_stats(self, event_data, **kwargs):
|
||||
failed = []
|
||||
for i in ['dark', 'failures']:
|
||||
for host, tasks in self.result[i].items():
|
||||
failed.append(host)
|
||||
error = ''
|
||||
for task, detail in tasks.items():
|
||||
error += f'{task}: {detail["stderr"]};'
|
||||
self.summary[i][host] = error.strip(';')
|
||||
self.summary['ok'] = list(set(self.result['ok'].keys()) - set(failed))
|
||||
self.summary['skipped'] = list(set(self.result['skipped'].keys()) - set(failed))
|
||||
|
||||
def v2_runner_on_ok(self, res, **kwargs):
|
||||
if "ansible_facts" in res._result:
|
||||
del res._result["ansible_facts"]
|
||||
def playbook_on_include(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
self.gather_result(res)
|
||||
def playbook_on_notify(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def v2_runner_on_failed(self, res, **kwargs):
|
||||
self.gather_result(res)
|
||||
def playbook_on_vars_prompt(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def v2_runner_on_unreachable(self, res, **kwargs):
|
||||
self.gather_result(res)
|
||||
def playbook_on_handler_task_start(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def v2_runner_on_skipped(self, res, **kwargs):
|
||||
self.gather_result(res)
|
||||
def playbook_on_no_hosts_matched(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def gather_item_result(self, res):
|
||||
self.item_results.setdefault(res._host.name, []).append(res._result)
|
||||
|
||||
def v2_runner_item_on_ok(self, res):
|
||||
self.gather_item_result(res)
|
||||
|
||||
def v2_runner_item_on_failed(self, res):
|
||||
self.gather_item_result(res)
|
||||
|
||||
def v2_runner_item_on_skipped(self, res):
|
||||
self.gather_item_result(res)
|
||||
def playbook_on_no_hosts_remaining(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def warning(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def on_any(self, event_data, **kwargs):
|
||||
pass
|
||||
|
||||
def status_handler(self, data, **kwargs):
|
||||
status = data.get('status', '')
|
||||
self.status = self.STATUS_MAPPER.get(status, 'unknown')
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
import errno
|
||||
import sys
|
||||
import os
|
||||
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.color import stringc
|
||||
from ansible.utils.singleton import Singleton
|
||||
|
||||
from .utils import get_ansible_task_log_path
|
||||
|
||||
|
||||
class UnSingleton(Singleton):
|
||||
def __init__(cls, name, bases, dct):
|
||||
type.__init__(cls, name, bases, dct)
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
return type.__call__(cls, *args, **kwargs)
|
||||
|
||||
|
||||
class AdHocDisplay(Display, metaclass=UnSingleton):
|
||||
def __init__(self, execution_id, verbosity=0):
|
||||
super().__init__(verbosity=verbosity)
|
||||
if execution_id:
|
||||
log_path = get_ansible_task_log_path(execution_id)
|
||||
else:
|
||||
log_path = os.devnull
|
||||
self.log_file = open(log_path, mode='a')
|
||||
|
||||
def close(self):
|
||||
self.log_file.close()
|
||||
|
||||
def set_cowsay_info(self):
|
||||
# 中断 cowsay 的测试,会频繁开启子进程
|
||||
return
|
||||
|
||||
def _write_to_screen(self, msg, stderr):
|
||||
if not stderr:
|
||||
screen = sys.stdout
|
||||
else:
|
||||
screen = sys.stderr
|
||||
|
||||
screen.write(msg)
|
||||
|
||||
try:
|
||||
screen.flush()
|
||||
except IOError as e:
|
||||
# Ignore EPIPE in case fileobj has been prematurely closed, eg.
|
||||
# when piping to "head -n1"
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
def _write_to_log_file(self, msg):
|
||||
# 这里先不 flush,log 文件不需要那么及时。
|
||||
self.log_file.write(msg)
|
||||
|
||||
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False, newline=True):
|
||||
if log_only:
|
||||
return
|
||||
|
||||
if color:
|
||||
msg = stringc(msg, color)
|
||||
|
||||
if not msg.endswith(u'\n'):
|
||||
msg2 = msg + u'\n'
|
||||
else:
|
||||
msg2 = msg
|
||||
|
||||
self._write_to_log_file(msg2)
|
||||
self._write_to_screen(msg2, stderr)
|
||||
@@ -1,156 +1,211 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
from ansible.inventory.host import Host
|
||||
from ansible.vars.manager import VariableManager
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
import json
|
||||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
__all__ = ['JMSInventory']
|
||||
|
||||
|
||||
__all__ = [
|
||||
'BaseHost', 'BaseInventory'
|
||||
]
|
||||
|
||||
|
||||
class BaseHost(Host):
|
||||
def __init__(self, host_data):
|
||||
class JMSInventory:
|
||||
def __init__(self, assets, account_policy='privileged_first',
|
||||
account_prefer='root,Administrator', host_callback=None):
|
||||
"""
|
||||
初始化
|
||||
:param host_data: {
|
||||
"hostname": "",
|
||||
"ip": "",
|
||||
"port": "",
|
||||
# behind is not must be required
|
||||
"username": "",
|
||||
"password": "",
|
||||
"private_key": "",
|
||||
"become": {
|
||||
"method": "",
|
||||
"user": "",
|
||||
"pass": "",
|
||||
}
|
||||
"groups": [],
|
||||
"vars": {},
|
||||
:param assets:
|
||||
:param account_prefer: account username name if not set use account_policy
|
||||
:param account_policy: privileged_only, privileged_first, skip
|
||||
"""
|
||||
self.assets = self.clean_assets(assets)
|
||||
self.account_prefer = account_prefer
|
||||
self.account_policy = account_policy
|
||||
self.host_callback = host_callback
|
||||
|
||||
@staticmethod
|
||||
def clean_assets(assets):
|
||||
from assets.models import Asset
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
assets = Asset.objects.filter(id__in=asset_ids, is_active=True) \
|
||||
.prefetch_related('platform', 'domain', 'accounts')
|
||||
return assets
|
||||
|
||||
@staticmethod
|
||||
def group_by_platform(assets):
|
||||
groups = defaultdict(list)
|
||||
for asset in assets:
|
||||
groups[asset.platform].append(asset)
|
||||
return groups
|
||||
|
||||
@staticmethod
|
||||
def make_proxy_command(gateway):
|
||||
proxy_command_list = [
|
||||
"ssh", "-o", "Port={}".format(gateway.port),
|
||||
"-o", "StrictHostKeyChecking=no",
|
||||
"{}@{}".format(gateway.username, gateway.address),
|
||||
"-W", "%h:%p", "-q",
|
||||
]
|
||||
|
||||
if gateway.password:
|
||||
proxy_command_list.insert(
|
||||
0, "sshpass -p '{}'".format(gateway.password)
|
||||
)
|
||||
if gateway.private_key:
|
||||
proxy_command_list.append("-i {}".format(gateway.private_key_path))
|
||||
|
||||
proxy_command = "'-o ProxyCommand={}'".format(
|
||||
" ".join(proxy_command_list)
|
||||
)
|
||||
return {"ansible_ssh_common_args": proxy_command}
|
||||
|
||||
@staticmethod
|
||||
def make_account_ansible_vars(account):
|
||||
var = {
|
||||
'ansible_user': account.username,
|
||||
}
|
||||
"""
|
||||
self.host_data = host_data
|
||||
hostname = host_data.get('hostname') or host_data.get('ip')
|
||||
port = host_data.get('port') or 22
|
||||
super().__init__(hostname, port)
|
||||
self.__set_required_variables()
|
||||
self.__set_extra_variables()
|
||||
if not account.secret:
|
||||
return var
|
||||
if account.secret_type == 'password':
|
||||
var['ansible_password'] = account.secret
|
||||
elif account.secret_type == 'ssh_key':
|
||||
var['ansible_ssh_private_key_file'] = account.private_key_path
|
||||
return var
|
||||
|
||||
def __set_required_variables(self):
|
||||
host_data = self.host_data
|
||||
self.set_variable('ansible_host', host_data['ip'])
|
||||
self.set_variable('ansible_port', host_data['port'])
|
||||
def make_ssh_account_vars(self, host, asset, account, automation, protocols, platform, gateway):
|
||||
if not account:
|
||||
host['error'] = _("No account available")
|
||||
return host
|
||||
|
||||
if host_data.get('username'):
|
||||
self.set_variable('ansible_user', host_data['username'])
|
||||
ssh_protocol_matched = list(filter(lambda x: x.name == 'ssh', protocols))
|
||||
ssh_protocol = ssh_protocol_matched[0] if ssh_protocol_matched else None
|
||||
host['ansible_host'] = asset.address
|
||||
host['ansible_port'] = ssh_protocol.port if ssh_protocol else 22
|
||||
|
||||
# 添加密码和密钥
|
||||
if host_data.get('password'):
|
||||
self.set_variable('ansible_ssh_pass', host_data['password'])
|
||||
if host_data.get('private_key'):
|
||||
self.set_variable('ansible_ssh_private_key_file', host_data['private_key'])
|
||||
|
||||
# 添加become支持
|
||||
become = host_data.get("become", False)
|
||||
if become:
|
||||
self.set_variable("ansible_become", True)
|
||||
self.set_variable("ansible_become_method", become.get('method', 'sudo'))
|
||||
self.set_variable("ansible_become_user", become.get('user', 'root'))
|
||||
self.set_variable("ansible_become_pass", become.get('pass', ''))
|
||||
else:
|
||||
self.set_variable("ansible_become", False)
|
||||
|
||||
def __set_extra_variables(self):
|
||||
for k, v in self.host_data.get('vars', {}).items():
|
||||
self.set_variable(k, v)
|
||||
|
||||
def __repr__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class BaseInventory(InventoryManager):
|
||||
"""
|
||||
提供生成Ansible inventory对象的方法
|
||||
"""
|
||||
loader_class = DataLoader
|
||||
variable_manager_class = VariableManager
|
||||
host_manager_class = BaseHost
|
||||
|
||||
def __init__(self, host_list=None, group_list=None):
|
||||
"""
|
||||
用于生成动态构建Ansible Inventory. super().__init__ 会自动调用
|
||||
host_list: [{
|
||||
"hostname": "",
|
||||
"ip": "",
|
||||
"port": "",
|
||||
"username": "",
|
||||
"password": "",
|
||||
"private_key": "",
|
||||
"become": {
|
||||
"method": "",
|
||||
"user": "",
|
||||
"pass": "",
|
||||
},
|
||||
"groups": [],
|
||||
"vars": {},
|
||||
},
|
||||
]
|
||||
group_list: [
|
||||
{"name: "", children: [""]},
|
||||
]
|
||||
:param host_list:
|
||||
:param group_list
|
||||
"""
|
||||
self.host_list = host_list or []
|
||||
self.group_list = group_list or []
|
||||
assert isinstance(host_list, list)
|
||||
self.loader = self.loader_class()
|
||||
self.variable_manager = self.variable_manager_class()
|
||||
super().__init__(self.loader)
|
||||
|
||||
def get_groups(self):
|
||||
return self._inventory.groups
|
||||
|
||||
def get_group(self, name):
|
||||
return self._inventory.groups.get(name, None)
|
||||
|
||||
def get_or_create_group(self, name):
|
||||
group = self.get_group(name)
|
||||
if not group:
|
||||
self.add_group(name)
|
||||
return self.get_or_create_group(name)
|
||||
else:
|
||||
return group
|
||||
|
||||
def parse_groups(self):
|
||||
for g in self.group_list:
|
||||
parent = self.get_or_create_group(g.get("name"))
|
||||
children = [self.get_or_create_group(n) for n in g.get('children', [])]
|
||||
for child in children:
|
||||
parent.add_child_group(child)
|
||||
|
||||
def parse_hosts(self):
|
||||
group_all = self.get_or_create_group('all')
|
||||
ungrouped = self.get_or_create_group('ungrouped')
|
||||
for host_data in self.host_list:
|
||||
host = self.host_manager_class(host_data=host_data)
|
||||
self.hosts[host_data['hostname']] = host
|
||||
groups_data = host_data.get('groups')
|
||||
if groups_data:
|
||||
for group_name in groups_data:
|
||||
group = self.get_or_create_group(group_name)
|
||||
group.add_host(host)
|
||||
su_from = account.su_from
|
||||
if platform.su_enabled and su_from:
|
||||
host.update(self.make_account_ansible_vars(su_from))
|
||||
become_method = 'sudo' if platform.su_method != 'su' else 'su'
|
||||
host['ansible_become'] = True
|
||||
host['ansible_become_method'] = 'sudo'
|
||||
host['ansible_become_user'] = account.username
|
||||
if become_method == 'sudo':
|
||||
host['ansible_become_password'] = su_from.secret
|
||||
else:
|
||||
ungrouped.add_host(host)
|
||||
group_all.add_host(host)
|
||||
host['ansible_become_password'] = account.secret
|
||||
else:
|
||||
host.update(self.make_account_ansible_vars(account))
|
||||
|
||||
def parse_sources(self, cache=False):
|
||||
self.parse_groups()
|
||||
self.parse_hosts()
|
||||
if gateway:
|
||||
host.update(self.make_proxy_command(gateway))
|
||||
|
||||
def get_matched_hosts(self, pattern):
|
||||
return self.get_hosts(pattern)
|
||||
def asset_to_host(self, asset, account, automation, protocols, platform):
|
||||
host = {
|
||||
'name': '{}'.format(asset.name),
|
||||
'jms_asset': {
|
||||
'id': str(asset.id), 'name': asset.name, 'address': asset.address,
|
||||
'type': asset.type, 'category': asset.category,
|
||||
'protocol': asset.protocol, 'port': asset.port,
|
||||
'specific': asset.specific,
|
||||
'protocols': [{'name': p.name, 'port': p.port} for p in protocols],
|
||||
},
|
||||
'jms_account': {
|
||||
'id': str(account.id), 'username': account.username,
|
||||
'secret': account.secret, 'secret_type': account.secret_type
|
||||
} if account else None
|
||||
}
|
||||
if host['jms_account'] and asset.platform.type == 'oracle':
|
||||
host['jms_account']['mode'] = 'sysdba' if account.privileged else None
|
||||
|
||||
ansible_config = dict(automation.ansible_config)
|
||||
ansible_connection = ansible_config.get('ansible_connection', 'ssh')
|
||||
host.update(ansible_config)
|
||||
|
||||
gateway = None
|
||||
if asset.domain:
|
||||
gateway = asset.domain.select_gateway()
|
||||
|
||||
if ansible_connection == 'local':
|
||||
if gateway:
|
||||
host['ansible_host'] = gateway.address
|
||||
host['ansible_port'] = gateway.port
|
||||
host['ansible_user'] = gateway.username
|
||||
host['ansible_password'] = gateway.password
|
||||
host['ansible_connection'] = 'smart'
|
||||
else:
|
||||
host['ansible_connection'] = 'local'
|
||||
else:
|
||||
self.make_ssh_account_vars(host, asset, account, automation, protocols, platform, gateway)
|
||||
return host
|
||||
|
||||
def select_account(self, asset):
|
||||
accounts = list(asset.accounts.all())
|
||||
account_selected = None
|
||||
account_usernames = self.account_prefer
|
||||
|
||||
if isinstance(self.account_prefer, str):
|
||||
account_usernames = self.account_prefer.split(',')
|
||||
|
||||
# 优先使用提供的名称
|
||||
if account_usernames:
|
||||
account_matched = list(filter(lambda account: account.username in account_usernames, accounts))
|
||||
account_selected = account_matched[0] if account_matched else None
|
||||
|
||||
if account_selected or self.account_policy == 'skip':
|
||||
return account_selected
|
||||
|
||||
if self.account_policy in ['privileged_only', 'privileged_first']:
|
||||
account_matched = list(filter(lambda account: account.privileged, accounts))
|
||||
account_selected = account_matched[0] if account_matched else None
|
||||
|
||||
if account_selected:
|
||||
return account_selected
|
||||
|
||||
if self.account_policy == 'privileged_first':
|
||||
account_selected = accounts[0] if accounts else None
|
||||
return account_selected
|
||||
|
||||
def generate(self, path_dir):
|
||||
hosts = []
|
||||
platform_assets = self.group_by_platform(self.assets)
|
||||
for platform, assets in platform_assets.items():
|
||||
automation = platform.automation
|
||||
|
||||
for asset in assets:
|
||||
protocols = asset.protocols.all()
|
||||
account = self.select_account(asset)
|
||||
host = self.asset_to_host(asset, account, automation, protocols, platform)
|
||||
|
||||
if not automation.ansible_enabled:
|
||||
host['error'] = _('Ansible disabled')
|
||||
|
||||
if self.host_callback is not None:
|
||||
host = self.host_callback(
|
||||
host, asset=asset, account=account,
|
||||
platform=platform, automation=automation,
|
||||
path_dir=path_dir
|
||||
)
|
||||
|
||||
if isinstance(host, list):
|
||||
hosts.extend(host)
|
||||
else:
|
||||
hosts.append(host)
|
||||
|
||||
exclude_hosts = list(filter(lambda x: x.get('error'), hosts))
|
||||
if exclude_hosts:
|
||||
print(_("Skip hosts below:"))
|
||||
for i, host in enumerate(exclude_hosts, start=1):
|
||||
print("{}: [{}] \t{}".format(i, host['name'], host['error']))
|
||||
|
||||
hosts = list(filter(lambda x: not x.get('error'), hosts))
|
||||
data = {'all': {'hosts': {}}}
|
||||
for host in hosts:
|
||||
name = host.pop('name')
|
||||
data['all']['hosts'][name] = host
|
||||
return data
|
||||
|
||||
def write_to_file(self, path):
|
||||
path_dir = os.path.dirname(path)
|
||||
if not os.path.exists(path_dir):
|
||||
os.makedirs(path_dir, 0o700, True)
|
||||
data = self.generate(path_dir)
|
||||
with open(path, 'w') as f:
|
||||
f.write(json.dumps(data, indent=4))
|
||||
|
||||
0
apps/ops/ansible/modules/__init__.py
Normal file
0
apps/ops/ansible/modules/__init__.py
Normal file
126
apps/ops/ansible/modules/mongodb_ping.py
Normal file
126
apps/ops/ansible/modules/mongodb_ping.py
Normal file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: mongodb_ping
|
||||
short_description: Check remote MongoDB server availability
|
||||
description:
|
||||
- Simple module to check remote MongoDB server availability.
|
||||
|
||||
requirements:
|
||||
- "pymongo"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: >
|
||||
Ping MongoDB server using non-default credentials and SSL
|
||||
registering the return values into the result variable for future use
|
||||
mongodb_ping:
|
||||
login_db: test_db
|
||||
login_host: jumpserver
|
||||
login_user: jms
|
||||
login_password: secret_pass
|
||||
ssl: True
|
||||
ssl_ca_certs: "/tmp/ca.crt"
|
||||
ssl_certfile: "/tmp/tls.key" #cert and key in one file
|
||||
connection_options:
|
||||
- "tlsAllowInvalidHostnames=true"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
is_available:
|
||||
description: MongoDB server availability.
|
||||
returned: always
|
||||
type: bool
|
||||
sample: true
|
||||
server_version:
|
||||
description: MongoDB server version.
|
||||
returned: always
|
||||
type: str
|
||||
sample: '4.0.0'
|
||||
conn_err_msg:
|
||||
description: Connection error message.
|
||||
returned: always
|
||||
type: str
|
||||
sample: ''
|
||||
'''
|
||||
|
||||
|
||||
from pymongo.errors import PyMongoError
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible_collections.community.mongodb.plugins.module_utils.mongodb_common import (
|
||||
mongodb_common_argument_spec,
|
||||
mongo_auth,
|
||||
get_mongodb_client,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBPing(object):
|
||||
def __init__(self, module, client):
|
||||
self.module = module
|
||||
self.client = client
|
||||
self.is_available = False
|
||||
self.conn_err_msg = ''
|
||||
self.version = ''
|
||||
|
||||
def do(self):
|
||||
self.get_mongodb_version()
|
||||
return self.is_available, self.version
|
||||
|
||||
def get_err(self):
|
||||
return self.conn_err_msg
|
||||
|
||||
def get_mongodb_version(self):
|
||||
try:
|
||||
server_info = self.client.server_info()
|
||||
self.is_available = True
|
||||
self.version = server_info.get('version', '')
|
||||
except PyMongoError as err:
|
||||
self.is_available = False
|
||||
self.version = ''
|
||||
self.conn_err_msg = err
|
||||
|
||||
|
||||
# =========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = mongodb_common_argument_spec()
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
client = None
|
||||
result = {
|
||||
'changed': False, 'is_available': False, 'server_version': ''
|
||||
}
|
||||
try:
|
||||
client = get_mongodb_client(module, directConnection=True)
|
||||
client = mongo_auth(module, client, directConnection=True)
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Unable to connect to database: %s' % to_native(e))
|
||||
|
||||
mongodb_ping = MongoDBPing(module, client)
|
||||
result["is_available"], result["server_version"] = mongodb_ping.do()
|
||||
conn_err_msg = mongodb_ping.get_err()
|
||||
if conn_err_msg:
|
||||
module.fail_json(msg='Unable to connect to database: %s' % conn_err_msg)
|
||||
|
||||
try:
|
||||
client.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
426
apps/ops/ansible/modules/mongodb_user.py
Normal file
426
apps/ops/ansible/modules/mongodb_user.py
Normal file
@@ -0,0 +1,426 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Modified from ansible_collections.community.mongodb.plugins.modules.mongodb_user
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: mongodb_user
|
||||
short_description: Adds or removes a user from a MongoDB database
|
||||
description:
|
||||
- Adds or removes a user from a MongoDB database.
|
||||
version_added: "1.0.0"
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.mongodb.login_options
|
||||
- community.mongodb.ssl_options
|
||||
|
||||
options:
|
||||
replica_set:
|
||||
description:
|
||||
- Replica set to connect to (automatically connects to primary for writes).
|
||||
type: str
|
||||
database:
|
||||
description:
|
||||
- The name of the database to add/remove the user from.
|
||||
required: true
|
||||
type: str
|
||||
aliases: [db]
|
||||
name:
|
||||
description:
|
||||
- The name of the user to add or remove.
|
||||
required: true
|
||||
aliases: [user]
|
||||
type: str
|
||||
password:
|
||||
description:
|
||||
- The password to use for the user.
|
||||
type: str
|
||||
aliases: [pass]
|
||||
roles:
|
||||
type: list
|
||||
elements: raw
|
||||
description:
|
||||
- >
|
||||
The database user roles valid values could either be one or more of the following strings:
|
||||
'read', 'readWrite', 'dbAdmin', 'userAdmin', 'clusterAdmin', 'readAnyDatabase', 'readWriteAnyDatabase', 'userAdminAnyDatabase',
|
||||
'dbAdminAnyDatabase'
|
||||
- "Or the following dictionary '{ db: DATABASE_NAME, role: ROLE_NAME }'."
|
||||
- "This param requires pymongo 2.5+. If it is a string, mongodb 2.4+ is also required. If it is a dictionary, mongo 2.6+ is required."
|
||||
state:
|
||||
description:
|
||||
- The database user state.
|
||||
default: present
|
||||
choices: [absent, present]
|
||||
type: str
|
||||
update_password:
|
||||
default: always
|
||||
choices: [always, on_create]
|
||||
description:
|
||||
- C(always) will always update passwords and cause the module to return changed.
|
||||
- C(on_create) will only set the password for newly created users.
|
||||
- This must be C(always) to use the localhost exception when adding the first admin user.
|
||||
- This option is effectively ignored when using x.509 certs. It is defaulted to 'on_create' to maintain a \
|
||||
a specific module behaviour when the login_database is '$external'.
|
||||
type: str
|
||||
create_for_localhost_exception:
|
||||
type: path
|
||||
description:
|
||||
- This is parmeter is only useful for handling special treatment around the localhost exception.
|
||||
- If C(login_user) is defined, then the localhost exception is not active and this parameter has no effect.
|
||||
- If this file is NOT present (and C(login_user) is not defined), then touch this file after successfully adding the user.
|
||||
- If this file is present (and C(login_user) is not defined), then skip this task.
|
||||
|
||||
notes:
|
||||
- Requires the pymongo Python package on the remote host, version 2.4.2+. This
|
||||
can be installed using pip or the OS package manager. Newer mongo server versions require newer
|
||||
pymongo versions. @see http://api.mongodb.org/python/current/installation.html
|
||||
requirements:
|
||||
- "pymongo"
|
||||
author:
|
||||
- "Elliott Foster (@elliotttf)"
|
||||
- "Julien Thebault (@Lujeni)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Create 'burgers' database user with name 'bob' and password '12345'.
|
||||
community.mongodb.mongodb_user:
|
||||
database: burgers
|
||||
name: bob
|
||||
password: 12345
|
||||
state: present
|
||||
|
||||
- name: Create a database user via SSL (MongoDB must be compiled with the SSL option and configured properly)
|
||||
community.mongodb.mongodb_user:
|
||||
database: burgers
|
||||
name: bob
|
||||
password: 12345
|
||||
state: present
|
||||
ssl: True
|
||||
|
||||
- name: Delete 'burgers' database user with name 'bob'.
|
||||
community.mongodb.mongodb_user:
|
||||
database: burgers
|
||||
name: bob
|
||||
state: absent
|
||||
|
||||
- name: Define more users with various specific roles (if not defined, no roles is assigned, and the user will be added via pre mongo 2.2 style)
|
||||
community.mongodb.mongodb_user:
|
||||
database: burgers
|
||||
name: ben
|
||||
password: 12345
|
||||
roles: read
|
||||
state: present
|
||||
|
||||
- name: Define roles
|
||||
community.mongodb.mongodb_user:
|
||||
database: burgers
|
||||
name: jim
|
||||
password: 12345
|
||||
roles: readWrite,dbAdmin,userAdmin
|
||||
state: present
|
||||
|
||||
- name: Define roles
|
||||
community.mongodb.mongodb_user:
|
||||
database: burgers
|
||||
name: joe
|
||||
password: 12345
|
||||
roles: readWriteAnyDatabase
|
||||
state: present
|
||||
|
||||
- name: Add a user to database in a replica set, the primary server is automatically discovered and written to
|
||||
community.mongodb.mongodb_user:
|
||||
database: burgers
|
||||
name: bob
|
||||
replica_set: belcher
|
||||
password: 12345
|
||||
roles: readWriteAnyDatabase
|
||||
state: present
|
||||
|
||||
# add a user 'oplog_reader' with read only access to the 'local' database on the replica_set 'belcher'. This is useful for oplog access (MONGO_OPLOG_URL).
|
||||
# please notice the credentials must be added to the 'admin' database because the 'local' database is not synchronized and can't receive user credentials
|
||||
# To login with such user, the connection string should be MONGO_OPLOG_URL="mongodb://oplog_reader:oplog_reader_password@server1,server2/local?authSource=admin"
|
||||
# This syntax requires mongodb 2.6+ and pymongo 2.5+
|
||||
- name: Roles as a dictionary
|
||||
community.mongodb.mongodb_user:
|
||||
login_user: root
|
||||
login_password: root_password
|
||||
database: admin
|
||||
user: oplog_reader
|
||||
password: oplog_reader_password
|
||||
state: present
|
||||
replica_set: belcher
|
||||
roles:
|
||||
- db: local
|
||||
role: read
|
||||
|
||||
- name: Adding a user with X.509 Member Authentication
|
||||
community.mongodb.mongodb_user:
|
||||
login_host: "mongodb-host.test"
|
||||
login_port: 27001
|
||||
login_database: "$external"
|
||||
database: "admin"
|
||||
name: "admin"
|
||||
password: "test"
|
||||
roles:
|
||||
- dbAdminAnyDatabase
|
||||
ssl: true
|
||||
ssl_ca_certs: "/tmp/ca.crt"
|
||||
ssl_certfile: "/tmp/tls.key" #cert and key in one file
|
||||
state: present
|
||||
auth_mechanism: "MONGODB-X509"
|
||||
connection_options:
|
||||
- "tlsAllowInvalidHostnames=true"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
user:
|
||||
description: The name of the user to add or remove.
|
||||
returned: success
|
||||
type: str
|
||||
'''
|
||||
|
||||
import os
|
||||
import traceback
|
||||
from operator import itemgetter
|
||||
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.module_utils._text import to_native, to_bytes
|
||||
from ansible_collections.community.mongodb.plugins.module_utils.mongodb_common import (
|
||||
missing_required_lib,
|
||||
mongodb_common_argument_spec,
|
||||
mongo_auth,
|
||||
PYMONGO_IMP_ERR,
|
||||
pymongo_found,
|
||||
get_mongodb_client,
|
||||
)
|
||||
|
||||
|
||||
def user_find(client, user, db_name):
|
||||
"""Check if the user exists.
|
||||
|
||||
Args:
|
||||
client (cursor): Mongodb cursor on admin database.
|
||||
user (str): User to check.
|
||||
db_name (str): User's database.
|
||||
|
||||
Returns:
|
||||
dict: when user exists, False otherwise.
|
||||
"""
|
||||
try:
|
||||
for mongo_user in client[db_name].command('usersInfo')['users']:
|
||||
if mongo_user['user'] == user:
|
||||
# NOTE: there is no 'db' field in mongo 2.4.
|
||||
if 'db' not in mongo_user:
|
||||
return mongo_user
|
||||
# Workaround to make the condition works with AWS DocumentDB,
|
||||
# since all users are in the admin database.
|
||||
if mongo_user["db"] in [db_name, "admin"]:
|
||||
return mongo_user
|
||||
except Exception as excep:
|
||||
if hasattr(excep, 'code') and excep.code == 11: # 11=UserNotFound
|
||||
pass # Allow return False
|
||||
else:
|
||||
raise
|
||||
return False
|
||||
|
||||
|
||||
def user_add(module, client, db_name, user, password, roles):
|
||||
# pymongo's user_add is a _create_or_update_user so we won't know if it was changed or updated
|
||||
# without reproducing a lot of the logic in database.py of pymongo
|
||||
db = client[db_name]
|
||||
|
||||
try:
|
||||
exists = user_find(client, user, db_name)
|
||||
except Exception as excep:
|
||||
# We get this exception: "not authorized on admin to execute command"
|
||||
# when auth is enabled on a new instance. The loalhost exception should
|
||||
# allow us to create the first user. If the localhost exception does not apply,
|
||||
# then user creation will also fail with unauthorized. So, ignore Unauthorized here.
|
||||
if hasattr(excep, 'code') and excep.code == 13: # 13=Unauthorized
|
||||
exists = False
|
||||
else:
|
||||
raise
|
||||
|
||||
if exists:
|
||||
user_add_db_command = 'updateUser'
|
||||
if not roles:
|
||||
roles = None
|
||||
else:
|
||||
user_add_db_command = 'createUser'
|
||||
|
||||
user_dict = {}
|
||||
|
||||
if password is not None:
|
||||
user_dict["pwd"] = password
|
||||
if roles is not None:
|
||||
user_dict["roles"] = roles
|
||||
|
||||
db.command(user_add_db_command, user, **user_dict)
|
||||
|
||||
|
||||
def user_remove(module, client, db_name, user):
|
||||
exists = user_find(client, user, db_name)
|
||||
if exists:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, user=user)
|
||||
db = client[db_name]
|
||||
db.command("dropUser", user)
|
||||
else:
|
||||
module.exit_json(changed=False, user=user)
|
||||
|
||||
|
||||
def check_if_roles_changed(uinfo, roles, db_name):
|
||||
# We must be aware of users which can read the oplog on a replicaset
|
||||
# Such users must have access to the local DB, but since this DB does not store users credentials
|
||||
# and is not synchronized among replica sets, the user must be stored on the admin db
|
||||
# Therefore their structure is the following :
|
||||
# {
|
||||
# "_id" : "admin.oplog_reader",
|
||||
# "user" : "oplog_reader",
|
||||
# "db" : "admin", # <-- admin DB
|
||||
# "roles" : [
|
||||
# {
|
||||
# "role" : "read",
|
||||
# "db" : "local" # <-- local DB
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
|
||||
def make_sure_roles_are_a_list_of_dict(roles, db_name):
|
||||
output = list()
|
||||
for role in roles:
|
||||
if isinstance(role, (binary_type, text_type)):
|
||||
new_role = {"role": role, "db": db_name}
|
||||
output.append(new_role)
|
||||
else:
|
||||
output.append(role)
|
||||
return output
|
||||
|
||||
roles_as_list_of_dict = make_sure_roles_are_a_list_of_dict(roles, db_name)
|
||||
uinfo_roles = uinfo.get('roles', [])
|
||||
|
||||
if sorted(roles_as_list_of_dict, key=itemgetter('db')) == sorted(uinfo_roles, key=itemgetter('db')):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
# =========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
def main():
|
||||
argument_spec = mongodb_common_argument_spec()
|
||||
argument_spec.update(
|
||||
database=dict(required=True, aliases=['db']),
|
||||
name=dict(required=True, aliases=['user']),
|
||||
password=dict(aliases=['pass'], no_log=True),
|
||||
replica_set=dict(default=None),
|
||||
roles=dict(default=None, type='list', elements='raw'),
|
||||
state=dict(default='present', choices=['absent', 'present']),
|
||||
update_password=dict(default="always", choices=["always", "on_create"], no_log=False),
|
||||
create_for_localhost_exception=dict(default=None, type='path'),
|
||||
)
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
login_user = module.params['login_user']
|
||||
|
||||
# Certs don't have a password but we want this module behaviour
|
||||
if module.params['login_database'] == '$external':
|
||||
module.params['update_password'] = 'on_create'
|
||||
|
||||
if not pymongo_found:
|
||||
module.fail_json(msg=missing_required_lib('pymongo'),
|
||||
exception=PYMONGO_IMP_ERR)
|
||||
|
||||
create_for_localhost_exception = module.params['create_for_localhost_exception']
|
||||
b_create_for_localhost_exception = (
|
||||
to_bytes(create_for_localhost_exception, errors='surrogate_or_strict')
|
||||
if create_for_localhost_exception is not None else None
|
||||
)
|
||||
|
||||
db_name = module.params['database']
|
||||
user = module.params['name']
|
||||
password = module.params['password']
|
||||
roles = module.params['roles'] or []
|
||||
state = module.params['state']
|
||||
update_password = module.params['update_password']
|
||||
|
||||
try:
|
||||
directConnection = False
|
||||
if module.params['replica_set'] is None:
|
||||
directConnection = True
|
||||
client = get_mongodb_client(module, directConnection=directConnection)
|
||||
client = mongo_auth(module, client, directConnection=directConnection)
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Unable to connect to database: %s' % to_native(e))
|
||||
|
||||
if state == 'present':
|
||||
if password is None and update_password == 'always':
|
||||
module.fail_json(msg='password parameter required when adding a user unless update_password is set to on_create')
|
||||
|
||||
if login_user is None and create_for_localhost_exception is not None:
|
||||
if os.path.exists(b_create_for_localhost_exception):
|
||||
try:
|
||||
client.close()
|
||||
except Exception:
|
||||
pass
|
||||
module.exit_json(changed=False, user=user, skipped=True, msg="The path in create_for_localhost_exception exists.")
|
||||
|
||||
try:
|
||||
if update_password != 'always':
|
||||
uinfo = user_find(client, user, db_name)
|
||||
if uinfo:
|
||||
password = None
|
||||
if not check_if_roles_changed(uinfo, roles, db_name):
|
||||
module.exit_json(changed=False, user=user)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, user=user)
|
||||
user_add(module, client, db_name, user, password, roles)
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Unable to add or update user: %s' % to_native(e), exception=traceback.format_exc())
|
||||
finally:
|
||||
try:
|
||||
client.close()
|
||||
except Exception:
|
||||
pass
|
||||
# Here we can check password change if mongo provide a query for that : https://jira.mongodb.org/browse/SERVER-22848
|
||||
# newuinfo = user_find(client, user, db_name)
|
||||
# if uinfo['role'] == newuinfo['role'] and CheckPasswordHere:
|
||||
# module.exit_json(changed=False, user=user)
|
||||
|
||||
if login_user is None and create_for_localhost_exception is not None:
|
||||
# localhost exception applied.
|
||||
try:
|
||||
# touch the file
|
||||
open(b_create_for_localhost_exception, 'wb').close()
|
||||
except Exception as e:
|
||||
module.fail_json(
|
||||
changed=True,
|
||||
msg='Added user but unable to touch create_for_localhost_exception file %s: %s' % (create_for_localhost_exception, to_native(e)),
|
||||
exception=traceback.format_exc()
|
||||
)
|
||||
|
||||
elif state == 'absent':
|
||||
try:
|
||||
user_remove(module, client, db_name, user)
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Unable to remove user: %s' % to_native(e), exception=traceback.format_exc())
|
||||
finally:
|
||||
try:
|
||||
client.close()
|
||||
except Exception:
|
||||
pass
|
||||
module.exit_json(changed=True, user=user)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
261
apps/ops/ansible/modules/oracle_info.py
Normal file
261
apps/ops/ansible/modules/oracle_info.py
Normal file
@@ -0,0 +1,261 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: oracle_info
|
||||
short_description: Gather information about Oracle servers
|
||||
description:
|
||||
- Gathers information about Oracle servers.
|
||||
|
||||
options:
|
||||
filter:
|
||||
description:
|
||||
- Limit the collected information by comma separated string or YAML list.
|
||||
- Allowable values are C(version), C(databases), C(settings), C(users).
|
||||
- By default, collects all subsets.
|
||||
- You can use '!' before value (for example, C(!users)) to exclude it from the information.
|
||||
- If you pass including and excluding values to the filter, for example, I(filter=!settings,version),
|
||||
the excluding values, C(!settings) in this case, will be ignored.
|
||||
type: list
|
||||
elements: str
|
||||
login_db:
|
||||
description:
|
||||
- Database name to connect to.
|
||||
- It makes sense if I(login_user) is allowed to connect to a specific database only.
|
||||
type: str
|
||||
exclude_fields:
|
||||
description:
|
||||
- List of fields which are not needed to collect.
|
||||
- "Supports elements: C(db_size). Unsupported elements will be ignored."
|
||||
type: list
|
||||
elements: str
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Get Oracle version with non-default credentials
|
||||
oracle_info:
|
||||
login_user: mysuperuser
|
||||
login_password: mysuperpass
|
||||
login_database: service_name
|
||||
filter: version
|
||||
|
||||
- name: Collect all info except settings and users by sys
|
||||
oracle_info:
|
||||
login_user: sys
|
||||
login_password: sys_pass
|
||||
login_database: service_name
|
||||
filter: "!settings,!users"
|
||||
exclude_fields: db_size
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
version:
|
||||
description: Database server version.
|
||||
returned: if not excluded by filter
|
||||
type: dict
|
||||
sample: { "version": {"full": "11.2.0.1.0"} }
|
||||
contains:
|
||||
full:
|
||||
description: Full server version.
|
||||
returned: if not excluded by filter
|
||||
type: str
|
||||
sample: "11.2.0.1.0"
|
||||
databases:
|
||||
description: Information about databases.
|
||||
returned: if not excluded by filter
|
||||
type: dict
|
||||
sample:
|
||||
- { "USERS": { "size": 5242880 }, "EXAMPLE": { "size": 104857600 } }
|
||||
contains:
|
||||
size:
|
||||
description: Database size in bytes.
|
||||
returned: if not excluded by filter
|
||||
type: dict
|
||||
sample: { 'size': 656594 }
|
||||
settings:
|
||||
description: Global settings (variables) information.
|
||||
returned: if not excluded by filter
|
||||
type: dict
|
||||
sample:
|
||||
- { "result_cache_mode": "MANUAL", "instance_type": "RDBMS" }
|
||||
users:
|
||||
description: Users information.
|
||||
returned: if not excluded by filter
|
||||
type: dict
|
||||
sample:
|
||||
- { "USERS": { "TEST": { "USERNAME": "TEST", "ACCOUNT_STATUS": "OPEN" } } }
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
from ops.ansible.modules_utils.oracle_common import (
|
||||
OracleClient, oracle_common_argument_spec
|
||||
)
|
||||
|
||||
|
||||
class OracleInfo(object):
|
||||
def __init__(self, module, oracle_client):
|
||||
self.module = module
|
||||
self.oracle_client = oracle_client
|
||||
self.info = {
|
||||
'version': {}, 'databases': {},
|
||||
'settings': {}, 'users': {},
|
||||
}
|
||||
|
||||
def get_info(self, filter_, exclude_fields):
|
||||
include_list = []
|
||||
exclude_list = []
|
||||
|
||||
if filter_:
|
||||
partial_info = {}
|
||||
|
||||
for fi in filter_:
|
||||
if fi.lstrip('!') not in self.info:
|
||||
self.module.warn('filter element: %s is not allowable, ignored' % fi)
|
||||
continue
|
||||
|
||||
if fi[0] == '!':
|
||||
exclude_list.append(fi.lstrip('!'))
|
||||
else:
|
||||
include_list.append(fi)
|
||||
|
||||
if include_list:
|
||||
self.__collect(exclude_fields, set(include_list))
|
||||
|
||||
for i in self.info:
|
||||
if i in include_list:
|
||||
partial_info[i] = self.info[i]
|
||||
else:
|
||||
not_in_exclude_list = list(set(self.info) - set(exclude_list))
|
||||
self.__collect(exclude_fields, set(not_in_exclude_list))
|
||||
|
||||
for i in self.info:
|
||||
if i not in exclude_list:
|
||||
partial_info[i] = self.info[i]
|
||||
return partial_info
|
||||
else:
|
||||
self.__collect(exclude_fields, set(self.info))
|
||||
return self.info
|
||||
|
||||
def __collect(self, exclude_fields, wanted):
|
||||
"""Collect all possible subsets."""
|
||||
if 'version' in wanted:
|
||||
self.__get_version()
|
||||
|
||||
if 'settings' in wanted:
|
||||
self.__get_settings()
|
||||
|
||||
if 'databases' in wanted:
|
||||
self.__get_databases(exclude_fields)
|
||||
#
|
||||
if 'users' in wanted:
|
||||
self.__get_users()
|
||||
|
||||
def __get_version(self):
|
||||
version_sql = 'SELECT VERSION FROM PRODUCT_COMPONENT_VERSION where ROWNUM=1'
|
||||
rtn, err = self.oracle_client.execute(version_sql, exception_to_fail=True)
|
||||
self.info['version'] = {'full': rtn.get('version')}
|
||||
|
||||
def __get_settings(self):
|
||||
"""Get global variables (instance settings)."""
|
||||
def _set_settings_value(item_dict):
|
||||
try:
|
||||
self.info['settings'][item_dict['name']] = item_dict['value']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
settings_sql = "SELECT name, value FROM V$PARAMETER"
|
||||
rtn, err = self.oracle_client.execute(settings_sql, exception_to_fail=True)
|
||||
|
||||
if isinstance(rtn, dict):
|
||||
_set_settings_value(rtn)
|
||||
elif isinstance(rtn, list):
|
||||
for i in rtn:
|
||||
_set_settings_value(i)
|
||||
|
||||
def __get_users(self):
|
||||
"""Get user info."""
|
||||
def _set_users_value(item_dict):
|
||||
try:
|
||||
tablespace = item_dict.pop('default_tablespace')
|
||||
username = item_dict.pop('username')
|
||||
partial_users = self.info['users'].get(tablespace, {})
|
||||
partial_users[username] = item_dict
|
||||
self.info['users'][tablespace] = partial_users
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
users_sql = "SELECT * FROM dba_users"
|
||||
rtn, err = self.oracle_client.execute(users_sql, exception_to_fail=True)
|
||||
if isinstance(rtn, dict):
|
||||
_set_users_value(rtn)
|
||||
elif isinstance(rtn, list):
|
||||
for i in rtn:
|
||||
_set_users_value(i)
|
||||
|
||||
def __get_databases(self, exclude_fields):
|
||||
"""Get info about databases."""
|
||||
def _set_databases_value(item_dict):
|
||||
try:
|
||||
tablespace_name = item_dict.pop('tablespace_name')
|
||||
size = item_dict.get('size')
|
||||
partial_params = {}
|
||||
if size:
|
||||
partial_params['size'] = size
|
||||
self.info['databases'][tablespace_name] = partial_params
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
database_sql = 'SELECT ' \
|
||||
' tablespace_name, sum(bytes) as "size"' \
|
||||
'FROM dba_data_files GROUP BY tablespace_name'
|
||||
if exclude_fields and 'db_size' in exclude_fields:
|
||||
database_sql = "SELECT " \
|
||||
" tablespace_name " \
|
||||
"FROM dba_data_files GROUP BY tablespace_name"
|
||||
|
||||
rtn, err = self.oracle_client.execute(database_sql, exception_to_fail=True)
|
||||
if isinstance(rtn, dict):
|
||||
_set_databases_value(rtn)
|
||||
elif isinstance(rtn, list):
|
||||
for i in rtn:
|
||||
_set_databases_value(i)
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = oracle_common_argument_spec()
|
||||
argument_spec.update(
|
||||
filter=dict(type='list'),
|
||||
exclude_fields=dict(type='list'),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
filter_ = module.params['filter']
|
||||
exclude_fields = module.params['exclude_fields']
|
||||
|
||||
if filter_:
|
||||
filter_ = [f.strip() for f in filter_]
|
||||
|
||||
if exclude_fields:
|
||||
exclude_fields = set([f.strip() for f in exclude_fields])
|
||||
|
||||
oracle_client = OracleClient(module)
|
||||
oracle = OracleInfo(module, oracle_client)
|
||||
|
||||
module.exit_json(changed=False, **oracle.get_info(filter_, exclude_fields))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
107
apps/ops/ansible/modules/oracle_ping.py
Normal file
107
apps/ops/ansible/modules/oracle_ping.py
Normal file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: oracle_ping
|
||||
short_description: Check remote Oracle server availability
|
||||
description:
|
||||
- Simple module to check remote Oracle server availability.
|
||||
|
||||
requirements:
|
||||
- "oracledb"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: >
|
||||
Ping Oracle server using non-default credentials and SSL
|
||||
registering the return values into the result variable for future use
|
||||
oracle_ping:
|
||||
login_host: jumpserver
|
||||
login_port: 1521
|
||||
login_user: jms
|
||||
login_password: secret_pass
|
||||
login_database: test_db
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
is_available:
|
||||
description: Oracle server availability.
|
||||
returned: always
|
||||
type: bool
|
||||
sample: true
|
||||
server_version:
|
||||
description: Oracle server version.
|
||||
returned: always
|
||||
type: str
|
||||
sample: '4.0.0'
|
||||
conn_err_msg:
|
||||
description: Connection error message.
|
||||
returned: always
|
||||
type: str
|
||||
sample: ''
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ops.ansible.modules_utils.oracle_common import (
|
||||
OracleClient, oracle_common_argument_spec
|
||||
)
|
||||
|
||||
|
||||
class OracleDBPing(object):
|
||||
def __init__(self, module, oracle_client):
|
||||
self.module = module
|
||||
self.oracle_client = oracle_client
|
||||
self.is_available = False
|
||||
self.conn_err_msg = ''
|
||||
self.version = ''
|
||||
|
||||
def do(self):
|
||||
self.get_oracle_version()
|
||||
return self.is_available, self.version
|
||||
|
||||
def get_err(self):
|
||||
return self.conn_err_msg
|
||||
|
||||
def get_oracle_version(self):
|
||||
version_sql = 'SELECT VERSION FROM PRODUCT_COMPONENT_VERSION where ROWNUM=1'
|
||||
rtn, err = self.oracle_client.execute(version_sql)
|
||||
if err:
|
||||
self.conn_err_msg = err
|
||||
else:
|
||||
self.version = rtn.get('version')
|
||||
self.is_available = True
|
||||
|
||||
|
||||
# =========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = oracle_common_argument_spec()
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
result = {
|
||||
'changed': False, 'is_available': False, 'server_version': ''
|
||||
}
|
||||
oracle_client = OracleClient(module)
|
||||
|
||||
oracle_ping = OracleDBPing(module, oracle_client)
|
||||
result["is_available"], result["server_version"] = oracle_ping.do()
|
||||
conn_err_msg = oracle_ping.get_err()
|
||||
oracle_client.close()
|
||||
if conn_err_msg:
|
||||
module.fail_json(msg='Unable to connect to database: %s' % conn_err_msg)
|
||||
|
||||
return module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
215
apps/ops/ansible/modules/oracle_user.py
Normal file
215
apps/ops/ansible/modules/oracle_user.py
Normal file
@@ -0,0 +1,215 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: oracle_user
|
||||
short_description: Adds or removes a user from a Oracle database
|
||||
description:
|
||||
- Adds or removes a user from a Oracle database.
|
||||
|
||||
options:
|
||||
authentication_type:
|
||||
description:
|
||||
- Authentication type of the user(default password)
|
||||
required: false
|
||||
type: str
|
||||
choices: ['external', 'global', 'no_authentication', 'password']
|
||||
default_tablespace:
|
||||
description:
|
||||
- The default tablespace for the user
|
||||
- If not provided, the default is used
|
||||
required: false
|
||||
type: str
|
||||
oracle_home:
|
||||
description:
|
||||
- Define the directory into which all Oracle software is installed.
|
||||
- Define ORACLE_HOME environment variable if set.
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- The database user state.
|
||||
default: present
|
||||
choices: [absent, present]
|
||||
type: str
|
||||
update_password:
|
||||
default: always
|
||||
choices: [always, on_create]
|
||||
description:
|
||||
- C(always) will always update passwords and cause the module to return changed.
|
||||
- C(on_create) will only set the password for newly created users.
|
||||
type: str
|
||||
temporary_tablespace:
|
||||
description:
|
||||
- The default temporary tablespace for the user
|
||||
- If not provided, the default is used
|
||||
required: false
|
||||
type: str
|
||||
name:
|
||||
description:
|
||||
- The name of the user to add or remove.
|
||||
required: true
|
||||
aliases: [user]
|
||||
type: str
|
||||
password:
|
||||
description:
|
||||
- The password to use for the user.
|
||||
type: str
|
||||
aliases: [pass]
|
||||
|
||||
requirements:
|
||||
- "oracledb"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Create default tablespace user with name 'jms' and password '123456'.
|
||||
oracle_user:
|
||||
hostname: "remote server"
|
||||
login_database: "helowin"
|
||||
login_username: "system"
|
||||
login_password: "123456"
|
||||
name: "jms"
|
||||
password: "123456"
|
||||
|
||||
- name: Delete user with name 'jms'.
|
||||
oracle_user:
|
||||
hostname: "remote server"
|
||||
login_database: "helowin"
|
||||
login_username: "system"
|
||||
login_password: "123456"
|
||||
name: "jms"
|
||||
state: "absent"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
name:
|
||||
description: The name of the user to add or remove.
|
||||
returned: success
|
||||
type: str
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
from ops.ansible.modules_utils.oracle_common import (
|
||||
OracleClient, oracle_common_argument_spec
|
||||
)
|
||||
|
||||
|
||||
def user_find(oracle_client, username):
|
||||
user = None
|
||||
username = username.upper()
|
||||
user_find_sql = "select username, " \
|
||||
" authentication_type, " \
|
||||
" default_tablespace, " \
|
||||
" temporary_tablespace " \
|
||||
"from dba_users where username='%s'" % username
|
||||
rtn, err = oracle_client.execute(user_find_sql)
|
||||
if isinstance(rtn, dict):
|
||||
user = rtn
|
||||
return user
|
||||
|
||||
|
||||
def user_add(
|
||||
module, oracle_client, username, password, auth_type,
|
||||
default_tablespace, temporary_tablespace
|
||||
):
|
||||
username = username.upper()
|
||||
extend_sql = None
|
||||
user = user_find(oracle_client, username)
|
||||
auth_type = auth_type.lower()
|
||||
identified_suffix_map = {
|
||||
'external': 'identified externally ',
|
||||
'global': 'identified globally ',
|
||||
'password': 'identified by "%s" ',
|
||||
}
|
||||
if user:
|
||||
user_sql = "alter user %s " % username
|
||||
user_sql += identified_suffix_map.get(auth_type, 'no authentication ') % password
|
||||
|
||||
if default_tablespace and default_tablespace.lower() != user['default_tablespace'].lower():
|
||||
user_sql += 'default tablespace %s quota unlimited on %s ' % (default_tablespace, default_tablespace)
|
||||
if temporary_tablespace and temporary_tablespace.lower() != user['temporary_tablespace'].lower():
|
||||
user_sql += 'temporary tablespace %s ' % temporary_tablespace
|
||||
else:
|
||||
user_sql = "create user %s " % username
|
||||
user_sql += identified_suffix_map.get(auth_type, 'no authentication ') % password
|
||||
if default_tablespace:
|
||||
user_sql += 'default tablespace %s quota unlimited on %s ' % (default_tablespace, default_tablespace)
|
||||
if temporary_tablespace:
|
||||
user_sql += 'temporary tablespace %s ' % temporary_tablespace
|
||||
extend_sql = 'grant connect to %s' % username
|
||||
|
||||
rtn, err = oracle_client.execute(user_sql)
|
||||
if err:
|
||||
module.fail_json(msg='Cannot add/edit user %s: %s' % (username, err), changed=False)
|
||||
else:
|
||||
if extend_sql:
|
||||
oracle_client.execute(extend_sql)
|
||||
module.exit_json(msg='User %s has been created.' % username, changed=True, name=username)
|
||||
|
||||
|
||||
def user_remove(module, oracle_client, username):
|
||||
user = user_find(oracle_client, username)
|
||||
|
||||
if user:
|
||||
rtn, err = oracle_client.execute('drop user %s cascade' % username)
|
||||
if err:
|
||||
module.fail_json(msg='Cannot drop user %s: %s' % (username, err), changed=False)
|
||||
else:
|
||||
module.exit_json(msg='User %s dropped.' % username, changed=True, name=username)
|
||||
else:
|
||||
module.exit_json(msg="User %s doesn't exist." % username, changed=False, name=username)
|
||||
|
||||
|
||||
# =========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
def main():
|
||||
argument_spec = oracle_common_argument_spec()
|
||||
argument_spec.update(
|
||||
authentication_type=dict(
|
||||
type='str', required=False,
|
||||
choices=['external', 'global', 'no_authentication', 'password']
|
||||
),
|
||||
default_tablespace=dict(required=False, aliases=['db']),
|
||||
name=dict(required=True, aliases=['user']),
|
||||
password=dict(aliases=['pass'], no_log=True),
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
update_password=dict(default="always", choices=["always", "on_create"], no_log=False),
|
||||
temporary_tablespace=dict(type='str', default=None),
|
||||
)
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
authentication_type = module.params['authentication_type'] or 'password'
|
||||
default_tablespace = module.params['default_tablespace']
|
||||
user = module.params['name']
|
||||
password = module.params['password']
|
||||
state = module.params['state']
|
||||
update_password = module.params['update_password']
|
||||
temporary_tablespace = module.params['temporary_tablespace']
|
||||
|
||||
oracle_client = OracleClient(module)
|
||||
if state == 'present':
|
||||
if password is None and update_password == 'always':
|
||||
module.fail_json(
|
||||
msg='password parameter required when adding a user unless update_password is set to on_create'
|
||||
)
|
||||
user_add(
|
||||
module, oracle_client, username=user, password=password,
|
||||
auth_type=authentication_type, default_tablespace=default_tablespace,
|
||||
temporary_tablespace=temporary_tablespace
|
||||
)
|
||||
elif state == 'absent':
|
||||
user_remove(oracle_client)
|
||||
module.exit_json(changed=True, user=user)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
0
apps/ops/ansible/modules_utils/__init__.py
Normal file
0
apps/ops/ansible/modules_utils/__init__.py
Normal file
94
apps/ops/ansible/modules_utils/oracle_common.py
Normal file
94
apps/ops/ansible/modules_utils/oracle_common.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import os
|
||||
|
||||
import oracledb
|
||||
|
||||
from oracledb.exceptions import DatabaseError
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def oracle_common_argument_spec():
|
||||
"""
|
||||
Returns a dict containing common options shared across the Oracle modules.
|
||||
"""
|
||||
options = dict(
|
||||
login_user=dict(type='str', required=False),
|
||||
login_password=dict(type='str', required=False, no_log=True),
|
||||
login_database=dict(type='str', required=False, default='test'),
|
||||
login_host=dict(type='str', required=False, default='localhost'),
|
||||
login_port=dict(type='int', required=False, default=1521),
|
||||
oracle_home=dict(type='str', required=False),
|
||||
mode=dict(type='str', required=False),
|
||||
)
|
||||
return options
|
||||
|
||||
|
||||
class OracleClient(object):
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self._conn = None
|
||||
self._cursor = None
|
||||
self.connect_params = {}
|
||||
|
||||
self.init_params()
|
||||
|
||||
def init_params(self):
|
||||
params = self.module.params
|
||||
hostname = params['login_host']
|
||||
port = params['login_port']
|
||||
service_name = params['login_database']
|
||||
username = params['login_user']
|
||||
password = params['login_password']
|
||||
oracle_home = params['oracle_home']
|
||||
mode = params['mode']
|
||||
|
||||
if oracle_home:
|
||||
os.environ.setdefault('ORACLE_HOME', oracle_home)
|
||||
if mode == 'sysdba':
|
||||
self.connect_params['mode'] = oracledb.SYSDBA
|
||||
|
||||
self.connect_params['host'] = hostname
|
||||
self.connect_params['port'] = port
|
||||
self.connect_params['user'] = username
|
||||
self.connect_params['password'] = password
|
||||
self.connect_params['service_name'] = service_name
|
||||
|
||||
@property
|
||||
def cursor(self):
|
||||
if self._cursor is None:
|
||||
try:
|
||||
oracledb.init_oracle_client(lib_dir='/Users/jiangweidong/Downloads/instantclient_19_8')
|
||||
self._conn = oracledb.connect(**self.connect_params)
|
||||
self._cursor = self._conn.cursor()
|
||||
except DatabaseError as err:
|
||||
self.module.fail_json(
|
||||
msg="Unable to connect to database: %s, %s" % (to_native(err), self.connect_params)
|
||||
)
|
||||
return self._cursor
|
||||
|
||||
def execute(self, sql, exception_to_fail=False):
|
||||
sql = sql[:-1] if sql.endswith(';') else sql
|
||||
result, error = None, None
|
||||
try:
|
||||
self.cursor.execute(sql)
|
||||
sql_header = self.cursor.description or []
|
||||
column_names = [description[0].lower() for description in sql_header]
|
||||
if column_names:
|
||||
result = [dict(zip(column_names, row)) for row in self.cursor]
|
||||
result = result[0] if len(result) == 1 else result
|
||||
else:
|
||||
result = None
|
||||
except DatabaseError as err:
|
||||
error = err
|
||||
if exception_to_fail and error:
|
||||
self.module.fail_json(msg='Cannot execute sql: %s' % to_native(error))
|
||||
return result, error
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
if self._cursor:
|
||||
self._cursor.close()
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -1,261 +1,87 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
import uuid
|
||||
import os
|
||||
|
||||
import shutil
|
||||
from collections import namedtuple
|
||||
import ansible_runner
|
||||
from django.conf import settings
|
||||
|
||||
from ansible import context
|
||||
from ansible.playbook import Playbook
|
||||
from ansible.module_utils.common.collections import ImmutableDict
|
||||
from ansible.executor.task_queue_manager import TaskQueueManager
|
||||
from ansible.vars.manager import VariableManager
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.executor.playbook_executor import PlaybookExecutor
|
||||
from ansible.playbook.play import Play
|
||||
import ansible.constants as C
|
||||
|
||||
from .callback import (
|
||||
AdHocResultCallback, PlaybookResultCallBack, CommandResultCallback
|
||||
)
|
||||
from common.utils import get_logger
|
||||
from .exceptions import AnsibleError
|
||||
from .display import AdHocDisplay
|
||||
|
||||
|
||||
__all__ = ["AdHocRunner", "PlayBookRunner", "CommandRunner"]
|
||||
C.HOST_KEY_CHECKING = False
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
Options = namedtuple('Options', [
|
||||
'listtags', 'listtasks', 'listhosts', 'syntax', 'connection',
|
||||
'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout',
|
||||
'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args',
|
||||
'scp_extra_args', 'become', 'become_method', 'become_user',
|
||||
'verbosity', 'check', 'extra_vars', 'playbook_path', 'passwords',
|
||||
'diff', 'gathering', 'remote_tmp',
|
||||
])
|
||||
|
||||
|
||||
def get_default_options():
|
||||
options = dict(
|
||||
syntax=False,
|
||||
timeout=30,
|
||||
connection='ssh',
|
||||
forks=10,
|
||||
remote_user='root',
|
||||
private_key_file=None,
|
||||
become=None,
|
||||
become_method=None,
|
||||
become_user=None,
|
||||
verbosity=1,
|
||||
check=False,
|
||||
diff=False,
|
||||
gathering='implicit',
|
||||
remote_tmp='/tmp/.ansible'
|
||||
)
|
||||
return options
|
||||
|
||||
|
||||
# JumpServer not use playbook
|
||||
class PlayBookRunner:
|
||||
"""
|
||||
用于执行AnsiblePlaybook的接口.简化Playbook对象的使用.
|
||||
"""
|
||||
|
||||
# Default results callback
|
||||
results_callback_class = PlaybookResultCallBack
|
||||
loader_class = DataLoader
|
||||
variable_manager_class = VariableManager
|
||||
options = get_default_options()
|
||||
|
||||
def __init__(self, inventory=None, options=None):
|
||||
"""
|
||||
:param options: Ansible options like ansible.cfg
|
||||
:param inventory: Ansible inventory
|
||||
"""
|
||||
if options:
|
||||
self.options = options
|
||||
C.RETRY_FILES_ENABLED = False
|
||||
self.inventory = inventory
|
||||
self.loader = self.loader_class()
|
||||
self.results_callback = self.results_callback_class()
|
||||
self.playbook_path = options.playbook_path
|
||||
self.variable_manager = self.variable_manager_class(
|
||||
loader=self.loader, inventory=self.inventory
|
||||
)
|
||||
self.passwords = options.passwords
|
||||
self.__check()
|
||||
|
||||
def __check(self):
|
||||
if self.options.playbook_path is None or \
|
||||
not os.path.exists(self.options.playbook_path):
|
||||
raise AnsibleError(
|
||||
"Not Found the playbook file: {}.".format(self.options.playbook_path)
|
||||
)
|
||||
if not self.inventory.list_hosts('all'):
|
||||
raise AnsibleError('Inventory is empty')
|
||||
|
||||
def run(self):
|
||||
executor = PlaybookExecutor(
|
||||
playbooks=[self.playbook_path],
|
||||
inventory=self.inventory,
|
||||
variable_manager=self.variable_manager,
|
||||
loader=self.loader,
|
||||
passwords={"conn_pass": self.passwords}
|
||||
)
|
||||
context.CLIARGS = ImmutableDict(self.options)
|
||||
|
||||
if executor._tqm:
|
||||
executor._tqm._stdout_callback = self.results_callback
|
||||
executor.run()
|
||||
executor._tqm.cleanup()
|
||||
return self.results_callback.output
|
||||
from .callback import DefaultCallback
|
||||
|
||||
|
||||
class AdHocRunner:
|
||||
"""
|
||||
ADHoc Runner接口
|
||||
"""
|
||||
results_callback_class = AdHocResultCallback
|
||||
results_callback = None
|
||||
loader_class = DataLoader
|
||||
variable_manager_class = VariableManager
|
||||
default_options = get_default_options()
|
||||
command_modules_choices = ('shell', 'raw', 'command', 'script', 'win_shell')
|
||||
cmd_modules_choices = ('shell', 'raw', 'command', 'script', 'win_shell')
|
||||
cmd_blacklist = [
|
||||
"reboot", 'shutdown', 'poweroff', 'halt', 'dd', 'half', 'top'
|
||||
]
|
||||
|
||||
def __init__(self, inventory, options=None):
|
||||
self.options = self.update_options(options)
|
||||
def __init__(self, inventory, module, module_args='', pattern='*', project_dir='/tmp/', extra_vars={}):
|
||||
self.id = uuid.uuid4()
|
||||
self.inventory = inventory
|
||||
self.loader = DataLoader()
|
||||
self.variable_manager = VariableManager(
|
||||
loader=self.loader, inventory=self.inventory
|
||||
)
|
||||
self.pattern = pattern
|
||||
self.module = module
|
||||
self.module_args = module_args
|
||||
self.project_dir = project_dir
|
||||
self.cb = DefaultCallback()
|
||||
self.runner = None
|
||||
self.extra_vars = extra_vars
|
||||
|
||||
def get_result_callback(self, execution_id=None):
|
||||
return self.__class__.results_callback_class(display=AdHocDisplay(execution_id))
|
||||
def check_module(self):
|
||||
if self.module not in self.cmd_modules_choices:
|
||||
return
|
||||
if self.module_args and self.module_args.split()[0] in self.cmd_blacklist:
|
||||
raise Exception("command not allowed: {}".format(self.module_args[0]))
|
||||
|
||||
@staticmethod
|
||||
def check_module_args(module_name, module_args=''):
|
||||
if module_name in C.MODULE_REQUIRE_ARGS and not module_args:
|
||||
err = "No argument passed to '%s' module." % module_name
|
||||
raise AnsibleError(err)
|
||||
def run(self, verbosity=0, **kwargs):
|
||||
self.check_module()
|
||||
if verbosity is None and settings.DEBUG:
|
||||
verbosity = 1
|
||||
|
||||
def check_pattern(self, pattern):
|
||||
if not pattern:
|
||||
raise AnsibleError("Pattern `{}` is not valid!".format(pattern))
|
||||
if not self.inventory.list_hosts("all"):
|
||||
raise AnsibleError("Inventory is empty.")
|
||||
if not self.inventory.list_hosts(pattern):
|
||||
raise AnsibleError(
|
||||
"pattern: %s dose not match any hosts." % pattern
|
||||
)
|
||||
if not os.path.exists(self.project_dir):
|
||||
os.mkdir(self.project_dir, 0o755)
|
||||
|
||||
def clean_args(self, module, args):
|
||||
if not args:
|
||||
return ''
|
||||
if module not in self.command_modules_choices:
|
||||
return args
|
||||
if isinstance(args, str):
|
||||
if args.startswith('executable='):
|
||||
_args = args.split(' ')
|
||||
executable, command = _args[0].split('=')[1], ' '.join(_args[1:])
|
||||
args = {'executable': executable, '_raw_params': command}
|
||||
else:
|
||||
args = {'_raw_params': args}
|
||||
return args
|
||||
else:
|
||||
return args
|
||||
|
||||
def clean_tasks(self, tasks):
|
||||
cleaned_tasks = []
|
||||
for task in tasks:
|
||||
module = task['action']['module']
|
||||
args = task['action'].get('args')
|
||||
cleaned_args = self.clean_args(module, args)
|
||||
task['action']['args'] = cleaned_args
|
||||
self.check_module_args(module, cleaned_args)
|
||||
cleaned_tasks.append(task)
|
||||
return cleaned_tasks
|
||||
|
||||
def update_options(self, options):
|
||||
_options = {k: v for k, v in self.default_options.items()}
|
||||
if options and isinstance(options, dict):
|
||||
_options.update(options)
|
||||
return _options
|
||||
|
||||
def set_control_master_if_need(self, cleaned_tasks):
|
||||
modules = [task.get('action', {}).get('module') for task in cleaned_tasks]
|
||||
if {'ping', 'win_ping'} & set(modules):
|
||||
self.results_callback.context = {
|
||||
'ssh_args': '-C -o ControlMaster=no'
|
||||
}
|
||||
|
||||
def run(self, tasks, pattern, play_name='Ansible Ad-hoc', gather_facts='no', execution_id=None):
|
||||
"""
|
||||
:param tasks: [{'action': {'module': 'shell', 'args': 'ls'}, ...}, ]
|
||||
:param pattern: all, *, or others
|
||||
:param play_name: The play name
|
||||
:param gather_facts:
|
||||
:return:
|
||||
"""
|
||||
self.check_pattern(pattern)
|
||||
self.results_callback = self.get_result_callback(execution_id)
|
||||
cleaned_tasks = self.clean_tasks(tasks)
|
||||
self.set_control_master_if_need(cleaned_tasks)
|
||||
context.CLIARGS = ImmutableDict(self.options)
|
||||
|
||||
play_source = dict(
|
||||
name=play_name,
|
||||
hosts=pattern,
|
||||
gather_facts=gather_facts,
|
||||
tasks=cleaned_tasks
|
||||
)
|
||||
|
||||
play = Play().load(
|
||||
play_source,
|
||||
variable_manager=self.variable_manager,
|
||||
loader=self.loader,
|
||||
)
|
||||
loader = DataLoader()
|
||||
# used in start callback
|
||||
playbook = Playbook(loader)
|
||||
playbook._entries.append(play)
|
||||
playbook._file_name = '__adhoc_playbook__'
|
||||
|
||||
tqm = TaskQueueManager(
|
||||
ansible_runner.run(
|
||||
extravars=self.extra_vars,
|
||||
host_pattern=self.pattern,
|
||||
private_data_dir=self.project_dir,
|
||||
inventory=self.inventory,
|
||||
variable_manager=self.variable_manager,
|
||||
loader=self.loader,
|
||||
stdout_callback=self.results_callback,
|
||||
passwords={"conn_pass": self.options.get("password", "")}
|
||||
module=self.module,
|
||||
module_args=self.module_args,
|
||||
verbosity=verbosity,
|
||||
event_handler=self.cb.event_handler,
|
||||
status_handler=self.cb.status_handler,
|
||||
**kwargs
|
||||
)
|
||||
try:
|
||||
tqm.send_callback('v2_playbook_on_start', playbook)
|
||||
tqm.run(play)
|
||||
tqm.send_callback('v2_playbook_on_stats', tqm._stats)
|
||||
return self.results_callback
|
||||
except Exception as e:
|
||||
raise AnsibleError(e)
|
||||
finally:
|
||||
if tqm is not None:
|
||||
tqm.cleanup()
|
||||
shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
|
||||
return self.cb
|
||||
|
||||
self.results_callback.close()
|
||||
|
||||
class PlaybookRunner:
|
||||
def __init__(self, inventory, playbook, project_dir='/tmp/', callback=None):
|
||||
self.id = uuid.uuid4()
|
||||
self.inventory = inventory
|
||||
self.playbook = playbook
|
||||
self.project_dir = project_dir
|
||||
if not callback:
|
||||
callback = DefaultCallback()
|
||||
self.cb = callback
|
||||
|
||||
def run(self, verbosity=0, **kwargs):
|
||||
if verbosity is None and settings.DEBUG:
|
||||
verbosity = 1
|
||||
|
||||
ansible_runner.run(
|
||||
private_data_dir=self.project_dir,
|
||||
inventory=self.inventory,
|
||||
playbook=self.playbook,
|
||||
verbosity=verbosity,
|
||||
event_handler=self.cb.event_handler,
|
||||
status_handler=self.cb.status_handler,
|
||||
**kwargs
|
||||
)
|
||||
return self.cb
|
||||
|
||||
|
||||
class CommandRunner(AdHocRunner):
|
||||
results_callback_class = CommandResultCallback
|
||||
modules_choices = ('shell', 'raw', 'command', 'script', 'win_shell')
|
||||
|
||||
def execute(self, cmd, pattern, module='shell'):
|
||||
if module and module not in self.modules_choices:
|
||||
raise AnsibleError("Module should in {}".format(self.modules_choices))
|
||||
|
||||
tasks = [
|
||||
{"action": {"module": module, "args": cmd}}
|
||||
]
|
||||
return self.run(tasks, pattern, play_name=cmd)
|
||||
def __init__(self, inventory, command, pattern='*', project_dir='/tmp/'):
|
||||
super().__init__(inventory, 'shell', command, pattern, project_dir)
|
||||
|
||||
def run(self, verbosity=0, **kwargs):
|
||||
return super().run(verbosity, **kwargs)
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
|
||||
sys.path.insert(0, '../..')
|
||||
from ops.ansible.inventory import BaseInventory
|
||||
|
||||
|
||||
class TestJMSInventory(unittest.TestCase):
|
||||
def setUp(self):
|
||||
host_list = [{
|
||||
"hostname": "testserver1",
|
||||
"ip": "102.1.1.1",
|
||||
"port": 22,
|
||||
"username": "root",
|
||||
"password": "password",
|
||||
"private_key": "/tmp/private_key",
|
||||
"become": {
|
||||
"method": "sudo",
|
||||
"user": "root",
|
||||
"pass": None,
|
||||
},
|
||||
"groups": ["group1", "group2"],
|
||||
"vars": {"sexy": "yes"},
|
||||
}, {
|
||||
"hostname": "testserver2",
|
||||
"ip": "8.8.8.8",
|
||||
"port": 2222,
|
||||
"username": "root",
|
||||
"password": "password",
|
||||
"private_key": "/tmp/private_key",
|
||||
"become": {
|
||||
"method": "su",
|
||||
"user": "root",
|
||||
"pass": "123",
|
||||
},
|
||||
"groups": ["group3", "group4"],
|
||||
"vars": {"love": "yes"},
|
||||
}]
|
||||
|
||||
self.inventory = BaseInventory(host_list=host_list)
|
||||
|
||||
def test_hosts(self):
|
||||
print("#"*10 + "Hosts" + "#"*10)
|
||||
for host in self.inventory.hosts:
|
||||
print(host)
|
||||
|
||||
def test_groups(self):
|
||||
print("#" * 10 + "Groups" + "#" * 10)
|
||||
for group in self.inventory.groups:
|
||||
print(group)
|
||||
|
||||
def test_group_all(self):
|
||||
print("#" * 10 + "all group hosts" + "#" * 10)
|
||||
group = self.inventory.get_group('all')
|
||||
print(group.hosts)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1,58 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, "../..")
|
||||
|
||||
from ops.ansible.runner import AdHocRunner, CommandRunner
|
||||
from ops.ansible.inventory import BaseInventory
|
||||
|
||||
|
||||
class TestAdHocRunner(unittest.TestCase):
|
||||
def setUp(self):
|
||||
host_data = [
|
||||
{
|
||||
"hostname": "testserver",
|
||||
"ip": "192.168.244.185",
|
||||
"port": 22,
|
||||
"username": "root",
|
||||
"password": "redhat",
|
||||
},
|
||||
]
|
||||
inventory = BaseInventory(host_data)
|
||||
self.runner = AdHocRunner(inventory)
|
||||
|
||||
def test_run(self):
|
||||
tasks = [
|
||||
{"action": {"module": "shell", "args": "ls"}, "name": "run_cmd"},
|
||||
{"action": {"module": "shell", "args": "whoami"}, "name": "run_whoami"},
|
||||
]
|
||||
ret = self.runner.run(tasks, "all")
|
||||
print(ret.results_summary)
|
||||
print(ret.results_raw)
|
||||
|
||||
|
||||
class TestCommandRunner(unittest.TestCase):
|
||||
def setUp(self):
|
||||
host_data = [
|
||||
{
|
||||
"hostname": "testserver",
|
||||
"ip": "192.168.244.168",
|
||||
"port": 22,
|
||||
"username": "root",
|
||||
"password": "redhat",
|
||||
},
|
||||
]
|
||||
inventory = BaseInventory(host_data)
|
||||
self.runner = CommandRunner(inventory)
|
||||
|
||||
def test_execute(self):
|
||||
res = self.runner.execute('ls', 'all')
|
||||
print(res.results_command)
|
||||
print(res.results_raw)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -3,4 +3,4 @@ from django.conf import settings
|
||||
|
||||
def get_ansible_task_log_path(task_id):
|
||||
from ops.utils import get_task_log_path
|
||||
return get_task_log_path(settings.ANSIBLE_LOG_DIR, task_id, level=3)
|
||||
return get_task_log_path(settings.CELERY_LOG_DIR, task_id, level=2)
|
||||
|
||||
@@ -2,4 +2,5 @@
|
||||
#
|
||||
from .adhoc import *
|
||||
from .celery import *
|
||||
from .command import *
|
||||
from .job import *
|
||||
from .playbook import *
|
||||
|
||||
@@ -1,92 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import viewsets, generics
|
||||
from rest_framework.views import Response
|
||||
from rest_framework import viewsets
|
||||
|
||||
from common.drf.serializers import CeleryTaskSerializer
|
||||
from ..models import Task, AdHoc, AdHocExecution
|
||||
from ..serializers import (
|
||||
TaskSerializer,
|
||||
AdHocSerializer,
|
||||
AdHocExecutionSerializer,
|
||||
TaskDetailSerializer,
|
||||
AdHocDetailSerializer,
|
||||
)
|
||||
from ..tasks import run_ansible_task
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..models import AdHoc
|
||||
from ..serializers import (
|
||||
AdHocSerializer
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'TaskViewSet', 'TaskRun', 'AdHocViewSet', 'AdHocRunHistoryViewSet'
|
||||
'AdHocViewSet'
|
||||
]
|
||||
|
||||
|
||||
class TaskViewSet(OrgBulkModelViewSet):
|
||||
model = Task
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_class = TaskSerializer
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == 'retrieve':
|
||||
return TaskDetailSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.select_related('latest_execution')
|
||||
return queryset
|
||||
|
||||
|
||||
class TaskRun(generics.RetrieveAPIView):
|
||||
queryset = Task.objects.all()
|
||||
serializer_class = CeleryTaskSerializer
|
||||
rbac_perms = {
|
||||
'retrieve': 'ops.add_adhoc'
|
||||
}
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
task = self.get_object()
|
||||
t = run_ansible_task.delay(str(task.id))
|
||||
return Response({"task": t.id})
|
||||
|
||||
|
||||
class AdHocViewSet(viewsets.ModelViewSet):
|
||||
queryset = AdHoc.objects.all()
|
||||
class AdHocViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = AdHocSerializer
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == 'retrieve':
|
||||
return AdHocDetailSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_queryset(self):
|
||||
task_id = self.request.query_params.get('task')
|
||||
if task_id:
|
||||
task = get_object_or_404(Task, id=task_id)
|
||||
self.queryset = self.queryset.filter(task=task)
|
||||
return self.queryset
|
||||
|
||||
|
||||
class AdHocRunHistoryViewSet(viewsets.ModelViewSet):
|
||||
queryset = AdHocExecution.objects.all()
|
||||
serializer_class = AdHocExecutionSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
task_id = self.request.query_params.get('task')
|
||||
adhoc_id = self.request.query_params.get('adhoc')
|
||||
if task_id:
|
||||
task = get_object_or_404(Task, id=task_id)
|
||||
adhocs = task.adhoc.all()
|
||||
self.queryset = self.queryset.filter(adhoc__in=adhocs)
|
||||
|
||||
if adhoc_id:
|
||||
adhoc = get_object_or_404(AdHoc, id=adhoc_id)
|
||||
self.queryset = self.queryset.filter(adhoc=adhoc)
|
||||
return self.queryset
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
permission_classes = ()
|
||||
model = AdHoc
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import viewsets
|
||||
from celery.result import AsyncResult
|
||||
@@ -12,20 +13,21 @@ from django_celery_beat.models import PeriodicTask
|
||||
|
||||
from common.permissions import IsValidUser
|
||||
from common.api import LogTailApi
|
||||
from ..models import CeleryTask
|
||||
from ..models import CeleryTaskExecution, CeleryTask
|
||||
from ..serializers import CeleryResultSerializer, CeleryPeriodTaskSerializer
|
||||
from ..celery.utils import get_celery_task_log_path
|
||||
from ..ansible.utils import get_ansible_task_log_path
|
||||
from common.mixins.api import CommonApiMixin
|
||||
|
||||
|
||||
__all__ = [
|
||||
'CeleryTaskLogApi', 'CeleryResultApi', 'CeleryPeriodTaskViewSet',
|
||||
'AnsibleTaskLogApi',
|
||||
'CeleryTaskExecutionLogApi', 'CeleryResultApi', 'CeleryPeriodTaskViewSet',
|
||||
'AnsibleTaskLogApi', 'CeleryTaskViewSet', 'CeleryTaskExecutionViewSet'
|
||||
]
|
||||
|
||||
from ..serializers.celery import CeleryTaskSerializer, CeleryTaskExecutionSerializer
|
||||
|
||||
class CeleryTaskLogApi(LogTailApi):
|
||||
|
||||
class CeleryTaskExecutionLogApi(LogTailApi):
|
||||
permission_classes = (IsValidUser,)
|
||||
task = None
|
||||
task_id = ''
|
||||
@@ -46,8 +48,8 @@ class CeleryTaskLogApi(LogTailApi):
|
||||
if new_path and os.path.isfile(new_path):
|
||||
return new_path
|
||||
try:
|
||||
task = CeleryTask.objects.get(id=self.task_id)
|
||||
except CeleryTask.DoesNotExist:
|
||||
task = CeleryTaskExecution.objects.get(id=self.task_id)
|
||||
except CeleryTaskExecution.DoesNotExist:
|
||||
return None
|
||||
return task.full_log_path
|
||||
|
||||
@@ -94,3 +96,29 @@ class CeleryPeriodTaskViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.exclude(description='')
|
||||
return queryset
|
||||
|
||||
|
||||
class CelerySummaryAPIView(generics.RetrieveAPIView):
|
||||
def get(self, request, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class CeleryTaskViewSet(CommonApiMixin, viewsets.ReadOnlyModelViewSet):
|
||||
serializer_class = CeleryTaskSerializer
|
||||
http_method_names = ('get', 'head', 'options',)
|
||||
|
||||
def get_queryset(self):
|
||||
return CeleryTask.objects.exclude(name__startswith='celery')
|
||||
|
||||
|
||||
class CeleryTaskExecutionViewSet(CommonApiMixin, viewsets.ReadOnlyModelViewSet):
|
||||
serializer_class = CeleryTaskExecutionSerializer
|
||||
http_method_names = ('get', 'head', 'options',)
|
||||
queryset = CeleryTaskExecution.objects.all()
|
||||
|
||||
def get_queryset(self):
|
||||
task_id = self.request.query_params.get('task_id')
|
||||
if task_id:
|
||||
task = get_object_or_404(CeleryTask, id=task_id)
|
||||
self.queryset = self.queryset.filter(name=task.name)
|
||||
return self.queryset
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.conf import settings
|
||||
|
||||
from assets.models import Asset, Node
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from common.permissions import IsValidUser
|
||||
from rbac.permissions import RBACPermission
|
||||
from ..models import CommandExecution
|
||||
from ..serializers import CommandExecutionSerializer
|
||||
from ..tasks import run_command_execution
|
||||
|
||||
|
||||
class CommandExecutionViewSet(RootOrgViewMixin, viewsets.ModelViewSet):
|
||||
serializer_class = CommandExecutionSerializer
|
||||
permission_classes = (RBACPermission,)
|
||||
|
||||
def get_queryset(self):
|
||||
return CommandExecution.objects.filter(user_id=str(self.request.user.id))
|
||||
|
||||
def check_hosts(self, serializer):
|
||||
data = serializer.validated_data
|
||||
assets = data["hosts"]
|
||||
system_user = data["run_as"]
|
||||
user = self.request.user
|
||||
|
||||
q = Q(granted_by_permissions__system_users__id=system_user.id) & (
|
||||
Q(granted_by_permissions__users=user) |
|
||||
Q(granted_by_permissions__user_groups__users=user)
|
||||
)
|
||||
|
||||
permed_assets = set()
|
||||
permed_assets.update(Asset.objects.filter(id__in=[a.id for a in assets]).filter(q).distinct())
|
||||
node_keys = Node.objects.filter(q).distinct().values_list('key', flat=True)
|
||||
|
||||
nodes_assets_q = Q()
|
||||
for _key in node_keys:
|
||||
nodes_assets_q |= Q(nodes__key__startswith=f'{_key}:')
|
||||
nodes_assets_q |= Q(nodes__key=_key)
|
||||
|
||||
permed_assets.update(
|
||||
Asset.objects.filter(
|
||||
id__in=[a.id for a in assets]
|
||||
).filter(
|
||||
nodes_assets_q
|
||||
).distinct()
|
||||
)
|
||||
|
||||
invalid_assets = set(assets) - set(permed_assets)
|
||||
if invalid_assets:
|
||||
msg = _("Not has host {} permission").format(
|
||||
[str(a.id) for a in invalid_assets]
|
||||
)
|
||||
raise ValidationError({"hosts": msg})
|
||||
|
||||
def check_permissions(self, request):
|
||||
if not settings.SECURITY_COMMAND_EXECUTION:
|
||||
return self.permission_denied(request, "Command execution disabled")
|
||||
return super().check_permissions(request)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
self.check_hosts(serializer)
|
||||
instance = serializer.save()
|
||||
instance.user = self.request.user
|
||||
instance.save()
|
||||
cols = self.request.query_params.get("cols", '80')
|
||||
rows = self.request.query_params.get("rows", '24')
|
||||
transaction.on_commit(lambda: run_command_execution.apply_async(
|
||||
args=(instance.id,), kwargs={"cols": cols, "rows": rows},
|
||||
task_id=str(instance.id)
|
||||
))
|
||||
64
apps/ops/api/job.py
Normal file
64
apps/ops/api/job.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from rest_framework import viewsets
|
||||
|
||||
from ops.models import Job, JobExecution
|
||||
from ops.serializers.job import JobSerializer, JobExecutionSerializer
|
||||
|
||||
__all__ = ['JobViewSet', 'JobExecutionViewSet']
|
||||
|
||||
from ops.tasks import run_ops_job_execution
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
|
||||
def set_task_to_serializer_data(serializer, task):
|
||||
data = getattr(serializer, "_data", {})
|
||||
data["task_id"] = task.id
|
||||
setattr(serializer, "_data", data)
|
||||
|
||||
|
||||
class JobViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = JobSerializer
|
||||
model = Job
|
||||
permission_classes = ()
|
||||
|
||||
def get_queryset(self):
|
||||
query_set = super().get_queryset()
|
||||
if self.action != 'retrieve':
|
||||
return query_set.filter(instant=False)
|
||||
return query_set
|
||||
|
||||
def perform_create(self, serializer):
|
||||
instance = serializer.save()
|
||||
run_after_save = serializer.validated_data.get('run_after_save', False)
|
||||
if instance.instant or run_after_save:
|
||||
self.run_job(instance, serializer)
|
||||
|
||||
def perform_update(self, serializer):
|
||||
instance = serializer.save()
|
||||
run_after_save = serializer.validated_data.get('run_after_save', False)
|
||||
if run_after_save:
|
||||
self.run_job(instance, serializer)
|
||||
|
||||
@staticmethod
|
||||
def run_job(job, serializer):
|
||||
execution = job.create_execution()
|
||||
task = run_ops_job_execution.delay(execution.id)
|
||||
set_task_to_serializer_data(serializer, task)
|
||||
|
||||
|
||||
class JobExecutionViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = JobExecutionSerializer
|
||||
http_method_names = ('get', 'post', 'head', 'options',)
|
||||
permission_classes = ()
|
||||
model = JobExecution
|
||||
|
||||
def perform_create(self, serializer):
|
||||
instance = serializer.save()
|
||||
task = run_ops_job_execution.delay(instance.id)
|
||||
set_task_to_serializer_data(serializer, task)
|
||||
|
||||
def get_queryset(self):
|
||||
query_set = super().get_queryset()
|
||||
job_id = self.request.query_params.get('job_id')
|
||||
if job_id:
|
||||
query_set = query_set.filter(job_id=job_id)
|
||||
return query_set
|
||||
35
apps/ops/api/playbook.py
Normal file
35
apps/ops/api/playbook.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import os
|
||||
import zipfile
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..exception import PlaybookNoValidEntry
|
||||
from ..models import Playbook
|
||||
from ..serializers.playbook import PlaybookSerializer
|
||||
|
||||
__all__ = ["PlaybookViewSet"]
|
||||
|
||||
|
||||
def unzip_playbook(src, dist):
|
||||
fz = zipfile.ZipFile(src, 'r')
|
||||
for file in fz.namelist():
|
||||
fz.extract(file, dist)
|
||||
|
||||
|
||||
class PlaybookViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = PlaybookSerializer
|
||||
permission_classes = ()
|
||||
model = Playbook
|
||||
|
||||
def perform_create(self, serializer):
|
||||
instance = serializer.save()
|
||||
src_path = os.path.join(settings.MEDIA_ROOT, instance.path.name)
|
||||
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
||||
unzip_playbook(src_path, dest_path)
|
||||
valid_entry = ('main.yml', 'main.yaml', 'main')
|
||||
for f in os.listdir(dest_path):
|
||||
if f in valid_entry:
|
||||
return
|
||||
os.remove(dest_path)
|
||||
raise PlaybookNoValidEntry
|
||||
@@ -15,4 +15,5 @@ class OpsConfig(AppConfig):
|
||||
from .celery import signal_handler
|
||||
from . import signal_handlers
|
||||
from . import notifications
|
||||
from . import tasks
|
||||
super().ready()
|
||||
|
||||
29
apps/ops/const.py
Normal file
29
apps/ops/const.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
class StrategyChoice(models.TextChoices):
|
||||
push = 'push', _('Push')
|
||||
verify = 'verify', _('Verify')
|
||||
collect = 'collect', _('Collect')
|
||||
change_secret = 'change_secret', _('Change password')
|
||||
|
||||
|
||||
class SSHKeyStrategy(models.TextChoices):
|
||||
add = 'add', _('Append SSH KEY')
|
||||
set = 'set', _('Empty and append SSH KEY')
|
||||
set_jms = 'set_jms', _('Replace (The key generated by JumpServer) ')
|
||||
|
||||
|
||||
class PasswordStrategy(models.TextChoices):
|
||||
custom = 'custom', _('Custom password')
|
||||
random_one = 'random_one', _('All assets use the same random password')
|
||||
random_all = 'random_all', _('All assets use different random password')
|
||||
|
||||
|
||||
string_punctuation = '!#$%&()*+,-.:;<=>?@[]^_~'
|
||||
DEFAULT_PASSWORD_LENGTH = 30
|
||||
DEFAULT_PASSWORD_RULES = {
|
||||
'length': DEFAULT_PASSWORD_LENGTH,
|
||||
'symbol_set': string_punctuation
|
||||
}
|
||||
6
apps/ops/exception.py
Normal file
6
apps/ops/exception.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from common.exceptions import JMSException
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class PlaybookNoValidEntry(JMSException):
|
||||
default_detail = _('no valid program entry found.')
|
||||
@@ -1,149 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from django.conf import settings
|
||||
from .ansible.inventory import BaseInventory
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
__all__ = [
|
||||
'JMSInventory', 'JMSCustomInventory',
|
||||
]
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class JMSBaseInventory(BaseInventory):
|
||||
def convert_to_ansible(self, asset, run_as_admin=False):
|
||||
info = {
|
||||
'id': asset.id,
|
||||
'hostname': asset.hostname,
|
||||
'ip': asset.ip,
|
||||
'port': asset.ssh_port,
|
||||
'vars': dict(),
|
||||
'groups': [],
|
||||
}
|
||||
if asset.domain and asset.domain.has_gateway():
|
||||
info["vars"].update(self.make_proxy_command(asset))
|
||||
if run_as_admin:
|
||||
info.update(asset.get_auth_info(with_become=True))
|
||||
if asset.is_windows():
|
||||
info["vars"].update({
|
||||
"ansible_connection": "ssh",
|
||||
"ansible_shell_type": settings.WINDOWS_SSH_DEFAULT_SHELL,
|
||||
})
|
||||
for label in asset.labels.all():
|
||||
info["vars"].update({
|
||||
label.name: label.value
|
||||
})
|
||||
if asset.domain:
|
||||
info["vars"].update({
|
||||
"domain": asset.domain.name,
|
||||
})
|
||||
return info
|
||||
|
||||
@staticmethod
|
||||
def make_proxy_command(asset):
|
||||
gateway = asset.domain.random_gateway()
|
||||
proxy_command_list = [
|
||||
"ssh", "-o", "Port={}".format(gateway.port),
|
||||
"-o", "StrictHostKeyChecking=no",
|
||||
"{}@{}".format(gateway.username, gateway.ip),
|
||||
"-W", "%h:%p", "-q",
|
||||
]
|
||||
|
||||
if gateway.password:
|
||||
proxy_command_list.insert(
|
||||
0, "sshpass -p '{}'".format(gateway.password)
|
||||
)
|
||||
if gateway.private_key:
|
||||
proxy_command_list.append("-i {}".format(gateway.private_key_file))
|
||||
|
||||
proxy_command = "'-o ProxyCommand={}'".format(
|
||||
" ".join(proxy_command_list)
|
||||
)
|
||||
return {"ansible_ssh_common_args": proxy_command}
|
||||
|
||||
|
||||
class JMSInventory(JMSBaseInventory):
|
||||
"""
|
||||
JMS Inventory is the inventory with jumpserver assets, so you can
|
||||
write you own inventory, construct you inventory,
|
||||
user_info is obtained from admin_user or asset_user
|
||||
"""
|
||||
def __init__(self, assets, run_as_admin=False, run_as=None, become_info=None, system_user=None):
|
||||
"""
|
||||
:param assets: assets
|
||||
:param run_as_admin: True 是否使用管理用户去执行, 每台服务器的管理用户可能不同
|
||||
:param run_as: 用户名(添加了统一的资产用户管理器之后AssetUserManager加上之后修改为username)
|
||||
:param become_info: 是否become成某个用户去执行
|
||||
"""
|
||||
self.assets = assets
|
||||
self.using_admin = run_as_admin
|
||||
self.run_as = run_as
|
||||
self.system_user = system_user
|
||||
self.become_info = become_info
|
||||
|
||||
host_list = []
|
||||
|
||||
for asset in assets:
|
||||
host = self.convert_to_ansible(asset, run_as_admin=run_as_admin)
|
||||
if run_as is not None:
|
||||
run_user_info = self.get_run_user_info(host)
|
||||
host.update(run_user_info)
|
||||
if become_info and asset.is_unixlike():
|
||||
host.update(become_info)
|
||||
host_list.append(host)
|
||||
|
||||
super().__init__(host_list=host_list)
|
||||
|
||||
def get_run_user_info(self, host):
|
||||
if not self.run_as and not self.system_user:
|
||||
return {}
|
||||
|
||||
asset_id = host.get('id', '')
|
||||
asset = self.assets.filter(id=asset_id).first()
|
||||
if not asset:
|
||||
logger.error('Host not found: ', asset_id)
|
||||
return {}
|
||||
|
||||
if self.system_user:
|
||||
self.system_user.load_asset_special_auth(asset=asset, username=self.run_as)
|
||||
return self.system_user._to_secret_json()
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
class JMSCustomInventory(JMSBaseInventory):
|
||||
"""
|
||||
JMS Custom Inventory is the inventory with jumpserver assets,
|
||||
user_info is obtained from custom parameter
|
||||
"""
|
||||
|
||||
def __init__(self, assets, username, password=None, public_key=None, private_key=None):
|
||||
"""
|
||||
"""
|
||||
self.assets = assets
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.public_key = public_key
|
||||
self.private_key = private_key
|
||||
|
||||
host_list = []
|
||||
|
||||
for asset in assets:
|
||||
host = self.convert_to_ansible(asset)
|
||||
run_user_info = self.get_run_user_info()
|
||||
host.update(run_user_info)
|
||||
host_list.append(host)
|
||||
|
||||
super().__init__(host_list=host_list)
|
||||
|
||||
def get_run_user_info(self):
|
||||
return {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'public_key': self.public_key,
|
||||
'private_key': self.private_key
|
||||
}
|
||||
47
apps/ops/migrations/0022_auto_20220817_1346.py
Normal file
47
apps/ops/migrations/0022_auto_20220817_1346.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# Generated by Django 3.2.14 on 2022-08-17 05:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def migrate_run_system_user_to_account(apps, schema_editor):
|
||||
execution_model = apps.get_model('ops', 'CommandExecution')
|
||||
count = 0
|
||||
bulk_size = 1000
|
||||
|
||||
while True:
|
||||
executions = execution_model.objects.all().prefetch_related('run_as')[count:bulk_size]
|
||||
if not executions:
|
||||
break
|
||||
count += len(executions)
|
||||
updated = []
|
||||
for obj in executions:
|
||||
run_as = obj.run_as
|
||||
if not run_as:
|
||||
continue
|
||||
obj.account = run_as.username
|
||||
updated.append(obj)
|
||||
execution_model.objects.bulk_update(updated, ['account'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0021_auto_20211130_1037'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='run_system_user',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='commandexecution',
|
||||
name='account',
|
||||
field=models.CharField(default='', max_length=128, verbose_name='account'),
|
||||
),
|
||||
migrations.RunPython(migrate_run_system_user_to_account),
|
||||
migrations.RemoveField(
|
||||
model_name='commandexecution',
|
||||
name='run_as',
|
||||
),
|
||||
]
|
||||
44
apps/ops/migrations/0023_auto_20220929_2025.py
Normal file
44
apps/ops/migrations/0023_auto_20220929_2025.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Generated by Django 3.2.14 on 2022-09-29 12:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0022_auto_20220817_1346'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='log_path',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='status',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='celerytask',
|
||||
name='args',
|
||||
field=models.JSONField(default=[], verbose_name='Args'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='celerytask',
|
||||
name='is_finished',
|
||||
field=models.BooleanField(default=False, verbose_name='Finished'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='celerytask',
|
||||
name='kwargs',
|
||||
field=models.JSONField(default={}, verbose_name='Kwargs'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='celerytask',
|
||||
name='state',
|
||||
field=models.CharField(default='SUCCESS', max_length=16, verbose_name='State'),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
||||
58
apps/ops/migrations/0024_auto_20221008_1514.py
Normal file
58
apps/ops/migrations/0024_auto_20221008_1514.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# Generated by Django 3.2.14 on 2022-10-08 07:19
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0106_auto_20220916_1556'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('ops', '0023_auto_20220929_2025'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='adhocexecution',
|
||||
name='adhoc',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhocexecution',
|
||||
name='task',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='commandexecution',
|
||||
name='hosts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='commandexecution',
|
||||
name='user',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='task',
|
||||
unique_together=None,
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='task',
|
||||
name='latest_adhoc',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='task',
|
||||
name='latest_execution',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='AdHoc',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='AdHocExecution',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='CommandExecution',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='Task',
|
||||
),
|
||||
]
|
||||
72
apps/ops/migrations/0025_auto_20221008_1631.py
Normal file
72
apps/ops/migrations/0025_auto_20221008_1631.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# Generated by Django 3.2.14 on 2022-10-08 08:31
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0106_auto_20220916_1556'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('ops', '0024_auto_20221008_1514'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AdHoc',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('interval', models.IntegerField(blank=True, default=24, null=True, verbose_name='Cycle perform')),
|
||||
('crontab', models.CharField(blank=True, max_length=128, null=True, verbose_name='Regularly perform')),
|
||||
('account', models.CharField(default='root', max_length=128, verbose_name='Account')),
|
||||
('account_policy', models.CharField(default='root', max_length=128, verbose_name='Account policy')),
|
||||
('date_last_run', models.DateTimeField(null=True, verbose_name='Date last run')),
|
||||
('pattern', models.CharField(default='all', max_length=1024, verbose_name='Pattern')),
|
||||
('module', models.CharField(default='shell', max_length=128, verbose_name='Module')),
|
||||
('args', models.CharField(default='', max_length=1024, verbose_name='Args')),
|
||||
('assets', models.ManyToManyField(to='assets.Asset', verbose_name='Assets')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AdHocExecution',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('status', models.CharField(default='running', max_length=16, verbose_name='Status')),
|
||||
('result', models.JSONField(blank=True, null=True, verbose_name='Result')),
|
||||
('summary', models.JSONField(default=dict, verbose_name='Summary')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_start', models.DateTimeField(db_index=True, null=True, verbose_name='Date start')),
|
||||
('date_finished', models.DateTimeField(null=True)),
|
||||
('creator', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Creator')),
|
||||
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='ops.adhoc', verbose_name='Adhoc')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'AdHoc execution',
|
||||
'db_table': 'ops_adhoc_execution',
|
||||
'get_latest_by': 'date_start',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoc',
|
||||
name='last_execution',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='ops.adhocexecution', verbose_name='Last execution'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoc',
|
||||
name='owner',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Creator'),
|
||||
),
|
||||
]
|
||||
100
apps/ops/migrations/0026_auto_20221009_2050.py
Normal file
100
apps/ops/migrations/0026_auto_20221009_2050.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# Generated by Django 3.2.14 on 2022-10-09 12:50
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('assets', '0106_auto_20220916_1556'),
|
||||
('ops', '0025_auto_20221008_1631'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Playbook',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('interval', models.IntegerField(blank=True, default=24, null=True, verbose_name='Cycle perform')),
|
||||
('crontab', models.CharField(blank=True, max_length=128, null=True, verbose_name='Regularly perform')),
|
||||
('account', models.CharField(default='root', max_length=128, verbose_name='Account')),
|
||||
('account_policy', models.CharField(default='root', max_length=128, verbose_name='Account policy')),
|
||||
('date_last_run', models.DateTimeField(null=True, verbose_name='Date last run')),
|
||||
('path', models.FilePathField(max_length=1024, verbose_name='Playbook')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('assets', models.ManyToManyField(to='assets.Asset', verbose_name='Assets')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='adhocexecution',
|
||||
name='date_finished',
|
||||
field=models.DateTimeField(null=True, verbose_name='Date finished'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PlaybookTemplate',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('path', models.FilePathField(verbose_name='Path')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Playbook template',
|
||||
'ordering': ['name'],
|
||||
'unique_together': {('org_id', 'name')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PlaybookExecution',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('status', models.CharField(default='running', max_length=16, verbose_name='Status')),
|
||||
('result', models.JSONField(blank=True, null=True, verbose_name='Result')),
|
||||
('summary', models.JSONField(default=dict, verbose_name='Summary')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_start', models.DateTimeField(db_index=True, null=True, verbose_name='Date start')),
|
||||
('date_finished', models.DateTimeField(null=True, verbose_name='Date finished')),
|
||||
('path', models.FilePathField(max_length=1024, verbose_name='Run dir')),
|
||||
('creator', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Creator')),
|
||||
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ops.playbook', verbose_name='Task')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-date_start'],
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='last_execution',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='ops.playbookexecution', verbose_name='Last execution'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='owner',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Owner'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='template',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='ops.playbooktemplate', verbose_name='Template'),
|
||||
),
|
||||
]
|
||||
273
apps/ops/migrations/0027_auto_20221024_1709.py
Normal file
273
apps/ops/migrations/0027_auto_20221024_1709.py
Normal file
@@ -0,0 +1,273 @@
|
||||
# Generated by Django 3.2.14 on 2022-12-05 03:23
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('assets', '0112_gateway_to_asset'),
|
||||
('ops', '0026_auto_20221009_2050'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CeleryTaskExecution',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=1024)),
|
||||
('args', models.JSONField(verbose_name='Args')),
|
||||
('kwargs', models.JSONField(verbose_name='Kwargs')),
|
||||
('state', models.CharField(max_length=16, verbose_name='State')),
|
||||
('is_finished', models.BooleanField(default=False, verbose_name='Finished')),
|
||||
('date_published', models.DateTimeField(auto_now_add=True, verbose_name='Date published')),
|
||||
('date_start', models.DateTimeField(null=True, verbose_name='Date start')),
|
||||
('date_finished', models.DateTimeField(null=True, verbose_name='Date finished')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Job',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('org_id',
|
||||
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('interval', models.IntegerField(blank=True, default=24, null=True, verbose_name='Cycle perform')),
|
||||
('crontab', models.CharField(blank=True, max_length=128, null=True, verbose_name='Regularly perform')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, null=True, verbose_name='Name')),
|
||||
('instant', models.BooleanField(default=False)),
|
||||
('args', models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Args')),
|
||||
('module', models.CharField(choices=[('shell', 'Shell'), ('win_shell', 'Powershell')], default='shell',
|
||||
max_length=128, null=True, verbose_name='Module')),
|
||||
('chdir', models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Chdir')),
|
||||
('timeout', models.IntegerField(default=60, verbose_name='Timeout (Seconds)')),
|
||||
('type', models.CharField(choices=[('adhoc', 'Adhoc'), ('playbook', 'Playbook')], default='adhoc',
|
||||
max_length=128, verbose_name='Type')),
|
||||
('runas', models.CharField(default='root', max_length=128, verbose_name='Runas')),
|
||||
('runas_policy', models.CharField(
|
||||
choices=[('privileged_only', 'Privileged Only'), ('privileged_first', 'Privileged First'),
|
||||
('skip', 'Skip')], default='skip', max_length=128, verbose_name='Runas policy')),
|
||||
('use_parameter_define', models.BooleanField(default=False, verbose_name='Use Parameter Define')),
|
||||
('parameters_define', models.JSONField(default=dict, verbose_name='Parameters define')),
|
||||
('comment',
|
||||
models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Comment')),
|
||||
('assets', models.ManyToManyField(to='assets.Asset', verbose_name='Assets')),
|
||||
('owner',
|
||||
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL,
|
||||
verbose_name='Creator')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['date_created'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='JobExecution',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Updated by')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('org_id',
|
||||
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('task_id', models.UUIDField(null=True)),
|
||||
('status', models.CharField(default='running', max_length=16, verbose_name='Status')),
|
||||
('parameters', models.JSONField(default=dict, verbose_name='Parameters')),
|
||||
('result', models.JSONField(blank=True, null=True, verbose_name='Result')),
|
||||
('summary', models.JSONField(default=dict, verbose_name='Summary')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_start', models.DateTimeField(db_index=True, null=True, verbose_name='Date start')),
|
||||
('date_finished', models.DateTimeField(null=True, verbose_name='Date finished')),
|
||||
('creator',
|
||||
models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL,
|
||||
verbose_name='Creator')),
|
||||
('job',
|
||||
models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions',
|
||||
to='ops.job')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-date_created'],
|
||||
},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbookexecution',
|
||||
name='creator',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbookexecution',
|
||||
name='task',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='playbooktemplate',
|
||||
unique_together=None,
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='celerytask',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='adhoc',
|
||||
old_name='owner',
|
||||
new_name='creator',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='celerytask',
|
||||
old_name='date_finished',
|
||||
new_name='last_published_time',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='account',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='account_policy',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='assets',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='crontab',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='date_last_run',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='interval',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='is_periodic',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='last_execution',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='args',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='date_published',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='date_start',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='is_finished',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='kwargs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='celerytask',
|
||||
name='state',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='account',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='account_policy',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='assets',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='crontab',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='date_last_run',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='interval',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='is_periodic',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='last_execution',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='owner',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='template',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoc',
|
||||
name='comment',
|
||||
field=models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Comment'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='creator',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL,
|
||||
to=settings.AUTH_USER_MODEL, verbose_name='Creator'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='adhoc',
|
||||
name='module',
|
||||
field=models.CharField(choices=[('shell', 'Shell'), ('win_shell', 'Powershell')], default='shell',
|
||||
max_length=128, verbose_name='Module'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='celerytask',
|
||||
name='name',
|
||||
field=models.CharField(max_length=1024, verbose_name='Name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='playbook',
|
||||
name='comment',
|
||||
field=models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Comment'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='playbook',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, null=True, verbose_name='Name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='playbook',
|
||||
name='path',
|
||||
field=models.FileField(upload_to='playbooks/'),
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='AdHocExecution',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='PlaybookExecution',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='PlaybookTemplate',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='playbook',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='ops.playbook',
|
||||
verbose_name='Playbook'),
|
||||
),
|
||||
]
|
||||
@@ -1,10 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import abc
|
||||
import uuid
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.db import models
|
||||
from django import forms
|
||||
from rest_framework import serializers
|
||||
|
||||
from .celery.utils import (
|
||||
@@ -14,12 +12,10 @@ from .celery.utils import (
|
||||
|
||||
__all__ = [
|
||||
'PeriodTaskModelMixin', 'PeriodTaskSerializerMixin',
|
||||
'PeriodTaskFormMixin',
|
||||
]
|
||||
|
||||
|
||||
class PeriodTaskModelMixin(models.Model):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(
|
||||
max_length=128, unique=False, verbose_name=_("Name")
|
||||
)
|
||||
@@ -73,7 +69,7 @@ class PeriodTaskModelMixin(models.Model):
|
||||
}
|
||||
create_or_update_celery_periodic_tasks(tasks)
|
||||
|
||||
def save(self, **kwargs):
|
||||
def save(self, *args, **kwargs):
|
||||
instance = super().save(**kwargs)
|
||||
self.set_period_schedule()
|
||||
return instance
|
||||
@@ -140,42 +136,3 @@ class PeriodTaskSerializerMixin(serializers.Serializer):
|
||||
msg = _("Require periodic or regularly perform setting")
|
||||
raise serializers.ValidationError(msg)
|
||||
return ok
|
||||
|
||||
|
||||
class PeriodTaskFormMixin(forms.Form):
|
||||
is_periodic = forms.BooleanField(
|
||||
initial=True, required=False, label=_('Periodic perform')
|
||||
)
|
||||
crontab = forms.CharField(
|
||||
max_length=128, required=False, label=_('Regularly perform'),
|
||||
help_text=_("eg: Every Sunday 03:05 run <5 3 * * 0> <br> "
|
||||
"Tips: "
|
||||
"Using 5 digits linux crontab expressions "
|
||||
"<min hour day month week> "
|
||||
"(<a href='https://tool.lu/crontab/' target='_blank'>Online tools</a>) <br>"
|
||||
"Note: "
|
||||
"If both Regularly perform and Cycle perform are set, "
|
||||
"give priority to Regularly perform"),
|
||||
)
|
||||
interval = forms.IntegerField(
|
||||
required=False, initial=24,
|
||||
help_text=_('Unit: hour'), label=_("Cycle perform"),
|
||||
)
|
||||
|
||||
def get_initial_for_field(self, field, field_name):
|
||||
"""
|
||||
Return initial data for field on form. Use initial data from the form
|
||||
or the field, in that order. Evaluate callable values.
|
||||
"""
|
||||
if field_name not in ['is_periodic', 'crontab', 'interval']:
|
||||
return super().get_initial_for_field(field, field_name)
|
||||
instance = getattr(self, 'instance', None)
|
||||
if instance is None:
|
||||
return super().get_initial_for_field(field, field_name)
|
||||
init_attr_name = field_name + '_initial'
|
||||
value = getattr(self, init_attr_name, None)
|
||||
if value is None:
|
||||
return super().get_initial_for_field(field, field_name)
|
||||
return value
|
||||
|
||||
|
||||
|
||||
@@ -3,4 +3,5 @@
|
||||
|
||||
from .adhoc import *
|
||||
from .celery import *
|
||||
from .command import *
|
||||
from .playbook import *
|
||||
from .job import *
|
||||
|
||||
@@ -1,339 +1,42 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
import uuid
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
|
||||
from celery import current_task
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger, lazyproperty, make_dirs
|
||||
from common.utils.translate import translate_value
|
||||
from common.db.fields import (
|
||||
JsonListTextField, JsonDictCharField, EncryptJsonDictCharField,
|
||||
JsonDictTextField,
|
||||
)
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from ..ansible import AdHocRunner, AnsibleError
|
||||
from ..inventory import JMSInventory
|
||||
from ..mixin import PeriodTaskModelMixin
|
||||
|
||||
__all__ = ["Task", "AdHoc", "AdHocExecution"]
|
||||
from common.utils import get_logger
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
|
||||
__all__ = ["AdHoc"]
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class Task(PeriodTaskModelMixin, OrgModelMixin):
|
||||
"""
|
||||
This task is different ansible task, Task like 'push system user', 'get asset info' ..
|
||||
One task can have some versions of adhoc, run a task only run the latest version adhoc
|
||||
"""
|
||||
callback = models.CharField(max_length=128, blank=True, null=True, verbose_name=_("Callback")) # Callback must be a registered celery task
|
||||
is_deleted = models.BooleanField(default=False)
|
||||
comment = models.TextField(blank=True, verbose_name=_("Comment"))
|
||||
date_created = models.DateTimeField(auto_now_add=True, db_index=True, verbose_name=_("Date created"))
|
||||
date_updated = models.DateTimeField(auto_now=True, verbose_name=_("Date updated"))
|
||||
latest_adhoc = models.ForeignKey('ops.AdHoc', on_delete=models.SET_NULL,
|
||||
null=True, related_name='task_latest')
|
||||
latest_execution = models.ForeignKey('ops.AdHocExecution', on_delete=models.SET_NULL, null=True, related_name='task_latest')
|
||||
total_run_amount = models.IntegerField(default=0)
|
||||
success_run_amount = models.IntegerField(default=0)
|
||||
_ignore_auto_created_by = True
|
||||
class AdHoc(JMSOrgBaseModel):
|
||||
class Modules(models.TextChoices):
|
||||
shell = 'shell', _('Shell')
|
||||
winshell = 'win_shell', _('Powershell')
|
||||
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
pattern = models.CharField(max_length=1024, verbose_name=_("Pattern"), default='all')
|
||||
module = models.CharField(max_length=128, choices=Modules.choices, default=Modules.shell,
|
||||
verbose_name=_('Module'))
|
||||
args = models.CharField(max_length=1024, default='', verbose_name=_('Args'))
|
||||
creator = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
comment = models.CharField(max_length=1024, default='', verbose_name=_('Comment'), null=True, blank=True)
|
||||
|
||||
|
||||
@property
|
||||
def short_id(self):
|
||||
return str(self.id).split('-')[-1]
|
||||
|
||||
@lazyproperty
|
||||
def versions(self):
|
||||
return self.adhoc.all().count()
|
||||
|
||||
@property
|
||||
def is_success(self):
|
||||
if self.latest_execution:
|
||||
return self.latest_execution.is_success
|
||||
else:
|
||||
return False
|
||||
|
||||
@lazyproperty
|
||||
def display_name(self):
|
||||
value = translate_value(self.name)
|
||||
return value
|
||||
|
||||
@property
|
||||
def timedelta(self):
|
||||
if self.latest_execution:
|
||||
return self.latest_execution.timedelta
|
||||
else:
|
||||
def row_count(self):
|
||||
if len(self.args) == 0:
|
||||
return 0
|
||||
count = str(self.args).count('\n')
|
||||
return count + 1
|
||||
|
||||
@property
|
||||
def date_start(self):
|
||||
if self.latest_execution:
|
||||
return self.latest_execution.date_start
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def assets_amount(self):
|
||||
if self.latest_execution:
|
||||
return self.latest_execution.hosts_amount
|
||||
return 0
|
||||
|
||||
def get_latest_adhoc(self):
|
||||
if self.latest_adhoc:
|
||||
return self.latest_adhoc
|
||||
try:
|
||||
adhoc = self.adhoc.all().latest()
|
||||
self.latest_adhoc = adhoc
|
||||
self.save()
|
||||
return adhoc
|
||||
except AdHoc.DoesNotExist:
|
||||
return None
|
||||
|
||||
@property
|
||||
def history_summary(self):
|
||||
total = self.total_run_amount
|
||||
success = self.success_run_amount
|
||||
failed = total - success
|
||||
return {'total': total, 'success': success, 'failed': failed}
|
||||
|
||||
def get_run_execution(self):
|
||||
return self.execution.all()
|
||||
|
||||
def run(self):
|
||||
latest_adhoc = self.get_latest_adhoc()
|
||||
if latest_adhoc:
|
||||
return latest_adhoc.run()
|
||||
else:
|
||||
return {'error': 'No adhoc'}
|
||||
|
||||
@property
|
||||
def period_key(self):
|
||||
return self.__str__()
|
||||
|
||||
def get_register_task(self):
|
||||
from ..tasks import run_ansible_task
|
||||
name = self.__str__()
|
||||
task = run_ansible_task.name
|
||||
args = (str(self.id),)
|
||||
kwargs = {"callback": self.callback}
|
||||
return name, task, args, kwargs
|
||||
def size(self):
|
||||
return len(self.args)
|
||||
|
||||
def __str__(self):
|
||||
return self.name + '@' + str(self.org_id)
|
||||
|
||||
class Meta:
|
||||
db_table = 'ops_task'
|
||||
unique_together = ('name', 'org_id')
|
||||
ordering = ('-date_updated',)
|
||||
verbose_name = _("Task")
|
||||
get_latest_by = 'date_created'
|
||||
permissions = [
|
||||
('view_taskmonitor', _('Can view task monitor'))
|
||||
]
|
||||
|
||||
|
||||
class AdHoc(OrgModelMixin):
|
||||
"""
|
||||
task: A task reference
|
||||
_tasks: [{'name': 'task_name', 'action': {'module': '', 'args': ''}, 'other..': ''}, ]
|
||||
_options: ansible options, more see ops.ansible.runner.Options
|
||||
run_as_admin: if true, then need get every host admin user run it, because every host may be have different admin user, so we choise host level
|
||||
run_as: username(Add the uniform AssetUserManager <AssetUserManager> and change it to username)
|
||||
_become: May be using become [sudo, su] options. {method: "sudo", user: "user", pass: "pass"]
|
||||
pattern: Even if we set _hosts, We only use that to make inventory, We also can set `patter` to run task on match hosts
|
||||
"""
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
task = models.ForeignKey(Task, related_name='adhoc', on_delete=models.CASCADE)
|
||||
tasks = JsonListTextField(verbose_name=_('Tasks'))
|
||||
pattern = models.CharField(max_length=64, default='{}', verbose_name=_('Pattern'))
|
||||
options = JsonDictCharField(max_length=1024, default='', verbose_name=_('Options'))
|
||||
hosts = models.ManyToManyField('assets.Asset', verbose_name=_("Host"))
|
||||
run_as_admin = models.BooleanField(default=False, verbose_name=_('Run as admin'))
|
||||
run_as = models.CharField(max_length=64, default='', blank=True, null=True, verbose_name=_('Username'))
|
||||
run_system_user = models.ForeignKey('assets.SystemUser', null=True, on_delete=models.CASCADE)
|
||||
become = EncryptJsonDictCharField(max_length=1024, default='', blank=True, null=True, verbose_name=_("Become"))
|
||||
created_by = models.CharField(max_length=64, default='', blank=True, null=True, verbose_name=_('Create by'))
|
||||
date_created = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
|
||||
@lazyproperty
|
||||
def run_times(self):
|
||||
return self.execution.count()
|
||||
|
||||
@property
|
||||
def inventory(self):
|
||||
if self.become:
|
||||
become_info = {
|
||||
'become': {
|
||||
self.become
|
||||
}
|
||||
}
|
||||
else:
|
||||
become_info = None
|
||||
|
||||
inventory = JMSInventory(
|
||||
self.hosts.all(), run_as_admin=self.run_as_admin,
|
||||
run_as=self.run_as, become_info=become_info, system_user=self.run_system_user
|
||||
)
|
||||
return inventory
|
||||
|
||||
@property
|
||||
def become_display(self):
|
||||
if self.become:
|
||||
return self.become.get("user", "")
|
||||
return ""
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
celery_task_id = current_task.request.id
|
||||
except AttributeError:
|
||||
celery_task_id = None
|
||||
|
||||
execution = AdHocExecution(
|
||||
celery_task_id=celery_task_id,
|
||||
adhoc=self, task=self.task,
|
||||
task_display=str(self.task)[:128],
|
||||
date_start=timezone.now(),
|
||||
hosts_amount=self.hosts.count(),
|
||||
)
|
||||
execution.save()
|
||||
return execution.start()
|
||||
|
||||
@property
|
||||
def short_id(self):
|
||||
return str(self.id).split('-')[-1]
|
||||
|
||||
@property
|
||||
def latest_execution(self):
|
||||
try:
|
||||
return self.execution.all().latest()
|
||||
except AdHocExecution.DoesNotExist:
|
||||
return None
|
||||
|
||||
def save(self, **kwargs):
|
||||
instance = super().save(**kwargs)
|
||||
self.task.latest_adhoc = instance
|
||||
self.task.save()
|
||||
return instance
|
||||
|
||||
def __str__(self):
|
||||
return "{} of {}".format(self.task.name, self.short_id)
|
||||
|
||||
def same_with(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
fields_check = []
|
||||
for field in self.__class__._meta.fields:
|
||||
if field.name not in ['id', 'date_created']:
|
||||
fields_check.append(field)
|
||||
for field in fields_check:
|
||||
if getattr(self, field.name) != getattr(other, field.name):
|
||||
return False
|
||||
return True
|
||||
|
||||
class Meta:
|
||||
db_table = "ops_adhoc"
|
||||
get_latest_by = 'date_created'
|
||||
verbose_name = _('AdHoc')
|
||||
|
||||
|
||||
class AdHocExecution(OrgModelMixin):
|
||||
"""
|
||||
AdHoc running history.
|
||||
"""
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
task = models.ForeignKey(Task, related_name='execution', on_delete=models.SET_NULL, null=True)
|
||||
task_display = models.CharField(max_length=128, blank=True, default='', verbose_name=_("Task display"))
|
||||
celery_task_id = models.UUIDField(default=None, null=True)
|
||||
hosts_amount = models.IntegerField(default=0, verbose_name=_("Host amount"))
|
||||
adhoc = models.ForeignKey(AdHoc, related_name='execution', on_delete=models.SET_NULL, null=True)
|
||||
date_start = models.DateTimeField(auto_now_add=True, verbose_name=_('Start time'))
|
||||
date_finished = models.DateTimeField(blank=True, null=True, verbose_name=_('End time'))
|
||||
timedelta = models.FloatField(default=0.0, verbose_name=_('Time'), null=True)
|
||||
is_finished = models.BooleanField(default=False, verbose_name=_('Is finished'))
|
||||
is_success = models.BooleanField(default=False, verbose_name=_('Is success'))
|
||||
result = JsonDictTextField(blank=True, null=True, verbose_name=_('Adhoc raw result'))
|
||||
summary = JsonDictTextField(blank=True, null=True, verbose_name=_('Adhoc result summary'))
|
||||
|
||||
@property
|
||||
def short_id(self):
|
||||
return str(self.id).split('-')[-1]
|
||||
|
||||
@property
|
||||
def adhoc_short_id(self):
|
||||
return str(self.adhoc_id).split('-')[-1]
|
||||
|
||||
@property
|
||||
def log_path(self):
|
||||
dt = datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
log_dir = os.path.join(settings.PROJECT_DIR, 'data', 'ansible', dt)
|
||||
if not os.path.exists(log_dir):
|
||||
make_dirs(log_dir)
|
||||
return os.path.join(log_dir, str(self.id) + '.log')
|
||||
|
||||
def start_runner(self):
|
||||
runner = AdHocRunner(self.adhoc.inventory, options=self.adhoc.options)
|
||||
try:
|
||||
result = runner.run(
|
||||
self.adhoc.tasks,
|
||||
self.adhoc.pattern,
|
||||
self.task.name,
|
||||
execution_id=self.id
|
||||
)
|
||||
return result.results_raw, result.results_summary
|
||||
except AnsibleError as e:
|
||||
logger.warn("Failed run adhoc {}, {}".format(self.task.name, e))
|
||||
return {}, {}
|
||||
|
||||
def start(self):
|
||||
self.task.latest_execution = self
|
||||
self.task.save()
|
||||
time_start = time.time()
|
||||
summary = {}
|
||||
raw = ''
|
||||
|
||||
try:
|
||||
raw, summary = self.start_runner()
|
||||
except Exception as e:
|
||||
logger.error(e, exc_info=True)
|
||||
raw = {"dark": {"all": str(e)}, "contacted": []}
|
||||
finally:
|
||||
self.clean_up(summary, time_start)
|
||||
return raw, summary
|
||||
|
||||
def clean_up(self, summary, time_start):
|
||||
is_success = summary.get('success', False)
|
||||
task = Task.objects.get(id=self.task_id)
|
||||
task.total_run_amount = models.F('total_run_amount') + 1
|
||||
if is_success:
|
||||
task.success_run_amount = models.F('success_run_amount') + 1
|
||||
task.save()
|
||||
AdHocExecution.objects.filter(id=self.id).update(
|
||||
is_finished=True,
|
||||
is_success=is_success,
|
||||
date_finished=timezone.now(),
|
||||
timedelta=time.time() - time_start,
|
||||
summary=summary
|
||||
)
|
||||
|
||||
@property
|
||||
def success_hosts(self):
|
||||
return self.summary.get('contacted', [])
|
||||
|
||||
@property
|
||||
def failed_hosts(self):
|
||||
return self.summary.get('dark', {})
|
||||
|
||||
def __str__(self):
|
||||
return self.short_id
|
||||
|
||||
class Meta:
|
||||
db_table = "ops_adhoc_execution"
|
||||
get_latest_by = 'date_start'
|
||||
verbose_name = _("AdHoc execution")
|
||||
return "{}: {}".format(self.module, self.args)
|
||||
|
||||
136
apps/ops/models/base.py
Normal file
136
apps/ops/models/base.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import os.path
|
||||
import uuid
|
||||
import logging
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
from ..ansible.inventory import JMSInventory
|
||||
from ..mixin import PeriodTaskModelMixin
|
||||
|
||||
|
||||
class BaseAnsibleJob(PeriodTaskModelMixin, JMSOrgBaseModel):
|
||||
owner = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
assets = models.ManyToManyField('assets.Asset', verbose_name=_("Assets"))
|
||||
account = models.CharField(max_length=128, default='root', verbose_name=_('Account'))
|
||||
account_policy = models.CharField(max_length=128, default='root', verbose_name=_('Account policy'))
|
||||
last_execution = models.ForeignKey('BaseAnsibleExecution', verbose_name=_("Last execution"),
|
||||
on_delete=models.SET_NULL, null=True)
|
||||
date_last_run = models.DateTimeField(null=True, verbose_name=_('Date last run'))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@property
|
||||
def inventory(self):
|
||||
inv = JMSInventory(self.assets.all(), self.account, self.account_policy)
|
||||
return inv
|
||||
|
||||
def get_register_task(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def to_json(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def create_execution(self):
|
||||
execution = self.executions.create()
|
||||
return execution
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
execution = self.create_execution()
|
||||
return execution.start()
|
||||
|
||||
|
||||
class BaseAnsibleExecution(models.Model):
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
|
||||
status = models.CharField(max_length=16, verbose_name=_('Status'), default='running')
|
||||
task = models.ForeignKey(BaseAnsibleJob, on_delete=models.CASCADE, related_name='executions', null=True)
|
||||
result = models.JSONField(blank=True, null=True, verbose_name=_('Result'))
|
||||
summary = models.JSONField(default=dict, verbose_name=_('Summary'))
|
||||
creator = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_start = models.DateTimeField(null=True, verbose_name=_('Date start'), db_index=True)
|
||||
date_finished = models.DateTimeField(null=True, verbose_name=_("Date finished"))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
ordering = ["-date_start"]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.id)
|
||||
|
||||
@property
|
||||
def private_dir(self):
|
||||
uniq = self.date_created.strftime('%Y%m%d_%H%M%S') + '_' + self.short_id
|
||||
return os.path.join(settings.ANSIBLE_DIR, self.task.name, uniq)
|
||||
|
||||
@property
|
||||
def inventory_path(self):
|
||||
return os.path.join(self.private_dir, 'inventory', 'hosts')
|
||||
|
||||
def get_runner(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def finish_task(self):
|
||||
self.date_finished = timezone.now()
|
||||
self.save(update_fields=['result', 'status', 'summary', 'date_finished'])
|
||||
self.update_task()
|
||||
|
||||
def set_error(self, error):
|
||||
this = self.__class__.objects.get(id=self.id) # 重新获取一次,避免数据库超时连接超时
|
||||
this.status = 'failed'
|
||||
this.summary['error'] = str(error)
|
||||
this.finish_task()
|
||||
|
||||
def set_result(self, cb):
|
||||
status_mapper = {
|
||||
'successful': 'success',
|
||||
}
|
||||
this = self.__class__.objects.get(id=self.id)
|
||||
this.status = status_mapper.get(cb.status, cb.status)
|
||||
this.summary = cb.summary
|
||||
this.result = cb.result
|
||||
this.finish_task()
|
||||
print("Finished")
|
||||
|
||||
def update_task(self):
|
||||
self.task.last_execution = self
|
||||
self.task.date_last_run = timezone.now()
|
||||
self.task.save(update_fields=['last_execution', 'date_last_run'])
|
||||
|
||||
def start(self, **kwargs):
|
||||
runner = self.get_runner()
|
||||
try:
|
||||
cb = runner.run(**kwargs)
|
||||
self.set_result(cb)
|
||||
return cb
|
||||
except Exception as e:
|
||||
logging.error(e, exc_info=True)
|
||||
self.set_error(e)
|
||||
|
||||
@property
|
||||
def is_finished(self):
|
||||
return self.status in ['success', 'failed']
|
||||
|
||||
@property
|
||||
def is_success(self):
|
||||
return self.status == 'success'
|
||||
|
||||
@property
|
||||
def short_id(self):
|
||||
return str(self.id).split('-')[-1]
|
||||
|
||||
@property
|
||||
def timedelta(self):
|
||||
if self.date_start and self.date_finished:
|
||||
return self.date_finished - self.date_start
|
||||
return None
|
||||
|
||||
@property
|
||||
def time_cost(self):
|
||||
if self.date_finished and self.date_start:
|
||||
return (self.date_finished - self.date_start).total_seconds()
|
||||
return None
|
||||
@@ -7,34 +7,73 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
||||
from ops.celery import app
|
||||
|
||||
|
||||
class CeleryTask(models.Model):
|
||||
WAITING = "waiting"
|
||||
RUNNING = "running"
|
||||
FINISHED = "finished"
|
||||
LOG_DIR = os.path.join(settings.PROJECT_DIR, 'data', 'celery')
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
|
||||
name = models.CharField(max_length=1024, verbose_name=_('Name'))
|
||||
last_published_time = models.DateTimeField(null=True)
|
||||
|
||||
STATUS_CHOICES = (
|
||||
(WAITING, WAITING),
|
||||
(RUNNING, RUNNING),
|
||||
(FINISHED, FINISHED),
|
||||
)
|
||||
@property
|
||||
def meta(self):
|
||||
task = app.tasks.get(self.name, None)
|
||||
return {
|
||||
"comment": getattr(task, 'verbose_name', None),
|
||||
"queue": getattr(task, 'queue', 'default')
|
||||
}
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
executions = CeleryTaskExecution.objects.filter(name=self.name)
|
||||
total = executions.count()
|
||||
success = executions.filter(state='SUCCESS').count()
|
||||
return {'total': total, 'success': success}
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
last_five_executions = CeleryTaskExecution.objects.filter(name=self.name).order_by('-date_published')[:5]
|
||||
|
||||
if len(last_five_executions) > 0:
|
||||
if last_five_executions[0].state == 'FAILURE':
|
||||
return "red"
|
||||
|
||||
for execution in last_five_executions:
|
||||
if execution.state == 'FAILURE':
|
||||
return "yellow"
|
||||
return "green"
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class CeleryTaskExecution(models.Model):
|
||||
LOG_DIR = os.path.join(settings.PROJECT_DIR, 'data', 'celery')
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
|
||||
name = models.CharField(max_length=1024)
|
||||
status = models.CharField(max_length=128, choices=STATUS_CHOICES, db_index=True)
|
||||
log_path = models.CharField(max_length=256, blank=True, null=True)
|
||||
date_published = models.DateTimeField(auto_now_add=True)
|
||||
date_start = models.DateTimeField(null=True)
|
||||
date_finished = models.DateTimeField(null=True)
|
||||
args = models.JSONField(verbose_name=_("Args"))
|
||||
kwargs = models.JSONField(verbose_name=_("Kwargs"))
|
||||
state = models.CharField(max_length=16, verbose_name=_("State"))
|
||||
is_finished = models.BooleanField(default=False, verbose_name=_("Finished"))
|
||||
date_published = models.DateTimeField(auto_now_add=True, verbose_name=_('Date published'))
|
||||
date_start = models.DateTimeField(null=True, verbose_name=_('Date start'))
|
||||
date_finished = models.DateTimeField(null=True, verbose_name=_('Date finished'))
|
||||
|
||||
@property
|
||||
def time_cost(self):
|
||||
if self.date_finished and self.date_start:
|
||||
return (self.date_finished - self.date_start).total_seconds()
|
||||
return None
|
||||
|
||||
@property
|
||||
def timedelta(self):
|
||||
if self.date_start and self.date_finished:
|
||||
return self.date_finished - self.date_start
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_success(self):
|
||||
return self.state == 'SUCCESS'
|
||||
|
||||
def __str__(self):
|
||||
return "{}: {}".format(self.name, self.id)
|
||||
|
||||
def is_finished(self):
|
||||
return self.status == self.FINISHED
|
||||
|
||||
@property
|
||||
def full_log_path(self):
|
||||
if not self.log_path:
|
||||
return None
|
||||
return os.path.join(self.LOG_DIR, self.log_path)
|
||||
|
||||
@@ -1,164 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import uuid
|
||||
import json
|
||||
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.translation import ugettext
|
||||
from django.db import models
|
||||
|
||||
from terminal.notifications import CommandExecutionAlert
|
||||
from assets.models import Asset
|
||||
from common.utils import lazyproperty
|
||||
from orgs.models import Organization
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from orgs.utils import tmp_to_org
|
||||
from ..ansible.runner import CommandRunner
|
||||
from ..inventory import JMSInventory
|
||||
|
||||
|
||||
class CommandExecution(OrgModelMixin):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
hosts = models.ManyToManyField('assets.Asset')
|
||||
run_as = models.ForeignKey('assets.SystemUser', on_delete=models.CASCADE)
|
||||
command = models.TextField(verbose_name=_("Command"))
|
||||
_result = models.TextField(blank=True, null=True, verbose_name=_('Result'))
|
||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE, null=True)
|
||||
is_finished = models.BooleanField(default=False, verbose_name=_('Is finished'))
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_start = models.DateTimeField(null=True, verbose_name=_('Date start'))
|
||||
date_finished = models.DateTimeField(null=True, verbose_name=_('Date finished'))
|
||||
|
||||
def __str__(self):
|
||||
return self.command[:10]
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
with tmp_to_org(self.run_as.org_id):
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def inventory(self):
|
||||
if self.run_as.username_same_with_user:
|
||||
username = self.user.username
|
||||
else:
|
||||
username = self.run_as.username
|
||||
inv = JMSInventory(self.allow_assets, run_as=username, system_user=self.run_as)
|
||||
return inv
|
||||
|
||||
@lazyproperty
|
||||
def run_as_display(self):
|
||||
return str(self.run_as)
|
||||
|
||||
@lazyproperty
|
||||
def user_display(self):
|
||||
return str(self.user)
|
||||
|
||||
@lazyproperty
|
||||
def hosts_display(self):
|
||||
return ','.join(self.hosts.all().values_list('hostname', flat=True))
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
if self._result:
|
||||
return json.loads(self._result)
|
||||
else:
|
||||
return {}
|
||||
|
||||
@result.setter
|
||||
def result(self, item):
|
||||
self._result = json.dumps(item)
|
||||
|
||||
@property
|
||||
def is_success(self):
|
||||
if 'error' in self.result:
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_hosts_names(self):
|
||||
return ','.join(self.hosts.all().values_list('hostname', flat=True))
|
||||
|
||||
def cmd_filter_rules(self, asset_id=None):
|
||||
from assets.models import CommandFilterRule
|
||||
user_id = self.user.id
|
||||
system_user_id = self.run_as.id
|
||||
rules = CommandFilterRule.get_queryset(
|
||||
user_id=user_id,
|
||||
system_user_id=system_user_id,
|
||||
asset_id=asset_id,
|
||||
)
|
||||
return rules
|
||||
|
||||
def is_command_can_run(self, command, asset_id=None):
|
||||
for rule in self.cmd_filter_rules(asset_id=asset_id):
|
||||
action, matched_cmd = rule.match(command)
|
||||
if action == rule.ActionChoices.allow:
|
||||
return True, None
|
||||
elif action == rule.ActionChoices.deny:
|
||||
return False, matched_cmd
|
||||
return True, None
|
||||
|
||||
@property
|
||||
def allow_assets(self):
|
||||
allow_asset_ids = []
|
||||
for asset in self.hosts.all():
|
||||
ok, __ = self.is_command_can_run(self.command, asset_id=asset.id)
|
||||
if ok:
|
||||
allow_asset_ids.append(asset.id)
|
||||
allow_assets = Asset.objects.filter(id__in=allow_asset_ids)
|
||||
return allow_assets
|
||||
|
||||
def run(self):
|
||||
print('-' * 10 + ' ' + ugettext('Task start') + ' ' + '-' * 10)
|
||||
org = Organization.get_instance(self.run_as.org_id)
|
||||
org.change_to()
|
||||
self.date_start = timezone.now()
|
||||
ok, msg = self.is_command_can_run(self.command)
|
||||
if ok:
|
||||
allow_assets = self.allow_assets
|
||||
deny_assets = set(list(self.hosts.all())) - set(list(allow_assets))
|
||||
for asset in deny_assets:
|
||||
print(f'资产{asset}: 命令{self.command}不允许执行')
|
||||
if not allow_assets:
|
||||
self.result = {
|
||||
"error": 'There are currently no assets that can be executed'
|
||||
}
|
||||
self.save()
|
||||
return self.result
|
||||
runner = CommandRunner(self.inventory)
|
||||
try:
|
||||
host = allow_assets.first()
|
||||
if host and host.is_windows():
|
||||
shell = 'win_shell'
|
||||
elif host and host.is_unixlike():
|
||||
shell = 'shell'
|
||||
else:
|
||||
shell = 'raw'
|
||||
result = runner.execute(self.command, 'all', module=shell)
|
||||
self.result = result.results_command
|
||||
except SoftTimeLimitExceeded as e:
|
||||
print("Run timeout than 60s")
|
||||
self.result = {"error": str(e)}
|
||||
except Exception as e:
|
||||
print("Error occur: {}".format(e))
|
||||
self.result = {"error": str(e)}
|
||||
else:
|
||||
msg = _("Command `{}` is forbidden ........").format(self.command)
|
||||
print('\033[31m' + msg + '\033[0m')
|
||||
CommandExecutionAlert({
|
||||
'input': self.command,
|
||||
'assets': self.hosts.all(),
|
||||
'user': str(self.user),
|
||||
'risk_level': 5,
|
||||
}).publish_async()
|
||||
self.result = {"error": msg}
|
||||
self.org_id = self.run_as.org_id
|
||||
self.is_finished = True
|
||||
self.date_finished = timezone.now()
|
||||
self.save()
|
||||
print('-' * 10 + ' ' + ugettext('Task end') + ' ' + '-' * 10)
|
||||
return self.result
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Command execution")
|
||||
4
apps/ops/models/common.py
Normal file
4
apps/ops/models/common.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# 内置环境变量
|
||||
BUILTIN_VARIABLES = {
|
||||
|
||||
}
|
||||
219
apps/ops/models/job.py
Normal file
219
apps/ops/models/job.py
Normal file
@@ -0,0 +1,219 @@
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils import timezone
|
||||
from celery import current_task
|
||||
|
||||
__all__ = ["Job", "JobExecution"]
|
||||
|
||||
from ops.ansible import JMSInventory, AdHocRunner, PlaybookRunner
|
||||
from ops.mixin import PeriodTaskModelMixin
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
|
||||
|
||||
class Job(JMSOrgBaseModel, PeriodTaskModelMixin):
|
||||
class Types(models.TextChoices):
|
||||
adhoc = 'adhoc', _('Adhoc')
|
||||
playbook = 'playbook', _('Playbook')
|
||||
|
||||
class RunasPolicies(models.TextChoices):
|
||||
privileged_only = 'privileged_only', _('Privileged Only')
|
||||
privileged_first = 'privileged_first', _('Privileged First')
|
||||
skip = 'skip', _('Skip')
|
||||
|
||||
class Modules(models.TextChoices):
|
||||
shell = 'shell', _('Shell')
|
||||
winshell = 'win_shell', _('Powershell')
|
||||
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, null=True, verbose_name=_('Name'))
|
||||
instant = models.BooleanField(default=False)
|
||||
args = models.CharField(max_length=1024, default='', verbose_name=_('Args'), null=True, blank=True)
|
||||
module = models.CharField(max_length=128, choices=Modules.choices, default=Modules.shell,
|
||||
verbose_name=_('Module'), null=True)
|
||||
chdir = models.CharField(default="", max_length=1024, verbose_name=_('Chdir'), null=True, blank=True)
|
||||
timeout = models.IntegerField(default=60, verbose_name=_('Timeout (Seconds)'))
|
||||
playbook = models.ForeignKey('ops.Playbook', verbose_name=_("Playbook"), null=True, on_delete=models.SET_NULL)
|
||||
type = models.CharField(max_length=128, choices=Types.choices, default=Types.adhoc, verbose_name=_("Type"))
|
||||
owner = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
assets = models.ManyToManyField('assets.Asset', verbose_name=_("Assets"))
|
||||
runas = models.CharField(max_length=128, default='root', verbose_name=_('Runas'))
|
||||
runas_policy = models.CharField(max_length=128, choices=RunasPolicies.choices, default=RunasPolicies.skip,
|
||||
verbose_name=_('Runas policy'))
|
||||
use_parameter_define = models.BooleanField(default=False, verbose_name=(_('Use Parameter Define')))
|
||||
parameters_define = models.JSONField(default=dict, verbose_name=_('Parameters define'))
|
||||
comment = models.CharField(max_length=1024, default='', verbose_name=_('Comment'), null=True, blank=True)
|
||||
|
||||
@property
|
||||
def last_execution(self):
|
||||
return self.executions.last()
|
||||
|
||||
@property
|
||||
def date_last_run(self):
|
||||
return self.last_execution.date_created if self.last_execution else None
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
summary = {
|
||||
"total": 0,
|
||||
"success": 0,
|
||||
}
|
||||
for execution in self.executions.all():
|
||||
summary["total"] += 1
|
||||
if execution.is_success:
|
||||
summary["success"] += 1
|
||||
return summary
|
||||
|
||||
@property
|
||||
def average_time_cost(self):
|
||||
total_cost = 0
|
||||
finished_count = self.executions.filter(status__in=['success', 'failed']).count()
|
||||
for execution in self.executions.filter(status__in=['success', 'failed']).all():
|
||||
total_cost += execution.time_cost
|
||||
return total_cost / finished_count if finished_count else 0
|
||||
|
||||
def get_register_task(self):
|
||||
from ..tasks import run_ops_job_execution
|
||||
name = "run_ops_job_period_{}".format(str(self.id)[:8])
|
||||
task = run_ops_job_execution.name
|
||||
args = (str(self.id),)
|
||||
kwargs = {}
|
||||
return name, task, args, kwargs
|
||||
|
||||
@property
|
||||
def inventory(self):
|
||||
return JMSInventory(self.assets.all(), self.runas_policy, self.runas)
|
||||
|
||||
def create_execution(self):
|
||||
return self.executions.create()
|
||||
|
||||
class Meta:
|
||||
ordering = ['date_created']
|
||||
|
||||
|
||||
class JobExecution(JMSOrgBaseModel):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
task_id = models.UUIDField(null=True)
|
||||
status = models.CharField(max_length=16, verbose_name=_('Status'), default='running')
|
||||
job = models.ForeignKey(Job, on_delete=models.CASCADE, related_name='executions', null=True)
|
||||
parameters = models.JSONField(default=dict, verbose_name=_('Parameters'))
|
||||
result = models.JSONField(blank=True, null=True, verbose_name=_('Result'))
|
||||
summary = models.JSONField(default=dict, verbose_name=_('Summary'))
|
||||
creator = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_start = models.DateTimeField(null=True, verbose_name=_('Date start'), db_index=True)
|
||||
date_finished = models.DateTimeField(null=True, verbose_name=_("Date finished"))
|
||||
|
||||
@property
|
||||
def job_type(self):
|
||||
return self.job.type
|
||||
|
||||
def compile_shell(self):
|
||||
if self.job.type != 'adhoc':
|
||||
return
|
||||
result = "{}{}{} ".format('\'', self.job.args, '\'')
|
||||
result += "chdir={}".format(self.job.chdir)
|
||||
return result
|
||||
|
||||
def get_runner(self):
|
||||
inv = self.job.inventory
|
||||
inv.write_to_file(self.inventory_path)
|
||||
if isinstance(self.parameters, str):
|
||||
extra_vars = json.loads(self.parameters)
|
||||
else:
|
||||
extra_vars = {}
|
||||
|
||||
if self.job.type == 'adhoc':
|
||||
args = self.compile_shell()
|
||||
runner = AdHocRunner(
|
||||
self.inventory_path, self.job.module, module_args=args,
|
||||
pattern="all", project_dir=self.private_dir, extra_vars=extra_vars,
|
||||
)
|
||||
elif self.job.type == 'playbook':
|
||||
runner = PlaybookRunner(
|
||||
self.inventory_path, self.job.playbook.entry
|
||||
)
|
||||
else:
|
||||
raise Exception("unsupported job type")
|
||||
return runner
|
||||
|
||||
@property
|
||||
def short_id(self):
|
||||
return str(self.id).split('-')[-1]
|
||||
|
||||
@property
|
||||
def time_cost(self):
|
||||
if self.date_finished and self.date_start:
|
||||
return (self.date_finished - self.date_start).total_seconds()
|
||||
return None
|
||||
|
||||
@property
|
||||
def timedelta(self):
|
||||
if self.date_start and self.date_finished:
|
||||
return self.date_finished - self.date_start
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_finished(self):
|
||||
return self.status in ['success', 'failed']
|
||||
|
||||
@property
|
||||
def is_success(self):
|
||||
return self.status == 'success'
|
||||
|
||||
@property
|
||||
def inventory_path(self):
|
||||
return os.path.join(self.private_dir, 'inventory', 'hosts')
|
||||
|
||||
@property
|
||||
def private_dir(self):
|
||||
uniq = self.date_created.strftime('%Y%m%d_%H%M%S') + '_' + self.short_id
|
||||
job_name = self.job.name if self.job.name else 'instant'
|
||||
return os.path.join(settings.ANSIBLE_DIR, job_name, uniq)
|
||||
|
||||
def set_error(self, error):
|
||||
this = self.__class__.objects.get(id=self.id) # 重新获取一次,避免数据库超时连接超时
|
||||
this.status = 'failed'
|
||||
this.summary['error'] = str(error)
|
||||
this.finish_task()
|
||||
|
||||
def set_result(self, cb):
|
||||
status_mapper = {
|
||||
'successful': 'success',
|
||||
}
|
||||
this = self.__class__.objects.get(id=self.id)
|
||||
this.status = status_mapper.get(cb.status, cb.status)
|
||||
this.summary = cb.summary
|
||||
this.result = cb.result
|
||||
this.finish_task()
|
||||
|
||||
def finish_task(self):
|
||||
self.date_finished = timezone.now()
|
||||
self.save(update_fields=['result', 'status', 'summary', 'date_finished'])
|
||||
|
||||
def set_celery_id(self):
|
||||
if not current_task:
|
||||
return
|
||||
task_id = current_task.request.root_id
|
||||
self.task_id = task_id
|
||||
|
||||
def start(self, **kwargs):
|
||||
self.date_start = timezone.now()
|
||||
self.set_celery_id()
|
||||
self.save()
|
||||
runner = self.get_runner()
|
||||
try:
|
||||
cb = runner.run(**kwargs)
|
||||
self.set_result(cb)
|
||||
return cb
|
||||
except Exception as e:
|
||||
logging.error(e, exc_info=True)
|
||||
self.set_error(e)
|
||||
|
||||
class Meta:
|
||||
ordering = ['-date_created']
|
||||
26
apps/ops/models/playbook.py
Normal file
26
apps/ops/models/playbook.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import os.path
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ops.exception import PlaybookNoValidEntry
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
|
||||
|
||||
class Playbook(JMSOrgBaseModel):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'), null=True)
|
||||
path = models.FileField(upload_to='playbooks/')
|
||||
creator = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
comment = models.CharField(max_length=1024, default='', verbose_name=_('Comment'), null=True, blank=True)
|
||||
|
||||
@property
|
||||
def entry(self):
|
||||
work_dir = os.path.join(settings.DATA_DIR, "ops", "playbook", self.id.__str__())
|
||||
valid_entry = ('main.yml', 'main.yaml', 'main')
|
||||
for f in os.listdir(work_dir):
|
||||
if f in valid_entry:
|
||||
return os.path.join(work_dir, f)
|
||||
raise PlaybookNoValidEntry
|
||||
@@ -1,153 +1,19 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from rest_framework import serializers
|
||||
from django.shortcuts import reverse
|
||||
|
||||
from common.drf.fields import ReadableHiddenField
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from ..models import Task, AdHoc, AdHocExecution, CommandExecution
|
||||
from ..models import AdHoc
|
||||
|
||||
|
||||
class AdHocExecutionSerializer(serializers.ModelSerializer):
|
||||
stat = serializers.SerializerMethodField()
|
||||
last_success = serializers.ListField(source='success_hosts')
|
||||
last_failure = serializers.DictField(source='failed_hosts')
|
||||
|
||||
class Meta:
|
||||
model = AdHocExecution
|
||||
fields_mini = ['id']
|
||||
fields_small = fields_mini + [
|
||||
'hosts_amount', 'timedelta', 'result', 'summary', 'short_id',
|
||||
'is_finished', 'is_success',
|
||||
'date_start', 'date_finished',
|
||||
]
|
||||
fields_fk = ['task', 'task_display', 'adhoc', 'adhoc_short_id',]
|
||||
fields_custom = ['stat', 'last_success', 'last_failure']
|
||||
fields = fields_small + fields_fk + fields_custom
|
||||
|
||||
@staticmethod
|
||||
def get_task(obj):
|
||||
return obj.task.id
|
||||
|
||||
@staticmethod
|
||||
def get_stat(obj):
|
||||
count_failed_hosts = len(obj.failed_hosts)
|
||||
count_success_hosts = len(obj.success_hosts)
|
||||
count_total = count_success_hosts + count_failed_hosts
|
||||
return {
|
||||
"total": count_total,
|
||||
"success": count_success_hosts,
|
||||
"failed": count_failed_hosts
|
||||
}
|
||||
|
||||
|
||||
class AdHocExecutionExcludeResultSerializer(AdHocExecutionSerializer):
|
||||
class Meta:
|
||||
model = AdHocExecution
|
||||
fields = [
|
||||
'id', 'task', 'task_display', 'hosts_amount', 'adhoc', 'date_start', 'stat',
|
||||
'date_finished', 'timedelta', 'is_finished', 'is_success',
|
||||
'short_id', 'adhoc_short_id', 'last_success', 'last_failure'
|
||||
]
|
||||
|
||||
|
||||
class TaskSerializer(BulkOrgResourceModelSerializer):
|
||||
summary = serializers.ReadOnlyField(source='history_summary')
|
||||
latest_execution = AdHocExecutionExcludeResultSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Task
|
||||
fields_mini = ['id', 'name', 'display_name']
|
||||
fields_small = fields_mini + [
|
||||
'interval', 'crontab',
|
||||
'is_periodic', 'is_deleted',
|
||||
'date_created', 'date_updated',
|
||||
'comment',
|
||||
]
|
||||
fields_fk = ['latest_execution']
|
||||
fields_custom = ['summary']
|
||||
fields = fields_small + fields_fk + fields_custom
|
||||
read_only_fields = [
|
||||
'is_deleted', 'date_created', 'date_updated',
|
||||
'latest_adhoc', 'latest_execution', 'total_run_amount',
|
||||
'success_run_amount', 'summary',
|
||||
]
|
||||
|
||||
|
||||
class TaskDetailSerializer(TaskSerializer):
|
||||
contents = serializers.ListField(source='latest_adhoc.tasks')
|
||||
|
||||
class Meta(TaskSerializer.Meta):
|
||||
fields = TaskSerializer.Meta.fields + ['contents']
|
||||
|
||||
|
||||
class AdHocSerializer(serializers.ModelSerializer):
|
||||
become_display = serializers.ReadOnlyField()
|
||||
tasks = serializers.ListField()
|
||||
class AdHocSerializer(BulkOrgResourceModelSerializer, serializers.ModelSerializer):
|
||||
creator = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
row_count = serializers.IntegerField(read_only=True)
|
||||
size = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = AdHoc
|
||||
fields_mini = ['id']
|
||||
fields_small = fields_mini + [
|
||||
'tasks', "pattern", "options", "run_as",
|
||||
"become", "become_display", "short_id",
|
||||
"run_as_admin",
|
||||
"date_created",
|
||||
]
|
||||
fields_fk = ["task"]
|
||||
fields_m2m = ["hosts"]
|
||||
fields = fields_small + fields_fk + fields_m2m
|
||||
read_only_fields = [
|
||||
'date_created'
|
||||
]
|
||||
extra_kwargs = {
|
||||
"become": {'write_only': True}
|
||||
}
|
||||
|
||||
|
||||
class AdHocExecutionNestSerializer(serializers.ModelSerializer):
|
||||
last_success = serializers.ListField(source='success_hosts')
|
||||
last_failure = serializers.DictField(source='failed_hosts')
|
||||
last_run = serializers.CharField(source='short_id')
|
||||
|
||||
class Meta:
|
||||
model = AdHocExecution
|
||||
fields = (
|
||||
'last_success', 'last_failure', 'last_run', 'timedelta',
|
||||
'is_finished', 'is_success'
|
||||
)
|
||||
|
||||
|
||||
class AdHocDetailSerializer(AdHocSerializer):
|
||||
latest_execution = AdHocExecutionNestSerializer(allow_null=True)
|
||||
task_name = serializers.CharField(source='task.name')
|
||||
|
||||
class Meta(AdHocSerializer.Meta):
|
||||
fields = AdHocSerializer.Meta.fields + [
|
||||
'latest_execution', 'created_by', 'run_times', 'task_name'
|
||||
]
|
||||
|
||||
|
||||
class CommandExecutionSerializer(serializers.ModelSerializer):
|
||||
result = serializers.JSONField(read_only=True)
|
||||
log_url = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = CommandExecution
|
||||
fields_mini = ['id']
|
||||
fields_small = fields_mini + [
|
||||
'command', 'result', 'log_url',
|
||||
'is_finished', 'date_created', 'date_finished'
|
||||
]
|
||||
fields_fk = ['run_as']
|
||||
fields_m2m = ['hosts']
|
||||
fields = fields_small + fields_fk + fields_m2m
|
||||
read_only_fields = [
|
||||
'result', 'is_finished', 'log_url', 'date_created',
|
||||
'date_finished'
|
||||
]
|
||||
ref_name = 'OpsCommandExecution'
|
||||
|
||||
@staticmethod
|
||||
def get_log_url(obj):
|
||||
return reverse('api-ops:celery-task-log', kwargs={'pk': obj.id})
|
||||
|
||||
read_only_field = ["id", "row_count", "size", "creator", "date_created", "date_updated"]
|
||||
fields = read_only_field + ["id", "name", "module", "args", "comment"]
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
from __future__ import unicode_literals
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
|
||||
__all__ = [
|
||||
'CeleryResultSerializer', 'CeleryTaskSerializer',
|
||||
'CeleryPeriodTaskSerializer'
|
||||
'CeleryResultSerializer', 'CeleryTaskExecutionSerializer',
|
||||
'CeleryPeriodTaskSerializer', 'CeleryTaskSerializer'
|
||||
]
|
||||
|
||||
from ops.models import CeleryTask, CeleryTaskExecution
|
||||
|
||||
|
||||
class CeleryResultSerializer(serializers.Serializer):
|
||||
id = serializers.UUIDField()
|
||||
@@ -16,10 +19,6 @@ class CeleryResultSerializer(serializers.Serializer):
|
||||
state = serializers.CharField(max_length=16)
|
||||
|
||||
|
||||
class CeleryTaskSerializer(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class CeleryPeriodTaskSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = PeriodicTask
|
||||
@@ -27,3 +26,22 @@ class CeleryPeriodTaskSerializer(serializers.ModelSerializer):
|
||||
'name', 'task', 'enabled', 'description',
|
||||
'last_run_at', 'total_run_count'
|
||||
]
|
||||
|
||||
|
||||
class CeleryTaskSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CeleryTask
|
||||
read_only_fields = ['id', 'name', 'meta', 'summary', 'state', 'last_published_time']
|
||||
fields = read_only_fields
|
||||
|
||||
|
||||
class CeleryTaskExecutionSerializer(serializers.ModelSerializer):
|
||||
is_success = serializers.BooleanField(required=False, read_only=True, label=_('Success'))
|
||||
|
||||
class Meta:
|
||||
model = CeleryTaskExecution
|
||||
fields = [
|
||||
"id", "name", "args", "kwargs", "time_cost", "timedelta", "is_success", "is_finished", "date_published",
|
||||
"date_start",
|
||||
"date_finished"
|
||||
]
|
||||
|
||||
36
apps/ops/serializers/job.py
Normal file
36
apps/ops/serializers/job.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import serializers
|
||||
from common.drf.fields import ReadableHiddenField
|
||||
from ops.mixin import PeriodTaskSerializerMixin
|
||||
from ops.models import Job, JobExecution
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
|
||||
class JobSerializer(BulkOrgResourceModelSerializer, PeriodTaskSerializerMixin):
|
||||
owner = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
run_after_save = serializers.BooleanField(label=_("Run after save"), read_only=True, default=False, required=False)
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
read_only_fields = ["id", "date_last_run", "date_created", "date_updated", "average_time_cost",
|
||||
"run_after_save"]
|
||||
fields = read_only_fields + [
|
||||
"name", "instant", "type", "module", "args", "playbook", "assets", "runas_policy", "runas", "owner",
|
||||
"use_parameter_define",
|
||||
"parameters_define",
|
||||
"timeout",
|
||||
"chdir",
|
||||
"comment",
|
||||
"summary",
|
||||
"is_periodic", "interval", "crontab"
|
||||
]
|
||||
|
||||
|
||||
class JobExecutionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = JobExecution
|
||||
read_only_fields = ["id", "task_id", "timedelta", "time_cost", 'is_finished', 'date_start', 'date_created',
|
||||
'is_success', 'task_id', 'short_id', 'job_type']
|
||||
fields = read_only_fields + [
|
||||
"job", "parameters"
|
||||
]
|
||||
31
apps/ops/serializers/playbook.py
Normal file
31
apps/ops/serializers/playbook.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import os
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import ReadableHiddenField
|
||||
from ops.models import Playbook
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
|
||||
def parse_playbook_name(path):
|
||||
file_name = os.path.split(path)[-1]
|
||||
return file_name.split(".")[-2]
|
||||
|
||||
|
||||
class PlaybookSerializer(BulkOrgResourceModelSerializer, serializers.ModelSerializer):
|
||||
creator = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
path = serializers.FileField(required=False)
|
||||
|
||||
def create(self, validated_data):
|
||||
name = validated_data.get('name')
|
||||
if not name:
|
||||
path = validated_data.get('path').name
|
||||
validated_data['name'] = parse_playbook_name(path)
|
||||
return super().create(validated_data)
|
||||
|
||||
class Meta:
|
||||
model = Playbook
|
||||
read_only_fields = ["id", "date_created", "date_updated"]
|
||||
fields = read_only_fields + [
|
||||
"id", 'path', "name", "comment", "creator",
|
||||
]
|
||||
@@ -1,15 +1,42 @@
|
||||
from django.utils import translation
|
||||
import ast
|
||||
from celery import signals
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.cache import cache
|
||||
from celery.signals import task_prerun, task_postrun, before_task_publish
|
||||
from django.dispatch import receiver
|
||||
from django.db.utils import ProgrammingError
|
||||
from django.utils import translation, timezone
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from common.db.utils import close_old_connections
|
||||
from common.signals import django_ready
|
||||
from common.db.utils import close_old_connections, get_logger
|
||||
|
||||
from .celery import app
|
||||
from .models import CeleryTaskExecution, CeleryTask
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
TASK_LANG_CACHE_KEY = 'TASK_LANG_{}'
|
||||
TASK_LANG_CACHE_TTL = 1800
|
||||
|
||||
|
||||
@before_task_publish.connect()
|
||||
@receiver(django_ready)
|
||||
def sync_registered_tasks(*args, **kwargs):
|
||||
with transaction.atomic():
|
||||
try:
|
||||
db_tasks = CeleryTask.objects.all()
|
||||
celery_task_names = [key for key in app.tasks]
|
||||
db_task_names = db_tasks.values_list('name', flat=True)
|
||||
|
||||
db_tasks.exclude(name__in=celery_task_names).delete()
|
||||
not_in_db_tasks = set(celery_task_names) - set(db_task_names)
|
||||
tasks_to_create = [CeleryTask(name=name) for name in not_in_db_tasks]
|
||||
CeleryTask.objects.bulk_create(tasks_to_create)
|
||||
except ProgrammingError:
|
||||
pass
|
||||
|
||||
|
||||
@signals.before_task_publish.connect
|
||||
def before_task_publish(headers=None, **kwargs):
|
||||
task_id = headers.get('id')
|
||||
current_lang = translation.get_language()
|
||||
@@ -17,8 +44,11 @@ def before_task_publish(headers=None, **kwargs):
|
||||
cache.set(key, current_lang, 1800)
|
||||
|
||||
|
||||
@task_prerun.connect()
|
||||
@signals.task_prerun.connect
|
||||
def on_celery_task_pre_run(task_id='', **kwargs):
|
||||
# 更新状态
|
||||
CeleryTaskExecution.objects.filter(id=task_id) \
|
||||
.update(state='RUNNING', date_start=timezone.now())
|
||||
# 关闭之前的数据库连接
|
||||
close_old_connections()
|
||||
|
||||
@@ -29,6 +59,41 @@ def on_celery_task_pre_run(task_id='', **kwargs):
|
||||
translation.activate(task_lang)
|
||||
|
||||
|
||||
@task_postrun.connect()
|
||||
def on_celery_task_post_run(**kwargs):
|
||||
@signals.task_postrun.connect
|
||||
def on_celery_task_post_run(task_id='', state='', **kwargs):
|
||||
close_old_connections()
|
||||
print(_("Task") + ": {} {}".format(task_id, state))
|
||||
|
||||
CeleryTaskExecution.objects.filter(id=task_id).update(
|
||||
state=state, date_finished=timezone.now(), is_finished=True
|
||||
)
|
||||
|
||||
|
||||
@signals.after_task_publish.connect
|
||||
def task_sent_handler(headers=None, body=None, **kwargs):
|
||||
info = headers if 'task' in headers else body
|
||||
task = info.get('task')
|
||||
i = info.get('id')
|
||||
if not i or not task:
|
||||
logger.error("Not found task id or name: {}".format(info))
|
||||
return
|
||||
|
||||
args = info.get('argsrepr', '()')
|
||||
kwargs = info.get('kwargsrepr', '{}')
|
||||
try:
|
||||
args = list(ast.literal_eval(args))
|
||||
kwargs = ast.literal_eval(kwargs)
|
||||
except (ValueError, SyntaxError):
|
||||
args = []
|
||||
kwargs = {}
|
||||
|
||||
data = {
|
||||
'id': i,
|
||||
'name': task,
|
||||
'state': 'PENDING',
|
||||
'is_finished': False,
|
||||
'args': args,
|
||||
'kwargs': kwargs
|
||||
}
|
||||
CeleryTaskExecution.objects.create(**data)
|
||||
CeleryTask.objects.filter(name=task).update(last_published_time=timezone.now())
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
# coding: utf-8
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from celery import shared_task, subtask
|
||||
from celery import shared_task
|
||||
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _, gettext
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger, get_object_or_none, get_log_keep_day
|
||||
from orgs.utils import tmp_to_root_org, tmp_to_org
|
||||
from orgs.utils import tmp_to_org
|
||||
from .celery.decorator import (
|
||||
register_as_period_task, after_app_shutdown_clean_periodic,
|
||||
after_app_ready_start
|
||||
@@ -20,76 +19,44 @@ from .celery.utils import (
|
||||
create_or_update_celery_periodic_tasks, get_celery_periodic_task,
|
||||
disable_celery_periodic_task, delete_celery_periodic_task
|
||||
)
|
||||
from .models import Task, CommandExecution, CeleryTask
|
||||
from .models import CeleryTaskExecution, Job, JobExecution
|
||||
from .notifications import ServerPerformanceCheckUtil
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
def rerun_task():
|
||||
pass
|
||||
@shared_task(soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task"))
|
||||
def run_ops_job(job_id):
|
||||
job = get_object_or_none(Job, id=job_id)
|
||||
with tmp_to_org(job.org):
|
||||
execution = job.create_execution()
|
||||
run_ops_job_execution(execution)
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def run_ansible_task(tid, callback=None, **kwargs):
|
||||
"""
|
||||
:param tid: is the tasks serialized data
|
||||
:param callback: callback function name
|
||||
:return:
|
||||
"""
|
||||
with tmp_to_root_org():
|
||||
task = get_object_or_none(Task, id=tid)
|
||||
if not task:
|
||||
logger.error("No task found")
|
||||
return
|
||||
with tmp_to_org(task.org):
|
||||
result = task.run()
|
||||
if callback is not None:
|
||||
subtask(callback).delay(result, task_name=task.name)
|
||||
return result
|
||||
|
||||
|
||||
@shared_task(soft_time_limit=60, queue="ansible")
|
||||
def run_command_execution(cid, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
execution = get_object_or_none(CommandExecution, id=cid)
|
||||
if not execution:
|
||||
logger.error("Not found the execution id: {}".format(cid))
|
||||
return
|
||||
with tmp_to_org(execution.run_as.org):
|
||||
@shared_task(soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task execution"))
|
||||
def run_ops_job_execution(execution_id, **kwargs):
|
||||
execution = get_object_or_none(JobExecution, id=execution_id)
|
||||
with tmp_to_org(execution.org):
|
||||
try:
|
||||
os.environ.update({
|
||||
"TERM_ROWS": kwargs.get("rows", ""),
|
||||
"TERM_COLS": kwargs.get("cols", ""),
|
||||
})
|
||||
execution.run()
|
||||
execution.start()
|
||||
except SoftTimeLimitExceeded:
|
||||
logger.error("Run time out")
|
||||
execution.set_error('Run timeout')
|
||||
logger.error("Run adhoc timeout")
|
||||
except Exception as e:
|
||||
execution.set_error(e)
|
||||
logger.error("Start adhoc execution error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Periodic clear celery tasks'))
|
||||
@after_app_shutdown_clean_periodic
|
||||
@register_as_period_task(interval=3600*24, description=_("Clean task history period"))
|
||||
def clean_tasks_adhoc_period():
|
||||
logger.debug("Start clean task adhoc and run history")
|
||||
tasks = Task.objects.all()
|
||||
for task in tasks:
|
||||
adhoc = task.adhoc.all().order_by('-date_created')[5:]
|
||||
for ad in adhoc:
|
||||
ad.execution.all().delete()
|
||||
ad.delete()
|
||||
|
||||
|
||||
@shared_task
|
||||
@after_app_shutdown_clean_periodic
|
||||
@register_as_period_task(interval=3600*24, description=_("Clean celery log period"))
|
||||
@register_as_period_task(interval=3600 * 24, description=_("Clean celery log period"))
|
||||
def clean_celery_tasks_period():
|
||||
logger.debug("Start clean celery task history")
|
||||
expire_days = get_log_keep_day('TASK_LOG_KEEP_DAYS')
|
||||
days_ago = timezone.now() - timezone.timedelta(days=expire_days)
|
||||
tasks = CeleryTask.objects.filter(date_start__lt=days_ago)
|
||||
tasks = CeleryTaskExecution.objects.filter(date_start__lt=days_ago)
|
||||
tasks.delete()
|
||||
tasks = CeleryTask.objects.filter(date_start__isnull=True)
|
||||
tasks = CeleryTaskExecution.objects.filter(date_start__isnull=True)
|
||||
tasks.delete()
|
||||
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;" % (
|
||||
settings.CELERY_LOG_DIR, expire_days
|
||||
@@ -99,7 +66,7 @@ def clean_celery_tasks_period():
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Clear celery periodic tasks'))
|
||||
@after_app_ready_start
|
||||
def clean_celery_periodic_tasks():
|
||||
"""清除celery定时任务"""
|
||||
@@ -122,7 +89,7 @@ def clean_celery_periodic_tasks():
|
||||
logger.info('Clean task failure: {}'.format(task))
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Create or update periodic tasks'))
|
||||
@after_app_ready_start
|
||||
def create_or_update_registered_periodic_tasks():
|
||||
from .celery.decorator import get_register_period_tasks
|
||||
@@ -130,52 +97,7 @@ def create_or_update_registered_periodic_tasks():
|
||||
create_or_update_celery_periodic_tasks(task)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_("Periodic check service performance"))
|
||||
@register_as_period_task(interval=3600)
|
||||
def check_server_performance_period():
|
||||
ServerPerformanceCheckUtil().check_and_publish()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def hello(name, callback=None):
|
||||
from users.models import User
|
||||
import time
|
||||
|
||||
count = User.objects.count()
|
||||
print(gettext("Hello") + ': ' + name)
|
||||
print("Count: ", count)
|
||||
time.sleep(1)
|
||||
return gettext("Hello")
|
||||
|
||||
|
||||
@shared_task
|
||||
# @after_app_shutdown_clean_periodic
|
||||
# @register_as_period_task(interval=30)
|
||||
def hello123():
|
||||
return None
|
||||
|
||||
|
||||
@shared_task
|
||||
def hello_callback(result):
|
||||
print(result)
|
||||
print("Hello callback")
|
||||
|
||||
|
||||
@shared_task
|
||||
def add(a, b):
|
||||
time.sleep(5)
|
||||
return a + b
|
||||
|
||||
|
||||
@shared_task
|
||||
def add_m(x):
|
||||
from celery import chain
|
||||
a = range(x)
|
||||
b = [a[i:i + 10] for i in range(0, len(a), 10)]
|
||||
s = list()
|
||||
s.append(add.s(b[0], b[1]))
|
||||
for i in b[1:]:
|
||||
s.append(add.s(i))
|
||||
res = chain(*tuple(s))()
|
||||
return res
|
||||
|
||||
|
||||
@@ -4,27 +4,33 @@ from __future__ import unicode_literals
|
||||
from django.urls import path
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from rest_framework_bulk.routes import BulkRouter
|
||||
from .. import api
|
||||
|
||||
from .. import api
|
||||
|
||||
app_name = "ops"
|
||||
|
||||
router = DefaultRouter()
|
||||
bulk_router = BulkRouter()
|
||||
|
||||
bulk_router.register(r'tasks', api.TaskViewSet, 'task')
|
||||
router.register(r'adhoc', api.AdHocViewSet, 'adhoc')
|
||||
router.register(r'adhoc-executions', api.AdHocRunHistoryViewSet, 'execution')
|
||||
router.register(r'command-executions', api.CommandExecutionViewSet, 'command-execution')
|
||||
router.register(r'adhocs', api.AdHocViewSet, 'adhoc')
|
||||
router.register(r'playbooks', api.PlaybookViewSet, 'playbook')
|
||||
router.register(r'jobs', api.JobViewSet, 'job')
|
||||
router.register(r'job-executions', api.JobExecutionViewSet, 'job-execution')
|
||||
|
||||
router.register(r'celery/period-tasks', api.CeleryPeriodTaskViewSet, 'celery-period-task')
|
||||
|
||||
urlpatterns = [
|
||||
path('tasks/<uuid:pk>/run/', api.TaskRun.as_view(), name='task-run'),
|
||||
path('celery/task/<uuid:pk>/log/', api.CeleryTaskLogApi.as_view(), name='celery-task-log'),
|
||||
path('celery/task/<uuid:pk>/result/', api.CeleryResultApi.as_view(), name='celery-result'),
|
||||
router.register(r'tasks', api.CeleryTaskViewSet, 'task')
|
||||
router.register(r'task-executions', api.CeleryTaskExecutionViewSet, 'task-executions')
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
path('ansible/job-execution/<uuid:pk>/log/', api.AnsibleTaskLogApi.as_view(), name='job-execution-log'),
|
||||
|
||||
path('celery/task/<uuid:name>/task-execution/<uuid:pk>/log/', api.CeleryTaskExecutionLogApi.as_view(),
|
||||
name='celery-task-execution-log'),
|
||||
path('celery/task/<uuid:name>/task-execution/<uuid:pk>/result/', api.CeleryResultApi.as_view(),
|
||||
name='celery-task-execution-result'),
|
||||
|
||||
path('ansible/task/<uuid:pk>/log/', api.AnsibleTaskLogApi.as_view(), name='ansible-task-log'),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
urlpatterns += bulk_router.urls
|
||||
urlpatterns += (router.urls + bulk_router.urls)
|
||||
|
||||
@@ -4,12 +4,12 @@ import uuid
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger, get_object_or_none, make_dirs
|
||||
from common.tasks import send_mail_async
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from orgs.utils import org_aware_func
|
||||
from jumpserver.const import PROJECT_DIR
|
||||
|
||||
from .models import Task, AdHoc
|
||||
from .models import AdHoc, CeleryTask
|
||||
from .const import DEFAULT_PASSWORD_RULES
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
@@ -29,7 +29,7 @@ def update_or_create_ansible_task(
|
||||
interval=None, crontab=None, is_periodic=False,
|
||||
callback=None, pattern='all', options=None,
|
||||
run_as_admin=False, run_as=None, system_user=None, become_info=None,
|
||||
):
|
||||
):
|
||||
if not hosts or not tasks or not task_name:
|
||||
return None, None
|
||||
if options is None:
|
||||
@@ -80,3 +80,15 @@ def get_task_log_path(base_path, task_id, level=2):
|
||||
path = os.path.join(base_path, rel_path)
|
||||
make_dirs(os.path.dirname(path), exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
def generate_random_password(**kwargs):
|
||||
import random
|
||||
import string
|
||||
length = int(kwargs.get('length', DEFAULT_PASSWORD_RULES['length']))
|
||||
symbol_set = kwargs.get('symbol_set')
|
||||
if symbol_set is None:
|
||||
symbol_set = DEFAULT_PASSWORD_RULES['symbol_set']
|
||||
chars = string.ascii_letters + string.digits + symbol_set
|
||||
password = ''.join([random.choice(chars) for _ in range(length)])
|
||||
return password
|
||||
|
||||
107
apps/ops/ws.py
107
apps/ops/ws.py
@@ -1,18 +1,18 @@
|
||||
import time
|
||||
import asyncio
|
||||
import os
|
||||
import threading
|
||||
import json
|
||||
from channels.generic.websocket import JsonWebsocketConsumer
|
||||
|
||||
from common.utils import get_logger
|
||||
import aiofiles
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
|
||||
from common.db.utils import close_old_connections
|
||||
from .celery.utils import get_celery_task_log_path
|
||||
from common.utils import get_logger
|
||||
from .ansible.utils import get_ansible_task_log_path
|
||||
from .celery.utils import get_celery_task_log_path
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class TaskLogWebsocket(JsonWebsocketConsumer):
|
||||
class TaskLogWebsocket(AsyncJsonWebsocketConsumer):
|
||||
disconnected = False
|
||||
|
||||
log_types = {
|
||||
@@ -20,70 +20,59 @@ class TaskLogWebsocket(JsonWebsocketConsumer):
|
||||
'ansible': get_ansible_task_log_path
|
||||
}
|
||||
|
||||
def connect(self):
|
||||
async def connect(self):
|
||||
user = self.scope["user"]
|
||||
if user.is_authenticated:
|
||||
self.accept()
|
||||
await self.accept()
|
||||
else:
|
||||
self.close()
|
||||
await self.close()
|
||||
|
||||
def get_log_path(self, task_id):
|
||||
func = self.log_types.get(self.log_type)
|
||||
def get_log_path(self, task_id, log_type):
|
||||
func = self.log_types.get(log_type)
|
||||
if func:
|
||||
return func(task_id)
|
||||
|
||||
def receive(self, text_data=None, bytes_data=None, **kwargs):
|
||||
data = json.loads(text_data)
|
||||
task_id = data.get('task')
|
||||
self.log_type = data.get('type', 'celery')
|
||||
if task_id:
|
||||
self.handle_task(task_id)
|
||||
async def receive_json(self, content, **kwargs):
|
||||
task_id = content.get('task')
|
||||
task_typ = content.get('type', 'celery')
|
||||
log_path = self.get_log_path(task_id, task_typ)
|
||||
await self.async_handle_task(task_id, log_path)
|
||||
|
||||
def wait_util_log_path_exist(self, task_id):
|
||||
log_path = self.get_log_path(task_id)
|
||||
async def async_handle_task(self, task_id, log_path):
|
||||
logger.info("Task id: {}".format(task_id))
|
||||
while not self.disconnected:
|
||||
if not os.path.exists(log_path):
|
||||
self.send_json({'message': '.', 'task': task_id})
|
||||
time.sleep(0.5)
|
||||
continue
|
||||
self.send_json({'message': '\r\n'})
|
||||
try:
|
||||
logger.debug('Task log path: {}'.format(log_path))
|
||||
task_log_f = open(log_path, 'rb')
|
||||
return task_log_f
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
def read_log_file(self, task_id):
|
||||
task_log_f = self.wait_util_log_path_exist(task_id)
|
||||
if not task_log_f:
|
||||
logger.debug('Task log file is None: {}'.format(task_id))
|
||||
return
|
||||
|
||||
task_end_mark = []
|
||||
while not self.disconnected:
|
||||
data = task_log_f.read(4096)
|
||||
if data:
|
||||
data = data.replace(b'\n', b'\r\n')
|
||||
self.send_json(
|
||||
{'message': data.decode(errors='ignore'), 'task': task_id}
|
||||
)
|
||||
if data.find(b'succeeded in') != -1:
|
||||
task_end_mark.append(1)
|
||||
if data.find(bytes(task_id, 'utf8')) != -1:
|
||||
task_end_mark.append(1)
|
||||
elif len(task_end_mark) == 2:
|
||||
logger.debug('Task log end: {}'.format(task_id))
|
||||
await self.send_json({'message': '.', 'task': task_id})
|
||||
await asyncio.sleep(0.5)
|
||||
else:
|
||||
await self.send_task_log(task_id, log_path)
|
||||
break
|
||||
time.sleep(0.2)
|
||||
task_log_f.close()
|
||||
|
||||
def handle_task(self, task_id):
|
||||
logger.info("Task id: {}".format(task_id))
|
||||
thread = threading.Thread(target=self.read_log_file, args=(task_id,))
|
||||
thread.start()
|
||||
async def send_task_log(self, task_id, log_path):
|
||||
await self.send_json({'message': '\r\n'})
|
||||
try:
|
||||
logger.debug('Task log path: {}'.format(log_path))
|
||||
task_end_mark = []
|
||||
async with aiofiles.open(log_path, 'rb') as task_log_f:
|
||||
while not self.disconnected:
|
||||
data = await task_log_f.read(4096)
|
||||
if data:
|
||||
data = data.replace(b'\n', b'\r\n')
|
||||
await self.send_json(
|
||||
{'message': data.decode(errors='ignore'), 'task': task_id}
|
||||
)
|
||||
if data.find(b'succeeded in') != -1:
|
||||
task_end_mark.append(1)
|
||||
if data.find(bytes(task_id, 'utf8')) != -1:
|
||||
task_end_mark.append(1)
|
||||
elif len(task_end_mark) == 2:
|
||||
logger.debug('Task log end: {}'.format(task_id))
|
||||
break
|
||||
await asyncio.sleep(0.2)
|
||||
except OSError as e:
|
||||
logger.warn('Task log path open failed: {}'.format(e))
|
||||
await self.close()
|
||||
|
||||
def disconnect(self, close_code):
|
||||
async def disconnect(self, close_code):
|
||||
self.disconnected = True
|
||||
self.close()
|
||||
close_old_connections()
|
||||
|
||||
Reference in New Issue
Block a user