mirror of
https://github.com/haiwen/seahub.git
synced 2025-07-07 20:19:34 +00:00
[api2] stat_file (#1690)
* [api2] stat_file * [send message] update last login info * datetime->data, return number to int * fill data * modify func args * modify algorithm
This commit is contained in:
parent
2eb27af57f
commit
60c78adc44
166
seahub/api2/endpoints/admin/statistics.py
Normal file
166
seahub/api2/endpoints/admin/statistics.py
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
# Copyright (c) 2012-2016 Seafile Ltd.
|
||||||
|
import datetime
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from rest_framework.authentication import SessionAuthentication
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from rest_framework import status
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from seahub.utils import get_file_ops_stats, get_file_ops_stats_by_day, \
|
||||||
|
get_total_storage_stats, get_total_storage_stats_by_day, \
|
||||||
|
get_user_activity_stats, get_user_activity_stats_by_day, \
|
||||||
|
is_pro_version, EVENTS_ENABLED
|
||||||
|
from seahub.utils.timeutils import datetime_to_isoformat_timestr
|
||||||
|
from seahub.settings import TIME_ZONE
|
||||||
|
|
||||||
|
from seahub.api2.authentication import TokenAuthentication
|
||||||
|
from seahub.api2.throttling import UserRateThrottle
|
||||||
|
from seahub.api2.utils import api_error
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def check_parameter(func):
|
||||||
|
def _decorated(view, request, *args, **kwargs):
|
||||||
|
if not is_pro_version() or not EVENTS_ENABLED:
|
||||||
|
return api_error(status.HTTP_404_NOT_FOUND, 'Events not enabled.')
|
||||||
|
start_time = request.GET.get("start", "")
|
||||||
|
end_time = request.GET.get("end", "")
|
||||||
|
group_by = request.GET.get("group_by", "hour")
|
||||||
|
if not start_time:
|
||||||
|
error_msg = "Start time can not be empty"
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
if not end_time:
|
||||||
|
error_msg = "End time can not be empty"
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
if group_by.lower() not in ["hour", "day"]:
|
||||||
|
error_msg = "group_by can only be day or hour."
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
try:
|
||||||
|
start_time = datetime.datetime.strptime(start_time,
|
||||||
|
"%Y-%m-%d %H:%M:%S")
|
||||||
|
except:
|
||||||
|
error_msg = "Start time %s invalid" % start_time
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
try:
|
||||||
|
end_time = datetime.datetime.strptime(end_time,
|
||||||
|
"%Y-%m-%d %H:%M:%S")
|
||||||
|
except:
|
||||||
|
error_msg = "End time %s invalid" % end_time
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
|
return func(view, request, start_time, end_time, group_by)
|
||||||
|
return _decorated
|
||||||
|
|
||||||
|
|
||||||
|
class FileOperationsView(APIView):
|
||||||
|
"""
|
||||||
|
Get file operations statistics.
|
||||||
|
Permission checking:
|
||||||
|
1. only admin can perform this action.
|
||||||
|
"""
|
||||||
|
authentication_classes = (TokenAuthentication, SessionAuthentication)
|
||||||
|
throttle_classes = (UserRateThrottle,)
|
||||||
|
permission_classes = (IsAdminUser,)
|
||||||
|
|
||||||
|
@check_parameter
|
||||||
|
def get(self, request, start_time, end_time, group_by):
|
||||||
|
"""
|
||||||
|
Get records of the specified time range.
|
||||||
|
param:
|
||||||
|
start: the start time of the query.
|
||||||
|
end: the end time of the query.
|
||||||
|
group_by: group records by day or by hour, default group by hour.
|
||||||
|
return:
|
||||||
|
the list of file operations record.
|
||||||
|
"""
|
||||||
|
data = get_data_by_hour_or_day(group_by, start_time, end_time, get_file_ops_stats, get_file_ops_stats_by_day)
|
||||||
|
ops_added_dict = get_init_data(start_time, end_time, group_by)
|
||||||
|
ops_visited_dict = get_init_data(start_time, end_time, group_by)
|
||||||
|
ops_deleted_dict = get_init_data(start_time, end_time, group_by)
|
||||||
|
|
||||||
|
for e in data:
|
||||||
|
if e[1] == 'Added':
|
||||||
|
ops_added_dict[e[0]] = e[2]
|
||||||
|
elif e[1] == 'Visited':
|
||||||
|
ops_visited_dict[e[0]] = e[2]
|
||||||
|
elif e[1] == 'Deleted':
|
||||||
|
ops_deleted_dict[e[0]] = e[2]
|
||||||
|
|
||||||
|
res_data = []
|
||||||
|
for k, v in ops_added_dict.items():
|
||||||
|
res_data.append({'datetime': datetime_to_isoformat_timestr(k),
|
||||||
|
'added': v,
|
||||||
|
'visited': ops_visited_dict[k],
|
||||||
|
'deleted': ops_deleted_dict[k]})
|
||||||
|
return Response(sorted(res_data, key=lambda x: x['datetime']))
|
||||||
|
|
||||||
|
|
||||||
|
class TotalStorageView(APIView):
|
||||||
|
authentication_classes = (TokenAuthentication, SessionAuthentication)
|
||||||
|
throttle_classes = (UserRateThrottle,)
|
||||||
|
permission_classes = (IsAdminUser,)
|
||||||
|
|
||||||
|
@check_parameter
|
||||||
|
def get(self, request, start_time, end_time, group_by):
|
||||||
|
data = get_data_by_hour_or_day(group_by, start_time, end_time, get_total_storage_stats, get_total_storage_stats_by_day)
|
||||||
|
|
||||||
|
res_data = []
|
||||||
|
init_data = get_init_data(start_time, end_time, group_by)
|
||||||
|
for e in data:
|
||||||
|
init_data[e[0]] = e[1]
|
||||||
|
for k, v in init_data.items():
|
||||||
|
res_data.append({'datetime': datetime_to_isoformat_timestr(k), 'total_storage': v})
|
||||||
|
|
||||||
|
return Response(sorted(res_data, key=lambda x: x['datetime']))
|
||||||
|
|
||||||
|
|
||||||
|
class ActiveUsersView(APIView):
|
||||||
|
authentication_classes = (TokenAuthentication, SessionAuthentication)
|
||||||
|
throttle_classes = (UserRateThrottle,)
|
||||||
|
permission_classes = (IsAdminUser,)
|
||||||
|
|
||||||
|
@check_parameter
|
||||||
|
def get(self, request, start_time, end_time, group_by):
|
||||||
|
data = get_data_by_hour_or_day(group_by, start_time, end_time, get_user_activity_stats, get_user_activity_stats_by_day)
|
||||||
|
|
||||||
|
res_data = []
|
||||||
|
init_data = get_init_data(start_time, end_time, group_by)
|
||||||
|
for e in data:
|
||||||
|
init_data[e[0]] = e[1]
|
||||||
|
for k, v in init_data.items():
|
||||||
|
res_data.append({'datetime': datetime_to_isoformat_timestr(k), 'count': v})
|
||||||
|
|
||||||
|
return Response(sorted(res_data, key=lambda x: x['datetime']))
|
||||||
|
|
||||||
|
|
||||||
|
def get_init_data(start_time, end_time, group_by):
|
||||||
|
res = {}
|
||||||
|
if group_by == 'hour':
|
||||||
|
start_time = start_time.replace(minute=0).replace(second=0)
|
||||||
|
end_time = end_time.replace(minute=0).replace(second=0)
|
||||||
|
time_delta = end_time - start_time
|
||||||
|
date_length = (time_delta.days * 24) + time_delta.seconds/3600 + 1
|
||||||
|
else:
|
||||||
|
start_time = start_time.replace(hour=0).replace(minute=0).replace(second=0)
|
||||||
|
end_time = end_time.replace(hour=0).replace(minute=0).replace(second=0)
|
||||||
|
time_delta = end_time - start_time
|
||||||
|
date_length = time_delta.days + 1
|
||||||
|
for offset in range(date_length):
|
||||||
|
offset = offset if group_by == 'hour' else offset * 24
|
||||||
|
dt = start_time + datetime.timedelta(hours=offset)
|
||||||
|
res[dt] = 0
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def get_data_by_hour_or_day(parameter, start_time, end_time, func, func_by_day):
|
||||||
|
timezone_name = timezone.get_current_timezone_name()
|
||||||
|
offset = pytz.timezone(timezone_name).localize(datetime.datetime.now()).strftime('%z')
|
||||||
|
offset = offset[:3] + ':' + offset[3:]
|
||||||
|
if parameter == "hour":
|
||||||
|
data = func(start_time, end_time, offset)
|
||||||
|
elif parameter == "day":
|
||||||
|
data = func_by_day(start_time, end_time, offset)
|
||||||
|
return data
|
@ -82,11 +82,11 @@ class SysInfo(APIView):
|
|||||||
logger.error(e)
|
logger.error(e)
|
||||||
inactive_ldap_users = 0
|
inactive_ldap_users = 0
|
||||||
|
|
||||||
active_users = active_db_users + active_ldap_users if active_ldap_users > 0 \
|
active_users = active_db_users + active_ldap_users if \
|
||||||
else active_db_users
|
active_ldap_users > 0 else active_db_users
|
||||||
|
|
||||||
inactive_users = inactive_db_users + inactive_ldap_users if inactive_ldap_users > 0 \
|
inactive_users = inactive_db_users + inactive_ldap_users if \
|
||||||
else inactive_db_users
|
inactive_ldap_users > 0 else inactive_db_users
|
||||||
|
|
||||||
# get license info
|
# get license info
|
||||||
is_pro = is_pro_version()
|
is_pro = is_pro_version()
|
||||||
@ -129,10 +129,11 @@ class SysInfo(APIView):
|
|||||||
|
|
||||||
# count current connected devices
|
# count current connected devices
|
||||||
try:
|
try:
|
||||||
current_connected_devices_count = TokenV2.objects.get_current_connected_devices_count()
|
current_connected_devices_count = TokenV2.objects.\
|
||||||
|
get_current_connected_devices_count()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
current_connected_devices_count= 0
|
current_connected_devices_count = 0
|
||||||
|
|
||||||
info = {
|
info = {
|
||||||
'users_count': active_users + inactive_users,
|
'users_count': active_users + inactive_users,
|
||||||
|
@ -120,7 +120,7 @@ from seaserv import seafserv_threaded_rpc, \
|
|||||||
is_group_user, remove_share, get_group, \
|
is_group_user, remove_share, get_group, \
|
||||||
get_commit, get_file_id_by_path, MAX_DOWNLOAD_DIR_SIZE, edit_repo, \
|
get_commit, get_file_id_by_path, MAX_DOWNLOAD_DIR_SIZE, edit_repo, \
|
||||||
ccnet_threaded_rpc, get_personal_groups, seafile_api, \
|
ccnet_threaded_rpc, get_personal_groups, seafile_api, \
|
||||||
create_org, ccnet_api
|
create_org, ccnet_api, send_message
|
||||||
|
|
||||||
from constance import config
|
from constance import config
|
||||||
|
|
||||||
@ -578,6 +578,12 @@ class Repos(APIView):
|
|||||||
}
|
}
|
||||||
repos_json.append(repo)
|
repos_json.append(repo)
|
||||||
|
|
||||||
|
utc_dt = datetime.datetime.utcnow()
|
||||||
|
timestamp = utc_dt.strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
try:
|
||||||
|
send_message('seahub.stats', 'user-login\t%s\t%s' % (email, timestamp))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('Error when sending user-login message: %s' % str(e))
|
||||||
response = HttpResponse(json.dumps(repos_json), status=200,
|
response = HttpResponse(json.dumps(repos_json), status=200,
|
||||||
content_type=json_content_type)
|
content_type=json_content_type)
|
||||||
response["enable_encrypted_library"] = config.ENABLE_ENCRYPTED_LIBRARY
|
response["enable_encrypted_library"] = config.ENABLE_ENCRYPTED_LIBRARY
|
||||||
|
@ -56,6 +56,7 @@ from seahub.api2.endpoints.admin.file_audit import FileAudit
|
|||||||
from seahub.api2.endpoints.admin.file_update import FileUpdate
|
from seahub.api2.endpoints.admin.file_update import FileUpdate
|
||||||
from seahub.api2.endpoints.admin.perm_audit import PermAudit
|
from seahub.api2.endpoints.admin.perm_audit import PermAudit
|
||||||
from seahub.api2.endpoints.admin.sysinfo import SysInfo
|
from seahub.api2.endpoints.admin.sysinfo import SysInfo
|
||||||
|
from seahub.api2.endpoints.admin.statistics import FileOperationsView, TotalStorageView, ActiveUsersView
|
||||||
from seahub.api2.endpoints.admin.devices import AdminDevices
|
from seahub.api2.endpoints.admin.devices import AdminDevices
|
||||||
from seahub.api2.endpoints.admin.device_errors import AdminDeviceErrors
|
from seahub.api2.endpoints.admin.device_errors import AdminDeviceErrors
|
||||||
from seahub.api2.endpoints.admin.libraries import AdminLibraries, AdminLibrary
|
from seahub.api2.endpoints.admin.libraries import AdminLibraries, AdminLibrary
|
||||||
@ -255,6 +256,11 @@ urlpatterns = patterns(
|
|||||||
## admin::revision-tags
|
## admin::revision-tags
|
||||||
url(r'^api/v2.1/admin/revision-tags/tagged-items/$', AdminTaggedItemsView.as_view(), name='api-v2.1-admin-revision-tags-tagged-items'),
|
url(r'^api/v2.1/admin/revision-tags/tagged-items/$', AdminTaggedItemsView.as_view(), name='api-v2.1-admin-revision-tags-tagged-items'),
|
||||||
|
|
||||||
|
## admin::statistics
|
||||||
|
url(r'^api/v2.1/admin/statistics/file-operations/$', FileOperationsView.as_view(), name='api-v2.1-admin-statistics-file-operations'),
|
||||||
|
url(r'^api/v2.1/admin/statistics/total-storage/$', TotalStorageView.as_view(), name='api-v2.1-admin-statistics-total-storage'),
|
||||||
|
url(r'^api/v2.1/admin/statistics/active-users/$', ActiveUsersView.as_view(), name='api-v2.1-admin-statistics-active-users'),
|
||||||
|
|
||||||
## admin::devices
|
## admin::devices
|
||||||
url(r'^api/v2.1/admin/devices/$', AdminDevices.as_view(), name='api-v2.1-admin-devices'),
|
url(r'^api/v2.1/admin/devices/$', AdminDevices.as_view(), name='api-v2.1-admin-devices'),
|
||||||
url(r'^api/v2.1/admin/device-errors/$', AdminDeviceErrors.as_view(), name='api-v2.1-admin-device-errors'),
|
url(r'^api/v2.1/admin/device-errors/$', AdminDeviceErrors.as_view(), name='api-v2.1-admin-device-errors'),
|
||||||
|
@ -628,6 +628,20 @@ if EVENTS_CONFIG_FILE:
|
|||||||
"""
|
"""
|
||||||
return _get_events(username, start, count)
|
return _get_events(username, start, count)
|
||||||
|
|
||||||
|
def get_user_activity_stats(start, end, offset):
|
||||||
|
""" Return user activity record of the specified time
|
||||||
|
"""
|
||||||
|
with _get_seafevents_session() as session:
|
||||||
|
res = seafevents.get_user_activity_stats(session, start, end, offset)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_user_activity_stats_by_day(start, end, offset):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
with _get_seafevents_session() as session:
|
||||||
|
res = seafevents.get_user_activity_stats_by_day(session, start, end, offset)
|
||||||
|
return res
|
||||||
|
|
||||||
def get_org_user_events(org_id, username, start, count):
|
def get_org_user_events(org_id, username, start, count):
|
||||||
return _get_events(username, start, count, org_id=org_id)
|
return _get_events(username, start, count, org_id=org_id)
|
||||||
|
|
||||||
@ -677,6 +691,34 @@ if EVENTS_CONFIG_FILE:
|
|||||||
|
|
||||||
return events if events else None
|
return events if events else None
|
||||||
|
|
||||||
|
def get_file_ops_stats(start, end, offset):
|
||||||
|
""" Return file audit record of specifiy time.
|
||||||
|
"""
|
||||||
|
with _get_seafevents_session() as session:
|
||||||
|
res = seafevents.get_file_ops_stats(session, start, end, offset)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_file_ops_stats_by_day(start, end, offset):
|
||||||
|
""" return file audit record of sepcifiy time group by day.
|
||||||
|
"""
|
||||||
|
with _get_seafevents_session() as session:
|
||||||
|
res = seafevents.get_file_ops_stats_by_day(session, start, end, offset)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_total_storage_stats(start, end, offset):
|
||||||
|
""" Return total storage record of specified time.
|
||||||
|
"""
|
||||||
|
with _get_seafevents_session() as session:
|
||||||
|
res = seafevents.get_total_storage_stats(session, start, end, offset)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_total_storage_stats_by_day(start, end, offset):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
with _get_seafevents_session() as session:
|
||||||
|
res = seafevents.get_total_storage_stats_by_day(session, start, end, offset)
|
||||||
|
return res
|
||||||
|
|
||||||
def get_file_update_events(email, org_id, repo_id, start, limit):
|
def get_file_update_events(email, org_id, repo_id, start, limit):
|
||||||
"""Return file update events list. (If no file update, return 'None')
|
"""Return file update events list. (If no file update, return 'None')
|
||||||
|
|
||||||
@ -688,7 +730,6 @@ if EVENTS_CONFIG_FILE:
|
|||||||
"""
|
"""
|
||||||
with _get_seafevents_session() as session:
|
with _get_seafevents_session() as session:
|
||||||
events = seafevents.get_file_update_events(session, email, org_id, repo_id, start, limit)
|
events = seafevents.get_file_update_events(session, email, org_id, repo_id, start, limit)
|
||||||
|
|
||||||
return events if events else None
|
return events if events else None
|
||||||
|
|
||||||
def get_perm_audit_events(email, org_id, repo_id, start, limit):
|
def get_perm_audit_events(email, org_id, repo_id, start, limit):
|
||||||
@ -721,6 +762,10 @@ else:
|
|||||||
EVENTS_ENABLED = False
|
EVENTS_ENABLED = False
|
||||||
def get_user_events():
|
def get_user_events():
|
||||||
pass
|
pass
|
||||||
|
def get_user_activity_stats():
|
||||||
|
pass
|
||||||
|
def get_user_activity_stats_by_day():
|
||||||
|
pass
|
||||||
def get_log_events_by_time():
|
def get_log_events_by_time():
|
||||||
pass
|
pass
|
||||||
def get_org_user_events():
|
def get_org_user_events():
|
||||||
@ -731,6 +776,14 @@ else:
|
|||||||
pass
|
pass
|
||||||
def get_file_audit_events():
|
def get_file_audit_events():
|
||||||
pass
|
pass
|
||||||
|
def get_file_ops_stats():
|
||||||
|
pass
|
||||||
|
def get_file_ops_stats_by_day():
|
||||||
|
pass
|
||||||
|
def get_total_storage_stats():
|
||||||
|
pass
|
||||||
|
def get_total_storage_stats_by_day():
|
||||||
|
pass
|
||||||
def get_file_update_events():
|
def get_file_update_events():
|
||||||
pass
|
pass
|
||||||
def get_perm_audit_events():
|
def get_perm_audit_events():
|
||||||
|
94
tests/api/endpoints/admin/test_statistics.py
Normal file
94
tests/api/endpoints/admin/test_statistics.py
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
|
from mock import patch
|
||||||
|
|
||||||
|
from django.core.urlresolvers import reverse
|
||||||
|
from seahub.test_utils import BaseTestCase
|
||||||
|
from seahub.utils.timeutils import datetime_to_isoformat_timestr
|
||||||
|
|
||||||
|
|
||||||
|
class FileOperationsInfoText(BaseTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.login_as(self.admin)
|
||||||
|
|
||||||
|
@patch("seahub.api2.endpoints.admin.statistics.get_file_ops_stats")
|
||||||
|
@patch("seahub.api2.endpoints.admin.statistics.get_file_ops_stats_by_day")
|
||||||
|
def test_can_get_file_audit_stats(self, mock_get_file_audit_stats_by_day, mock_get_file_audit_stats):
|
||||||
|
mock_get_file_audit_stats.return_value = [
|
||||||
|
(datetime.datetime(2017, 6, 2, 7, 0), u'Added', 2L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 7, 0), u'Deleted', 2L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 7, 0), u'Visited', 2L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 8, 0), u'Added', 3L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 8, 0), u'Deleted', 4L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 8, 0), u'Visited', 5L)]
|
||||||
|
mock_get_file_audit_stats_by_day.return_value = [
|
||||||
|
(datetime.datetime(2017, 6, 2, 4, 2), u'Added', 2L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 4, 2), u'Deleted', 2L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 4, 2), u'Visited', 2L),
|
||||||
|
]
|
||||||
|
url = reverse('api-v2.1-admin-statistics-file-operations')
|
||||||
|
url += "?start=2017-06-01 07:00:00&end=2017-06-03 07:00:00&group_by=hour"
|
||||||
|
resp = self.client.get(url)
|
||||||
|
json_resp = json.loads(resp.content)
|
||||||
|
self.assertEqual(200, resp.status_code)
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 7, 0)),
|
||||||
|
'added': 2L, 'deleted': 2L, 'visited': 2L}
|
||||||
|
assert data in json_resp
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 8, 0)),
|
||||||
|
'added': 3L, 'deleted': 4L, 'visited': 5L}
|
||||||
|
assert data in json_resp
|
||||||
|
url += "?start=2017-06-01 07:00:00&end=2017-06-03 07:00:00&group_by=day"
|
||||||
|
resp = self.client.get(url)
|
||||||
|
json_resp = json.loads(resp.content)
|
||||||
|
self.assertEqual(200, resp.status_code)
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 4, 2)),
|
||||||
|
'added': 2, 'deleted': 2, 'visited': 2}
|
||||||
|
assert data in json_resp
|
||||||
|
|
||||||
|
@patch("seahub.api2.endpoints.admin.statistics.get_user_activity_stats")
|
||||||
|
@patch("seahub.api2.endpoints.admin.statistics.get_user_activity_stats_by_day")
|
||||||
|
def test_can_user_activity_stats(self, mock_stats_by_day, mock_stats):
|
||||||
|
mock_stats.return_value = [(datetime.datetime(2017, 6, 2, 7, 0), 2L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 8, 0), 5L)]
|
||||||
|
mock_stats_by_day.return_value = [(datetime.datetime(2017, 6, 2, 4, 0), 3L)]
|
||||||
|
url = reverse('api-v2.1-admin-statistics-active-users')
|
||||||
|
url += "?start=2017-06-01 07:00:00&end=2017-06-03 07:00:00&group_by=hour"
|
||||||
|
resp = self.client.get(url)
|
||||||
|
json_resp = json.loads(resp.content)
|
||||||
|
|
||||||
|
self.assertEqual(200, resp.status_code)
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 7, 0)), 'count': 2}
|
||||||
|
assert data in json_resp
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 8, 0)), 'count': 5}
|
||||||
|
assert data in json_resp
|
||||||
|
url += "?start=2017-06-01 07:00:00&end=2017-06-03 07:00:00&group_by=day"
|
||||||
|
resp = self.client.get(url)
|
||||||
|
json_resp = json.loads(resp.content)
|
||||||
|
self.assertEqual(200, resp.status_code)
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 4, 0)), 'count': 3}
|
||||||
|
assert data in json_resp
|
||||||
|
|
||||||
|
@patch("seahub.api2.endpoints.admin.statistics.get_total_storage_stats")
|
||||||
|
@patch("seahub.api2.endpoints.admin.statistics.get_total_storage_stats_by_day")
|
||||||
|
def test_can_get_total_storage_stats(self, mock_stats_by_day, mock_stats):
|
||||||
|
mock_stats.return_value = [(datetime.datetime(2017, 6, 2, 7, 0), 2L),
|
||||||
|
(datetime.datetime(2017, 6, 2, 8, 0), 5L)]
|
||||||
|
mock_stats_by_day.return_value = [(datetime.datetime(2017, 6, 2, 3, 0), 13L)]
|
||||||
|
url = reverse('api-v2.1-admin-statistics-total-storage')
|
||||||
|
url += "?start=2017-06-01 07:00:00&end=2017-06-03 07:00:00&group_by=hour"
|
||||||
|
resp = self.client.get(url)
|
||||||
|
|
||||||
|
json_resp = json.loads(resp.content)
|
||||||
|
self.assertEqual(200, resp.status_code)
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 7, 0)), 'total_storage': 2}
|
||||||
|
assert data in json_resp
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 8, 0)), 'total_storage': 5}
|
||||||
|
assert data in json_resp
|
||||||
|
url += "?start=2017-06-01 07:00:00&end=2017-06-03 07:00:00&group_by=day"
|
||||||
|
resp = self.client.get(url)
|
||||||
|
json_resp = json.loads(resp.content)
|
||||||
|
self.assertEqual(200, resp.status_code)
|
||||||
|
data = {'datetime': datetime_to_isoformat_timestr(datetime.datetime(2017, 6, 2, 3, 0)), 'total_storage': 13}
|
||||||
|
assert data in json_resp
|
Loading…
Reference in New Issue
Block a user