mirror of https://github.com/zulip/zulip.git
export: Notify all realm admins on realm export.
This commit is contained in:
parent
7afe6800f7
commit
ce0df00e44
|
@ -2,17 +2,16 @@ from django.utils.timezone import now as timezone_now
|
||||||
|
|
||||||
from zerver.lib.export import get_realm_exports_serialized
|
from zerver.lib.export import get_realm_exports_serialized
|
||||||
from zerver.lib.upload import delete_export_tarball
|
from zerver.lib.upload import delete_export_tarball
|
||||||
from zerver.models import RealmAuditLog, UserProfile
|
from zerver.models import Realm, RealmAuditLog
|
||||||
from zerver.tornado.django_api import send_event_on_commit
|
from zerver.tornado.django_api import send_event_on_commit
|
||||||
|
|
||||||
|
|
||||||
def notify_realm_export(user_profile: UserProfile) -> None:
|
def notify_realm_export(realm: Realm) -> None:
|
||||||
# In the future, we may want to send this event to all realm admins.
|
event = dict(type="realm_export", exports=get_realm_exports_serialized(realm))
|
||||||
event = dict(type="realm_export", exports=get_realm_exports_serialized(user_profile))
|
send_event_on_commit(realm, event, realm.get_human_admin_users().values_list("id", flat=True))
|
||||||
send_event_on_commit(user_profile.realm, event, [user_profile.id])
|
|
||||||
|
|
||||||
|
|
||||||
def do_delete_realm_export(user_profile: UserProfile, export: RealmAuditLog) -> None:
|
def do_delete_realm_export(export: RealmAuditLog) -> None:
|
||||||
export_data = export.extra_data
|
export_data = export.extra_data
|
||||||
export_path = export_data.get("export_path")
|
export_path = export_data.get("export_path")
|
||||||
|
|
||||||
|
@ -23,4 +22,4 @@ def do_delete_realm_export(user_profile: UserProfile, export: RealmAuditLog) ->
|
||||||
export_data.update(deleted_timestamp=timezone_now().timestamp())
|
export_data.update(deleted_timestamp=timezone_now().timestamp())
|
||||||
export.extra_data = export_data
|
export.extra_data = export_data
|
||||||
export.save(update_fields=["extra_data"])
|
export.save(update_fields=["extra_data"])
|
||||||
notify_realm_export(user_profile)
|
notify_realm_export(export.realm)
|
||||||
|
|
|
@ -2479,7 +2479,7 @@ def export_realm_wrapper(
|
||||||
return public_url
|
return public_url
|
||||||
|
|
||||||
|
|
||||||
def get_realm_exports_serialized(user: UserProfile) -> list[dict[str, Any]]:
|
def get_realm_exports_serialized(realm: Realm) -> list[dict[str, Any]]:
|
||||||
# Exclude exports made via shell. 'acting_user=None', since they
|
# Exclude exports made via shell. 'acting_user=None', since they
|
||||||
# aren't supported in the current API format.
|
# aren't supported in the current API format.
|
||||||
#
|
#
|
||||||
|
@ -2487,7 +2487,7 @@ def get_realm_exports_serialized(user: UserProfile) -> list[dict[str, Any]]:
|
||||||
# appropriate way to express for who issued them; this requires an
|
# appropriate way to express for who issued them; this requires an
|
||||||
# API change.
|
# API change.
|
||||||
all_exports = RealmAuditLog.objects.filter(
|
all_exports = RealmAuditLog.objects.filter(
|
||||||
realm=user.realm, event_type=AuditLogEventType.REALM_EXPORTED
|
realm=realm, event_type=AuditLogEventType.REALM_EXPORTED
|
||||||
).exclude(acting_user=None)
|
).exclude(acting_user=None)
|
||||||
exports_dict = {}
|
exports_dict = {}
|
||||||
for export in all_exports:
|
for export in all_exports:
|
||||||
|
@ -2505,9 +2505,7 @@ def get_realm_exports_serialized(user: UserProfile) -> list[dict[str, Any]]:
|
||||||
pending = deleted_timestamp is None and failed_timestamp is None and export_path is None
|
pending = deleted_timestamp is None and failed_timestamp is None and export_path is None
|
||||||
|
|
||||||
if export_path is not None and not deleted_timestamp:
|
if export_path is not None and not deleted_timestamp:
|
||||||
export_url = zerver.lib.upload.upload_backend.get_export_tarball_url(
|
export_url = zerver.lib.upload.upload_backend.get_export_tarball_url(realm, export_path)
|
||||||
user.realm, export_path
|
|
||||||
)
|
|
||||||
|
|
||||||
assert acting_user is not None
|
assert acting_user is not None
|
||||||
exports_dict[export.id] = dict(
|
exports_dict[export.id] = dict(
|
||||||
|
|
|
@ -73,7 +73,7 @@ def export_realm(request: HttpRequest, user: UserProfile) -> HttpResponse:
|
||||||
)
|
)
|
||||||
|
|
||||||
# Allow for UI updates on a pending export
|
# Allow for UI updates on a pending export
|
||||||
notify_realm_export(user)
|
notify_realm_export(realm)
|
||||||
|
|
||||||
# Using the deferred_work queue processor to avoid
|
# Using the deferred_work queue processor to avoid
|
||||||
# killing the process after 60s
|
# killing the process after 60s
|
||||||
|
@ -90,7 +90,7 @@ def export_realm(request: HttpRequest, user: UserProfile) -> HttpResponse:
|
||||||
|
|
||||||
@require_realm_admin
|
@require_realm_admin
|
||||||
def get_realm_exports(request: HttpRequest, user: UserProfile) -> HttpResponse:
|
def get_realm_exports(request: HttpRequest, user: UserProfile) -> HttpResponse:
|
||||||
realm_exports = get_realm_exports_serialized(user)
|
realm_exports = get_realm_exports_serialized(user.realm)
|
||||||
return json_success(request, data={"exports": realm_exports})
|
return json_success(request, data={"exports": realm_exports})
|
||||||
|
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ def delete_realm_export(request: HttpRequest, user: UserProfile, export_id: int)
|
||||||
if export_data.get("failed_timestamp") is not None:
|
if export_data.get("failed_timestamp") is not None:
|
||||||
raise JsonableError(_("Export failed, nothing to delete"))
|
raise JsonableError(_("Export failed, nothing to delete"))
|
||||||
raise JsonableError(_("Export still in progress"))
|
raise JsonableError(_("Export still in progress"))
|
||||||
do_delete_realm_export(user, audit_log_entry)
|
do_delete_realm_export(audit_log_entry)
|
||||||
return json_success(request)
|
return json_success(request)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -138,7 +138,7 @@ class DeferredWorker(QueueProcessingWorker):
|
||||||
extra_data["failed_timestamp"] = timezone_now().timestamp()
|
extra_data["failed_timestamp"] = timezone_now().timestamp()
|
||||||
export_event.extra_data = extra_data
|
export_event.extra_data = extra_data
|
||||||
export_event.save(update_fields=["extra_data"])
|
export_event.save(update_fields=["extra_data"])
|
||||||
notify_realm_export(user_profile)
|
notify_realm_export(realm)
|
||||||
return
|
return
|
||||||
|
|
||||||
extra_data["started_timestamp"] = timezone_now().timestamp()
|
extra_data["started_timestamp"] = timezone_now().timestamp()
|
||||||
|
@ -170,7 +170,7 @@ class DeferredWorker(QueueProcessingWorker):
|
||||||
time.time() - start,
|
time.time() - start,
|
||||||
stack_info=True,
|
stack_info=True,
|
||||||
)
|
)
|
||||||
notify_realm_export(user_profile)
|
notify_realm_export(realm)
|
||||||
return
|
return
|
||||||
|
|
||||||
assert public_url is not None
|
assert public_url is not None
|
||||||
|
@ -194,7 +194,7 @@ class DeferredWorker(QueueProcessingWorker):
|
||||||
|
|
||||||
# For future frontend use, also notify administrator
|
# For future frontend use, also notify administrator
|
||||||
# clients that the export happened.
|
# clients that the export happened.
|
||||||
notify_realm_export(user_profile)
|
notify_realm_export(realm)
|
||||||
logging.info(
|
logging.info(
|
||||||
"Completed data export for %s in %s",
|
"Completed data export for %s in %s",
|
||||||
user_profile.realm.string_id,
|
user_profile.realm.string_id,
|
||||||
|
|
Loading…
Reference in New Issue