migration: Rename extra_data_json to extra_data in audit log models.

This migration applies under the assumption that extra_data_json has
been populated for all existing and coming audit log entries.

- This removes the manual conversions back and forth for extra_data
throughout the codebase including the orjson.loads(), orjson.dumps(),
and str() calls.

- The custom handler used for converting Decimal is removed since
DjangoJSONEncoder handles that for extra_data.

- We remove None-checks for extra_data because it is now no longer
nullable.

- Meanwhile, we want the bouncer to support processing RealmAuditLog entries for
remote servers before and after the JSONField migration on extra_data.

- Since now extra_data should always be a dict for the newer remote
server, which is now migrated, the test cases are updated to create
RealmAuditLog objects by passing a dict for extra_data before
sending over the analytics data. Note that while JSONField allows for
non-dict values, a proper remote server always passes a dict for
extra_data.

- We still test out the legacy extra_data format because not all
remote servers have migrated to use JSONField extra_data.
This verifies that support for extra_data being a string or None has not
been dropped.

Co-authored-by: Siddharth Asthana <siddharthasthana31@gmail.com>
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
This commit is contained in:
Zixuan James Li 2023-07-13 13:46:06 -04:00 committed by Tim Abbott
parent 4e7bf3c4fb
commit 30495cec58
33 changed files with 472 additions and 663 deletions

View File

@ -7,7 +7,6 @@ from decimal import Decimal
from functools import wraps from functools import wraps
from typing import Any, Callable, Dict, Generator, Optional, Tuple, TypeVar, Union from typing import Any, Callable, Dict, Generator, Optional, Tuple, TypeVar, Union
import orjson
import stripe import stripe
from django.conf import settings from django.conf import settings
from django.core.signing import Signer from django.core.signing import Signer
@ -487,12 +486,10 @@ def make_end_of_cycle_updates_if_needed(
realm=realm, realm=realm,
event_time=event_time, event_time=event_time,
event_type=RealmAuditLog.CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN, event_type=RealmAuditLog.CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN,
extra_data=orjson.dumps( extra_data={
{ "monthly_plan_id": plan.id,
"monthly_plan_id": plan.id, "annual_plan_id": new_plan.id,
"annual_plan_id": new_plan.id, },
}
).decode(),
) )
return new_plan, new_plan_ledger_entry return new_plan, new_plan_ledger_entry
@ -625,12 +622,6 @@ def compute_plan_parameters(
return billing_cycle_anchor, next_invoice_date, period_end, price_per_license return billing_cycle_anchor, next_invoice_date, period_end, price_per_license
def decimal_to_float(obj: object) -> object:
if isinstance(obj, Decimal):
return float(obj)
raise TypeError # nocoverage
def is_free_trial_offer_enabled() -> bool: def is_free_trial_offer_enabled() -> bool:
return settings.FREE_TRIAL_DAYS not in (None, 0) return settings.FREE_TRIAL_DAYS not in (None, 0)
@ -656,8 +647,7 @@ def do_change_remote_server_plan_type(remote_server: RemoteZulipServer, plan_typ
event_type=RealmAuditLog.REMOTE_SERVER_PLAN_TYPE_CHANGED, event_type=RealmAuditLog.REMOTE_SERVER_PLAN_TYPE_CHANGED,
server=remote_server, server=remote_server,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=str({"old_value": old_value, "new_value": plan_type}), extra_data={"old_value": old_value, "new_value": plan_type},
extra_data_json={"old_value": old_value, "new_value": plan_type},
) )
@ -741,9 +731,7 @@ def process_initial_upgrade(
acting_user=user, acting_user=user,
event_time=billing_cycle_anchor, event_time=billing_cycle_anchor,
event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED, event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED,
extra_data=orjson.dumps(plan_params, default=decimal_to_float).decode(), extra_data=plan_params,
# Note that DjangoJSONEncoder has builtin support for parsing Decimal
extra_data_json=plan_params,
) )
if not free_trial: if not free_trial:
@ -979,8 +967,7 @@ def attach_discount_to_realm(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_DISCOUNT_CHANGED, event_type=RealmAuditLog.REALM_DISCOUNT_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=str({"old_discount": old_discount, "new_discount": discount}), extra_data={"old_discount": old_discount, "new_discount": discount},
extra_data_json={"old_discount": old_discount, "new_discount": discount},
) )
@ -995,8 +982,7 @@ def update_sponsorship_status(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_SPONSORSHIP_PENDING_STATUS_CHANGED, event_type=RealmAuditLog.REALM_SPONSORSHIP_PENDING_STATUS_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=str({"sponsorship_pending": sponsorship_pending}), extra_data={"sponsorship_pending": sponsorship_pending},
extra_data_json={"sponsorship_pending": sponsorship_pending},
) )
@ -1250,6 +1236,5 @@ def update_billing_method_of_current_plan(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_BILLING_METHOD_CHANGED, event_type=RealmAuditLog.REALM_BILLING_METHOD_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=str({"charge_automatically": charge_automatically}), extra_data={"charge_automatically": charge_automatically},
extra_data_json={"charge_automatically": charge_automatically},
) )

View File

@ -830,16 +830,13 @@ class StripeTest(StripeTestCase):
], ],
) )
self.assertEqual(audit_log_entries[3][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED) self.assertEqual(audit_log_entries[3][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED)
self.assertEqual( first_audit_log_entry = (
orjson.loads( RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED)
assert_is_not_none( .values_list("extra_data", flat=True)
RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED) .first()
.values_list("extra_data", flat=True)
.first()
)
)["automanage_licenses"],
True,
) )
assert first_audit_log_entry is not None
self.assertTrue(first_audit_log_entry["automanage_licenses"])
# Check that we correctly updated Realm # Check that we correctly updated Realm
realm = get_realm("zulip") realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD) self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD)
@ -971,16 +968,13 @@ class StripeTest(StripeTestCase):
], ],
) )
self.assertEqual(audit_log_entries[2][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED) self.assertEqual(audit_log_entries[2][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED)
self.assertEqual( first_audit_log_entry = (
orjson.loads( RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED)
assert_is_not_none( .values_list("extra_data", flat=True)
RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED) .first()
.values_list("extra_data", flat=True)
.first()
)
)["automanage_licenses"],
False,
) )
assert first_audit_log_entry is not None
self.assertFalse(first_audit_log_entry["automanage_licenses"])
# Check that we correctly updated Realm # Check that we correctly updated Realm
realm = get_realm("zulip") realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD) self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD)
@ -1099,16 +1093,13 @@ class StripeTest(StripeTestCase):
], ],
) )
self.assertEqual(audit_log_entries[3][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED) self.assertEqual(audit_log_entries[3][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED)
self.assertEqual( first_audit_log_entry = (
orjson.loads( RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED)
assert_is_not_none( .values_list("extra_data", flat=True)
RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED) .first()
.values_list("extra_data", flat=True)
.first()
)
)["automanage_licenses"],
True,
) )
assert first_audit_log_entry is not None
self.assertTrue(first_audit_log_entry["automanage_licenses"])
realm = get_realm("zulip") realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD) self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD)
@ -1365,16 +1356,13 @@ class StripeTest(StripeTestCase):
], ],
) )
self.assertEqual(audit_log_entries[2][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED) self.assertEqual(audit_log_entries[2][0], RealmAuditLog.REALM_PLAN_TYPE_CHANGED)
self.assertEqual( first_audit_log_entry = (
orjson.loads( RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED)
assert_is_not_none( .values_list("extra_data", flat=True)
RealmAuditLog.objects.filter(event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED) .first()
.values_list("extra_data", flat=True)
.first()
)
)["automanage_licenses"],
False,
) )
assert first_audit_log_entry is not None
self.assertFalse(first_audit_log_entry["automanage_licenses"])
realm = get_realm("zulip") realm = get_realm("zulip")
self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD) self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD)
@ -2490,7 +2478,7 @@ class StripeTest(StripeTestCase):
event_type=RealmAuditLog.REALM_DISCOUNT_CHANGED event_type=RealmAuditLog.REALM_DISCOUNT_CHANGED
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = str({"old_discount": None, "new_discount": Decimal("85")}) expected_extra_data = {"old_discount": None, "new_discount": str(Decimal("85"))}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
self.login_user(user) self.login_user(user)
# Check that the discount appears in page_params # Check that the discount appears in page_params
@ -2543,9 +2531,10 @@ class StripeTest(StripeTestCase):
event_type=RealmAuditLog.REALM_DISCOUNT_CHANGED event_type=RealmAuditLog.REALM_DISCOUNT_CHANGED
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = str( expected_extra_data = {
{"old_discount": Decimal("25.0000"), "new_discount": Decimal("50")} "old_discount": str(Decimal("25.0000")),
) "new_discount": str(Decimal("50")),
}
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
self.assertEqual(realm_audit_log.acting_user, user) self.assertEqual(realm_audit_log.acting_user, user)
@ -2579,7 +2568,7 @@ class StripeTest(StripeTestCase):
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = {"sponsorship_pending": True} expected_extra_data = {"sponsorship_pending": True}
self.assertEqual(realm_audit_log.extra_data, str(expected_extra_data)) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
self.assertEqual(realm_audit_log.acting_user, iago) self.assertEqual(realm_audit_log.acting_user, iago)
def test_get_discount_for_realm(self) -> None: def test_get_discount_for_realm(self) -> None:
@ -2896,10 +2885,9 @@ class StripeTest(StripeTestCase):
audit_log = RealmAuditLog.objects.get( audit_log = RealmAuditLog.objects.get(
event_type=RealmAuditLog.CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN event_type=RealmAuditLog.CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN
) )
extra_data: str = assert_is_not_none(audit_log.extra_data)
self.assertEqual(audit_log.realm, user.realm) self.assertEqual(audit_log.realm, user.realm)
self.assertEqual(orjson.loads(extra_data)["monthly_plan_id"], monthly_plan.id) self.assertEqual(audit_log.extra_data["monthly_plan_id"], monthly_plan.id)
self.assertEqual(orjson.loads(extra_data)["annual_plan_id"], annual_plan.id) self.assertEqual(audit_log.extra_data["annual_plan_id"], annual_plan.id)
invoice_plans_as_needed(self.next_month) invoice_plans_as_needed(self.next_month)
@ -3916,7 +3904,7 @@ class StripeTest(StripeTestCase):
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = {"charge_automatically": plan.charge_automatically} expected_extra_data = {"charge_automatically": plan.charge_automatically}
self.assertEqual(realm_audit_log.acting_user, iago) self.assertEqual(realm_audit_log.acting_user, iago)
self.assertEqual(realm_audit_log.extra_data, str(expected_extra_data)) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
update_billing_method_of_current_plan(realm, False, acting_user=iago) update_billing_method_of_current_plan(realm, False, acting_user=iago)
plan.refresh_from_db() plan.refresh_from_db()
@ -3927,7 +3915,7 @@ class StripeTest(StripeTestCase):
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = {"charge_automatically": plan.charge_automatically} expected_extra_data = {"charge_automatically": plan.charge_automatically}
self.assertEqual(realm_audit_log.acting_user, iago) self.assertEqual(realm_audit_log.acting_user, iago)
self.assertEqual(realm_audit_log.extra_data, str(expected_extra_data)) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
@mock_stripe() @mock_stripe()
def test_customer_has_credit_card_as_default_payment_method(self, *mocks: Mock) -> None: def test_customer_has_credit_card_as_default_payment_method(self, *mocks: Mock) -> None:
@ -4552,7 +4540,7 @@ class BillingHelpersTest(ZulipTestCase):
"old_value": RemoteZulipServer.PLAN_TYPE_SELF_HOSTED, "old_value": RemoteZulipServer.PLAN_TYPE_SELF_HOSTED,
"new_value": RemoteZulipServer.PLAN_TYPE_STANDARD, "new_value": RemoteZulipServer.PLAN_TYPE_STANDARD,
} }
self.assertEqual(remote_realm_audit_log.extra_data, str(expected_extra_data)) self.assertEqual(remote_realm_audit_log.extra_data, expected_extra_data)
self.assertEqual(remote_server.plan_type, RemoteZulipServer.PLAN_TYPE_STANDARD) self.assertEqual(remote_server.plan_type, RemoteZulipServer.PLAN_TYPE_STANDARD)
def test_deactivate_remote_server(self) -> None: def test_deactivate_remote_server(self) -> None:

View File

@ -1,6 +1,5 @@
from typing import Optional, Union from typing import Optional, Union
import orjson
from django.db import transaction from django.db import transaction
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -136,12 +135,10 @@ def do_change_default_sending_stream(
event_time=event_time, event_time=event_time,
modified_user=user_profile, modified_user=user_profile,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: None if stream is None else stream.id,
RealmAuditLog.NEW_VALUE: None if stream is None else stream.id, },
}
).decode(),
) )
if user_profile.is_bot: if user_profile.is_bot:
@ -179,12 +176,10 @@ def do_change_default_events_register_stream(
event_time=event_time, event_time=event_time,
modified_user=user_profile, modified_user=user_profile,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: None if stream is None else stream.id,
RealmAuditLog.NEW_VALUE: None if stream is None else stream.id, },
}
).decode(),
) )
if user_profile.is_bot: if user_profile.is_bot:
@ -223,12 +218,10 @@ def do_change_default_all_public_streams(
event_time=event_time, event_time=event_time,
modified_user=user_profile, modified_user=user_profile,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: value,
RealmAuditLog.NEW_VALUE: value, },
}
).decode(),
) )
if user_profile.is_bot: if user_profile.is_bot:

View File

@ -64,7 +64,7 @@ def do_change_realm_subdomain(
event_type=RealmAuditLog.REALM_SUBDOMAIN_CHANGED, event_type=RealmAuditLog.REALM_SUBDOMAIN_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
acting_user=acting_user, acting_user=acting_user,
extra_data=str({"old_subdomain": old_subdomain, "new_subdomain": new_subdomain}), extra_data={"old_subdomain": old_subdomain, "new_subdomain": new_subdomain},
) )
# If a realm if being renamed multiple times, we should find all the placeholder # If a realm if being renamed multiple times, we should find all the placeholder

View File

@ -2,7 +2,6 @@ import datetime
from collections import defaultdict from collections import defaultdict
from typing import Any, Dict, Iterable, List, Optional, Sequence, Set from typing import Any, Dict, Iterable, List, Optional, Sequence, Set
import orjson
from django.conf import settings from django.conf import settings
from django.db import transaction from django.db import transaction
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -445,11 +444,9 @@ def do_create_user(
modified_user=user_profile, modified_user=user_profile,
event_type=RealmAuditLog.USER_CREATED, event_type=RealmAuditLog.USER_CREATED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm), },
}
).decode(),
) )
if realm_creation: if realm_creation:
@ -570,11 +567,9 @@ def do_activate_mirror_dummy_user(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.USER_ACTIVATED, event_type=RealmAuditLog.USER_ACTIVATED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm), },
}
).decode(),
) )
do_increment_logging_stat( do_increment_logging_stat(
user_profile.realm, user_profile.realm,
@ -600,11 +595,9 @@ def do_reactivate_user(user_profile: UserProfile, *, acting_user: Optional[UserP
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.USER_REACTIVATED, event_type=RealmAuditLog.USER_REACTIVATED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm), },
}
).decode(),
) )
bot_owner_changed = False bot_owner_changed = False

View File

@ -1,7 +1,6 @@
import datetime import datetime
from typing import Optional from typing import Optional
import orjson
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
from zerver.actions.message_flags import do_mark_muted_user_messages_as_read from zerver.actions.message_flags import do_mark_muted_user_messages_as_read
@ -28,7 +27,7 @@ def do_mute_user(
modified_user=user_profile, modified_user=user_profile,
event_type=RealmAuditLog.USER_MUTED, event_type=RealmAuditLog.USER_MUTED,
event_time=date_muted, event_time=date_muted,
extra_data=orjson.dumps({"muted_user_id": muted_user.id}).decode(), extra_data={"muted_user_id": muted_user.id},
) )
@ -45,5 +44,5 @@ def do_unmute_user(mute_object: MutedUser) -> None:
modified_user=user_profile, modified_user=user_profile,
event_type=RealmAuditLog.USER_UNMUTED, event_type=RealmAuditLog.USER_UNMUTED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps({"unmuted_user_id": muted_user.id}).decode(), extra_data={"unmuted_user_id": muted_user.id},
) )

View File

@ -1,6 +1,5 @@
from typing import Optional from typing import Optional
import orjson
from django.db import transaction from django.db import transaction
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -31,12 +30,10 @@ def do_add_realm_domain(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_DOMAIN_ADDED, event_type=RealmAuditLog.REALM_DOMAIN_ADDED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_domains": get_realm_domains(realm),
"realm_domains": get_realm_domains(realm), "added_domain": added_domain,
"added_domain": added_domain, },
}
).decode(),
) )
event = dict( event = dict(
@ -67,12 +64,10 @@ def do_change_realm_domain(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_DOMAIN_CHANGED, event_type=RealmAuditLog.REALM_DOMAIN_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_domains": get_realm_domains(realm_domain.realm),
"realm_domains": get_realm_domains(realm_domain.realm), "changed_domain": changed_domain,
"changed_domain": changed_domain, },
}
).decode(),
) )
event = dict( event = dict(
@ -102,12 +97,10 @@ def do_remove_realm_domain(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_DOMAIN_REMOVED, event_type=RealmAuditLog.REALM_DOMAIN_REMOVED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_domains": get_realm_domains(realm),
"realm_domains": get_realm_domains(realm), "removed_domain": removed_domain,
"removed_domain": removed_domain, },
}
).decode(),
) )
if RealmDomain.objects.filter(realm=realm).count() == 0 and realm.emails_restricted_to_domains: if RealmDomain.objects.filter(realm=realm).count() == 0 and realm.emails_restricted_to_domains:

View File

@ -1,6 +1,5 @@
from typing import IO, Dict, Optional from typing import IO, Dict, Optional
import orjson
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.db import transaction from django.db import transaction
from django.db.utils import IntegrityError from django.db.utils import IntegrityError
@ -63,12 +62,10 @@ def check_add_realm_emoji(
acting_user=author, acting_user=author,
event_type=RealmAuditLog.REALM_EMOJI_ADDED, event_type=RealmAuditLog.REALM_EMOJI_ADDED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_emoji": dict(sorted(realm_emoji_dict.items())),
"realm_emoji": dict(sorted(realm_emoji_dict.items())), "added_emoji": realm_emoji_dict[str(realm_emoji.id)],
"added_emoji": realm_emoji_dict[str(realm_emoji.id)], },
}
).decode(),
) )
notify_realm_emoji(realm_emoji.realm, realm_emoji_dict) notify_realm_emoji(realm_emoji.realm, realm_emoji_dict)
return realm_emoji return realm_emoji
@ -86,12 +83,10 @@ def do_remove_realm_emoji(realm: Realm, name: str, *, acting_user: Optional[User
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_EMOJI_REMOVED, event_type=RealmAuditLog.REALM_EMOJI_REMOVED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_emoji": dict(sorted(realm_emoji_dict.items())),
"realm_emoji": dict(sorted(realm_emoji_dict.items())), "deactivated_emoji": realm_emoji_dict[str(emoji.id)],
"deactivated_emoji": realm_emoji_dict[str(emoji.id)], },
}
).decode(),
) )
notify_realm_emoji(realm, realm_emoji_dict) notify_realm_emoji(realm, realm_emoji_dict)

View File

@ -1,4 +1,3 @@
import orjson
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
from zerver.lib.export import get_realm_exports_serialized from zerver.lib.export import get_realm_exports_serialized
@ -14,11 +13,7 @@ def notify_realm_export(user_profile: UserProfile) -> None:
def do_delete_realm_export(user_profile: UserProfile, export: RealmAuditLog) -> None: def do_delete_realm_export(user_profile: UserProfile, export: RealmAuditLog) -> None:
# Give mypy a hint so it knows `orjson.loads` export_data = export.extra_data
# isn't being passed an `Optional[str]`.
export_extra_data = export.extra_data
assert export_extra_data is not None
export_data = orjson.loads(export_extra_data)
export_path = export_data.get("export_path") export_path = export_data.get("export_path")
if export_path: if export_path:
@ -26,6 +21,6 @@ def do_delete_realm_export(user_profile: UserProfile, export: RealmAuditLog) ->
delete_export_tarball(export_path) delete_export_tarball(export_path)
export_data.update(deleted_timestamp=timezone_now().timestamp()) export_data.update(deleted_timestamp=timezone_now().timestamp())
export.extra_data = orjson.dumps(export_data).decode() export.extra_data = export_data
export.save(update_fields=["extra_data"]) export.save(update_fields=["extra_data"])
notify_realm_export(user_profile) notify_realm_export(user_profile)

View File

@ -20,7 +20,7 @@ def do_change_icon_source(
RealmAuditLog.objects.create( RealmAuditLog.objects.create(
realm=realm, realm=realm,
event_type=RealmAuditLog.REALM_ICON_SOURCE_CHANGED, event_type=RealmAuditLog.REALM_ICON_SOURCE_CHANGED,
extra_data=str({"icon_source": icon_source, "icon_version": realm.icon_version}), extra_data={"icon_source": icon_source, "icon_version": realm.icon_version},
event_time=event_time, event_time=event_time,
acting_user=acting_user, acting_user=acting_user,
) )

View File

@ -1,6 +1,5 @@
from typing import Dict, List, Optional from typing import Dict, List, Optional
import orjson
from django.db import transaction from django.db import transaction
from django.db.models import Max from django.db.models import Max
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -57,16 +56,14 @@ def do_add_linkifier(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_LINKIFIER_ADDED, event_type=RealmAuditLog.REALM_LINKIFIER_ADDED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_linkifiers": realm_linkifiers,
"realm_linkifiers": realm_linkifiers, "added_linkifier": LinkifierDict(
"added_linkifier": LinkifierDict( pattern=pattern,
pattern=pattern, url_template=url_template,
url_template=url_template, id=linkifier.id,
id=linkifier.id, ),
), },
}
).decode(),
) )
notify_linkifiers(realm, realm_linkifiers) notify_linkifiers(realm, realm_linkifiers)
@ -97,12 +94,13 @@ def do_remove_linkifier(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_LINKIFIER_REMOVED, event_type=RealmAuditLog.REALM_LINKIFIER_REMOVED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_linkifiers": realm_linkifiers,
"realm_linkifiers": realm_linkifiers, "removed_linkifier": {
"removed_linkifier": {"pattern": pattern, "url_template": url_template}, "pattern": pattern,
} "url_template": url_template,
).decode(), },
},
) )
notify_linkifiers(realm, realm_linkifiers) notify_linkifiers(realm, realm_linkifiers)
@ -130,16 +128,14 @@ def do_update_linkifier(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_LINKIFIER_CHANGED, event_type=RealmAuditLog.REALM_LINKIFIER_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_linkifiers": realm_linkifiers,
"realm_linkifiers": realm_linkifiers, "changed_linkifier": LinkifierDict(
"changed_linkifier": LinkifierDict( pattern=pattern,
pattern=pattern, url_template=url_template,
url_template=url_template, id=linkifier.id,
id=linkifier.id, ),
), },
}
).decode(),
) )
notify_linkifiers(realm, realm_linkifiers) notify_linkifiers(realm, realm_linkifiers)
@ -187,10 +183,8 @@ def check_reorder_linkifiers(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_LINKIFIERS_REORDERED, event_type=RealmAuditLog.REALM_LINKIFIERS_REORDERED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_linkifiers": realm_linkifiers,
"realm_linkifiers": realm_linkifiers, },
}
).decode(),
) )
notify_linkifiers(realm, realm_linkifiers) notify_linkifiers(realm, realm_linkifiers)

View File

@ -1,6 +1,5 @@
from typing import List, Optional from typing import List, Optional
import orjson
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.db import transaction from django.db import transaction
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -53,17 +52,15 @@ def check_add_realm_playground(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_PLAYGROUND_ADDED, event_type=RealmAuditLog.REALM_PLAYGROUND_ADDED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_playgrounds": realm_playgrounds,
"realm_playgrounds": realm_playgrounds, "added_playground": RealmPlaygroundDict(
"added_playground": RealmPlaygroundDict( id=realm_playground.id,
id=realm_playground.id, name=realm_playground.name,
name=realm_playground.name, pygments_language=realm_playground.pygments_language,
pygments_language=realm_playground.pygments_language, url_template=realm_playground.url_template,
url_template=realm_playground.url_template, ),
), },
}
).decode(),
) )
notify_realm_playgrounds(realm, realm_playgrounds) notify_realm_playgrounds(realm, realm_playgrounds)
return realm_playground.id return realm_playground.id
@ -87,12 +84,10 @@ def do_remove_realm_playground(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.REALM_PLAYGROUND_REMOVED, event_type=RealmAuditLog.REALM_PLAYGROUND_REMOVED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ "realm_playgrounds": realm_playgrounds,
"realm_playgrounds": realm_playgrounds, "removed_playground": removed_playground,
"removed_playground": removed_playground, },
}
).decode(),
) )
notify_realm_playgrounds(realm, realm_playgrounds) notify_realm_playgrounds(realm, realm_playgrounds)

View File

@ -2,7 +2,6 @@ import logging
from email.headerregistry import Address from email.headerregistry import Address
from typing import Any, Dict, Literal, Optional, Tuple, Union from typing import Any, Dict, Literal, Optional, Tuple, Union
import orjson
from django.conf import settings from django.conf import settings
from django.db import transaction from django.db import transaction
from django.db.models import QuerySet from django.db.models import QuerySet
@ -91,13 +90,11 @@ def do_set_realm_property(
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=event_time, event_time=event_time,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: value,
RealmAuditLog.NEW_VALUE: value, "property": name,
"property": name, },
}
).decode(),
) )
if name == "waiting_period_threshold": if name == "waiting_period_threshold":
@ -149,13 +146,11 @@ def do_set_realm_authentication_methods(
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: updated_value,
RealmAuditLog.NEW_VALUE: updated_value, "property": "authentication_methods",
"property": "authentication_methods", },
}
).decode(),
) )
event = dict( event = dict(
@ -197,13 +192,11 @@ def do_set_realm_stream(
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=event_time, event_time=event_time,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: stream_id,
RealmAuditLog.NEW_VALUE: stream_id, "property": field,
"property": field, },
}
).decode(),
) )
event = dict( event = dict(
@ -249,13 +242,11 @@ def do_set_realm_user_default_setting(
event_type=RealmAuditLog.REALM_DEFAULT_USER_SETTINGS_CHANGED, event_type=RealmAuditLog.REALM_DEFAULT_USER_SETTINGS_CHANGED,
event_time=event_time, event_time=event_time,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: value,
RealmAuditLog.NEW_VALUE: value, "property": name,
"property": name, },
}
).decode(),
) )
event = dict( event = dict(
@ -289,11 +280,9 @@ def do_deactivate_realm(realm: Realm, *, acting_user: Optional[UserProfile]) ->
event_type=RealmAuditLog.REALM_DEACTIVATED, event_type=RealmAuditLog.REALM_DEACTIVATED,
event_time=event_time, event_time=event_time,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm), },
}
).decode(),
) )
ScheduledEmail.objects.filter(realm=realm).delete() ScheduledEmail.objects.filter(realm=realm).delete()
@ -331,11 +320,9 @@ def do_reactivate_realm(realm: Realm) -> None:
realm=realm, realm=realm,
event_type=RealmAuditLog.REALM_REACTIVATED, event_type=RealmAuditLog.REALM_REACTIVATED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm), },
}
).decode(),
) )
@ -431,7 +418,7 @@ def do_change_realm_org_type(
realm=realm, realm=realm,
event_time=timezone_now(), event_time=timezone_now(),
acting_user=acting_user, acting_user=acting_user,
extra_data=str({"old_value": old_value, "new_value": org_type}), extra_data={"old_value": old_value, "new_value": org_type},
) )
event = dict(type="realm", op="update", property="org_type", value=org_type) event = dict(type="realm", op="update", property="org_type", value=org_type)
@ -455,7 +442,7 @@ def do_change_realm_plan_type(
realm=realm, realm=realm,
event_time=timezone_now(), event_time=timezone_now(),
acting_user=acting_user, acting_user=acting_user,
extra_data=str({"old_value": old_value, "new_value": plan_type}), extra_data={"old_value": old_value, "new_value": plan_type},
) )
if plan_type == Realm.PLAN_TYPE_PLUS: if plan_type == Realm.PLAN_TYPE_PLUS:

View File

@ -3,7 +3,6 @@ from collections import defaultdict
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any, Collection, Dict, Iterable, List, Mapping, Optional, Set, Tuple from typing import Any, Collection, Dict, Iterable, List, Mapping, Optional, Set, Tuple
import orjson
from django.conf import settings from django.conf import settings
from django.db import transaction from django.db import transaction
from django.db.models import Q, QuerySet from django.db.models import Q, QuerySet
@ -880,13 +879,11 @@ def do_change_subscription_property(
modified_user=user_profile, modified_user=user_profile,
acting_user=acting_user, acting_user=acting_user,
modified_stream=stream, modified_stream=stream,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: database_value,
RealmAuditLog.NEW_VALUE: database_value, "property": database_property_name,
"property": database_property_name, },
}
).decode(),
) )
# This first in_home_view event is deprecated and will be removed # This first in_home_view event is deprecated and will be removed
@ -980,13 +977,11 @@ def do_change_stream_permission(
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_invite_only_value,
RealmAuditLog.OLD_VALUE: old_invite_only_value, RealmAuditLog.NEW_VALUE: stream.invite_only,
RealmAuditLog.NEW_VALUE: stream.invite_only, "property": "invite_only",
"property": "invite_only", },
}
).decode(),
) )
if old_history_public_to_subscribers_value != stream.history_public_to_subscribers: if old_history_public_to_subscribers_value != stream.history_public_to_subscribers:
@ -996,13 +991,11 @@ def do_change_stream_permission(
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_history_public_to_subscribers_value,
RealmAuditLog.OLD_VALUE: old_history_public_to_subscribers_value, RealmAuditLog.NEW_VALUE: stream.history_public_to_subscribers,
RealmAuditLog.NEW_VALUE: stream.history_public_to_subscribers, "property": "history_public_to_subscribers",
"property": "history_public_to_subscribers", },
}
).decode(),
) )
if old_is_web_public_value != stream.is_web_public: if old_is_web_public_value != stream.is_web_public:
@ -1024,13 +1017,11 @@ def do_change_stream_permission(
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_is_web_public_value,
RealmAuditLog.OLD_VALUE: old_is_web_public_value, RealmAuditLog.NEW_VALUE: stream.is_web_public,
RealmAuditLog.NEW_VALUE: stream.is_web_public, "property": "is_web_public",
"property": "is_web_public", },
}
).decode(),
) )
notify_stream_creation_ids = set() notify_stream_creation_ids = set()
@ -1145,13 +1136,11 @@ def do_change_stream_post_policy(
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_post_policy,
RealmAuditLog.OLD_VALUE: old_post_policy, RealmAuditLog.NEW_VALUE: stream_post_policy,
RealmAuditLog.NEW_VALUE: stream_post_policy, "property": "stream_post_policy",
"property": "stream_post_policy", },
}
).decode(),
) )
event = dict( event = dict(
@ -1197,12 +1186,10 @@ def do_rename_stream(stream: Stream, new_name: str, user_profile: UserProfile) -
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_NAME_CHANGED, event_type=RealmAuditLog.STREAM_NAME_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_name,
RealmAuditLog.OLD_VALUE: old_name, RealmAuditLog.NEW_VALUE: new_name,
RealmAuditLog.NEW_VALUE: new_name, },
}
).decode(),
) )
assert stream.recipient_id is not None assert stream.recipient_id is not None
@ -1300,13 +1287,11 @@ def do_change_stream_description(
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_description,
RealmAuditLog.OLD_VALUE: old_description, RealmAuditLog.NEW_VALUE: new_description,
RealmAuditLog.NEW_VALUE: new_description, "property": "description",
"property": "description", },
}
).decode(),
) )
event = dict( event = dict(
@ -1385,12 +1370,10 @@ def do_change_stream_message_retention_days(
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED, event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_message_retention_days_value,
RealmAuditLog.OLD_VALUE: old_message_retention_days_value, RealmAuditLog.NEW_VALUE: message_retention_days,
RealmAuditLog.NEW_VALUE: message_retention_days, },
}
).decode(),
) )
event = dict( event = dict(
@ -1431,13 +1414,11 @@ def do_change_stream_group_based_setting(
modified_stream=stream, modified_stream=stream,
event_type=RealmAuditLog.STREAM_GROUP_BASED_SETTING_CHANGED, event_type=RealmAuditLog.STREAM_GROUP_BASED_SETTING_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_user_group_id,
RealmAuditLog.OLD_VALUE: old_user_group_id, RealmAuditLog.NEW_VALUE: user_group.id,
RealmAuditLog.NEW_VALUE: user_group.id, "property": setting_name,
"property": setting_name, },
}
).decode(),
) )
event = dict( event = dict(
op="update", op="update",

View File

@ -2,7 +2,6 @@ import datetime
from typing import Dict, List, Mapping, Optional, Sequence, TypedDict, Union from typing import Dict, List, Mapping, Optional, Sequence, TypedDict, Union
import django.db.utils import django.db.utils
import orjson
from django.db import transaction from django.db import transaction
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
@ -74,13 +73,11 @@ def create_user_group_in_database(
event_type=RealmAuditLog.USER_GROUP_GROUP_BASED_SETTING_CHANGED, event_type=RealmAuditLog.USER_GROUP_GROUP_BASED_SETTING_CHANGED,
event_time=creation_time, event_time=creation_time,
modified_user_group=user_group, modified_user_group=user_group,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: None,
RealmAuditLog.OLD_VALUE: None, RealmAuditLog.NEW_VALUE: user_group.can_mention_group.id,
RealmAuditLog.NEW_VALUE: user_group.can_mention_group.id, "property": "can_mention_group",
"property": "can_mention_group", },
}
).decode(),
), ),
] + [ ] + [
RealmAuditLog( RealmAuditLog(
@ -234,12 +231,10 @@ def do_update_user_group_name(
event_type=RealmAuditLog.USER_GROUP_NAME_CHANGED, event_type=RealmAuditLog.USER_GROUP_NAME_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: name,
RealmAuditLog.NEW_VALUE: name, },
}
).decode(),
) )
except django.db.utils.IntegrityError: except django.db.utils.IntegrityError:
raise JsonableError(_("User group '{group_name}' already exists.").format(group_name=name)) raise JsonableError(_("User group '{group_name}' already exists.").format(group_name=name))
@ -259,12 +254,10 @@ def do_update_user_group_description(
event_type=RealmAuditLog.USER_GROUP_DESCRIPTION_CHANGED, event_type=RealmAuditLog.USER_GROUP_DESCRIPTION_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: description,
RealmAuditLog.NEW_VALUE: description, },
}
).decode(),
) )
do_send_user_group_update_event(user_group, dict(description=description)) do_send_user_group_update_event(user_group, dict(description=description))
@ -351,7 +344,7 @@ def add_subgroups_to_user_group(
event_type=RealmAuditLog.USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_ADDED, event_type=RealmAuditLog.USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_ADDED,
event_time=now, event_time=now,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(), extra_data={"subgroup_ids": subgroup_ids},
), ),
*( *(
RealmAuditLog( RealmAuditLog(
@ -360,7 +353,7 @@ def add_subgroups_to_user_group(
event_type=RealmAuditLog.USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_ADDED, event_type=RealmAuditLog.USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_ADDED,
event_time=now, event_time=now,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(), extra_data={"supergroup_ids": [user_group.id]},
) )
for subgroup_id in subgroup_ids for subgroup_id in subgroup_ids
), ),
@ -385,7 +378,7 @@ def remove_subgroups_from_user_group(
event_type=RealmAuditLog.USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_REMOVED, event_type=RealmAuditLog.USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_REMOVED,
event_time=now, event_time=now,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(), extra_data={"subgroup_ids": subgroup_ids},
), ),
*( *(
RealmAuditLog( RealmAuditLog(
@ -394,7 +387,7 @@ def remove_subgroups_from_user_group(
event_type=RealmAuditLog.USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_REMOVED, event_type=RealmAuditLog.USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_REMOVED,
event_time=now, event_time=now,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(), extra_data={"supergroup_ids": [user_group.id]},
) )
for subgroup_id in subgroup_ids for subgroup_id in subgroup_ids
), ),
@ -434,13 +427,11 @@ def do_change_user_group_permission_setting(
event_type=RealmAuditLog.USER_GROUP_GROUP_BASED_SETTING_CHANGED, event_type=RealmAuditLog.USER_GROUP_GROUP_BASED_SETTING_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
modified_user_group=user_group, modified_user_group=user_group,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value.id,
RealmAuditLog.OLD_VALUE: old_value.id, RealmAuditLog.NEW_VALUE: setting_value_group.id,
RealmAuditLog.NEW_VALUE: setting_value_group.id, "property": setting_name,
"property": setting_name, },
}
).decode(),
) )
event_data_dict: Dict[str, Union[str, int]] = {setting_name: setting_value_group.id} event_data_dict: Dict[str, Union[str, int]] = {setting_name: setting_value_group.id}

View File

@ -1,7 +1,6 @@
import datetime import datetime
from typing import Iterable, Optional, Union from typing import Iterable, Optional, Union
import orjson
from django.conf import settings from django.conf import settings
from django.db import transaction from django.db import transaction
from django.db.models import F from django.db.models import F
@ -195,8 +194,7 @@ def do_change_full_name(
modified_user=user_profile, modified_user=user_profile,
event_type=RealmAuditLog.USER_FULL_NAME_CHANGED, event_type=RealmAuditLog.USER_FULL_NAME_CHANGED,
event_time=event_time, event_time=event_time,
extra_data=old_name, extra_data={RealmAuditLog.OLD_VALUE: old_name, RealmAuditLog.NEW_VALUE: full_name},
extra_data_json={RealmAuditLog.OLD_VALUE: old_name, RealmAuditLog.NEW_VALUE: full_name},
) )
payload = dict(user_id=user_profile.id, full_name=user_profile.full_name) payload = dict(user_id=user_profile.id, full_name=user_profile.full_name)
send_event( send_event(
@ -352,7 +350,7 @@ def do_change_avatar_fields(
realm=user_profile.realm, realm=user_profile.realm,
modified_user=user_profile, modified_user=user_profile,
event_type=RealmAuditLog.USER_AVATAR_SOURCE_CHANGED, event_type=RealmAuditLog.USER_AVATAR_SOURCE_CHANGED,
extra_data=str({"avatar_source": avatar_source}), extra_data={"avatar_source": avatar_source},
event_time=event_time, event_time=event_time,
acting_user=acting_user, acting_user=acting_user,
) )
@ -410,13 +408,11 @@ def do_change_user_setting(
event_time=event_time, event_time=event_time,
acting_user=acting_user, acting_user=acting_user,
modified_user=user_profile, modified_user=user_profile,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: setting_value,
RealmAuditLog.NEW_VALUE: setting_value, "property": setting_name,
"property": setting_name, },
}
).decode(),
) )
# Disabling digest emails should clear a user's email queue # Disabling digest emails should clear a user's email queue

View File

@ -3,7 +3,6 @@ from collections import defaultdict
from email.headerregistry import Address from email.headerregistry import Address
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
import orjson
from django.conf import settings from django.conf import settings
from django.db import transaction from django.db import transaction
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -274,11 +273,9 @@ def do_deactivate_user(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.USER_DEACTIVATED, event_type=RealmAuditLog.USER_DEACTIVATED,
event_time=event_time, event_time=event_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm), },
}
).decode(),
) )
do_increment_logging_stat( do_increment_logging_stat(
user_profile.realm, user_profile.realm,
@ -327,13 +324,11 @@ def do_change_user_role(
acting_user=acting_user, acting_user=acting_user,
event_type=RealmAuditLog.USER_ROLE_CHANGED, event_type=RealmAuditLog.USER_ROLE_CHANGED,
event_time=timezone_now(), event_time=timezone_now(),
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: value,
RealmAuditLog.NEW_VALUE: value, RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm), },
}
).decode(),
) )
event = dict( event = dict(
type="realm_user", op="update", person=dict(user_id=user_profile.id, role=user_profile.role) type="realm_user", op="update", person=dict(user_id=user_profile.id, role=user_profile.role)

View File

@ -2412,9 +2412,7 @@ def get_realm_exports_serialized(user: UserProfile) -> List[Dict[str, Any]]:
failed_timestamp = None failed_timestamp = None
acting_user = export.acting_user acting_user = export.acting_user
export_data = {} export_data = export.extra_data
if export.extra_data is not None:
export_data = orjson.loads(export.extra_data)
deleted_timestamp = export_data.get("deleted_timestamp") deleted_timestamp = export_data.get("deleted_timestamp")
failed_timestamp = export_data.get("failed_timestamp") failed_timestamp = export_data.get("failed_timestamp")

View File

@ -1,6 +1,5 @@
from typing import Dict, Iterable, List, Mapping, Sequence, TypedDict from typing import Dict, Iterable, List, Mapping, Sequence, TypedDict
import orjson
from django.db import transaction from django.db import transaction
from django.db.models import F, QuerySet from django.db.models import F, QuerySet
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -379,13 +378,11 @@ def create_system_user_groups_for_realm(realm: Realm) -> Dict[int, UserGroup]:
event_type=RealmAuditLog.USER_GROUP_GROUP_BASED_SETTING_CHANGED, event_type=RealmAuditLog.USER_GROUP_GROUP_BASED_SETTING_CHANGED,
event_time=creation_time, event_time=creation_time,
modified_user_group=user_group, modified_user_group=user_group,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: None,
RealmAuditLog.OLD_VALUE: None, RealmAuditLog.NEW_VALUE: user_group.can_mention_group.id,
RealmAuditLog.NEW_VALUE: user_group.can_mention_group.id, "property": "can_mention_group",
"property": "can_mention_group", },
}
).decode(),
) )
) )
UserGroup.objects.bulk_update(groups_with_updated_settings, ["can_mention_group"]) UserGroup.objects.bulk_update(groups_with_updated_settings, ["can_mention_group"])
@ -404,7 +401,7 @@ def create_system_user_groups_for_realm(realm: Realm) -> Dict[int, UserGroup]:
event_type=RealmAuditLog.USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_ADDED, event_type=RealmAuditLog.USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_ADDED,
event_time=now, event_time=now,
acting_user=None, acting_user=None,
extra_data=orjson.dumps({"subgroup_ids": [subgroup.id]}).decode(), extra_data={"subgroup_ids": [subgroup.id]},
), ),
RealmAuditLog( RealmAuditLog(
realm=realm, realm=realm,
@ -412,7 +409,7 @@ def create_system_user_groups_for_realm(realm: Realm) -> Dict[int, UserGroup]:
event_type=RealmAuditLog.USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_ADDED, event_type=RealmAuditLog.USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_ADDED,
event_time=now, event_time=now,
acting_user=None, acting_user=None,
extra_data=orjson.dumps({"supergroup_ids": [supergroup.id]}).decode(), extra_data={"supergroup_ids": [supergroup.id]},
), ),
] ]
) )

View File

@ -0,0 +1,21 @@
# Generated by Django 4.0.7 on 2022-10-02 17:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("zerver", "0466_realmfilter_order"),
]
operations = [
migrations.RemoveField(
model_name="realmauditlog",
name="extra_data",
),
migrations.RenameField(
model_name="realmauditlog",
old_name="extra_data_json",
new_name="extra_data",
),
]

View File

@ -1,4 +1,3 @@
import ast
import datetime import datetime
import hashlib import hashlib
import secrets import secrets
@ -11,7 +10,6 @@ from typing import (
Any, Any,
Callable, Callable,
Dict, Dict,
Iterable,
List, List,
Optional, Optional,
Pattern, Pattern,
@ -4448,8 +4446,7 @@ class AbstractRealmAuditLog(models.Model):
ROLE_COUNT_HUMANS = "11" ROLE_COUNT_HUMANS = "11"
ROLE_COUNT_BOTS = "12" ROLE_COUNT_BOTS = "12"
extra_data = models.TextField(null=True) extra_data = models.JSONField(default=dict, encoder=DjangoJSONEncoder)
extra_data_json = models.JSONField(default=dict, encoder=DjangoJSONEncoder)
# Event types # Event types
USER_CREATED = 101 USER_CREATED = 101
@ -4568,29 +4565,6 @@ class AbstractRealmAuditLog(models.Model):
class Meta: class Meta:
abstract = True abstract = True
def save(
self,
force_insert: bool = False,
force_update: bool = False,
using: Optional[str] = None,
update_fields: Optional[Iterable[str]] = None,
) -> None:
# Unless extra_data is set and extra_data_json is unset (defaults to {})
# we do not attempt to auto convert extra_data to extra_data_json
if self.extra_data is not None and not self.extra_data_json:
try:
if self.extra_data.startswith("{'"):
self.extra_data_json = ast.literal_eval(self.extra_data)
else:
self.extra_data_json = orjson.loads(self.extra_data)
except (
Exception
): # nocoverage # This is not intended to happen at all for correctly written code
raise Exception(
"extra_data_json must be explicitly set if extra_data is not str()'d from a dict or JSON-encoded."
)
super().save(force_insert, force_update, using, update_fields)
class RealmAuditLog(AbstractRealmAuditLog): class RealmAuditLog(AbstractRealmAuditLog):
""" """

View File

@ -1,7 +1,6 @@
from datetime import timedelta from datetime import timedelta
from typing import Any, Dict, Union from typing import Any, Dict, Union
import orjson
from django.contrib.auth.password_validation import validate_password from django.contrib.auth.password_validation import validate_password
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -124,7 +123,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_time__gte=now, event_time__gte=now,
event_time__lte=now + timedelta(minutes=60), event_time__lte=now + timedelta(minutes=60),
) )
.order_by("event_time") .order_by("event_time", "event_type")
.values_list("event_type", flat=True) .values_list("event_type", flat=True)
) )
self.assertEqual( self.assertEqual(
@ -149,10 +148,10 @@ class TestRealmAuditLog(ZulipTestCase):
event_time__lte=now + timedelta(minutes=60), event_time__lte=now + timedelta(minutes=60),
): ):
if event.event_type == RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED: if event.event_type == RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED:
self.assertIsNone(event.extra_data) self.assertDictEqual(event.extra_data, {})
modified_user_group_names.append(assert_is_not_none(event.modified_user_group).name) modified_user_group_names.append(assert_is_not_none(event.modified_user_group).name)
continue continue
extra_data = orjson.loads(assert_is_not_none(event.extra_data)) extra_data = event.extra_data
self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT]) self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])
self.assertNotIn(RealmAuditLog.OLD_VALUE, extra_data) self.assertNotIn(RealmAuditLog.OLD_VALUE, extra_data)
@ -189,7 +188,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_time__gte=now, event_time__gte=now,
event_time__lte=now + timedelta(minutes=60), event_time__lte=now + timedelta(minutes=60),
): ):
extra_data = orjson.loads(assert_is_not_none(event.extra_data)) extra_data = event.extra_data
self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT]) self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])
self.assertIn(RealmAuditLog.OLD_VALUE, extra_data) self.assertIn(RealmAuditLog.OLD_VALUE, extra_data)
self.assertIn(RealmAuditLog.NEW_VALUE, extra_data) self.assertIn(RealmAuditLog.NEW_VALUE, extra_data)
@ -426,14 +425,14 @@ class TestRealmAuditLog(ZulipTestCase):
log_entry = RealmAuditLog.objects.get( log_entry = RealmAuditLog.objects.get(
realm=realm, event_type=RealmAuditLog.REALM_DEACTIVATED, acting_user=user realm=realm, event_type=RealmAuditLog.REALM_DEACTIVATED, acting_user=user
) )
extra_data = orjson.loads(assert_is_not_none(log_entry.extra_data)) extra_data = log_entry.extra_data
self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT]) self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])
do_reactivate_realm(realm) do_reactivate_realm(realm)
log_entry = RealmAuditLog.objects.get( log_entry = RealmAuditLog.objects.get(
realm=realm, event_type=RealmAuditLog.REALM_REACTIVATED realm=realm, event_type=RealmAuditLog.REALM_REACTIVATED
) )
extra_data = orjson.loads(assert_is_not_none(log_entry.extra_data)) extra_data = log_entry.extra_data
self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT]) self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])
def test_create_stream_if_needed(self) -> None: def test_create_stream_if_needed(self) -> None:
@ -501,7 +500,7 @@ class TestRealmAuditLog(ZulipTestCase):
acting_user=user, acting_user=user,
) )
self.assertEqual(realm_audit_logs.count(), 1) self.assertEqual(realm_audit_logs.count(), 1)
extra_data = orjson.loads(assert_is_not_none(realm_audit_logs[0].extra_data)) extra_data = realm_audit_logs[0].extra_data
expected_new_value = auth_method_dict expected_new_value = auth_method_dict
self.assertEqual(extra_data[RealmAuditLog.OLD_VALUE], expected_old_value) self.assertEqual(extra_data[RealmAuditLog.OLD_VALUE], expected_old_value)
self.assertEqual(extra_data[RealmAuditLog.NEW_VALUE], expected_new_value) self.assertEqual(extra_data[RealmAuditLog.NEW_VALUE], expected_new_value)
@ -534,7 +533,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(value_expected).decode(), extra_data=value_expected,
).count(), ).count(),
1, 1,
) )
@ -554,7 +553,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(value_expected).decode(), extra_data=value_expected,
).count(), ).count(),
1, 1,
) )
@ -574,13 +573,11 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: stream.id,
RealmAuditLog.NEW_VALUE: stream.id, "property": "notifications_stream",
"property": "notifications_stream", },
}
).decode(),
).count(), ).count(),
1, 1,
) )
@ -600,13 +597,11 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: stream.id,
RealmAuditLog.NEW_VALUE: stream.id, "property": "signup_notifications_stream",
"property": "signup_notifications_stream", },
}
).decode(),
).count(), ).count(),
1, 1,
) )
@ -627,7 +622,7 @@ class TestRealmAuditLog(ZulipTestCase):
assert audit_log is not None assert audit_log is not None
self.assert_length(audit_entries, 1) self.assert_length(audit_entries, 1)
self.assertEqual(icon_source, realm.icon_source) self.assertEqual(icon_source, realm.icon_source)
self.assertEqual(audit_log.extra_data, "{'icon_source': 'G', 'icon_version': 2}") self.assertEqual(audit_log.extra_data, {"icon_source": "G", "icon_version": 2})
def test_change_subscription_property(self) -> None: def test_change_subscription_property(self) -> None:
user = self.example_user("hamlet") user = self.example_user("hamlet")
@ -665,7 +660,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
modified_user=user, modified_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -684,12 +679,10 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.USER_DEFAULT_SENDING_STREAM_CHANGED, event_type=RealmAuditLog.USER_DEFAULT_SENDING_STREAM_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: stream.id,
RealmAuditLog.NEW_VALUE: stream.id, },
}
).decode(),
).count(), ).count(),
1, 1,
) )
@ -703,12 +696,10 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.USER_DEFAULT_REGISTER_STREAM_CHANGED, event_type=RealmAuditLog.USER_DEFAULT_REGISTER_STREAM_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: stream.id,
RealmAuditLog.NEW_VALUE: stream.id, },
}
).decode(),
).count(), ).count(),
1, 1,
) )
@ -722,9 +713,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.USER_DEFAULT_ALL_PUBLIC_STREAMS_CHANGED, event_type=RealmAuditLog.USER_DEFAULT_ALL_PUBLIC_STREAMS_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps( extra_data={RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: False},
{RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: False}
).decode(),
).count(), ).count(),
1, 1,
) )
@ -743,9 +732,10 @@ class TestRealmAuditLog(ZulipTestCase):
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
modified_stream=stream, modified_stream=stream,
extra_data=orjson.dumps( extra_data={
{RealmAuditLog.OLD_VALUE: old_name, RealmAuditLog.NEW_VALUE: "updated name"} RealmAuditLog.OLD_VALUE: old_name,
).decode(), RealmAuditLog.NEW_VALUE: "updated name",
},
).count(), ).count(),
1, 1,
) )
@ -784,7 +774,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
modified_user=user, modified_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -810,7 +800,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_DOMAIN_ADDED, event_type=RealmAuditLog.REALM_DOMAIN_ADDED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -831,7 +821,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_DOMAIN_CHANGED, event_type=RealmAuditLog.REALM_DOMAIN_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -852,7 +842,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_DOMAIN_REMOVED, event_type=RealmAuditLog.REALM_DOMAIN_REMOVED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -884,7 +874,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_PLAYGROUND_ADDED, event_type=RealmAuditLog.REALM_PLAYGROUND_ADDED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -911,7 +901,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_PLAYGROUND_REMOVED, event_type=RealmAuditLog.REALM_PLAYGROUND_REMOVED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -942,7 +932,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_LINKIFIER_ADDED, event_type=RealmAuditLog.REALM_LINKIFIER_ADDED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -970,7 +960,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_LINKIFIER_CHANGED, event_type=RealmAuditLog.REALM_LINKIFIER_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -995,7 +985,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_LINKIFIER_REMOVED, event_type=RealmAuditLog.REALM_LINKIFIER_REMOVED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -1033,7 +1023,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_EMOJI_ADDED, event_type=RealmAuditLog.REALM_EMOJI_ADDED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -1062,7 +1052,7 @@ class TestRealmAuditLog(ZulipTestCase):
event_type=RealmAuditLog.REALM_EMOJI_REMOVED, event_type=RealmAuditLog.REALM_EMOJI_REMOVED,
event_time__gte=now, event_time__gte=now,
acting_user=user, acting_user=user,
extra_data=orjson.dumps(expected_extra_data).decode(), extra_data=expected_extra_data,
).count(), ).count(),
1, 1,
) )
@ -1126,14 +1116,8 @@ class TestRealmAuditLog(ZulipTestCase):
supergroup_id, subgroup_extra_data = logged_subgroup_entries[i] supergroup_id, subgroup_extra_data = logged_subgroup_entries[i]
subgroup_id, supergroup_extra_data = logged_supergroup_entries[i] subgroup_id, supergroup_extra_data = logged_supergroup_entries[i]
assert subgroup_extra_data is not None self.assertEqual(subgroup_extra_data["subgroup_ids"][0], expected_subgroup_id)
assert supergroup_extra_data is not None self.assertEqual(supergroup_extra_data["supergroup_ids"][0], expected_supergroup_id)
self.assertEqual(
orjson.loads(subgroup_extra_data)["subgroup_ids"][0], expected_subgroup_id
)
self.assertEqual(
orjson.loads(supergroup_extra_data)["supergroup_ids"][0], expected_supergroup_id
)
self.assertEqual(supergroup_id, expected_supergroup_id) self.assertEqual(supergroup_id, expected_supergroup_id)
self.assertEqual(subgroup_id, expected_subgroup_id) self.assertEqual(subgroup_id, expected_subgroup_id)
@ -1151,7 +1135,7 @@ class TestRealmAuditLog(ZulipTestCase):
): ):
self.assertEqual(user_group_id, expected_user_group_id) self.assertEqual(user_group_id, expected_user_group_id)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(extra_data)), extra_data,
{ {
RealmAuditLog.OLD_VALUE: None, RealmAuditLog.OLD_VALUE: None,
RealmAuditLog.NEW_VALUE: nobody_group.id, RealmAuditLog.NEW_VALUE: nobody_group.id,
@ -1203,10 +1187,7 @@ class TestRealmAuditLog(ZulipTestCase):
) )
self.assert_length(audit_log_entries, len(UserGroup.GROUP_PERMISSION_SETTINGS)) self.assert_length(audit_log_entries, len(UserGroup.GROUP_PERMISSION_SETTINGS))
self.assertListEqual( self.assertListEqual(
[ [audit_log.extra_data for audit_log in audit_log_entries],
orjson.loads(assert_is_not_none(audit_log.extra_data))
for audit_log in audit_log_entries
],
[ [
{ {
RealmAuditLog.OLD_VALUE: None, RealmAuditLog.OLD_VALUE: None,
@ -1264,7 +1245,7 @@ class TestRealmAuditLog(ZulipTestCase):
self.assertEqual(audit_log_entry.modified_user_group, user_group) self.assertEqual(audit_log_entry.modified_user_group, user_group)
self.assertEqual(audit_log_entry.acting_user, hamlet) self.assertEqual(audit_log_entry.acting_user, hamlet)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(audit_log_entry.extra_data)), audit_log_entry.extra_data,
{"subgroup_ids": [subgroup.id for subgroup in subgroups]}, {"subgroup_ids": [subgroup.id for subgroup in subgroups]},
) )
audit_log_entries = RealmAuditLog.objects.filter( audit_log_entries = RealmAuditLog.objects.filter(
@ -1277,7 +1258,7 @@ class TestRealmAuditLog(ZulipTestCase):
self.assertEqual(audit_log_entries[i].modified_user_group, subgroups[i]) self.assertEqual(audit_log_entries[i].modified_user_group, subgroups[i])
self.assertEqual(audit_log_entries[i].acting_user, hamlet) self.assertEqual(audit_log_entries[i].acting_user, hamlet)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(audit_log_entries[i].extra_data)), audit_log_entries[i].extra_data,
{"supergroup_ids": [user_group.id]}, {"supergroup_ids": [user_group.id]},
) )
@ -1290,7 +1271,7 @@ class TestRealmAuditLog(ZulipTestCase):
self.assertEqual(audit_log_entry.modified_user_group, user_group) self.assertEqual(audit_log_entry.modified_user_group, user_group)
self.assertEqual(audit_log_entry.acting_user, hamlet) self.assertEqual(audit_log_entry.acting_user, hamlet)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(audit_log_entry.extra_data)), audit_log_entry.extra_data,
{"subgroup_ids": [subgroup.id for subgroup in subgroups[:2]]}, {"subgroup_ids": [subgroup.id for subgroup in subgroups[:2]]},
) )
audit_log_entries = RealmAuditLog.objects.filter( audit_log_entries = RealmAuditLog.objects.filter(
@ -1303,7 +1284,7 @@ class TestRealmAuditLog(ZulipTestCase):
self.assertEqual(audit_log_entries[i].modified_user_group, subgroups[i]) self.assertEqual(audit_log_entries[i].modified_user_group, subgroups[i])
self.assertEqual(audit_log_entries[i].acting_user, hamlet) self.assertEqual(audit_log_entries[i].acting_user, hamlet)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(audit_log_entries[i].extra_data)), audit_log_entries[i].extra_data,
{"supergroup_ids": [user_group.id]}, {"supergroup_ids": [user_group.id]},
) )
@ -1326,7 +1307,7 @@ class TestRealmAuditLog(ZulipTestCase):
) )
self.assert_length(audit_log_entries, 1) self.assert_length(audit_log_entries, 1)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(audit_log_entries[0].extra_data)), audit_log_entries[0].extra_data,
{ {
RealmAuditLog.OLD_VALUE: "demo", RealmAuditLog.OLD_VALUE: "demo",
RealmAuditLog.NEW_VALUE: "bar", RealmAuditLog.NEW_VALUE: "bar",
@ -1341,7 +1322,7 @@ class TestRealmAuditLog(ZulipTestCase):
) )
self.assert_length(audit_log_entries, 1) self.assert_length(audit_log_entries, 1)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(audit_log_entries[0].extra_data)), audit_log_entries[0].extra_data,
{ {
RealmAuditLog.OLD_VALUE: "No description", RealmAuditLog.OLD_VALUE: "No description",
RealmAuditLog.NEW_VALUE: "Foo", RealmAuditLog.NEW_VALUE: "Foo",
@ -1363,7 +1344,7 @@ class TestRealmAuditLog(ZulipTestCase):
self.assert_length(audit_log_entries, 1) self.assert_length(audit_log_entries, 1)
self.assertIsNone(audit_log_entries[0].acting_user) self.assertIsNone(audit_log_entries[0].acting_user)
self.assertDictEqual( self.assertDictEqual(
orjson.loads(assert_is_not_none(audit_log_entries[0].extra_data)), audit_log_entries[0].extra_data,
{ {
RealmAuditLog.OLD_VALUE: old_group.id, RealmAuditLog.OLD_VALUE: old_group.id,
RealmAuditLog.NEW_VALUE: new_group.id, RealmAuditLog.NEW_VALUE: new_group.id,

View File

@ -2918,13 +2918,11 @@ class RealmPropertyActionTest(BaseAction):
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=self.user_profile, acting_user=self.user_profile,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: val,
RealmAuditLog.NEW_VALUE: val, "property": name,
"property": name, },
}
).decode(),
).count(), ).count(),
1, 1,
) )
@ -2999,13 +2997,11 @@ class RealmPropertyActionTest(BaseAction):
event_type=RealmAuditLog.REALM_DEFAULT_USER_SETTINGS_CHANGED, event_type=RealmAuditLog.REALM_DEFAULT_USER_SETTINGS_CHANGED,
event_time__gte=now, event_time__gte=now,
acting_user=self.user_profile, acting_user=self.user_profile,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: val,
RealmAuditLog.NEW_VALUE: val, "property": name,
"property": name, },
}
).decode(),
).count(), ).count(),
1, 1,
) )

View File

@ -1,8 +1,6 @@
from datetime import datetime, timezone from datetime import datetime, timezone
from unittest import mock from unittest import mock
import orjson
from zerver.actions.users import do_deactivate_user from zerver.actions.users import do_deactivate_user
from zerver.lib.cache import cache_get, get_muting_users_cache_key from zerver.lib.cache import cache_get, get_muting_users_cache_key
from zerver.lib.muted_users import get_mute_object, get_muting_users, get_user_mutes from zerver.lib.muted_users import get_mute_object, get_muting_users, get_user_mutes
@ -113,7 +111,7 @@ class MutedUsersTests(ZulipTestCase):
( (
RealmAuditLog.USER_MUTED, RealmAuditLog.USER_MUTED,
mute_time, mute_time,
orjson.dumps({"muted_user_id": cordelia.id}).decode(), {"muted_user_id": cordelia.id},
), ),
) )
@ -173,7 +171,7 @@ class MutedUsersTests(ZulipTestCase):
( (
RealmAuditLog.USER_UNMUTED, RealmAuditLog.USER_UNMUTED,
mute_time, mute_time,
orjson.dumps({"unmuted_user_id": cordelia.id}).decode(), {"unmuted_user_id": cordelia.id},
), ),
) )

View File

@ -712,7 +712,7 @@ class AnalyticsBouncerTest(BouncerTestCase):
modified_user=user, modified_user=user,
event_type=RealmAuditLog.REALM_LOGO_CHANGED, event_type=RealmAuditLog.REALM_LOGO_CHANGED,
event_time=end_time, event_time=end_time,
extra_data=orjson.dumps({"foo": "bar"}).decode(), extra_data={"data": "foo"},
) )
send_analytics_to_remote_server() send_analytics_to_remote_server()
check_counts(6, 4, 3, 2, 1) check_counts(6, 4, 3, 2, 1)
@ -722,11 +722,9 @@ class AnalyticsBouncerTest(BouncerTestCase):
modified_user=user, modified_user=user,
event_type=RealmAuditLog.USER_REACTIVATED, event_type=RealmAuditLog.USER_REACTIVATED,
event_time=end_time, event_time=end_time,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user.realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user.realm), },
}
).decode(),
) )
send_analytics_to_remote_server() send_analytics_to_remote_server()
check_counts(7, 5, 3, 2, 2) check_counts(7, 5, 3, 2, 2)
@ -829,11 +827,9 @@ class AnalyticsBouncerTest(BouncerTestCase):
modified_user=user, modified_user=user,
event_type=RealmAuditLog.USER_REACTIVATED, event_type=RealmAuditLog.USER_REACTIVATED,
event_time=self.TIME_ZERO, event_time=self.TIME_ZERO,
extra_data=orjson.dumps( extra_data={
{ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user.realm),
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user.realm), },
}
).decode(),
) )
# Event type not in SYNCED_BILLING_EVENTS -- should not be included # Event type not in SYNCED_BILLING_EVENTS -- should not be included
RealmAuditLog.objects.create( RealmAuditLog.objects.create(
@ -888,12 +884,10 @@ class AnalyticsBouncerTest(BouncerTestCase):
self.assertEqual(remote_log_entry.event_time, self.TIME_ZERO) self.assertEqual(remote_log_entry.event_time, self.TIME_ZERO)
self.assertEqual(remote_log_entry.backfilled, True) self.assertEqual(remote_log_entry.backfilled, True)
assert remote_log_entry.extra_data is not None assert remote_log_entry.extra_data is not None
self.assertEqual( self.assertEqual(remote_log_entry.extra_data, {RealmAuditLog.ROLE_COUNT: user_count})
orjson.loads(remote_log_entry.extra_data), {RealmAuditLog.ROLE_COUNT: user_count}
)
self.assertEqual(remote_log_entry.event_type, RealmAuditLog.USER_REACTIVATED) self.assertEqual(remote_log_entry.event_type, RealmAuditLog.USER_REACTIVATED)
# This verify that the bouncer is forward-compatible with remote servers using # This verifies that the bouncer is backwards-compatible with remote servers using
# TextField to store extra_data. # TextField to store extra_data.
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com") @override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate @responses.activate
@ -904,7 +898,6 @@ class AnalyticsBouncerTest(BouncerTestCase):
request_extra_data: object, request_extra_data: object,
*, *,
expected_extra_data: object = None, expected_extra_data: object = None,
expected_extra_data_json: object = None,
skip_audit_log_check: bool = False, skip_audit_log_check: bool = False,
) -> None: ) -> None:
user = self.example_user("hamlet") user = self.example_user("hamlet")
@ -949,27 +942,21 @@ class AnalyticsBouncerTest(BouncerTestCase):
self.assertEqual(remote_log_entry.remote_id, log_entry.id) self.assertEqual(remote_log_entry.remote_id, log_entry.id)
self.assertEqual(remote_log_entry.event_time, self.TIME_ZERO) self.assertEqual(remote_log_entry.event_time, self.TIME_ZERO)
self.assertEqual(remote_log_entry.extra_data, expected_extra_data) self.assertEqual(remote_log_entry.extra_data, expected_extra_data)
self.assertEqual(remote_log_entry.extra_data_json, expected_extra_data_json)
# Pre-migration extra_data # Pre-migration extra_data
verify_request_with_overridden_extra_data( verify_request_with_overridden_extra_data(
request_extra_data=orjson.dumps({"fake_data": 42}).decode(), request_extra_data=orjson.dumps({"fake_data": 42}).decode(),
expected_extra_data=orjson.dumps({"fake_data": 42}).decode(), expected_extra_data={"fake_data": 42},
expected_extra_data_json={"fake_data": 42},
)
verify_request_with_overridden_extra_data(
request_extra_data=None, expected_extra_data=None, expected_extra_data_json={}
) )
verify_request_with_overridden_extra_data(request_extra_data=None, expected_extra_data={})
# Post-migration extra_data # Post-migration extra_data
verify_request_with_overridden_extra_data( verify_request_with_overridden_extra_data(
request_extra_data={"fake_data": 42}, request_extra_data={"fake_data": 42},
expected_extra_data=orjson.dumps({"fake_data": 42}).decode(), expected_extra_data={"fake_data": 42},
expected_extra_data_json={"fake_data": 42},
) )
verify_request_with_overridden_extra_data( verify_request_with_overridden_extra_data(
request_extra_data={}, request_extra_data={},
expected_extra_data=orjson.dumps({}).decode(), expected_extra_data={},
expected_extra_data_json={},
) )
# Invalid extra_data # Invalid extra_data
with self.assertLogs(level="WARNING") as m: with self.assertLogs(level="WARNING") as m:

View File

@ -317,7 +317,7 @@ class RealmTest(ZulipTestCase):
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = {"old_subdomain": "zulip", "new_subdomain": "newzulip"} expected_extra_data = {"old_subdomain": "zulip", "new_subdomain": "newzulip"}
self.assertEqual(realm_audit_log.extra_data, str(expected_extra_data)) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
self.assertEqual(realm_audit_log.acting_user, iago) self.assertEqual(realm_audit_log.acting_user, iago)
def test_do_deactivate_realm_clears_scheduled_jobs(self) -> None: def test_do_deactivate_realm_clears_scheduled_jobs(self) -> None:
@ -760,7 +760,7 @@ class RealmTest(ZulipTestCase):
"old_value": Realm.ORG_TYPES["business"]["id"], "old_value": Realm.ORG_TYPES["business"]["id"],
"new_value": Realm.ORG_TYPES["government"]["id"], "new_value": Realm.ORG_TYPES["government"]["id"],
} }
self.assertEqual(realm_audit_log.extra_data, str(expected_extra_data)) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
self.assertEqual(realm_audit_log.acting_user, iago) self.assertEqual(realm_audit_log.acting_user, iago)
self.assertEqual(realm.org_type, Realm.ORG_TYPES["government"]["id"]) self.assertEqual(realm.org_type, Realm.ORG_TYPES["government"]["id"])
@ -782,7 +782,7 @@ class RealmTest(ZulipTestCase):
"old_value": Realm.PLAN_TYPE_SELF_HOSTED, "old_value": Realm.PLAN_TYPE_SELF_HOSTED,
"new_value": Realm.PLAN_TYPE_STANDARD, "new_value": Realm.PLAN_TYPE_STANDARD,
} }
self.assertEqual(realm_audit_log.extra_data, str(expected_extra_data)) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
self.assertEqual(realm_audit_log.acting_user, iago) self.assertEqual(realm_audit_log.acting_user, iago)
self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD) self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_STANDARD)
self.assertEqual(realm.max_invites, Realm.INVITES_STANDARD_REALM_DAILY_MAX) self.assertEqual(realm.max_invites, Realm.INVITES_STANDARD_REALM_DAILY_MAX)

View File

@ -3,7 +3,6 @@ from typing import Optional, Set
from unittest.mock import patch from unittest.mock import patch
import botocore.exceptions import botocore.exceptions
import orjson
from django.conf import settings from django.conf import settings
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -69,9 +68,7 @@ class RealmExportTest(ZulipTestCase):
self.assertEqual(audit_log_entry.acting_user_id, admin.id) self.assertEqual(audit_log_entry.acting_user_id, admin.id)
# Test that the file is hosted, and the contents are as expected. # Test that the file is hosted, and the contents are as expected.
extra_data = audit_log_entry.extra_data export_path = audit_log_entry.extra_data["export_path"]
assert extra_data is not None
export_path = orjson.loads(extra_data)["export_path"]
assert export_path.startswith("/") assert export_path.startswith("/")
path_id = export_path[1:] path_id = export_path[1:]
self.assertEqual(bucket.Object(path_id).get()["Body"].read(), b"zulip!") self.assertEqual(bucket.Object(path_id).get()["Body"].read(), b"zulip!")
@ -102,9 +99,7 @@ class RealmExportTest(ZulipTestCase):
# Try to delete an export with a `deleted_timestamp` key. # Try to delete an export with a `deleted_timestamp` key.
audit_log_entry.refresh_from_db() audit_log_entry.refresh_from_db()
extra_data = audit_log_entry.extra_data export_data = audit_log_entry.extra_data
assert extra_data is not None
export_data = orjson.loads(extra_data)
self.assertIn("deleted_timestamp", export_data) self.assertIn("deleted_timestamp", export_data)
result = self.client_delete(f"/json/export/realm/{audit_log_entry.id}") result = self.client_delete(f"/json/export/realm/{audit_log_entry.id}")
self.assert_json_error(result, "Export already deleted") self.assert_json_error(result, "Export already deleted")
@ -171,9 +166,7 @@ class RealmExportTest(ZulipTestCase):
self.assertEqual(audit_log_entry.acting_user_id, admin.id) self.assertEqual(audit_log_entry.acting_user_id, admin.id)
# Test that the file is hosted, and the contents are as expected. # Test that the file is hosted, and the contents are as expected.
extra_data = audit_log_entry.extra_data export_path = audit_log_entry.extra_data.get("export_path")
assert extra_data is not None
export_path = orjson.loads(extra_data).get("export_path")
response = self.client_get(export_path) response = self.client_get(export_path)
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertEqual(response.getvalue(), b"zulip!") self.assertEqual(response.getvalue(), b"zulip!")
@ -201,9 +194,7 @@ class RealmExportTest(ZulipTestCase):
# Try to delete an export with a `deleted_timestamp` key. # Try to delete an export with a `deleted_timestamp` key.
audit_log_entry.refresh_from_db() audit_log_entry.refresh_from_db()
extra_data = audit_log_entry.extra_data export_data = audit_log_entry.extra_data
assert extra_data is not None
export_data = orjson.loads(extra_data)
self.assertIn("deleted_timestamp", export_data) self.assertIn("deleted_timestamp", export_data)
result = self.client_delete(f"/json/export/realm/{audit_log_entry.id}") result = self.client_delete(f"/json/export/realm/{audit_log_entry.id}")
self.assert_json_error(result, "Export already deleted") self.assert_json_error(result, "Export already deleted")

View File

@ -5,7 +5,6 @@ import orjson
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from zerver.lib.test_classes import ZulipTestCase from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.utils import assert_is_not_none
from zerver.models import RealmAuditLog, RealmFilter, url_template_validator from zerver.models import RealmAuditLog, RealmFilter, url_template_validator
@ -275,15 +274,12 @@ class RealmFilterTest(ZulipTestCase):
def assert_linkifier_audit_logs(expected_id_order: List[int]) -> None: def assert_linkifier_audit_logs(expected_id_order: List[int]) -> None:
"""Check if the audit log created orders the linkifiers correctly""" """Check if the audit log created orders the linkifiers correctly"""
extra_data = orjson.loads( extra_data = (
assert_is_not_none( RealmAuditLog.objects.filter(
RealmAuditLog.objects.filter( acting_user=iago, event_type=RealmAuditLog.REALM_LINKIFIERS_REORDERED
acting_user=iago,
event_type=RealmAuditLog.REALM_LINKIFIERS_REORDERED,
)
.latest("event_time")
.extra_data
) )
.latest("event_time")
.extra_data
) )
audit_logged_ids = [ audit_logged_ids = [
linkifier_dict["id"] for linkifier_dict in extra_data["realm_linkifiers"] linkifier_dict["id"] for linkifier_dict in extra_data["realm_linkifiers"]

View File

@ -659,13 +659,11 @@ class StreamAdminTest(ZulipTestCase):
).last() ).last()
assert history_public_to_subscribers_log is not None assert history_public_to_subscribers_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.OLD_VALUE: False, RealmAuditLog.NEW_VALUE: True,
RealmAuditLog.NEW_VALUE: True, "property": "history_public_to_subscribers",
"property": "history_public_to_subscribers", }
}
).decode()
self.assertEqual(history_public_to_subscribers_log.extra_data, expected_extra_data) self.assertEqual(history_public_to_subscribers_log.extra_data, expected_extra_data)
invite_only_log = RealmAuditLog.objects.filter( invite_only_log = RealmAuditLog.objects.filter(
@ -674,13 +672,11 @@ class StreamAdminTest(ZulipTestCase):
).order_by("-id")[1] ).order_by("-id")[1]
assert invite_only_log is not None assert invite_only_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.OLD_VALUE: True, RealmAuditLog.NEW_VALUE: False,
RealmAuditLog.NEW_VALUE: False, "property": "invite_only",
"property": "invite_only", }
}
).decode()
self.assertEqual(invite_only_log.extra_data, expected_extra_data) self.assertEqual(invite_only_log.extra_data, expected_extra_data)
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None) do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
@ -724,13 +720,11 @@ class StreamAdminTest(ZulipTestCase):
).last() ).last()
assert history_public_to_subscribers_log is not None assert history_public_to_subscribers_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.OLD_VALUE: True, RealmAuditLog.NEW_VALUE: False,
RealmAuditLog.NEW_VALUE: False, "property": "history_public_to_subscribers",
"property": "history_public_to_subscribers", }
}
).decode()
self.assertEqual(history_public_to_subscribers_log.extra_data, expected_extra_data) self.assertEqual(history_public_to_subscribers_log.extra_data, expected_extra_data)
invite_only_log = RealmAuditLog.objects.filter( invite_only_log = RealmAuditLog.objects.filter(
@ -739,13 +733,11 @@ class StreamAdminTest(ZulipTestCase):
).order_by("-id")[1] ).order_by("-id")[1]
assert invite_only_log is not None assert invite_only_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.OLD_VALUE: False, RealmAuditLog.NEW_VALUE: True,
RealmAuditLog.NEW_VALUE: True, "property": "invite_only",
"property": "invite_only", }
}
).decode()
self.assertEqual(invite_only_log.extra_data, expected_extra_data) self.assertEqual(invite_only_log.extra_data, expected_extra_data)
default_stream = self.make_stream("default_stream", realm=realm) default_stream = self.make_stream("default_stream", realm=realm)
@ -844,13 +836,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.OLD_VALUE: True, RealmAuditLog.NEW_VALUE: False,
RealmAuditLog.NEW_VALUE: False, "property": "invite_only",
"property": "invite_only", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_make_stream_private_with_public_history(self) -> None: def test_make_stream_private_with_public_history(self) -> None:
@ -885,13 +875,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.OLD_VALUE: False, RealmAuditLog.NEW_VALUE: True,
RealmAuditLog.NEW_VALUE: True, "property": "invite_only",
"property": "invite_only", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Convert a private stream with protected history to a private stream # Convert a private stream with protected history to a private stream
@ -926,13 +914,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.OLD_VALUE: False, RealmAuditLog.NEW_VALUE: True,
RealmAuditLog.NEW_VALUE: True, "property": "history_public_to_subscribers",
"property": "history_public_to_subscribers", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_make_stream_web_public(self) -> None: def test_make_stream_web_public(self) -> None:
@ -1011,13 +997,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.OLD_VALUE: False, RealmAuditLog.NEW_VALUE: True,
RealmAuditLog.NEW_VALUE: True, "property": "is_web_public",
"property": "is_web_public", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_history_access_for_private_streams(self) -> None: def test_change_history_access_for_private_streams(self) -> None:
@ -1050,13 +1034,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: False,
RealmAuditLog.OLD_VALUE: False, RealmAuditLog.NEW_VALUE: True,
RealmAuditLog.NEW_VALUE: True, "property": "history_public_to_subscribers",
"property": "history_public_to_subscribers", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
params = { params = {
@ -1082,13 +1064,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: True,
RealmAuditLog.OLD_VALUE: True, RealmAuditLog.NEW_VALUE: False,
RealmAuditLog.NEW_VALUE: False, "property": "history_public_to_subscribers",
"property": "history_public_to_subscribers", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_add_and_remove_stream_as_default(self) -> None: def test_add_and_remove_stream_as_default(self) -> None:
@ -1928,13 +1908,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: "Test description",
RealmAuditLog.OLD_VALUE: "Test description", RealmAuditLog.NEW_VALUE: "a multi line description",
RealmAuditLog.NEW_VALUE: "a multi line description", "property": "description",
"property": "description", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Verify that we don't render inline URL previews in this code path. # Verify that we don't render inline URL previews in this code path.
@ -1992,13 +1970,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: Stream.STREAM_POST_POLICY_EVERYONE,
RealmAuditLog.OLD_VALUE: Stream.STREAM_POST_POLICY_EVERYONE, RealmAuditLog.NEW_VALUE: Stream.STREAM_POST_POLICY_ADMINS,
RealmAuditLog.NEW_VALUE: Stream.STREAM_POST_POLICY_ADMINS, "property": "stream_post_policy",
"property": "stream_post_policy", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_stream_post_policy_requires_admin(self) -> None: def test_change_stream_post_policy_requires_admin(self) -> None:
@ -2058,13 +2034,11 @@ class StreamAdminTest(ZulipTestCase):
modified_stream=stream, modified_stream=stream,
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: old_post_policy,
RealmAuditLog.OLD_VALUE: old_post_policy, RealmAuditLog.NEW_VALUE: policy,
RealmAuditLog.NEW_VALUE: policy, "property": "stream_post_policy",
"property": "stream_post_policy", }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_stream_message_retention_days_notifications(self) -> None: def test_change_stream_message_retention_days_notifications(self) -> None:
@ -2092,9 +2066,7 @@ class StreamAdminTest(ZulipTestCase):
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {RealmAuditLog.OLD_VALUE: None, RealmAuditLog.NEW_VALUE: 2}
{RealmAuditLog.OLD_VALUE: None, RealmAuditLog.NEW_VALUE: 2}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Go from 2 days to 8 days # Go from 2 days to 8 days
@ -2115,9 +2087,7 @@ class StreamAdminTest(ZulipTestCase):
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {RealmAuditLog.OLD_VALUE: 2, RealmAuditLog.NEW_VALUE: 8}
{RealmAuditLog.OLD_VALUE: 2, RealmAuditLog.NEW_VALUE: 8}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
# Go from 8 days to realm default (None on stream, forever/-1 on realm) # Go from 8 days to realm default (None on stream, forever/-1 on realm)
@ -2139,12 +2109,10 @@ class StreamAdminTest(ZulipTestCase):
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED
).last() ).last()
assert realm_audit_log is not None assert realm_audit_log is not None
expected_extra_data = orjson.dumps( expected_extra_data = {
{ RealmAuditLog.OLD_VALUE: 8,
RealmAuditLog.OLD_VALUE: 8, RealmAuditLog.NEW_VALUE: None,
RealmAuditLog.NEW_VALUE: None, }
}
).decode()
self.assertEqual(realm_audit_log.extra_data, expected_extra_data) self.assertEqual(realm_audit_log.extra_data, expected_extra_data)
def test_change_stream_message_retention_days(self) -> None: def test_change_stream_message_retention_days(self) -> None:

View File

@ -1,6 +1,5 @@
from datetime import timedelta from datetime import timedelta
import orjson
from django.conf import settings from django.conf import settings
from django.db import transaction from django.db import transaction
from django.http import HttpRequest, HttpResponse from django.http import HttpRequest, HttpResponse
@ -103,9 +102,7 @@ def delete_realm_export(request: HttpRequest, user: UserProfile, export_id: int)
except RealmAuditLog.DoesNotExist: except RealmAuditLog.DoesNotExist:
raise JsonableError(_("Invalid data export ID")) raise JsonableError(_("Invalid data export ID"))
export_data = {} export_data = audit_log_entry.extra_data
if audit_log_entry.extra_data is not None:
export_data = orjson.loads(audit_log_entry.extra_data)
if export_data.get("deleted_timestamp") is not None: if export_data.get("deleted_timestamp") is not None:
raise JsonableError(_("Export already deleted")) raise JsonableError(_("Export already deleted"))
if export_data.get("export_path") is None: if export_data.get("export_path") is None:

View File

@ -1054,22 +1054,20 @@ class DeferredWorker(QueueProcessingWorker):
output_dir = tempfile.mkdtemp(prefix="zulip-export-") output_dir = tempfile.mkdtemp(prefix="zulip-export-")
export_event = RealmAuditLog.objects.get(id=event["id"]) export_event = RealmAuditLog.objects.get(id=event["id"])
user_profile = get_user_profile_by_id(event["user_profile_id"]) user_profile = get_user_profile_by_id(event["user_profile_id"])
extra_data = {} extra_data = export_event.extra_data
if export_event.extra_data is not None:
extra_data = orjson.loads(export_event.extra_data)
if extra_data.get("started_timestamp") is not None: if extra_data.get("started_timestamp") is not None:
logger.error( logger.error(
"Marking export for realm %s as failed due to retry -- possible OOM during export?", "Marking export for realm %s as failed due to retry -- possible OOM during export?",
realm.string_id, realm.string_id,
) )
extra_data["failed_timestamp"] = timezone_now().timestamp() extra_data["failed_timestamp"] = timezone_now().timestamp()
export_event.extra_data = orjson.dumps(extra_data).decode() export_event.extra_data = extra_data
export_event.save(update_fields=["extra_data"]) export_event.save(update_fields=["extra_data"])
notify_realm_export(user_profile) notify_realm_export(user_profile)
return return
extra_data["started_timestamp"] = timezone_now().timestamp() extra_data["started_timestamp"] = timezone_now().timestamp()
export_event.extra_data = orjson.dumps(extra_data).decode() export_event.extra_data = extra_data
export_event.save(update_fields=["extra_data"]) export_event.save(update_fields=["extra_data"])
logger.info( logger.info(
@ -1089,7 +1087,7 @@ class DeferredWorker(QueueProcessingWorker):
) )
except Exception: except Exception:
extra_data["failed_timestamp"] = timezone_now().timestamp() extra_data["failed_timestamp"] = timezone_now().timestamp()
export_event.extra_data = orjson.dumps(extra_data).decode() export_event.extra_data = extra_data
export_event.save(update_fields=["extra_data"]) export_event.save(update_fields=["extra_data"])
logging.exception( logging.exception(
"Data export for %s failed after %s", "Data export for %s failed after %s",
@ -1104,7 +1102,7 @@ class DeferredWorker(QueueProcessingWorker):
# Update the extra_data field now that the export is complete. # Update the extra_data field now that the export is complete.
extra_data["export_path"] = urllib.parse.urlparse(public_url).path extra_data["export_path"] = urllib.parse.urlparse(public_url).path
export_event.extra_data = orjson.dumps(extra_data).decode() export_event.extra_data = extra_data
export_event.save(update_fields=["extra_data"]) export_event.save(update_fields=["extra_data"])
# Send a direct message notification letting the user who # Send a direct message notification letting the user who

View File

@ -0,0 +1,30 @@
# Generated by Django 4.0.7 on 2022-10-02 17:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("zilencer", "0027_backfill_remote_realmauditlog_extradata_to_json_field"),
]
operations = [
migrations.RemoveField(
model_name="remoterealmauditlog",
name="extra_data",
),
migrations.RemoveField(
model_name="remotezulipserverauditlog",
name="extra_data",
),
migrations.RenameField(
model_name="remoterealmauditlog",
old_name="extra_data_json",
new_name="extra_data",
),
migrations.RenameField(
model_name="remotezulipserverauditlog",
old_name="extra_data_json",
new_name="extra_data",
),
]

View File

@ -482,7 +482,6 @@ def remote_server_post_analytics(
remote_realm_audit_logs = [] remote_realm_audit_logs = []
for row in realmauditlog_rows: for row in realmauditlog_rows:
extra_data = {} extra_data = {}
extra_data_str = None
# Remote servers that do support JSONField will pass extra_data # Remote servers that do support JSONField will pass extra_data
# as a dict. Otherwise, extra_data will be either a string or None. # as a dict. Otherwise, extra_data will be either a string or None.
if isinstance(row["extra_data"], str): if isinstance(row["extra_data"], str):
@ -495,13 +494,12 @@ def remote_server_post_analytics(
extra_data = orjson.loads(row["extra_data"]) extra_data = orjson.loads(row["extra_data"])
except orjson.JSONDecodeError: except orjson.JSONDecodeError:
raise JsonableError(_("Malformed audit log data")) raise JsonableError(_("Malformed audit log data"))
extra_data_str = row["extra_data"]
elif row["extra_data"] is not None: elif row["extra_data"] is not None:
# This is guaranteed to succeed because row["extra_data"] would be parsed
# from JSON with our json validator and validated with check_dict if it
# is not a str or None.
assert isinstance(row["extra_data"], dict) assert isinstance(row["extra_data"], dict)
extra_data = row["extra_data"] extra_data = row["extra_data"]
# This is guaranteed to succeed because row["extra_data"] would be parsed
# from JSON with our json validator if it is a dict.
extra_data_str = orjson.dumps(row["extra_data"]).decode()
remote_realm_audit_logs.append( remote_realm_audit_logs.append(
RemoteRealmAuditLog( RemoteRealmAuditLog(
realm_id=row["realm"], realm_id=row["realm"],
@ -511,8 +509,7 @@ def remote_server_post_analytics(
row["event_time"], tz=datetime.timezone.utc row["event_time"], tz=datetime.timezone.utc
), ),
backfilled=row["backfilled"], backfilled=row["backfilled"],
extra_data=extra_data_str, extra_data=extra_data,
extra_data_json=extra_data,
event_type=row["event_type"], event_type=row["event_type"],
) )
) )