mirror of https://github.com/zulip/zulip.git
realm_playgrounds: Populate url_template from legacy url_prefix.
We populate url_template by simply escaping "{" and "}" as well as appending "{code}" to the end of the legacy url_prefix. Signed-off-by: Zixuan James Li <p359101898@gmail.com>
This commit is contained in:
parent
641f60305d
commit
9bc13bc93d
|
@ -0,0 +1,48 @@
|
|||
import uri_template
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
|
||||
def transform_to_url_template_syntax(
|
||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
||||
) -> None:
|
||||
realm_playground_model = apps.get_model("zerver", "RealmPlayground")
|
||||
|
||||
# Only the null entries are realm playgrounds that exist prior to the
|
||||
# addition of url_template
|
||||
realm_playgrounds = realm_playground_model.objects.filter(url_template__isnull=True)
|
||||
escape_table = str.maketrans(
|
||||
{
|
||||
"{": "%7B",
|
||||
"}": "%7D",
|
||||
}
|
||||
)
|
||||
for realm_playground in realm_playgrounds:
|
||||
# It is originally expected to have the code appended to the end of
|
||||
# url_prefix. Therefore, making a template having only one variable that
|
||||
# expands at the end of it should suffice to replicate the old behavior
|
||||
# for the legacy prefix strings. Note that we use simple expansion
|
||||
# instead of reserved expansion here (i.e.: {code} instead of {+code})
|
||||
# because the code segment is supposed to be URL encoded.
|
||||
converted_template = realm_playground.url_prefix.translate(escape_table) + "{code}"
|
||||
if not uri_template.validate(converted_template):
|
||||
raise RuntimeError(
|
||||
f'Failed to convert url prefix "{realm_playground.url_prefix}". The converted template "{converted_template}" is invalid.'
|
||||
)
|
||||
realm_playground.url_template = converted_template
|
||||
realm_playground_model.objects.bulk_update(realm_playgrounds, fields=["url_template"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("zerver", "0462_realmplayground_url_template"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
transform_to_url_template_syntax,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
elidable=True,
|
||||
),
|
||||
]
|
|
@ -4,12 +4,8 @@
|
|||
# You can also read
|
||||
# https://www.caktusgroup.com/blog/2016/02/02/writing-unit-tests-django-migrations/
|
||||
# to get a tutorial on the framework that inspired this feature.
|
||||
from decimal import Decimal
|
||||
from typing import Optional
|
||||
|
||||
import orjson
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.lib.test_classes import MigrationsTestCase
|
||||
from zerver.lib.test_helpers import use_db_models
|
||||
|
@ -29,239 +25,57 @@ from zerver.lib.test_helpers import use_db_models
|
|||
# As a result, we generally mark these tests as skipped once they have
|
||||
# been tested for a migration being merged.
|
||||
|
||||
USER_ACTIVATED = 102
|
||||
USER_FULL_NAME_CHANGED = 124
|
||||
REALM_DISCOUNT_CHANGED = 209
|
||||
OLD_VALUE = "1"
|
||||
NEW_VALUE = "2"
|
||||
|
||||
|
||||
class RealmAuditLogExtraData(MigrationsTestCase):
|
||||
migrate_from = "0459_remove_invalid_characters_from_user_group_name"
|
||||
migrate_to = "0460_backfill_realmauditlog_extradata_to_json_field"
|
||||
|
||||
full_name_change_log_id: Optional[int] = None
|
||||
valid_json_log_id: Optional[int] = None
|
||||
str_json_log_id: Optional[int] = None
|
||||
# The BATCH_SIZE is defined as 5000 in
|
||||
# backfill_realmauditlog_extradata_to_json_field, this later is used to test
|
||||
# if batching works properly.
|
||||
DATA_SIZE = 10005
|
||||
expected_console_output = """Audit log entry 50003 with event type REALM_DISCOUNT_CHANGED is skipped.
|
||||
The data consistency needs to be manually checked.
|
||||
Discount data to remove after the upcoming JSONField migration:
|
||||
{'old_discount': Decimal('25.0000'), 'new_discount': Decimal('50')}
|
||||
Discount data to keep after the upcoming JSONField migration:
|
||||
{}
|
||||
|
||||
Audit log entry 50004 with event type REALM_DISCOUNT_CHANGED is skipped.
|
||||
The data consistency needs to be manually checked.
|
||||
Discount data to remove after the upcoming JSONField migration:
|
||||
{'old_discount': Decimal('25.0000'), 'new_discount': Decimal('50')}
|
||||
Discount data to keep after the upcoming JSONField migration:
|
||||
{'new_discount': '50', 'old_discount': '25.0000'}
|
||||
|
||||
Audit log entry with id 50001 has extra_data_json been inconsistently overwritten.
|
||||
The old value is:
|
||||
{"corrupted":"foo"}
|
||||
The new value is:
|
||||
{"key":"value"}
|
||||
|
||||
Audit log entry with id 50002 has extra_data_json been inconsistently overwritten.
|
||||
The old value is:
|
||||
{"corrupted":"bar"}
|
||||
The new value is:
|
||||
{"key":"value"}
|
||||
|
||||
"""
|
||||
class RealmPlaygroundURLPrefix(MigrationsTestCase):
|
||||
migrate_from = "0462_realmplayground_url_template"
|
||||
migrate_to = "0463_backfill_realmplayground_url_template"
|
||||
|
||||
@use_db_models
|
||||
def setUpBeforeMigration(self, apps: StateApps) -> None:
|
||||
Realm = apps.get_model("zerver", "Realm")
|
||||
RealmAuditLog = apps.get_model("zerver", "RealmAuditLog")
|
||||
event_time = timezone_now()
|
||||
realm = Realm.objects.get(string_id="zulip")
|
||||
iago = self.example_user("iago")
|
||||
RealmPlayground = apps.get_model("zerver", "RealmPlayground")
|
||||
|
||||
full_name_change_log = RealmAuditLog(
|
||||
realm=realm,
|
||||
event_type=USER_FULL_NAME_CHANGED,
|
||||
event_time=event_time,
|
||||
extra_data="foo",
|
||||
)
|
||||
|
||||
new_full_name_change_log = RealmAuditLog(
|
||||
realm=realm,
|
||||
event_type=USER_FULL_NAME_CHANGED,
|
||||
event_time=event_time,
|
||||
extra_data="foo",
|
||||
extra_data_json={OLD_VALUE: "foo", NEW_VALUE: "bar"},
|
||||
)
|
||||
|
||||
valid_json_log = RealmAuditLog(
|
||||
realm=realm,
|
||||
event_type=USER_ACTIVATED,
|
||||
event_time=event_time,
|
||||
extra_data=orjson.dumps({"key": "value"}).decode(),
|
||||
)
|
||||
|
||||
str_json_log = RealmAuditLog(
|
||||
realm=realm,
|
||||
event_type=USER_ACTIVATED,
|
||||
event_time=event_time,
|
||||
extra_data=str({"key": "value"}),
|
||||
)
|
||||
|
||||
self.backfilled_inconsistent_log_id = RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
event_type=USER_ACTIVATED,
|
||||
event_time=event_time,
|
||||
extra_data=orjson.dumps({"key": "baz"}).decode(),
|
||||
extra_data_json={
|
||||
"key": "baz",
|
||||
"inconsistent_old_extra_data": orjson.dumps({"key": "baz"}).decode(),
|
||||
"inconsistent_old_extra_data_json": {"key": "value corrupted"},
|
||||
},
|
||||
).id
|
||||
|
||||
# The following audit log entries have preset ids because we use
|
||||
# them to assert the generated log output that is defined before
|
||||
# the test case is run.
|
||||
inconsistent_json_log = RealmAuditLog(
|
||||
id=50001,
|
||||
realm=realm,
|
||||
event_type=USER_ACTIVATED,
|
||||
event_time=event_time,
|
||||
extra_data=orjson.dumps({"key": "value"}).decode(),
|
||||
extra_data_json={"corrupted": "foo"},
|
||||
)
|
||||
|
||||
inconsistent_str_json_log = RealmAuditLog(
|
||||
id=50002,
|
||||
realm=realm,
|
||||
event_type=USER_ACTIVATED,
|
||||
event_time=event_time,
|
||||
extra_data=str({"key": "value"}),
|
||||
extra_data_json={"corrupted": "bar"},
|
||||
)
|
||||
|
||||
self.old_decimal_log_id = RealmAuditLog.objects.create(
|
||||
id=50003,
|
||||
realm=realm,
|
||||
event_type=REALM_DISCOUNT_CHANGED,
|
||||
event_time=event_time,
|
||||
extra_data=str({"old_discount": Decimal("25.0000"), "new_discount": Decimal("50")}),
|
||||
).id
|
||||
|
||||
self.new_decimal_log_id = RealmAuditLog.objects.create(
|
||||
id=50004,
|
||||
realm=realm,
|
||||
event_type=REALM_DISCOUNT_CHANGED,
|
||||
event_time=event_time,
|
||||
extra_data=str({"old_discount": Decimal("25.0000"), "new_discount": Decimal("50")}),
|
||||
extra_data_json={"old_discount": Decimal("25.0000"), "new_discount": Decimal("50")},
|
||||
).id
|
||||
|
||||
RealmAuditLog.objects.bulk_create(
|
||||
[
|
||||
full_name_change_log,
|
||||
new_full_name_change_log,
|
||||
valid_json_log,
|
||||
str_json_log,
|
||||
inconsistent_json_log,
|
||||
inconsistent_str_json_log,
|
||||
urls = [
|
||||
"http://example.com/",
|
||||
"https://example.com/{",
|
||||
"https://example.com/{}",
|
||||
"https://example.com/{val}",
|
||||
"https://example.com/{code}",
|
||||
]
|
||||
)
|
||||
self.full_name_change_log_id = full_name_change_log.id
|
||||
self.new_full_name_change_log_id = new_full_name_change_log.id
|
||||
self.valid_json_log_id = valid_json_log.id
|
||||
self.str_json_log_id = str_json_log.id
|
||||
self.realm_playground_ids = []
|
||||
|
||||
other_logs = []
|
||||
for i in range(self.DATA_SIZE):
|
||||
other_logs.append(
|
||||
RealmAuditLog(
|
||||
realm=realm,
|
||||
event_type=USER_ACTIVATED,
|
||||
event_time=event_time,
|
||||
extra_data=orjson.dumps({"data": i}).decode(),
|
||||
for index, url in enumerate(urls):
|
||||
self.realm_playground_ids.append(
|
||||
RealmPlayground.objects.create(
|
||||
realm=iago.realm,
|
||||
name=f"Playground {index}",
|
||||
pygments_language="Python",
|
||||
url_prefix=url,
|
||||
url_template=None,
|
||||
).id
|
||||
)
|
||||
self.realm_playground_ids.append(
|
||||
RealmPlayground.objects.create(
|
||||
realm=iago.realm,
|
||||
name="Existing Playground",
|
||||
pygments_language="Python",
|
||||
url_prefix="https://example.com",
|
||||
url_template="https://example.com/{code}",
|
||||
).id
|
||||
)
|
||||
self.other_logs_id = [
|
||||
audit_log.id for audit_log in RealmAuditLog.objects.bulk_create(other_logs)
|
||||
|
||||
def test_converted_url_templates(self) -> None:
|
||||
RealmPlayground = self.apps.get_model("zerver", "RealmPlayground")
|
||||
|
||||
expected_urls = [
|
||||
"http://example.com/{code}",
|
||||
"https://example.com/%7B{code}",
|
||||
"https://example.com/%7B%7D{code}",
|
||||
"https://example.com/%7Bval%7D{code}",
|
||||
"https://example.com/%7Bcode%7D{code}",
|
||||
"https://example.com/{code}",
|
||||
]
|
||||
|
||||
# No new audit log entry should have extra_data_json populated as of
|
||||
# now except for the entries created with non-default values.
|
||||
self.assert_length(
|
||||
RealmAuditLog.objects.filter(
|
||||
event_time__gte=event_time,
|
||||
).exclude(
|
||||
extra_data_json={},
|
||||
),
|
||||
5,
|
||||
)
|
||||
|
||||
def test_realmaudit_log_extra_data_to_json(self) -> None:
|
||||
RealmAuditLog = self.apps.get_model("zerver", "RealmAuditLog")
|
||||
|
||||
self.assertIsNotNone(self.full_name_change_log_id)
|
||||
self.assertIsNotNone(self.valid_json_log_id)
|
||||
self.assertIsNotNone(self.str_json_log_id)
|
||||
|
||||
full_name_change_log = RealmAuditLog.objects.filter(id=self.full_name_change_log_id).first()
|
||||
new_full_name_change_log = RealmAuditLog.objects.filter(
|
||||
id=self.new_full_name_change_log_id
|
||||
).first()
|
||||
valid_json_log = RealmAuditLog.objects.filter(id=self.valid_json_log_id).first()
|
||||
str_json_log = RealmAuditLog.objects.filter(id=self.str_json_log_id).first()
|
||||
|
||||
self.assertIsNotNone(full_name_change_log)
|
||||
self.assertEqual(full_name_change_log.extra_data_json, {"1": "foo", "2": None})
|
||||
|
||||
self.assertIsNotNone(new_full_name_change_log)
|
||||
self.assertEqual(new_full_name_change_log.extra_data_json, {"1": "foo", "2": "bar"})
|
||||
|
||||
self.assertIsNotNone(valid_json_log)
|
||||
self.assertEqual(valid_json_log.extra_data_json, {"key": "value"})
|
||||
|
||||
self.assertIsNotNone(str_json_log)
|
||||
self.assertEqual(str_json_log.extra_data_json, {"key": "value"})
|
||||
|
||||
other_logs = RealmAuditLog.objects.filter(id__in=self.other_logs_id).order_by("id")
|
||||
self.assertIsNotNone(other_logs)
|
||||
self.assert_length(other_logs, self.DATA_SIZE)
|
||||
for index, audit_log in enumerate(other_logs):
|
||||
self.assertEqual(audit_log.extra_data_json, {"data": index})
|
||||
|
||||
inconsistent_json_log = RealmAuditLog.objects.get(
|
||||
extra_data_json__inconsistent_old_extra_data=orjson.dumps({"key": "value"}).decode()
|
||||
)
|
||||
self.assertIsNotNone(inconsistent_json_log)
|
||||
self.assertEqual(inconsistent_json_log.id, 50001)
|
||||
self.assertEqual(
|
||||
inconsistent_json_log.extra_data_json["inconsistent_old_extra_data_json"],
|
||||
{"corrupted": "foo"},
|
||||
)
|
||||
|
||||
inconsistent_str_json_log = RealmAuditLog.objects.get(
|
||||
extra_data_json__inconsistent_old_extra_data=str({"key": "value"})
|
||||
)
|
||||
self.assertIsNotNone(inconsistent_str_json_log)
|
||||
self.assertEqual(inconsistent_str_json_log.id, 50002)
|
||||
self.assertEqual(
|
||||
inconsistent_str_json_log.extra_data_json["inconsistent_old_extra_data_json"],
|
||||
{"corrupted": "bar"},
|
||||
)
|
||||
|
||||
backfilled_inconsistent_log = RealmAuditLog.objects.get(
|
||||
id=self.backfilled_inconsistent_log_id
|
||||
)
|
||||
self.assertIsNotNone(backfilled_inconsistent_log)
|
||||
self.assertEqual(
|
||||
backfilled_inconsistent_log.extra_data_json,
|
||||
{
|
||||
"key": "baz",
|
||||
"inconsistent_old_extra_data": orjson.dumps({"key": "baz"}).decode(),
|
||||
"inconsistent_old_extra_data_json": {"key": "value corrupted"},
|
||||
},
|
||||
)
|
||||
for realm_playground_id, expected_url in zip(self.realm_playground_ids, expected_urls):
|
||||
realm_playground = RealmPlayground.objects.get(id=realm_playground_id)
|
||||
self.assertEqual(realm_playground.url_template, expected_url)
|
||||
|
|
Loading…
Reference in New Issue