diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 4382669ec3..a541423270 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -1,10 +1,10 @@
-**Testing Plan:**
+**Testing plan:**
-**GIFs or Screenshots:**
diff --git a/.github/workflows/cancel-previous-runs.yml b/.github/workflows/cancel-previous-runs.yml
index 6d05dfbff9..c374fb573a 100644
--- a/.github/workflows/cancel-previous-runs.yml
+++ b/.github/workflows/cancel-previous-runs.yml
@@ -1,4 +1,4 @@
-name: Cancel Previous Runs
+name: Cancel previous runs
on: [push, pull_request]
defaults:
@@ -7,7 +7,7 @@ defaults:
jobs:
cancel:
- name: Cancel Previous Runs
+ name: Cancel previous runs
runs-on: ubuntu-latest
timeout-minutes: 3
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index c650610997..9096d87853 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -1,4 +1,4 @@
-name: "Code Scanning"
+name: "Code scanning"
on: [push, pull_request]
diff --git a/.github/workflows/production-suite.yml b/.github/workflows/production-suite.yml
index 1cf14c8140..f97ded2aca 100644
--- a/.github/workflows/production-suite.yml
+++ b/.github/workflows/production-suite.yml
@@ -1,4 +1,4 @@
-name: Zulip Production Suite
+name: Zulip production suite
on:
push:
@@ -30,7 +30,7 @@ defaults:
jobs:
production_build:
- name: Bionic Production Build
+ name: Bionic production build
runs-on: ubuntu-latest
# This docker image was created by a generated Dockerfile at:
@@ -104,12 +104,12 @@ jobs:
matrix:
include:
- docker_image: mepriyank/actions:bionic
- name: Bionic Production Install
+ name: Bionic production install
is_bionic: true
os: bionic
- docker_image: mepriyank/actions:focal
- name: Focal Production Install
+ name: Focal production install
is_focal: true
os: focal
diff --git a/.github/workflows/zulip-ci.yml b/.github/workflows/zulip-ci.yml
index c3e1236e4b..1f63c69d25 100644
--- a/.github/workflows/zulip-ci.yml
+++ b/.github/workflows/zulip-ci.yml
@@ -37,7 +37,7 @@ jobs:
# GitHub Actions sets HOME to /github/home which causes
# problem later in provison and frontend test that runs
# tools/setup/postgres-init-dev-db because of the .pgpass
- # location. Postgresql (psql) expects .pgpass to be at
+ # location. PostgreSQL (psql) expects .pgpass to be at
# /home/github/.pgpass and setting home to `/home/github/`
# ensures it written there because we write it to ~/.pgpass.
HOME: /home/github/
@@ -142,7 +142,7 @@ jobs:
# see https://community.codecov.io/t/http-400-while-uploading-to-s3-with-python-codecov-from-travis/1428/7
pip install codecov && codecov || echo "Error in uploading coverage reports to codecov.io."
- - name: Store puppeteer artifacts
+ - name: Store Puppeteer artifacts
if: ${{ matrix.include_frontend_tests }}
uses: actions/upload-artifact@v2
with:
@@ -153,6 +153,6 @@ jobs:
if: ${{ matrix.is_focal }}
run: mispipe "tools/ci/setup-backend" ts
# TODO: We need to port the notify_failure step from CircleCI
- # config, however, it might be the case that GitHub Notifications
- # make this unnesscary. More details on settings to configure it:
+ # config, however, it might be the case that GitHub notifications
+ # make this unnecessary. More details on settings to configure it:
# https://help.github.com/en/github/managing-subscriptions-and-notifications-on-github/configuring-notifications#github-actions-notification-options
diff --git a/Dockerfile-postgresql b/Dockerfile-postgresql
index 6cd6c7abd0..653166fac7 100644
--- a/Dockerfile-postgresql
+++ b/Dockerfile-postgresql
@@ -5,7 +5,7 @@
# the on-disk data in volumes. So the base image can not currently be upgraded
# without users needing a manual pgdump and restore.
-# Install hunspell, zulip stop words, and run zulip database
+# Install hunspell, Zulip stop words, and run Zulip database
# init.
FROM groonga/pgroonga:latest-alpine-10-slim
RUN apk add -U --no-cache hunspell-en
diff --git a/README.md b/README.md
index f2a040d68f..1f3703a403 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@ over 500 contributors merging over 500 commits a month, Zulip is also the
largest and fastest growing open source group chat project.
[![CircleCI branch](https://img.shields.io/circleci/project/github/zulip/zulip/master.svg)](https://circleci.com/gh/zulip/zulip/tree/master)
-[![Coverage Status](https://img.shields.io/codecov/c/github/zulip/zulip/master.svg)](https://codecov.io/gh/zulip/zulip/branch/master)
+[![coverage status](https://img.shields.io/codecov/c/github/zulip/zulip/master.svg)](https://codecov.io/gh/zulip/zulip/branch/master)
[![Mypy coverage](https://img.shields.io/badge/mypy-100%25-green.svg)][mypy-coverage]
[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![GitHub release](https://img.shields.io/github/release/zulip/zulip.svg)](https://github.com/zulip/zulip/releases/latest)
diff --git a/SECURITY.md b/SECURITY.md
index cc83b959ac..4bda8220ae 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -1,9 +1,9 @@
-# Security Policy
+# Security policy
Security announcements are sent to zulip-announce@googlegroups.com,
so you should subscribe if you are running Zulip in production.
-## Reporting a Vulnerability
+## Reporting a vulnerability
We love responsible reports of (potential) security issues in Zulip,
whether in the latest release or our development branch.
@@ -18,7 +18,7 @@ Our [security
model](https://zulip.readthedocs.io/en/latest/production/security-model.html)
document may be a helpful resource.
-## Supported Versions
+## Supported versions
Zulip provides security support for the latest major release, in the
form of minor security/maintenance releases.
diff --git a/analytics/lib/counts.py b/analytics/lib/counts.py
index 7b85558ab1..c885e3b57e 100644
--- a/analytics/lib/counts.py
+++ b/analytics/lib/counts.py
@@ -598,7 +598,7 @@ def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
## CountStat declarations ##
count_stats_ = [
- # Messages Sent stats
+ # Messages sent stats
# Stats that count the number of messages sent in various ways.
# These are also the set of stats that read from the Message table.
@@ -617,7 +617,7 @@ def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
sql_data_collector(StreamCount, count_message_by_stream_query(realm),
(UserProfile, 'is_bot')), CountStat.DAY),
- # Number of Users stats
+ # Number of users stats
# Stats that count the number of active users in the UserProfile.is_active sense.
# 'active_users_audit:is_bot:day' is the canonical record of which users were
@@ -658,7 +658,7 @@ def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
LoggingCountStat('messages_read::hour', UserCount, CountStat.HOUR),
LoggingCountStat('messages_read_interactions::hour', UserCount, CountStat.HOUR),
- # User Activity stats
+ # User activity stats
# Stats that measure user activity in the UserActivityInterval sense.
CountStat('1day_actives::day',
diff --git a/analytics/tests/test_views.py b/analytics/tests/test_views.py
index 8445a4a5b6..ca12eb8381 100644
--- a/analytics/tests/test_views.py
+++ b/analytics/tests/test_views.py
@@ -437,7 +437,7 @@ class TestSupportEndpoint(ZulipTestCase):
zulip_realm = get_realm("zulip")
self.assert_in_success_response([f'',
- '',
+ '',
'',
'input type="number" name="discount" value="None"',
'',
@@ -449,7 +449,7 @@ class TestSupportEndpoint(ZulipTestCase):
lear_realm = get_realm("lear")
self.assert_in_success_response([f'',
- '',
+ '',
'',
'input type="number" name="discount" value="None"',
'',
diff --git a/analytics/views.py b/analytics/views.py
index 4f31e051c6..2174474549 100644
--- a/analytics/views.py
+++ b/analytics/views.py
@@ -172,14 +172,14 @@ def get_chart_data_for_remote_realm(
@require_server_admin
def stats_for_installation(request: HttpRequest) -> HttpResponse:
- return render_stats(request, '/installation', 'Installation', True)
+ return render_stats(request, '/installation', 'installation', True)
@require_server_admin
def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse:
assert settings.ZILENCER_ENABLED
server = RemoteZulipServer.objects.get(id=remote_server_id)
return render_stats(request, f'/remote/{server.id}/installation',
- f'remote Installation {server.hostname}', True, True)
+ f'remote installation {server.hostname}', True, True)
@require_server_admin_api
@has_request_variables
@@ -787,9 +787,9 @@ def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
realm_minutes[string_id] = realm_duration.total_seconds() / 60
- output += f"\nTotal Duration: {total_duration}\n"
- output += f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}\n"
- output += f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
+ output += f"\nTotal duration: {total_duration}\n"
+ output += f"\nTotal duration in minutes: {total_duration.total_seconds() / 60.}\n"
+ output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
content = mark_safe('
' + output + '
')
return content, realm_minutes
@@ -1328,7 +1328,7 @@ def raw_user_activity_table(records: List[QuerySet]) -> str:
]
rows = list(map(row, records))
- title = 'Raw Data'
+ title = 'Raw data'
return make_table(title, cols, rows)
def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Dict[str, Any]]:
@@ -1473,7 +1473,7 @@ def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
'count',
]
- title = 'User Activity'
+ title = 'User activity'
return make_table(title, cols, rows)
def realm_user_summary_table(all_records: List[QuerySet],
diff --git a/docs/development/overview.md b/docs/development/overview.md
index 3555219a1b..64716ea5a6 100644
--- a/docs/development/overview.md
+++ b/docs/development/overview.md
@@ -39,7 +39,7 @@ method][install-vagrant].
If you have a very slow network connection, however, you may want to
avoid using Vagrant (which involves downloading an Ubuntu virtual
-machine or Linux Container) and either
+machine or Linux container) and either
[install directly][install-direct] (recommended), or use
[the manual install process][install-generic] instead. These options
only support Linux.
@@ -71,7 +71,7 @@ need to.
Once you've installed the Zulip development environment, you'll want
to read these documents to learn how to use it:
-* [Using the Development Environment][using-dev-env]
+* [Using the development environment][using-dev-env]
* [Testing][testing] (and [Configuring CI][ci])
And if you've set up the Zulip development environment on a remote
diff --git a/docs/development/remote.md b/docs/development/remote.md
index 622d17d0fe..ee5acbba0e 100644
--- a/docs/development/remote.md
+++ b/docs/development/remote.md
@@ -254,7 +254,7 @@ Now your workspace should look similar this:
Next, read the following to learn more about developing for Zulip:
* [Git & GitHub Guide][rtd-git-guide]
-* [Using the Development Environment][rtd-using-dev-env]
+* [Using the development environment][rtd-using-dev-env]
* [Testing][rtd-testing]
[install-direct]: ../development/setup-advanced.html#installing-directly-on-ubuntu-debian-centos-or-fedora
diff --git a/docs/development/request-remote.md b/docs/development/request-remote.md
index 3f6750cdf4..1e3cf0e8b5 100644
--- a/docs/development/request-remote.md
+++ b/docs/development/request-remote.md
@@ -73,7 +73,7 @@ Once you've confirmed you can connect to your remote server, take a look at:
Next, read the following to learn more about developing for Zulip:
-* [Using the Development Environment](../development/using.md)
+* [Using the development environment](../development/using.md)
* [Testing](../testing/testing.md)
[github-join]: https://github.com/join
diff --git a/docs/development/setup-advanced.md b/docs/development/setup-advanced.md
index 76239e48c6..2cb9eb1190 100644
--- a/docs/development/setup-advanced.md
+++ b/docs/development/setup-advanced.md
@@ -2,19 +2,19 @@
Contents:
-* [Installing directly on Ubuntu, Debian, Centos, or Fedora](#installing-directly-on-ubuntu-debian-centos-or-fedora)
+* [Installing directly on Ubuntu, Debian, CentOS, or Fedora](#installing-directly-on-ubuntu-debian-centos-or-fedora)
* [Installing directly on Windows 10](#installing-directly-on-windows-10-experimental)
* [Installing manually on other Linux/UNIX](#installing-manually-on-unix)
* [Installing directly on cloud9](#installing-on-cloud9)
-## Installing directly on Ubuntu, Debian, Centos, or Fedora
+## Installing directly on Ubuntu, Debian, CentOS, or Fedora
If you'd like to install a Zulip development environment on a computer
that's running one of:
* Ubuntu 20.04 Focal, 18.04 Bionic
* Debian 10 Buster
-* Centos 7 (beta)
+* CentOS 7 (beta)
* Fedora 29 (beta)
* RHEL 7 (beta)
@@ -279,14 +279,14 @@ proxy in the environment as follows:
yarn config set https-proxy http://proxy_host:port
```
-## Installing on cloud9
+## Installing on Cloud9
AWS Cloud9 is a cloud-based integrated development environment (IDE)
that lets you write, run, and debug your code with just a browser. It
includes a code editor, debugger, and terminal.
This section documents how to set up the Zulip development environment
-in a cloud9 workspace. If you don't have an existing cloud9 account,
+in a Cloud9 workspace. If you don't have an existing Cloud9 account,
you can sign up [here](https://aws.amazon.com/cloud9/).
* Create a Workspace, and select the blank template.
@@ -295,10 +295,10 @@ you can sign up [here](https://aws.amazon.com/cloud9/).
Free Tier).
* Clone the zulip repo: `git clone --config pull.rebase
https://github.com//zulip.git`
-* Restart rabbitmq-server since its broken on cloud9: `sudo service
+* Restart rabbitmq-server since its broken on Cloud9: `sudo service
rabbitmq-server restart`.
* And run provision `cd zulip && ./tools/provision`, once this is done.
-* Activate the zulip virtual environment by `source
+* Activate the Zulip virtual environment by `source
/srv/zulip-py3-venv/bin/activate` or by opening a new terminal.
#### Install zulip-cloud9
@@ -321,7 +321,7 @@ you need to start a new terminal.
Your development server would be running at
`https://-.c9users.io` on port 8080. You
-dont need to add `:8080` to your url, since the cloud9 proxy should
+dont need to add `:8080` to your URL, since the Cloud9 proxy should
automatically forward the connection. You might want to visit
[zulip-cloud9 repo](https://github.com/cPhost/zulip-cloud9) and it's
[wiki](https://github.com/cPhost/zulip-cloud9/wiki) for more info on
diff --git a/docs/development/setup-vagrant.md b/docs/development/setup-vagrant.md
index a65ebaf053..483a701440 100644
--- a/docs/development/setup-vagrant.md
+++ b/docs/development/setup-vagrant.md
@@ -65,7 +65,7 @@ to GitHub working on your machine.
Follow our [Git guide][set-up-git] in order to install Git, set up a
GitHub account, create an SSH key to access code on GitHub
-efficiently, etc. Be sure to create an ssh key and add it to your
+efficiently, etc. Be sure to create an SSH key and add it to your
GitHub account using
[these instructions](https://help.github.com/en/articles/generating-an-ssh-key).
@@ -83,8 +83,8 @@ Jump to:
1. Install [Vagrant][vagrant-dl] (latest).
2. Install [VirtualBox][vbox-dl] (latest).
-(For a non-free option, but better performance, you can also use [VMWare
-Fusion][vmware-fusion-dl] with the [VMWare Fusion Vagrant
+(For a non-free option, but better performance, you can also use [VMware
+Fusion][vmware-fusion-dl] with the [VMware Fusion Vagrant
plugin][vagrant-vmware-fusion-dl] or [Parallels Desktop][parallels-desktop-dl] as
a provider for Vagrant.)
@@ -252,7 +252,7 @@ git remote add -f upstream https://github.com/zulip/zulip.git
This will create a 'zulip' directory and download the Zulip code into it.
-Don't forget to replace YOURUSERNAME with your git username. You will see
+Don't forget to replace YOURUSERNAME with your Git username. You will see
something like:
```
@@ -534,7 +534,7 @@ $ ./tools/run-dev.py
Next, read the following to learn more about developing for Zulip:
* [Git & GitHub Guide][rtd-git-guide]
-* [Using the Development Environment][rtd-using-dev-env]
+* [Using the development environment][rtd-using-dev-env]
* [Testing][rtd-testing] (and [Configuring CI][ci] to
run the full test suite against any branches you push to your fork,
which can help you optimize your development workflow).
@@ -803,7 +803,7 @@ that failed. Once you've resolved the problem, you can rerun
`tools/provision` to proceed; the provisioning system is designed
to recover well from failures.
-The zulip provisioning system is generally highly reliable; the most common
+The Zulip provisioning system is generally highly reliable; the most common
cause of issues here is a poor network connection (or one where you need a
proxy to access the Internet and haven't [configured the development
environment to use it](#specifying-a-proxy).
@@ -985,7 +985,7 @@ connect to your development server.
### Customizing CPU and RAM allocation
When running Vagrant using a VM-based provider such as VirtualBox or
-VMWare Fusion, CPU and RAM resources must be explicitly allocated to
+VMware Fusion, CPU and RAM resources must be explicitly allocated to
the guest system (with Docker and other container-based Vagrant
providers, explicit allocation is unnecessary and the settings
described here are ignored).
diff --git a/docs/documentation/overview.md b/docs/documentation/overview.md
index 2eab8d8bb9..828e1051a7 100644
--- a/docs/documentation/overview.md
+++ b/docs/documentation/overview.md
@@ -34,7 +34,7 @@ Zulip servers. These docs are written in
[Commonmark Markdown](https://commonmark.org/) with a small bit of rST.
We've chosen Markdown because it is
[easy to write](https://commonmark.org/help/). The source for Zulip's
-developer documentation is at `docs/` in the Zulip git repository, and
+developer documentation is at `docs/` in the Zulip Git repository, and
they are served in production at
[zulip.readthedocs.io](https://zulip.readthedocs.io/en/latest/).
@@ -129,7 +129,7 @@ payload verification. Note that this test does not check for broken
links (those are checked by `test-help-documentation`).
* `tools/test-help-documentation` checks `/help/`, `/api/`,
- `/integrations/`, and the Core website ("portico") documentation for
+ `/integrations/`, and the core website ("portico") documentation for
broken links. Note that the "portico" documentation check has a
manually maintained whitelist of pages, so if you add a new page to
this site, you will need to edit `PorticoDocumentationSpider` to add it.
diff --git a/docs/documentation/user.md b/docs/documentation/user.md
index 0ddc26a928..d9a7beefcd 100644
--- a/docs/documentation/user.md
+++ b/docs/documentation/user.md
@@ -11,7 +11,7 @@ The feature articles serve a few different purposes:
* Feature discovery, for someone browsing the `/help` page, and looking at
the set of titles.
* Public documentation of our featureset, for someone googling "can zulip do .."
-* Canned responses to support questions; if someone emails a zulip admin
+* Canned responses to support questions; if someone emails a Zulip admin
asking "how do I change my name", they can reply with a link to the doc.
* Feature explanations for new Zulip users and admins, especially for
organization settings.
@@ -239,9 +239,9 @@ languages in API docs, etc. To create a tab switcher, write:
{start_tabs}
{tab|desktop-web}
- # First Tab's content
+ # First tab's content
{tab|ios}
- # Second Tab's content
+ # Second tab's content
{tab|android}
# Third tab's content
{end_tabs}
diff --git a/docs/git/overview.md b/docs/git/overview.md
index c06d255436..b7a935149f 100644
--- a/docs/git/overview.md
+++ b/docs/git/overview.md
@@ -51,8 +51,8 @@ Finally, install the [Zulip developer environment][zulip-rtd-dev-overview], and
***
The following sections will help you be awesome with Zulip and Git/GitHub in a
-rebased-based workflow. Read through it if you're new to git, to a rebase-based
-git workflow, or if you'd like a git refresher.
+rebased-based workflow. Read through it if you're new to Git, to a rebase-based
+Git workflow, or if you'd like a Git refresher.
[gitbook-rebase]: https://git-scm.com/book/en/v2/Git-Branching-Rebasing
[github-rebase-pr]: https://github.com/edx/edx-platform/wiki/How-to-Rebase-a-Pull-Request
diff --git a/docs/git/setup.md b/docs/git/setup.md
index 632e7c77f9..8fd57e572b 100644
--- a/docs/git/setup.md
+++ b/docs/git/setup.md
@@ -14,7 +14,7 @@ administrator][git-bash-admin] at all times.**
You'll also need a GitHub account, which you can sign up for
[here][github-join].
-We highly recommend you create an ssh key if you don't already have
+We highly recommend you create an SSH key if you don't already have
one and [add it to your GitHub account][github-help-add-ssh-key]. If
you don't, you'll have to type your GitHub username and password every
time you interact with GitHub, which is usually several times a day.
@@ -44,7 +44,7 @@ If you don't already have one installed, here are some suggestions:
- Windows: [SourceTree][gitgui-sourcetree]
If you like working on the command line, but want better visualization and
-navigation of your git repo, try [Tig][tig], a cross-platform ncurses-based
+navigation of your Git repo, try [Tig][tig], a cross-platform ncurses-based
text-mode interface to Git.
And, if none of the above are to your liking, try [one of these][gitbook-guis].
diff --git a/docs/git/terminology.md b/docs/git/terminology.md
index d2a468e692..a0090532f4 100644
--- a/docs/git/terminology.md
+++ b/docs/git/terminology.md
@@ -1,7 +1,7 @@
# Important Git terms
When you install Git, it adds a manual entry for `gitglossary`. You can view
-this glossary by running `man gitglossary`. Below we've included the git terms
+this glossary by running `man gitglossary`. Below we've included the Git terms
you'll encounter most often along with their definitions from *gitglossary*.
## branch
diff --git a/docs/git/working-copies.md b/docs/git/working-copies.md
index a8cf00cb53..7b4e4003f2 100644
--- a/docs/git/working-copies.md
+++ b/docs/git/working-copies.md
@@ -1,7 +1,7 @@
# Working copies
When you work on Zulip code, there are three working copies
-of the Zulip git repo that you are generally concerned with:
+of the Zulip Git repo that you are generally concerned with:
- local copy: This lives on your laptop or your remote dev instance.
- forked copy: This lives on GitHub, and it's tied to your account.
diff --git a/docs/overview/architecture-overview.md b/docs/overview/architecture-overview.md
index ce22b6a907..8c8f2cb76e 100644
--- a/docs/overview/architecture-overview.md
+++ b/docs/overview/architecture-overview.md
@@ -135,7 +135,7 @@ from outside.
and the production build process (`tools/build-release-tarball`)
compiles, minifies, and installs the static assets into the
`prod-static/` tree form. In development, files are served directly
- from `/static/` in the git repository.
+ from `/static/` in the Git repository.
- Requests to `/json/events` and `/api/v1/events`, i.e. the
real-time push system, are sent to the Tornado server.
- Requests to all other paths are sent to the Django app running via
@@ -184,25 +184,25 @@ persistence:
# Zulip-specific configuration: disable saving to disk.
save ""
-People often wonder if we could replace memcached with redis (or
-replace RabbitMQ with redis, with some loss of functionality).
+People often wonder if we could replace memcached with Redis (or
+replace RabbitMQ with Redis, with some loss of functionality).
The answer is likely yes, but it wouldn't improve Zulip.
Operationally, our current setup is likely easier to develop and run
-in production than a pure redis system would be. Meanwhile, the
-perceived benefit for using redis is usually to reduce memory
+in production than a pure Redis system would be. Meanwhile, the
+perceived benefit for using Redis is usually to reduce memory
consumption by running fewer services, and no such benefit would
materialize:
* Our cache uses significant memory, but that memory usage would be
- essentially the same with redis as it is with memcached.
+ essentially the same with Redis as it is with memcached.
* All of these services have low minimum memory requirements, and in
- fact our applications for redis and RabbitMQ do not use significant
+ fact our applications for Redis and RabbitMQ do not use significant
memory even at scale.
-* We would likely need to run multiple redis services (with different
+* We would likely need to run multiple Redis services (with different
configurations) in order to ensure the pure LRU use case (memcached)
doesn't push out data that we want to persist until expiry
- (redis-based rate limiting) or until consumed (RabbitMQ-based
+ (Redis-based rate limiting) or until consumed (RabbitMQ-based
queuing of deferred work).
### RabbitMQ
@@ -237,12 +237,12 @@ with the operating system.
In production, Postgres is installed with a default configuration. The
directory that would contain configuration files
(`puppet/zulip/files/postgresql`) has only a utility script and a custom
-list of stopwords used by a Postgresql extension.
+list of stopwords used by a PostgreSQL extension.
-In a development environment, configuration of that postgresql
+In a development environment, configuration of that PostgreSQL
extension is handled by `tools/postgres-init-dev-db` (invoked by
`tools/provision`). That file also manages setting up the
-development postgresql user.
+development PostgreSQL user.
`tools/provision` also invokes `tools/rebuild-dev-database`
to create the actual database with its schema.
@@ -287,11 +287,11 @@ self-explanatory names.
topic]", or "Link to this conversation". To avoid visual clutter,
the chevron only appears in the web UI upon hover.
-* **ellipsis**: A small vertical three dot icon(technically called
+* **ellipsis**: A small vertical three dot icon (technically called
as ellipsis-v), present in sidebars as a menu icon.
- It offers contextual options for Global Filters(All messages
- and Starred messages), Stream Filters and Topics in left
- sidebar and User in right sidebar. To avoid visual clutter
+ It offers contextual options for global filters (All messages
+ and Starred messages), stream filters and topics in left
+ sidebar and users in right sidebar. To avoid visual clutter
ellipsis only appears in the web UI upon hover.
* **huddle**: What the codebase calls a "group private message".
diff --git a/docs/overview/changelog.md b/docs/overview/changelog.md
index 86ac1f411f..2ab9d0ddb5 100644
--- a/docs/overview/changelog.md
+++ b/docs/overview/changelog.md
@@ -31,7 +31,7 @@ in bursts.
data will fascilitate future features showing a log of activity by
a given user or changes to an organization's settings.
- Added support for using Sentry for processing backend exceptions.
-- Added documentation for using `wal-g` for continuous postgres backups.
+- Added documentation for using `wal-g` for continuous Postgres backups.
- Added loading spinners for message editing widgets.
- Added live update of compose placeholder text when recipients change.
- The Zoom integration is now stable (no longer beta).
@@ -50,7 +50,7 @@ in bursts.
- Fixed screenreader accessibility of many components, including
the compose box, message editing, popovers, and many more.
- Improved formatting of GitLab integration.
-- Improved positioning logic for inline Youtube previews.
+- Improved positioning logic for inline YouTube previews.
- Upgraded our ancient forked version of bootstrap, on a path towards
removing the last forked dependencies from the codebase.
- Updated webapp codebase to use many modern ES6 patterns.
@@ -124,7 +124,7 @@ in bursts.
- The Zulip server now sets badge counts for the iOS mobile app.
- Quote-and-reply now generates a handy link to the quoted message.
- Upgraded Django from 1.11.x to the latest LTS series, 2.2.x.
-- Added integrations for ErrBit, Grafana, Thinkst Canary, and AlertManager.
+- Added integrations for ErrBit, Grafana, Thinkst Canary, and Alertmanager.
- Extended API documentation to have detailed data on most responses,
validated against the API's actual implementation and against all
tests in our extensive automated test suite.
@@ -132,7 +132,7 @@ in bursts.
global/default policy and policies for specific streams.
- Added a new incoming webhook API that accepts messages in the format
used by Slack's incoming webhooks API.
-- Introduced the Zulip API Feature Level, a concept that will greatly
+- Introduced the Zulip API feature level, a concept that will greatly
simplify the implementation of mobile, terminal, and desktop clients
that need to talk to a wide range of supported Zulip server
versions, as well as the [Zulip API
@@ -162,22 +162,22 @@ in bursts.
accounts affected by this bug, so we expect the vast majority of
installations will have none.
- This release switches Zulip to install Postgres 12 from the upstream
- postgres repository by default, rather than using the default
+ Postgres repository by default, rather than using the default
Postgres version included with the operating system. Existing Zulip
installations will continue to work with Postgres 10; this detail is
configured in `/etc/zulip/zulip.conf`. We have no concrete plans to
start requiring Postgres 12, though we do expect it to improve
performance. Installations that would like to upgrade can follow
- [our new postgres upgrade guide][postgres-upgrade].
+ [our new Postgres upgrade guide][postgres-upgrade].
- The format of the `JWT_AUTH_KEYS` setting has changed to include an
[algorithms](https://pyjwt.readthedocs.io/en/latest/algorithms.html)
list: `{"subdomain": "key"}` becomes `{"subdomain": {"key": "key",
"algorithms": ["HS256"]}}`.
-- Added a new Organization Owner permission above the previous
- Organization Administrator. All existing organization
+- Added a new organization owner permission above the previous
+ organization administrator. All existing organization
administrators are automatically converted into organization owners.
Certain sensitive administrative settings are now only
- editable by Organization Owners.
+ editable by organization owners.
- The changelog now has a section that makes it easy to find the
Upgrade notes for all releases one is upgrading across.
@@ -322,7 +322,7 @@ in bursts.
### 2.1.7 -- 2020-06-25
- CVE-2020-15070: Fix privilege escalation vulnerability with custom
- profile fields and direct write access to Zulip's postgres database.
+ profile fields and direct write access to Zulip's Postgres database.
- Changed default memcached authentication username to zulip@localhost,
fixing authentication problems when servers change their hostname.
@@ -366,7 +366,7 @@ details.
- Fixed a regression in 2.1.3 that impacted creating the very first
organization via our data import tools.
-- Remove the old `tsearch_extras` postgres extension, which was causing
+- Remove the old `tsearch_extras` Postgres extension, which was causing
an exception restoring backups on fresh Zulip servers that had been
generated on systems that had been upgraded from older Zulip releases.
- Removed fetching GitHub contributor data from static asset build
@@ -398,7 +398,7 @@ details.
- Fixed copy-to-clipboard button for outgoing webhook bots.
- Fixed logging spam from soft_deactivation cron job.
- Fixed email integration handling of emails with nested MIME structure.
-- Fixed unicode bugs in incoming email integration.
+- Fixed Unicode bugs in incoming email integration.
- Fixed error handling for Slack data import.
- Fixed incoming webhook support for AWX 9.x.y.
- Fixed a couple missing translation tags.
@@ -413,8 +413,8 @@ details.
- Corrected fix for CVE-2019-19775 (the original fix was affected by
an unfixed security bug in Python's urllib, CVE-2015-2104).
- Migrated data for handling replies to missed-message emails from
- semi-persistent redis to the fully persistent database.
-- Added authentication for redis and memcached even in configurations
+ semi-persistent Redis to the fully persistent database.
+- Added authentication for Redis and memcached even in configurations
where these are running on localhost, for add hardening against
attacks from malicious processes running on the Zulip server.
- Improved logging for misconfigurations of LDAP authentication.
@@ -444,7 +444,7 @@ details.
- Added support for Debian buster. Removed support for EOL Ubuntu Trusty.
- Added support for SAML authentication.
- Removed our dependency on `tsearch_extras`, making it possible to
- run a production Zulip server against any postgres database
+ run a production Zulip server against any Postgres database
(including those where one cannot install extensions, like Amazon RDS).
- Significantly improved the email->Zulip gateway, and added [nice
setup documentation](../production/email-gateway.md). It now
@@ -488,7 +488,7 @@ configure this feature entirely in the UI. However, servers that had
previously [enabled previews of linked
websites](https://zulip.com/help/allow-image-link-previews) will
lose the setting and need to re-enable it.
-- We rewrote the Google Authentication backend to use the
+- We rewrote the Google authentication backend to use the
`python-social-auth` system we use for other third-party
authentication systems. For this release, the old variable names
still work, but users should update the following setting names in
@@ -644,7 +644,7 @@ lose the setting and need to re-enable it.
joined that stream.
- Fixed several subtle real-time sync issues with "stream settings".
- Fixed a few subtle Markdown processor bugs involving emoji.
-- Fixed several issues where Linkifiers validation was overly restrictive.
+- Fixed several issues where linkifiers validation was overly restrictive.
- Fixed several rare/minor UI consistency issues in the left sidebar.
- Fixed issues involving saving a message edit before file upload completes.
- Fixed issues with pasting images into the compose box from Safari.
@@ -700,7 +700,7 @@ lose the setting and need to re-enable it.
- Fixed a table layout bug in "deactivated users" settings.
- Fixed an exception when administrators edited bot users when custom
profile fields were configured in the organization.
-- Fixed a bug enabling the PGRoonga search backend with older postgres.
+- Fixed a bug enabling the PGRoonga search backend with older Postgres.
- Fixed getting personal API key when passwords are disabled.
### 2.0.3 -- 2019-04-23
@@ -714,11 +714,11 @@ lose the setting and need to re-enable it.
such a version by default, but one can install it manually).
- Fixed `manage.py query_ldap` test tool (broken in 2.0.2).
- Fixed several bugs in new backup and restore tools.
-- Fixed minor bugs with Youtube previews.
+- Fixed minor bugs with YouTube previews.
### 2.0.2 -- 2019-03-15
-- Fixed a regression in the puppet configuration for S3 upload backend
+- Fixed a regression in the Puppet configuration for S3 upload backend
introduced in 2.0.1.
- Fixed a too-fast fade for "Saved" in organization settings.
- Fixed a white flash when loading a browser in night mode.
@@ -821,7 +821,7 @@ and is enabled by default in that case. To disable it, set
- Improved accessibility of emoji rendering in messages bodies.
- Eliminated UI lag when using "Quote and reply".
- Expanded production documentation for more unusual deployment options.
-- Expanded set of characters allowed in custom Linkifiers.
+- Expanded set of characters allowed in custom linkifiers.
- Optimized development provisioning; now takes 2s in the no-op case.
- Zulip's Help Center now has nicely generated open graph tags.
- Fixed missing API authentication headers for mobile file access.
@@ -842,11 +842,11 @@ and is enabled by default in that case. To disable it, set
### 1.9.2 -- 2019-01-29
This release migrates Zulip off a deprecated Google+ API (necessary
-for Google Authentication to continue working past March 7), and
+for Google authentication to continue working past March 7), and
contains a few bug fixes for the installer and Slack import. It has
minimal changes for existing servers not using Google authentication.
-- Updated the Google Auth integration to stop using a deprecated and
+- Updated the Google auth integration to stop using a deprecated and
soon-to-be-removed Google+ authentication API.
- Improved installer error messages for common configuration problems.
- Fixed several bugs in Slack, Gitter, and HipChat import tools.
@@ -976,7 +976,7 @@ Zulip installations; it has minimal changes for existing servers.
- Improved Zulip's layout for windows with a width around 1024px.
- Improved Zulip's generic error handling behavior for webhooks.
- Improved keyboard navigation of settings and popovers.
-- Renamed "Realm Filters" to "Linkifiers", at least in the UI.
+- Renamed "realm filters" to "linkifiers", at least in the UI.
- Converted several layered-checkbox settings to clearer dropdowns.
- Cleaned up some legacy APIs still using email addresses.
- Made arrow-key navigation work within right and left sidebar search.
@@ -1079,7 +1079,7 @@ Zulip installations; it has minimal changes for existing servers.
public streams, even though from before a user subscribed.
- Added a menu item to mark all messages as read.
- Fixed image upload file pickers offering non-image files.
-- Fixed some subtle bugs with full-text search and unicode.
+- Fixed some subtle bugs with full-text search and Unicode.
- Fixed bugs in the "edit history" HTML rendering process.
- Fixed popovers being closed when new messages come in.
- Fixed unexpected code blocks when using the email mirror.
@@ -1413,7 +1413,7 @@ running a version from before 1.7 should upgrade directly to 1.7.1.
- Simplified the settings for configuring senders for our emails.
- Emoji can now be typed with spaces, e.g. entering "robot face" in
the typeahead as well as "robot_face".
-- Improved title and alt text for unicode emoji.
+- Improved title and alt text for Unicode emoji.
- Added development tools to make iterating on emails and error pages easy.
- Added backend support for multi-use invite links (no UI for creating yet).
- Added a central debugging log for attempts to send outgoing emails.
@@ -1486,7 +1486,7 @@ Zulip apps.
* Added an icon to distinguish bot users as message senders.
* Added a command-line Slack importer tool using the API.
* Added new announcement notifications on stream creation.
-* Added support for some newer unicode emoji code points.
+* Added support for some newer Unicode emoji code points.
* Added support for users deleting realm emoji they themselves uploaded.
* Added support for organization administrators deleting messages.
* Extended data available to mobile apps to cover the entire API.
@@ -1523,7 +1523,7 @@ Zulip apps.
* Fixed numerous bugs with the message editing widget.
* Fixed missing logging / rate limiting on browser endpoints.
* Fixed regressions in Zulip's browser state preservation on reload logic.
-* Fixed support for unicode characters in the email mirror system.
+* Fixed support for Unicode characters in the email mirror system.
* Fixed load spikes when email mirror is receiving a lot of traffic.
* Fixed the ugly grey flicker when scrolling fast on Macs.
* Fixed previews of GitHub image URLs.
@@ -1597,7 +1597,7 @@ Zulip apps.
- Added a webhook integration for GitHub, replacing the deprecated
github-services hook.
- Normalized the message formatting for all the Zulip Git integrations.
-- Added support for VMWare Fusion Vagrant provider for faster OSX
+- Added support for VMware Fusion Vagrant provider for faster OSX
development.
- Added a shields.io style badge for joining a Zulip server.
- Added admin setting for which email domains can join a realm.
@@ -1630,7 +1630,7 @@ Zulip apps.
- Added several new linters (eslint, pep8) and cleaned the codebase.
- Optimized the speed of the Zulip upgrade process, especially with Git.
- Have peer_add events send user_id, not email.
-- Fixed problems with rabbitmq when installing Zulip.
+- Fixed problems with RabbitMQ when installing Zulip.
- Fixed JavaScript not being gzip-compressed properly.
- Fixed a major performance bug in the Tornado service.
- Fixed a frontend performance bug creating streams in very large realms.
@@ -1702,8 +1702,8 @@ Zulip apps.
- Added management command for creating realms through web UI.
- Added management command to send password reset emails.
- Added endpoint for mobile apps to query available auth backends.
-- Added LetsEncrypt documentation for getting SSL certificates.
-- Added nice rendering of unicode emoji.
+- Added Let's Encrypt documentation for getting SSL certificates.
+- Added nice rendering of Unicode emoji.
- Added support for pinning streams to the top of the left sidebar.
- Added search box for filtering user list when creating a new stream.
- Added realm setting to disable message editing.
@@ -1714,10 +1714,10 @@ Zulip apps.
easy to add additional social authentication methods).
- Added TERMS_OF_SERVICE setting using Markdown formatting to configure
the terms of service for a Zulip server.
-- Added numerous hooks to puppet modules to enable more configurations.
-- Moved several useful puppet components into the main puppet
- manifests (setting a redis password, etc.).
-- Added automatic configuration of postgres/memcached settings based
+- Added numerous hooks to Puppet modules to enable more configurations.
+- Moved several useful Puppet components into the main Puppet
+ manifests (setting a Redis password, etc.).
+- Added automatic configuration of Postgres/memcached settings based
on the server's available RAM.
- Added scripts/upgrade-zulip-from-git for upgrading Zulip from a Git repo.
- Added preliminary support for Python 3. All of Zulip's test suites now
@@ -1732,7 +1732,7 @@ Zulip apps.
- Improved missed message emails to better support directly replying.
- Increased backend test coverage of Python code to 85.5%.
- Increased mypy static type coverage of Python code to 95%. Also
- fixed many string annotations to properly handle unicode.
+ fixed many string annotations to properly handle Unicode.
- Fixed major i18n-related frontend performance regression on
/#subscriptions page. Saves several seconds of load time with 1k
streams.
@@ -1749,7 +1749,7 @@ Zulip apps.
- Fixed EPMD restart being attempted on every puppet apply.
- Fixed message cache filling; should improve perf after server restart.
- Fixed caching race condition when changing user objects.
-- Fixed buggy puppet configuration for supervisord restarts.
+- Fixed buggy Puppet configuration for supervisord restarts.
- Fixed some error handling race conditions when editing messages.
- Fixed fastcgi_params to protect against the httpoxy attack.
- Fixed bug preventing users with mit.edu emails from registering accounts.
@@ -1757,7 +1757,7 @@ Zulip apps.
- Fixed APNS push notification support (had been broken by Apple changing
the APNS API).
- Fixed some logic bugs in how attachments are tracked.
-- Fixed unnecessarily resource-intensive rabbitmq cron checks.
+- Fixed unnecessarily resource-intensive RabbitMQ cron checks.
- Fixed old deployment directories leaking indefinitely.
- Fixed need to manually add localhost in ALLOWED_HOSTS.
- Fixed display positioning for the color picker on subscriptions page.
@@ -1817,7 +1817,7 @@ Zulip apps.
- Fixed erroneous WWW-Authenticate headers with expired sessions.
- Changed "coworkers" to "users" in the Zulip UI.
- Changed add_default_stream REST API to correctly use PUT rather than PATCH.
-- Updated the Zulip emoji set (the Android Emoji) to a modern version.
+- Updated the Zulip emoji set (the Android emoji) to a modern version.
- Made numerous small improvements to the Zulip development experience.
- Migrated backend templates to the faster Jinja2 templating system.
- Migrated development environment setup scripts to tools/setup/.
@@ -1844,7 +1844,7 @@ Zulip apps.
- Added ability for realm administrators to manage custom emoji.
- Added guide to writing new integrations.
- Enabled camo image proxy to fix mixed-content warnings for http images.
-- Refactored the Zulip puppet modules to be more modular.
+- Refactored the Zulip Puppet modules to be more modular.
- Refactored the Tornado event system, fixing old memory leaks.
- Removed many old-style /json API endpoints
- Implemented running queue processors multithreaded in development,
@@ -1864,7 +1864,7 @@ Zulip apps.
- Added new integration for Travis CI.
- Added settings option to control maximum file upload size.
- Added support for running Zulip development environment in Docker.
-- Added easy configuration support for a remote postgres database.
+- Added easy configuration support for a remote Postgres database.
- Added extensive documentation on scalability, backups, and security.
- Recent private message threads are now displayed expanded similar to
the pre-existing recent topics feature.
@@ -1886,7 +1886,7 @@ Zulip apps.
- Fixed buggy #! lines in upgrade scripts.
### 1.3.8 - 2015-11-15
-- Added options to the Python api for working with untrusted server certificates.
+- Added options to the Python API for working with untrusted server certificates.
- Added a lot of documentation on the development environment and testing.
- Added partial support for translating the Zulip UI.
- Migrated installing Node dependencies to use npm.
@@ -1913,7 +1913,7 @@ Zulip apps.
This section links to the upgrade notes from past releases, so you can
easily read them all when upgrading across multiple releases.
-* [Draft Upgrade notes for 4.0](#upgrade-notes-for-4-0)
+* [Draft upgrade notes for 4.0](#upgrade-notes-for-4-0)
* [Upgrade notes for 3.0](#upgrade-notes-for-3-0)
* [Upgrade notes for 2.1.5](#upgrade-notes-for-2-1-5)
* [Upgrade notes for 2.1.0](#upgrade-notes-for-2-1-0)
diff --git a/docs/overview/directory-structure.md b/docs/overview/directory-structure.md
index 37753cec9b..b47e01d6e4 100644
--- a/docs/overview/directory-structure.md
+++ b/docs/overview/directory-structure.md
@@ -138,7 +138,7 @@ Django context (i.e. with database access).
-------------------------------------------------------------------------
-### Production puppet configuration
+### Production Puppet configuration
This is used to deploy essentially all configuration in production.
diff --git a/docs/overview/gsoc-ideas.md b/docs/overview/gsoc-ideas.md
index cfac426d41..36828425d8 100644
--- a/docs/overview/gsoc-ideas.md
+++ b/docs/overview/gsoc-ideas.md
@@ -242,7 +242,7 @@ the areas mentioned above are not your main strength.
As a data point, in Summer 2017, we had 4 students working on the
React Native mobile app (1 focused primarily on visual design), 1 on
-the Electron Desktop app, 2 on bots/integrations, 1 on webapp visual
+the Electron desktop app, 2 on bots/integrations, 1 on webapp visual
design, 2 on our development tooling and automated testing
infrastructure, and the remaining 4 on various other parts of the
backend and core webapp.
@@ -348,7 +348,7 @@ CSS](https://github.com/zulip/zulip/).
(likely part 1) is
[here](../subsystems/custom-apps.md).
**Skills recommended**: Python and JavaScript/CSS, plus devops
- skills (Linux deployment, Docker, puppet etc.) are all useful here.
+ skills (Linux deployment, Docker, Puppet etc.) are all useful here.
Experience writing tools using various popular APIs is helpful for
being able to make good choices. Experts: Steve Howell.
@@ -454,7 +454,7 @@ Expert: Tommy Ip, Tim Abbott.
adding [mypy](../testing/mypy.md) stubs
for Django in mypy to make our type checking more powerful. Read
[our mypy blog post](https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/)
- for details on how mypy works and is integrated into zulip. This
+ for details on how mypy works and is integrated into Zulip. This
specific project is ideal for a strong contributor interested in
type systems.
diff --git a/docs/production/deployment.md b/docs/production/deployment.md
index 685509a5bf..9d820704b7 100644
--- a/docs/production/deployment.md
+++ b/docs/production/deployment.md
@@ -56,8 +56,8 @@ file and you'll find inline documentation in comments for how to
configure it.
Since some of these services require some configuration on the node
-itself (e.g. installing our `postgres` extensions), we have designed
-the puppet configuration that Zulip uses for installing and upgrading
+itself (e.g. installing our Postgres extensions), we have designed
+the Puppet configuration that Zulip uses for installing and upgrading
configuration to be completely modular.
For example, you can install a Zulip rabbitmq server on a machine, you
@@ -74,7 +74,7 @@ of includes in
though it's also possible to subclass some of the lower-level
manifests defined in that directory if you want to customize. A good
example of doing this is in the
-[zulip_ops puppet configuration][zulipchat-puppet] that we use as part
+[zulip_ops Puppet configuration][zulipchat-puppet] that we use as part
of managing chat.zulip.org and zulip.com.
### Using Zulip with Amazon RDS as the database
@@ -117,7 +117,7 @@ This complication will be removed in a future version.
#### Step 2: Create the Postgres database
-Access an administrative `psql` shell on your postgres database, and
+Access an administrative `psql` shell on your Postgres database, and
run the commands in `scripts/setup/create-db.sql` to:
* Create a database called `zulip`.
@@ -126,7 +126,7 @@ run the commands in `scripts/setup/create-db.sql` to:
`zulip` in the `zulip` database. You might have to grant `create`
privileges first for the `zulip` user to do this.
-Depending on how authentication works for your postgres installation,
+Depending on how authentication works for your Postgres installation,
you may also need to set a password for the Zulip user, generate a
client certificate, or similar; consult the documentation for your
database provider for the available options.
@@ -134,11 +134,11 @@ database provider for the available options.
#### Step 3: Configure Zulip to use the Postgres database
In `/etc/zulip/settings.py` on your Zulip server, configure the
-following settings with details for how to connect to your postgres
+following settings with details for how to connect to your Postgres
server. Your database provider should provide these details.
-* `REMOTE_POSTGRES_HOST`: Name or IP address of the postgres server.
-* `REMOTE_POSTGRES_PORT`: Port on the postgres server.
+* `REMOTE_POSTGRES_HOST`: Name or IP address of the Postgres server.
+* `REMOTE_POSTGRES_PORT`: Port on the Postgres server.
* `REMOTE_POSTGRES_SSLMODE`: SSL Mode used to connect to the server.
If you're using password authentication, you should specify the
@@ -152,7 +152,7 @@ postgres_password = abcd1234
Now complete the installation by running the following commands.
```
-# Ask Zulip installer to initialize the postgres database.
+# Ask Zulip installer to initialize the Postgres database.
su zulip -c '/home/zulip/deployments/current/scripts/setup/initialize-database'
# And then generate a realm creation link:
@@ -233,7 +233,7 @@ For `nginx` configuration, there's two things you need to set up:
`/etc/nginx/sites-available`) for the Zulip app. You can look at
our [nginx reverse proxy configuration][nginx-loadbalancer] to see
an example of how to do this properly (the various include files are
- available via the `zulip::nginx` puppet module). Or modify this
+ available via the `zulip::nginx` Puppet module). Or modify this
example:
```
diff --git a/docs/production/email-gateway.md b/docs/production/email-gateway.md
index ba540fdb5e..73fb1820d9 100644
--- a/docs/production/email-gateway.md
+++ b/docs/production/email-gateway.md
@@ -31,7 +31,7 @@ records in DNS.
## Local delivery setup
-Zulip's puppet configuration provides everything needed to run this
+Zulip's Puppet configuration provides everything needed to run this
integration; you just need to enable and configure it as follows.
The main decision you need to make is what email domain you want to
diff --git a/docs/production/expensive-migrations.md b/docs/production/expensive-migrations.md
index 9d232eafe2..e893a523aa 100644
--- a/docs/production/expensive-migrations.md
+++ b/docs/production/expensive-migrations.md
@@ -17,7 +17,7 @@ can run them manually before starting the upgrade:
/home/zulip/deployments/current`
2. Run `./manage.py dbshell`. This will open a shell connected to the
Postgres database.
-3. In the postgres shell, run the following commands:
+3. In the Postgres shell, run the following commands:
CREATE INDEX CONCURRENTLY
zerver_usermessage_is_private_message_id
diff --git a/docs/production/export-and-import.md b/docs/production/export-and-import.md
index 7c785262f9..4e7ab4d7a4 100644
--- a/docs/production/export-and-import.md
+++ b/docs/production/export-and-import.md
@@ -12,7 +12,7 @@ service (or back):
* Backups must be restored on a server running the same Zulip
version (most precisely, one where `manage.py showmigrations` has
the same output).
- * Backups must be restored on a server running the same `postgres`
+ * Backups must be restored on a server running the same Postgres
version.
* Backups aren't useful for migrating organizations between
self-hosting and Zulip Cloud (which may require renumbering all
@@ -20,7 +20,7 @@ service (or back):
We highly recommend this tool in situations where it is applicable,
because it is highly optimized and highly stable, since the hard
- work is done by the built-in backup feature of `postgres`. We also
+ work is done by the built-in backup feature of Postgres. We also
document [backup details](#backup-details) for users managing
backups manually.
@@ -36,7 +36,7 @@ service (or back):
Like the backup tool, logical data exports must be imported on a
Zulip server running the same version. However, logical data
exports can be imported on Zulip servers running a different
- `postgres` version or hosting a different set of Zulip
+ Postgres version or hosting a different set of Zulip
organizations. We recommend this tool in cases where the backup
tool isn't applicable, including situations where an easily
machine-parsable export format is desired.
@@ -47,7 +47,7 @@ service (or back):
inexpensively preserve public stream conversations when
decommissioning a Zulip organization.
-* It's possible to set up [postgres streaming
+* It's possible to set up [Postgres streaming
replication](#postgres-streaming-replication) and the [S3 file
upload
backend](../production/upload-backends.html#s3-backend-configuration)
@@ -69,7 +69,7 @@ The backup tool provides the following options:
to (default: write to a file in `/tmp`). On success, the
console output will show the path to the output tarball.
- `--skip-db`: Skip backup of the database. Useful if you're using a
- remote postgres host with its own backup system and just need to
+ remote Postgres host with its own backup system and just need to
backup non-database state.
- `--skip-uploads`: If `LOCAL_UPLOADS_DIR` is set, user-uploaded files
in that directory will be ignored.
@@ -154,19 +154,19 @@ emails to send). You can check whether these queues are empty using
#### Backup details
This section is primarily for users managing backups themselves
-(E.g. if they're using a remote postgres database with an existing
+(E.g. if they're using a remote Postgres database with an existing
backup strategy), and also serves as documentation for what is
included in the backups generated by Zulip's standard tools. The
data includes:
-* The postgres database. You can back it up like any postgres
+* The Postgres database. You can back it up like any Postgres
database. We have some example tooling for doing that incrementally
into S3 using [wal-g](https://github.com/wal-g/wal-g) in
`puppet/zulip/manifests/postgres_backups.pp`.
In short, this requires:
- Zulip 1.4 or newer release.
- An Amazon S3 bucket for storing the backups.
- - `/etc/zulip/zulip-secrets.conf` on the postgres server like this:
+ - `/etc/zulip/zulip-secrets.conf` on the Postgres server like this:
```
[secrets]
s3_backups_key = # aws public key
diff --git a/docs/production/install-existing-server.md b/docs/production/install-existing-server.md
index 39586c7729..0e7fc15fe0 100644
--- a/docs/production/install-existing-server.md
+++ b/docs/production/install-existing-server.md
@@ -33,7 +33,7 @@ sudo wget -O /etc/nginx/nginx.conf.zulip \
sudo meld /etc/nginx/nginx.conf /etc/nginx/nginx.conf.zulip # be sure to merge to the right
```
-After the zulip installation completes, then you can overwrite (or
+After the Zulip installation completes, then you can overwrite (or
merge) your new nginx.conf with the installed one:
```shell
@@ -41,13 +41,13 @@ $ sudo meld /etc/nginx/nginx.conf.zulip /etc/nginx/nginx.conf # be sure to merg
$ sudo service nginx restart
```
-Zulip's puppet configuration will change the ownership of
+Zulip's Puppet configuration will change the ownership of
`/var/log/nginx` so that the `zulip` user can access it. Depending on
your configuration, this may or may not cause problems.
### Puppet
-If you have a puppet server running on your server, you will get an
+If you have a Puppet server running on your server, you will get an
error message about not being able to connect to the client during the
install process:
@@ -55,7 +55,7 @@ install process:
puppet-agent[29873]: Could not request certificate: Failed to open TCP connection to puppet:8140
```
-So you'll need to shutdown any puppet servers.
+So you'll need to shutdown any Puppet servers.
```shell
$ sudo service puppet-agent stop
@@ -75,7 +75,7 @@ If you have an existing PostgreSQL database, note that Zulip will use
the default `main` as its database name; make sure you're not using
that.
-### Memcached, redis, and rabbitmq
+### Memcached, Redis, and RabbitMQ
Zulip will, by default, configure these services for its use. The
configuration we use is pretty basic, but if you're using them for
diff --git a/docs/production/multiple-organizations.md b/docs/production/multiple-organizations.md
index c985bdb0b8..9bf58328e7 100644
--- a/docs/production/multiple-organizations.md
+++ b/docs/production/multiple-organizations.md
@@ -9,7 +9,7 @@ The vast majority of Zulip servers host just a single organization (or
documents what's involved in hosting multiple Zulip organizations on a
single server.
-Throughout this article, we'll assume you're working on a zulip server
+Throughout this article, we'll assume you're working on a Zulip server
with hostname `zulip.example.com`. You may also find the more
[technically focused article on realms](../subsystems/realms.md) to be useful
reading.
@@ -33,7 +33,7 @@ things:
file. That setting is the default in 1.7 and later.
* Make sure you have SSL certificates for all of the subdomains you're
going to use. If you're using
- [our LetsEncrypt instructions](ssl-certificates.md), it's easy to
+ [our Let's Encrypt instructions](ssl-certificates.md), it's easy to
just specify multiple subdomains in your certificate request.
* If necessary, modify your `nginx` configuration to use your new
certificates.
diff --git a/docs/production/postgres.md b/docs/production/postgres.md
index 72139a8f19..d129a53a78 100644
--- a/docs/production/postgres.md
+++ b/docs/production/postgres.md
@@ -8,7 +8,7 @@ Previous versions of Zulip used whatever version of Postgres was
included with the base operating system (E.g. Postgres 12 on Ubuntu
Focal, 10 on Ubuntu Bionic, and 9.6 on Ubuntu Xenial). We recommend
that installations currently using older Postgres releases [upgrade to
-Postgres 12][upgrade-postgres], as may drop support for older postgres
+Postgres 12][upgrade-postgres], as may drop support for older Postgres
in a future release.
[upgrade-postgres]: ../production/upgrade-or-modify.html#upgrading-postgresql
@@ -16,7 +16,7 @@ in a future release.
#### Remote Postgres database
This is a bit annoying to set up, but you can configure Zulip to use a
-dedicated postgres server by setting the `REMOTE_POSTGRES_HOST`
+dedicated Postgres server by setting the `REMOTE_POSTGRES_HOST`
variable in /etc/zulip/settings.py, and configuring Postgres
certificate authentication (see
http://www.postgresql.org/docs/9.1/static/ssl-tcp.html and
@@ -25,11 +25,11 @@ documentation on how to set this up and deploy the certificates) to
make the DATABASES configuration in `zproject/computed_settings.py`
work (or override that configuration).
-If you want to use a remote Postgresql database, you should configure
+If you want to use a remote PostgreSQL database, you should configure
the information about the connection with the server. You need a user
called "zulip" in your database server. You can configure these
options in `/etc/zulip/settings.py` (the below descriptions are from the
-Postgresql documentation):
+PostgreSQL documentation):
* `REMOTE_POSTGRES_HOST`: Name or IP address of the remote host
* `REMOTE_POSTGRES_SSLMODE`: SSL Mode used to connect to the server,
@@ -64,15 +64,15 @@ sudo update-rc.d postgresql disable
```
In future versions of this feature, we'd like to implement and
-document how to the remote postgres database server itself
+document how to the remote Postgres database server itself
automatically by using the Zulip install script with a different set
-of puppet manifests than the all-in-one feature; if you're interested
+of Puppet manifests than the all-in-one feature; if you're interested
in working on this, post to the Zulip development mailing list and we
can give you some tips.
-#### Debugging postgres database issues
+#### Debugging Postgres database issues
-When debugging postgres issues, in addition to the standard `pg_top`
+When debugging Postgres issues, in addition to the standard `pg_top`
tool, often it can be useful to use this query:
```
@@ -92,9 +92,9 @@ sending a Postgres process SIGKILL. Doing so will cause the database
to kill all current connections, roll back any pending transactions,
and enter recovery mode.
-#### Stopping the Zulip postgres database
+#### Stopping the Zulip Postgres database
-To start or stop postgres manually, use the pg_ctlcluster command:
+To start or stop Postgres manually, use the pg_ctlcluster command:
```
pg_ctlcluster 9.1 [--force] main {start|stop|restart|reload}
@@ -120,7 +120,7 @@ Many database parameters can be adjusted while the database is
running. Just modify /etc/postgresql/9.1/main/postgresql.conf and
issue a reload. The logs will note the change.
-#### Debugging issues starting postgres
+#### Debugging issues starting Postgres
pg_ctlcluster often doesn't give you any information on why the
database failed to start. It may tell you to check the logs, but you
@@ -141,7 +141,7 @@ pg_ctlcluster does.
#### Postgres vacuuming alerts
-The `autovac_freeze` postgres alert from `check_postgres` is
+The `autovac_freeze` Postgres alert from `check_postgres` is
particularly important. This alert indicates that the age (in terms
of number of transactions) of the oldest transaction id (XID) is
getting close to the `autovacuum_freeze_max_age` setting. When the
@@ -154,4 +154,4 @@ database as a database superuser (`postgres`).
See
http://www.postgresql.org/docs/9.1/static/routine-vacuuming.html#VACUUM-FOR-WRAPAROUND
-for more details on postgres vacuuming.
+for more details on Postgres vacuuming.
diff --git a/docs/production/requirements.md b/docs/production/requirements.md
index 19d1d5cbe7..028276dcf2 100644
--- a/docs/production/requirements.md
+++ b/docs/production/requirements.md
@@ -18,8 +18,8 @@ For details on each of these requirements, see below.
#### General
The installer expects Zulip to be the **only thing** running on the
-system; it will install system packages with `apt` (like nginx,
-postgresql, and redis) and configure them for its own use. We
+system; it will install system packages with `apt` (like Nginx,
+PostgreSQL, and Redis) and configure them for its own use. We
strongly recommend using either a fresh machine instance in a cloud
provider, a fresh VM, or a dedicated machine. If you decide to
disregard our advice and use a server that hosts other services, we
@@ -50,7 +50,7 @@ https://help.ubuntu.com/community/Repositories/Ubuntu
#### Hardware specifications
-* CPU and Memory: For installations with 100+ users you'll need a
+* CPU and memory: For installations with 100+ users you'll need a
minimum of **2 CPUs** and **4GB RAM**. For installations with fewer
users, 1 CPU and 2GB RAM is sufficient. We strongly recommend against
installing with less than 2GB of RAM, as you will likely experience
@@ -149,7 +149,7 @@ most use cases, there's little scalability benefit to doing so. See
installing Zulip with a dedicated database server.
* **Dedicated database**. For installations with hundreds of daily
- active users, we recommend using a [remote postgres
+ active users, we recommend using a [remote Postgres
database](postgres.md), but it's not required.
* **RAM:** We recommended more RAM for larger installations:
diff --git a/docs/production/security-model.md b/docs/production/security-model.md
index 71a628930e..e031558939 100644
--- a/docs/production/security-model.md
+++ b/docs/production/security-model.md
@@ -36,7 +36,7 @@ announcement).
prevent CSRF attacks.
* The preferred way to log in to Zulip is using an SSO solution like
- Google Auth, LDAP, or similar, but Zulip also supports password
+ Google auth, LDAP, or similar, but Zulip also supports password
authentication. See
[the authentication methods documentation](../production/authentication-methods.md)
for details on Zulip's available authentication methods.
@@ -134,11 +134,11 @@ strength allowed is controlled by two settings in
## Users and bots
-* There are several types of users in a Zulip organization: Organization
- Owners, Organization Administrators, Members (normal users), Guests,
- and Bots.
+* There are several types of users in a Zulip organization: organization
+ owners, organization administrators, members (normal users), guests,
+ and bots.
-* Owners and Administrators have the ability to deactivate and
+* Owners and administrators have the ability to deactivate and
reactivate other human and bot users, delete streams, add/remove
administrator privileges, as well as change configuration for the
organization.
diff --git a/docs/production/settings.md b/docs/production/settings.md
index 6c5e54bbba..ddd0b9ec25 100644
--- a/docs/production/settings.md
+++ b/docs/production/settings.md
@@ -38,13 +38,13 @@ prefilled with that value.
`AUTHENTICATION_BACKENDS`: Zulip supports a wide range of popular
options for authenticating users to your server, including Google
-Auth, GitHub Auth, LDAP, SAML, REMOTE_USER, and more.
+auth, GitHub auth, LDAP, SAML, REMOTE_USER, and more.
If you want an additional or different authentication backend, you
will need to uncomment one or more and then do any additional
configuration required for that backend as documented in the
`settings.py` file. See the
-[section on Authentication](../production/authentication-methods.md) for more
+[section on authentication](../production/authentication-methods.md) for more
detail on the available authentication backends and how to configure
them.
diff --git a/docs/production/troubleshooting.md b/docs/production/troubleshooting.md
index a0e1cbd9c2..d5bcce54a7 100644
--- a/docs/production/troubleshooting.md
+++ b/docs/production/troubleshooting.md
@@ -36,7 +36,7 @@ and restart various services.
### Checking status with `supervisorctl status`
-You can check if the zulip application is running using:
+You can check if the Zulip application is running using:
```
supervisorctl status
```
@@ -93,10 +93,10 @@ The Zulip application uses several major open source services to store
and cache data, queue messages, and otherwise support the Zulip
application:
-* postgresql
-* rabbitmq-server
-* nginx
-* redis
+* PostgreSQL
+* RabbitMQ
+* Nginx
+* Redis
* memcached
If one of these services is not installed or functioning correctly,
@@ -140,11 +140,11 @@ problems and how to resolve them:
`_
and instead install apt upgrades manually. With unattended upgrades
enabled, the moment a new Postgres release is published, your Zulip
- server will have its postgres server upgraded (and thus restarted).
+ server will have its Postgres server upgraded (and thus restarted).
```
-Restarting one of the system services that Zulip uses (`postgres`,
-`memcached`, `redis`, or `rabbitmq`) will drop the connections that
+Restarting one of the system services that Zulip uses (Postgres,
+memcached, Redis, or Rabbitmq) will drop the connections that
Zulip processes have to the service, resulting in future operations on
those connections throwing errors.
@@ -165,8 +165,8 @@ workers are commonly idle for periods of hours or days at a time.
You can prevent this trickle when doing a planned upgrade by
restarting the Zulip server with
`/home/zulip/deployments/current/scripts/restart-server` after
-installing system package updates to `postgres`, `memcached`,
-`rabbitmq`, or `redis`.
+installing system package updates to Postgres, memcached,
+RabbitMQ, or Redis.
Few system administrators enjoy outages at random times (even if only
brief) or the resulting distribution of error emails, which is why we
@@ -189,8 +189,8 @@ standard stuff:
especially for the database and where uploads are stored.
* Service uptime and standard monitoring for the [services Zulip
depends on](#troubleshooting-services). Most monitoring software
- has standard plugins for `nginx`, `postgres`, `redis`, `rabbitmq`,
- and `memcached`, and those will work well with Zulip.
+ has standard plugins for Nginx, Postgres, Redis, RabbitMQ,
+ and memcached, and those will work well with Zulip.
* `supervisorctl status` showing all services `RUNNING`.
* Checking for processes being OOM killed.
@@ -230,8 +230,8 @@ Database monitoring:
* `check_fts_update_log`: Checks whether full-text search updates are
being processed properly or getting backlogged.
* `check_postgres`: General checks for database health.
-* `check_postgres_backup`: Checks status of postgres backups.
-* `check_postgres_replication_lag`: Checks whether postgres streaming
+* `check_postgres_backup`: Checks status of Postgres backups.
+* `check_postgres_replication_lag`: Checks whether Postgres streaming
replication is up to date.
Standard server monitoring:
diff --git a/docs/production/upgrade-or-modify.md b/docs/production/upgrade-or-modify.md
index 81a87ff4da..bd26503f79 100644
--- a/docs/production/upgrade-or-modify.md
+++ b/docs/production/upgrade-or-modify.md
@@ -3,7 +3,7 @@
This page explains how to upgrade, patch, or modify Zulip, including:
- [Upgrading to a release](#upgrading-to-a-release)
-- [Upgrading from a git repository](#upgrading-from-a-git-repository)
+- [Upgrading from a Git repository](#upgrading-from-a-git-repository)
- [Troubleshooting and rollback](#troubleshooting-and-rollback)
- [Preserving local changes to configuration files](#preserving-local-changes-to-configuration-files)
- [Upgrading the operating system](#upgrading-the-operating-system)
@@ -166,7 +166,7 @@ the version corresponding to the `restart-server` path you call.
You can test whether this will happen assuming no upstream changes to
the configuration using `scripts/zulip-puppet-apply` (without the
-`-f` option), which will do a test puppet run and output and changes
+`-f` option), which will do a test Puppet run and output and changes
it would make. Using this list, you can save a copy of any files
that you've modified, do the upgrade, and then restore your
configuration.
@@ -177,7 +177,7 @@ system to Zulip Cloud. Before making local changes to a configuration
file, first check whether there's an option supported by
`/etc/zulip/zulip.conf` for the customization you need. And if you
need to make local modifications, please report the issue so that we
-can make the Zulip puppet configuration flexible enough to handle your
+can make the Zulip Puppet configuration flexible enough to handle your
setup.
### nginx configuration changes
@@ -195,7 +195,7 @@ some additional steps to update your Zulip installation, documented
below.
The steps are largely the same for the various OS upgrades aside from
-the versions of postgres, so you should be able to adapt these
+the versions of Postgres, so you should be able to adapt these
instructions for other supported platforms.
### Upgrading from Ubuntu 18.04 Bionic to 20.04 Focal
@@ -226,8 +226,8 @@ instructions for other supported platforms.
release update of Ubuntu 20.04 LTS is released.
When `do-release-upgrade` asks you how to upgrade configuration
- files for services that Zulip manages like `redis`, `postgres`,
- `nginx`, and `memcached`, the best choice is `N` to keep the
+ files for services that Zulip manages like Redis, Postgres,
+ Nginx, and memcached, the best choice is `N` to keep the
currently installed version. But it's not important; the next
step will re-install Zulip's configuration in any case.
@@ -357,8 +357,8 @@ instructions for other supported platforms.
[debian-upgrade-os]: https://www.debian.org/releases/buster/amd64/release-notes/ch-upgrading.html
When prompted for you how to upgrade configuration
- files for services that Zulip manages like `redis`, `postgres`,
- `nginx`, and `memcached`, the best choice is `N` to keep the
+ files for services that Zulip manages like Redis, Postgres,
+ Nginx, and memcached, the best choice is `N` to keep the
currently installed version. But it's not important; the next
step will re-install Zulip's configuration in any case.
diff --git a/docs/subsystems/analytics.md b/docs/subsystems/analytics.md
index 42b05e6394..32676dbb28 100644
--- a/docs/subsystems/analytics.md
+++ b/docs/subsystems/analytics.md
@@ -10,7 +10,7 @@ designed around the following goals:
- Efficient to query so that we can display data in-app (e.g. on the streams
page) with minimum impact on the overall performance of those pages.
- Storage size smaller than the size of the main Message/UserMessage
- database tables, so that we can store the data in the main postgres
+ database tables, so that we can store the data in the main Postgres
database rather than using a specialized database platform.
There are a few important things you need to understand in order to
@@ -64,7 +64,7 @@ summed to rows in InstallationCount with totals for pairs of (end_time,
client).
Note: In most cases, we do not store rows with value 0. See
-[Performance Strategy](#performance-strategy) below.
+[Performance strategy](#performance-strategy) below.
## CountStats
diff --git a/docs/subsystems/api-release-checklist.md b/docs/subsystems/api-release-checklist.md
index 5e3c8666a7..5dd5b8094c 100644
--- a/docs/subsystems/api-release-checklist.md
+++ b/docs/subsystems/api-release-checklist.md
@@ -16,7 +16,7 @@ presented [here](https://packaging.python.org/tutorials/installing-packages/).
2. Create a [source distribution][3].
-3. Create a [pure Python Wheel][4].
+3. Create a [pure Python wheel][4].
4. [Upload][5] the distribution file(s) to [zulip-beta][6].
diff --git a/docs/subsystems/conversion.md b/docs/subsystems/conversion.md
index 2bed74bda7..153ef15153 100644
--- a/docs/subsystems/conversion.md
+++ b/docs/subsystems/conversion.md
@@ -104,12 +104,12 @@ files from S3. Finally, `Attachment`'s `m2m` relationship ties to
Here are the same classes of data, listed in roughly
decreasing order of riskiness:
-- Message Data (sheer volume/lack of time/security)
-- File-Related Data (S3/security/lots of moving parts)
-- Recipient Data (complexity/security/cross-realm considerations)
-- Cross Realm Data (duplicate ids)
-- Disjoint User Data
-- Public Realm Data
+- Message data (sheer volume/lack of time/security)
+- File-related data (S3/security/lots of moving parts)
+- Recipient data (complexity/security/cross-realm considerations)
+- Cross realm data (duplicate ids)
+- Disjoint user data
+- Public realm data
(Note the above list is essentially in reverse order of how we
process the data, which isn't surprising for a top-down approach.)
diff --git a/docs/subsystems/custom-apps.md b/docs/subsystems/custom-apps.md
index b7e60f0f95..7d6e7772d3 100644
--- a/docs/subsystems/custom-apps.md
+++ b/docs/subsystems/custom-apps.md
@@ -53,17 +53,17 @@ can be the source of the stimulus, or the target of the response,
or both. Along those lines, we divide custom apps into
these three types:
-- A **Zulip Reader** uses activity on Zulip to stimulate an external
+- A **Zulip reader** uses activity on Zulip to stimulate an external
response. An example here would be a follow-up bot that sees
messages with the alert word "@todo" on a stream and then
adds a task to a third party todo-list tool.
-- A **Zulip Writer** reacts to external stimuli and generates
+- A **Zulip writer** reacts to external stimuli and generates
Zulip responses. An example here might be a build bot that
gets triggered by an automated code build finishing and then
writes "build finished" to a Zulip stream.
-- A **Zulip Read/Writer** reacts to a stimulus from Zulip by
+- A **Zulip reader/writer** reacts to a stimulus from Zulip by
responding to Zulip. An example here would be a math bot
that sees a message saying "compute 2+2" and responds with
"2+2=4" on the same stream or back to the user in a PM.
@@ -72,16 +72,16 @@ The above three classifications represent kind of a Zulip-centric
view of the universe, but we should put ourselves in the shoes
of somebody "out in the world."
-- A **World Reader** is an app that gets some stimulus from
+- A **world reader** is an app that gets some stimulus from
the outside world and produces a response in Zulip. (So, a world
reader is a Zulip writer.)
-- A **World Writer** is an app that gets some stimulus from
+- A **world writer** is an app that gets some stimulus from
Zulip and produces a response in the outside world. (So, a world
writer is a Zulip reader.)
Some things are a little outside of the scope of this document.
-We could plausibly extend Zulip some day to host **World Reader/Writer**
+We could plausibly extend Zulip some day to host **world reader/Writer**
apps that don't even write Zulip messages but simply use
Zulip as a kind of middleware platform.
@@ -93,14 +93,14 @@ the scope of this document, we won't spend a lot of time talking
about how to build these types of apps, but we are aware that
any solution needs to accommodate multiple sources and targets.
-### World Reader/Zulip Reader
+### World reader/Zulip reader
Finally, we set the stage for how we talk about custom apps in
terms of these two broad categories:
-- A **World Reader** responds to stimuli from the outside world (and
+- A **world reader** responds to stimuli from the outside world (and
typically produces a response in Zulip).
-- A **Zulip Reader** responds to stimuli from Zulip conversations (and
+- A **Zulip reader** responds to stimuli from Zulip conversations (and
typically produces a response in the outside world).
Again, we recognize that there can be overlap between those two
@@ -152,9 +152,9 @@ party corporate system based on Zulip events, I may want to deploy code
to a public webserver or try to get my code to be part of the
Zulip project itself.
-## World Reader
+## World reader
-A **World Reader** custom app is an app that responds to stimuli
+A **world reader** custom app is an app that responds to stimuli
from the world outside of Zulip. It typically functions as a **Zulip
Writer** and posts some kind of message to a Zulip stream or user to
alert people of world events. Here are some example stimuli:
@@ -175,11 +175,11 @@ you basically have to solve these problems:
### Zulip integrations
Zulip actually supports a bunch of integrations out-of-the-box that
-perform as **World Readers**.
+perform as **world readers**.
The [three different integration models](https://zulip.com/api/integrations-overview#sending-content-into-zulip)
basically differ in where they perform the main functions of a
-**World Reader**.
+**world reader**.
#### Incoming webhook integrations
@@ -240,21 +240,21 @@ Ruby. There are probably still some scenarios, however, where a lot of the
logic for translation could be moved to a Zulip-side integration, and then we
supply very thin client code for the plugin.
-## Zulip Reader
+## Zulip reader
-A **Zulip Reader** custom app gets stimuli from Zerver itself. Most
-**Zulip Reader** apps are packaged/advertised more as what people commonly call
+A **Zulip reader** custom app gets stimuli from Zerver itself. Most
+**Zulip reader** apps are packaged/advertised more as what people commonly call
"bots" than as "integrations." (But sometimes what is currently a "bot" should really
be deployed more like an "integration" in an ideal Zulip universe.)
-Example custom **Zulip Reader** apps can be serious or whimsical.
+Example custom **Zulip reader** apps can be serious or whimsical.
**Serious**
- A user tags a message with an alert word like `@followup` or `@ticket`.
- A user needs help computing something, like a simple math expression
or a timezone conversion.
-- A **World Reader** custom app posts something to a Zulip stream that we
+- A **world reader** custom app posts something to a Zulip stream that we
want to cross-post to another external system.
- A user wants the custom app to query the outside world, like look up the
weather or search Wikipedia.
@@ -267,7 +267,7 @@ weather or search Wikipedia.
- A user wants to tell the office telepresence robot to "turn left."
Setting aside whether a custom app is performing a serious or whimsical
-function, there are a few different types of **Zulip Readers**:
+function, there are a few different types of **Zulip readers**:
- Some readers will do simple local computations and post right back to Zulip.
- Some readers will do more expensive/web-related computations like searching
@@ -303,7 +303,7 @@ code to your friends and have them be able to deploy it.
- If you've written a general-use bot, it may be difficult to persuade your
admin to give you a superuser account.
-We want to make it easier to deploy **Zulip Readers** on
+We want to make it easier to deploy **Zulip readers** on
Zulip hardware. The following document talks about how we want to enable this
from a code structuring standpoint:
@@ -326,7 +326,7 @@ apps, where an app author might use the following development process:
To give a concrete example, let's say that I work for a company that is
building an issue tracker, and we want to offer Zulip support. I would
-start by writing a **Zulip Reader** that scans for the alert word `@ticket`
+start by writing a **Zulip reader** that scans for the alert word `@ticket`
on certain public Zulip streams, and part of that app would have logic
to post to my company's issue-tracking API.
@@ -360,7 +360,7 @@ the "world" as it sees fit.
### Zulip-side support for reader apps
Even for app authors that have access to dedicated hardware,
-there would be several advantages to running **Zulip Readers** under
+there would be several advantages to running **Zulip readers** under
the same umbrella as the core Zulip system.
- Your app will automatically inherit the uptime of the Zulip server itself (in
@@ -382,7 +382,7 @@ the problems below. (One assumption is that we don't run apps truly
in-process.)
- **Contributions**: We need a process for users to contribute code.
-- **Configuration/Discovery**: We need Zulip to be able to find which
+- **Configuration/discovery**: We need Zulip to be able to find which
apps are allowed to run for a particular
deployment. (The admin may choose to run only a subset of contributed
apps.)
diff --git a/docs/subsystems/dependencies.md b/docs/subsystems/dependencies.md
index 09dfcfaf85..e904bf18d8 100644
--- a/docs/subsystems/dependencies.md
+++ b/docs/subsystems/dependencies.md
@@ -103,7 +103,7 @@ on specific versions of these packages wherever possible.
The exact lists of `apt` packages needed by Zulip are maintained in a
few places:
-* For production, in our puppet configuration, `puppet/zulip/`, using
+* For production, in our Puppet configuration, `puppet/zulip/`, using
the `Package` and `SafePackage` directives.
* For development, in `SYSTEM_DEPENDENCIES` in `tools/lib/provision.py`.
* The packages needed to build a Zulip virtualenv, in
@@ -113,7 +113,7 @@ few places:
install other dependencies, and (2) because that list is shared
between development and production.
-We also rely on the `pgroonga` PPA for the `pgroonga` postgres
+We also rely on the PGroonga PPA for the PGroonga Postgres
extension, used by our [full-text search](full-text-search.md).
## Python packages
diff --git a/docs/subsystems/emoji.md b/docs/subsystems/emoji.md
index 45a644827d..d62b0a7585 100644
--- a/docs/subsystems/emoji.md
+++ b/docs/subsystems/emoji.md
@@ -14,7 +14,7 @@ Currently, Zulip supports these four display formats for emoji:
## Emoji codes
The Unicode standard has various ranges of characters set aside for
-emoji. So you can put emoji in your terminal using actual unicode
+emoji. So you can put emoji in your terminal using actual Unicode
characters like 😀 and 👍. If you paste those into Zulip, Zulip will
render them as the corresponding emoji image.
@@ -48,7 +48,7 @@ etc. However, we can't use the sprite sheets in some contexts, such
as missed-message and digest emails, that need to have self-contained
assets. For those, we use individual emoji files under
`static/generated/emoji`. The structure of that repository contains
-both files named after the unicode representation of emoji (as actual
+both files named after the Unicode representation of emoji (as actual
image files) as well as symlinks pointing to those emoji.
We need to maintain those both for the names used in the iamcal emoji
@@ -69,7 +69,7 @@ for more details on this strategy.
The emoji tree generated by this process contains several import elements:
* `emoji_codes.json`: A set of mappings used by the Zulip frontend to
- understand what unicode emoji exist and what their shortnames are,
+ understand what Unicode emoji exist and what their shortnames are,
used for autocomplete, emoji pickers, etc. This has been
deduplicated using the logic in
`tools/setup/emoji/emoji_setup_utils.py` to generally only have
@@ -80,7 +80,7 @@ The emoji tree generated by this process contains several import elements:
`images/emoji/unicode/` tree. This is used to serve individual emoji
images, as well as for the
[backend Markdown processor](../subsystems/markdown.md) to know which emoji
- names exist and what unicode emoji / images they map to. In this
+ names exist and what Unicode emoji / images they map to. In this
tree, we currently include all of the emoji in `emoji-map.json`;
this means that if you send `:angry_face:`, it won't autocomplete,
but will still work (but not in previews).
@@ -131,7 +131,7 @@ principles that were applied to the current set of names. We use (strong),
be familiar to a large subset of users. This largely applies to certain
faces. (medium)
-* The set of names should be compatible with the iamcal, gemoji, and unicode
+* The set of names should be compatible with the iamcal, gemoji, and Unicode
names. Compatible here means that if there is an emoji name a user knows
from one of those sets, and the user searches for the key word of that
name, they will get an emoji in our set. It is okay if this emoji has a
diff --git a/docs/subsystems/markdown.md b/docs/subsystems/markdown.md
index 41aac159a7..371f3c3790 100644
--- a/docs/subsystems/markdown.md
+++ b/docs/subsystems/markdown.md
@@ -137,8 +137,8 @@ Important considerations for any changes are:
Zulip's Markdown processor's rendering supports a number of features
that depend on realm-specific or user-specific data. For example, the
realm could have
-[Linkifiers](https://zulip.com/help/add-a-custom-linkification-filter)
-or [Custom emoji](https://zulip.com/help/add-custom-emoji)
+[linkifiers](https://zulip.com/help/add-a-custom-linkification-filter)
+or [custom emoji](https://zulip.com/help/add-custom-emoji)
configured, and Zulip supports mentions for streams, users, and user
groups (which depend on data like users' names, IDs, etc.).
@@ -248,7 +248,7 @@ accurate.
`http://google.com`, and not `https://zulip.com/google.com` which
is the default behavior.
-* Set `title=`(the url) on every link tag.
+* Set `title=`(the URL) on every link tag.
* Disable link-by-reference syntax,
`[foo][bar]` ... `[bar]: https://google.com`.
diff --git a/docs/subsystems/schema-migrations.md b/docs/subsystems/schema-migrations.md
index bf3248ea8b..148e64e0a2 100644
--- a/docs/subsystems/schema-migrations.md
+++ b/docs/subsystems/schema-migrations.md
@@ -48,7 +48,7 @@ migrations.
to the table, performing data backfills, or building indexes. We
have a `zerver/lib/migrate.py` library to help with adding columns
and backfilling data. For building indexes on these tables, we
- should do this using SQL with postgres's CONCURRENTLY keyword.
+ should do this using SQL with Postgres's CONCURRENTLY keyword.
* **Atomicity**. By default, each Django migration is run atomically
inside a transaction. This can be problematic if one wants to do
diff --git a/docs/subsystems/typing-indicators.md b/docs/subsystems/typing-indicators.md
index d87f750876..800fbf88bd 100644
--- a/docs/subsystems/typing-indicators.md
+++ b/docs/subsystems/typing-indicators.md
@@ -12,7 +12,7 @@ system and possibly improve it. This document assumes that the
client is our web app, but any client can play along with this
protocol.
-Right now typing indicators are only used in "Private Messages"
+Right now typing indicators are only used in "Private messages"
views.
There are two major roles for users in this system:
diff --git a/docs/subsystems/users.md b/docs/subsystems/users.md
index 25168691a5..980548d66a 100644
--- a/docs/subsystems/users.md
+++ b/docs/subsystems/users.md
@@ -35,7 +35,7 @@ All users can...
- show up in your message stream
- be narrowed to by clicking on recipient bars, etc.
- be narrowed to by searches (but not suggested)
-- can show up in your "Private Messages" sidebar
+- can show up in your "Private messages" sidebar
We also have the mirroring world, where we have unknown users
that we can send PMs to, and local-echo is allowed to create
diff --git a/docs/testing/linters.md b/docs/testing/linters.md
index 7f6f81fe66..8c8d8f3407 100644
--- a/docs/testing/linters.md
+++ b/docs/testing/linters.md
@@ -17,10 +17,10 @@ prevent common coding errors.
We borrow some open source tools for much of our linting, and the links
below will direct you to the official documentation for these projects.
-- [eslint](https://eslint.org)
+- [ESLint](https://eslint.org)
- [mypy](http://mypy-lang.org/)
- [Prettier](https://prettier.io/)
-- [puppet](https://puppet.com/) (puppet provides its own mechanism for
+- [Puppet](https://puppet.com/) (puppet provides its own mechanism for
validating manifests)
- [pyflakes](https://pypi.python.org/pypi/pyflakes)
- [stylelint](https://github.com/stylelint/stylelint)
@@ -100,11 +100,11 @@ Most of our lint checks get performed by `./tools/lint`. These include the
following checks:
- Check Python code with pyflakes.
-- Check JavaScript and TypeScript code with eslint.
+- Check JavaScript and TypeScript code with ESLint.
- Check CSS, JavaScript, TypeScript, and YAML formatting with Prettier.
- Check Python code for custom Zulip rules.
- Check non-Python code for custom Zulip rules.
-- Check puppet manifests with the puppet validator.
+- Check Puppet manifests with the Puppet validator.
- Check HTML templates for matching tags and indentations.
- Check CSS for parsability and formatting.
- Check JavaScript code for addClass calls.
@@ -136,7 +136,7 @@ types of checks mostly lives [here](https://github.com/zulip/zulip/blob/master/t
You can use the `-h` option for `lint` to see its usage. One particular
flag to take note of is the `--modified` flag, which enables you to only run
-lint checks against files that are modified in your git repo. Most of the
+lint checks against files that are modified in your Git repo. Most of the
"sub-linters" respect this flag, but some will continue to process all the files.
Generally, a good workflow is to run with `--modified` when you are iterating on
the code, and then run without that option right before committing new code.
@@ -190,8 +190,8 @@ We check our JavaScript code in a few different ways:
#### Puppet manifests
We use Puppet as our tool to manage configuration files, using
-puppet "manifests." To lint puppet manifests, we use the "parser validate"
-option of puppet.
+Puppet "manifests." To lint Puppet manifests, we use the "parser validate"
+option of Puppet.
#### HTML templates
diff --git a/docs/testing/manual-testing.md b/docs/testing/manual-testing.md
index 64aa20d0c8..9df91a5874 100644
--- a/docs/testing/manual-testing.md
+++ b/docs/testing/manual-testing.md
@@ -181,10 +181,10 @@ For each of the above types of messages, you will want to cycle
through the following views for Cordelia (and have Hamlet send new
messages after each narrow):
-- Go to All Messages view.
-- Go to Private Messages view.
-- Go to Private Messages w/Hamlet.
-- Go to Private Messages w/Hamlet and Othello.
+- Go to All messages view.
+- Go to Private messages view.
+- Go to Private messages w/Hamlet.
+- Go to Private messages w/Hamlet and Othello.
- Go to Verona view.
- Go to Verona/bar view.
- Go to Verona/foo view.
@@ -212,9 +212,9 @@ populated and where the focus is placed.
- Buttons
- Narrow to a stream and click on "New topic"
- - Narrow "Private Messages" and click on "New topic"
+ - Narrow "Private messages" and click on "New topic"
- Narrow to a stream and click on "New private message"
- - Narrow "Private Messages" and click on "New private message"
+ - Narrow "Private messages" and click on "New private message"
- Topics
- Compose/send a message to a stream with no topic.
@@ -468,7 +468,7 @@ Do these tasks as Cordelia.
- Show user list on left sidebar in narrow windows (verify by making window thinner)
- 24-hour time (and then test going back to AM/PM)
- Notifications
- - Stream Message
+ - Stream message
- turn off notifications at user level
- create a new stream
- have Hamlet send a message
@@ -477,7 +477,7 @@ Do these tasks as Cordelia.
- have Hamlet send a message
- then turn off notifications for that stream
- have Hamlet send another message
- - Private Messages and @-mentions
+ - Private messages and @-mentions
- Test Desktop/Audible options
- You can ignore other stuff for now
- Bots/API key
diff --git a/docs/testing/philosophy.md b/docs/testing/philosophy.md
index 04f7b824af..5c8f048c9a 100644
--- a/docs/testing/philosophy.md
+++ b/docs/testing/philosophy.md
@@ -66,9 +66,9 @@ these goals, but a few techniques are worth highlighting:
outgoing HTTP requests are required to test something, we mock the
responses with libraries like `responses`.
* We carefully avoid the potential for contamination of data inside
- services like postgres, redis, and memcached from different tests.
+ services like Postgres, Redis, and memcached from different tests.
* Every test case prepends a unique random prefix to all keys it
- uses when accessing redis and memcached.
+ uses when accessing Redis and memcached.
* Every test case runs inside a database transaction, which is
aborted after the test completes. Each test process interacts
only with a fresh copy of a special template database used for
diff --git a/docs/testing/testing-with-node.md b/docs/testing/testing-with-node.md
index b8df4d4570..31dfb75d61 100644
--- a/docs/testing/testing-with-node.md
+++ b/docs/testing/testing-with-node.md
@@ -203,7 +203,7 @@ These instructions assume you're using the Vagrant development environment.
1. In the `Configure Node.js Remote Interpreter`, window select `Vagrant`
1. Wait for WebStorm to connect to Vagrant. This will be displayed
by the `Vagrant Host URL` section updating to contain the Vagrant
- SSH url, e.g. `ssh://vagrant@127.0.0.1:2222`.
+ SSH URL, e.g. `ssh://vagrant@127.0.0.1:2222`.
1. **Set the `Node.js interpreter path` to `/usr/local/bin/node`**
1. Hit `OK` 2 times to get back to the `Run/Debug Configurations` window.
1. Under `Working Directory` select the root `zulip` directory.
diff --git a/docs/testing/testing-with-puppeteer.md b/docs/testing/testing-with-puppeteer.md
index db154032bc..db38567989 100644
--- a/docs/testing/testing-with-puppeteer.md
+++ b/docs/testing/testing-with-puppeteer.md
@@ -22,7 +22,7 @@ The test files live in `frontend_tests/puppeteer_tests` and make use
of various useful helper functions defined in
`frontend_tests/puppeteer_lib/common.js`.
-## How puppeteer tests work
+## How Puppeteer tests work
The Puppeteer tests use a real Chromium browser (powered by
[puppeteer](https://github.com/puppeteer/puppeteer)), connected to a
@@ -93,7 +93,7 @@ These tools/features are often useful when debugging:
debugging test failures.
* TODO: Mention how to access Puppeteer screenshots in CI.
* TODO: Add an option for using the `headless: false` debugging mode
- of puppeteer so you can watch what's happening, and document how to
+ of Puppeteer so you can watch what's happening, and document how to
make that work with Vagrant.
* TODO: Document `--interactive`.
* TODO: Document how to run 100x in CI to check for nondeterminstic
@@ -108,7 +108,7 @@ These tools/features are often useful when debugging:
includes the console output for the server; any Python exceptions
are likely actual bugs in the changes being tested.
-See also [puppeteer upstream's debugging
+See also [Puppeteer upstream's debugging
tips](https://github.com/puppeteer/puppeteer#debugging-tips); some
tips may require temporary patches to functions like `run_test` or
`ensure_browser` in `frontend_tests/puppeteer_lib/common.js`.
@@ -153,4 +153,3 @@ notes above:
`zilencer/management/commands/populate_db.py`.
[learn-async-await]: https://developer.mozilla.org/en-US/docs/Learn/JavaScript/Asynchronous/Async_await
-
diff --git a/docs/testing/testing.md b/docs/testing/testing.md
index d12aaca8e5..6e61d85ba8 100644
--- a/docs/testing/testing.md
+++ b/docs/testing/testing.md
@@ -136,7 +136,7 @@ code paths (`httplib2.Http().request`, `requests.request`, etc.) to
throw an exception in the backend tests. While this is enforcement is
not complete (there a lot of other ways to use the Internet from
Python), it is easy to do and catches most common cases of new code
-dependning on Internet access.
+depending on Internet access.
This enforcement code results in the following exception:
diff --git a/docs/translating/german.md b/docs/translating/german.md
index 1386dc7b9a..cdab6fc23b 100644
--- a/docs/translating/german.md
+++ b/docs/translating/german.md
@@ -137,7 +137,7 @@ Since we try to avoid concatenating words whenever possible, don't use
"Privatnachricht" . PN is the officially used abbreviation for
"Private Nachricht" and is used in many German chat forums.
-*"Private Nachricht" (Youtube, Transifex)*
+*"Private Nachricht" (YouTube, Transifex)*
* Starred Message - **Markierte Nachricht**
@@ -260,7 +260,7 @@ works as well, but is not that common in German.
This translation is unambiguous.
-*"Deabonnieren" (Youtube, Transifex)*
+*"Deabonnieren" (YouTube, Transifex)*
* Narrow to - **Begrenzen auf**
@@ -293,7 +293,7 @@ preferable due to its brevity.
* Search - **Suchen**
-*"Suchen" (Youtube, Google, Facebook, Transifex)*
+*"Suchen" (YouTube, Google, Facebook, Transifex)*
* Pin/Unpin - **Anpinnen/Loslösen**
diff --git a/docs/translating/translating.md b/docs/translating/translating.md
index d0d0fbee70..3a9b023998 100644
--- a/docs/translating/translating.md
+++ b/docs/translating/translating.md
@@ -99,7 +99,7 @@ There are a few ways to see your translations in the Zulip UI:
out the language the user requests in a browser using the following
prioritization (mostly copied from the Django docs):
- 1. It looks for the language code as a url prefix (e.g. `/de/login/`).
+ 1. It looks for the language code as a URL prefix (e.g. `/de/login/`).
2. It looks for the `LANGUAGE_SESSION_KEY` key in the current user's
session (the Zulip language UI option ends up setting this).
3. It looks for the cookie named 'django_language'. You can set a
diff --git a/docs/tutorials/life-of-a-request.md b/docs/tutorials/life-of-a-request.md
index ab14764e88..eb4d57efac 100644
--- a/docs/tutorials/life-of-a-request.md
+++ b/docs/tutorials/life-of-a-request.md
@@ -16,7 +16,7 @@ application will serve the request (or deciding to serve the request
itself for static content).
In development, `tools/run-dev.py` fills the role of nginx. Static files
-are in your git checkout under `static`, and are served unminified.
+are in your Git checkout under `static`, and are served unminified.
## Static files are [served directly][served-directly] by Nginx
@@ -167,7 +167,7 @@ its url patterns (see
[zerver/lib/rest.py](https://github.com/zulip/zulip/blob/master/zerver/lib/rest.py))
so that the action called is `rest_dispatch`. This method will
authenticate the user, either through a session token from a cookie,
-or from an `email:api-key` string given via HTTP Basic Auth for API
+or from an `email:api-key` string given via HTTP basic auth for API
clients.
It will then look up what HTTP verb was used (GET, POST, etc) to make
diff --git a/docs/tutorials/new-feature-tutorial.md b/docs/tutorials/new-feature-tutorial.md
index dfccefa355..ec05806b76 100644
--- a/docs/tutorials/new-feature-tutorial.md
+++ b/docs/tutorials/new-feature-tutorial.md
@@ -147,7 +147,7 @@ document it and update any existing documentation that might be
relevant to the new feature. For more information on the kinds of
documentation Zulip has, see [Documentation](../documentation/overview.md).
-## Example Feature
+## Example feature
This example describes the process of adding a new setting to Zulip: a
flag that allows an admin to require topics on stream messages (the default
diff --git a/docs/tutorials/writing-views.md b/docs/tutorials/writing-views.md
index 9405107c84..4e0278dc0c 100644
--- a/docs/tutorials/writing-views.md
+++ b/docs/tutorials/writing-views.md
@@ -112,7 +112,7 @@ which is documented in detail at
[zerver/lib/rest.py](https://github.com/zulip/zulip/blob/master/zerver/lib/rest.py).
This method will authenticate the user either through a session token
from a cookie on the browser, or from a base64 encoded `email:api-key`
-string given via HTTP Basic Auth for API clients.
+string given via HTTP basic auth for API clients.
``` py
>>> import requests
diff --git a/frontend_tests/node_tests/buddy_data.js b/frontend_tests/node_tests/buddy_data.js
index fa212a7c9f..d0ab500bb0 100644
--- a/frontend_tests/node_tests/buddy_data.js
+++ b/frontend_tests/node_tests/buddy_data.js
@@ -223,7 +223,7 @@ run_test("title_data", () => {
};
assert.deepEqual(buddy_data.get_title_data(bot.user_id, is_group), expected_group_data);
- // Individual Users.
+ // Individual users.
user_status.set_status_text({
user_id: me.user_id,
status_text: "out to lunch",
diff --git a/frontend_tests/node_tests/general.js b/frontend_tests/node_tests/general.js
index 82ecdbc98c..7d3f6ad367 100644
--- a/frontend_tests/node_tests/general.js
+++ b/frontend_tests/node_tests/general.js
@@ -369,7 +369,7 @@ run_test("update_user_event", (override) => {
that happen during an event. This concept is called "mocking",
and you can find libraries to help do mocking. Here we will
just build our own lightweight mocking system, which is almost
- trivially easy to do in a language like Javascript.
+ trivially easy to do in a language like JavaScript.
*/
diff --git a/frontend_tests/node_tests/markdown.js b/frontend_tests/node_tests/markdown.js
index 0f4d2c60b5..5eb530293b 100644
--- a/frontend_tests/node_tests/markdown.js
+++ b/frontend_tests/node_tests/markdown.js
@@ -223,11 +223,11 @@ run_test("markdown_detection", () => {
"https://zulip.com/image.jpg too",
"Contains a zulip.com/foo.jpeg file",
"Contains a https://zulip.com/image.png file",
- "twitter url https://twitter.com/jacobian/status/407886996565016579",
+ "Twitter URL https://twitter.com/jacobian/status/407886996565016579",
"https://twitter.com/jacobian/status/407886996565016579",
"then https://twitter.com/jacobian/status/407886996565016579",
- "twitter url http://twitter.com/jacobian/status/407886996565016579",
- "youtube url https://www.youtube.com/watch?v=HHZ8iqswiCw&feature=youtu.be&a",
+ "Twitter URL http://twitter.com/jacobian/status/407886996565016579",
+ "YouTube URL https://www.youtube.com/watch?v=HHZ8iqswiCw&feature=youtu.be&a",
];
no_markup.forEach((content) => {
@@ -475,7 +475,7 @@ run_test("marked", () => {
'
:smile:
',
translate_emoticons: true,
},
- // Test HTML Escape in Custom Zulip Rules
+ // Test HTML escaping in custom Zulip rules
{
input: "@**
The Rogue One
**",
expected: "
@**<h1>The Rogue One</h1>**
",
diff --git a/frontend_tests/node_tests/typeahead.js b/frontend_tests/node_tests/typeahead.js
index 0220603b2e..d1f1100573 100644
--- a/frontend_tests/node_tests/typeahead.js
+++ b/frontend_tests/node_tests/typeahead.js
@@ -48,33 +48,33 @@ run_test("get_emoji_matcher", () => {
run_test("triage", () => {
const alice = {name: "alice"};
- const Alicia = {name: "Alicia"};
+ const alicia = {name: "Alicia"};
const steve = {name: "steve"};
- const Stephanie = {name: "Stephanie"};
+ const stephanie = {name: "Stephanie"};
- const names = [alice, Alicia, steve, Stephanie];
+ const names = [alice, alicia, steve, stephanie];
assert.deepEqual(
typeahead.triage("a", names, (r) => r.name),
{
- matches: [alice, Alicia],
- rest: [steve, Stephanie],
+ matches: [alice, alicia],
+ rest: [steve, stephanie],
},
);
assert.deepEqual(
typeahead.triage("A", names, (r) => r.name),
{
- matches: [Alicia, alice],
- rest: [steve, Stephanie],
+ matches: [alicia, alice],
+ rest: [steve, stephanie],
},
);
assert.deepEqual(
typeahead.triage("S", names, (r) => r.name),
{
- matches: [Stephanie, steve],
- rest: [alice, Alicia],
+ matches: [stephanie, steve],
+ rest: [alice, alicia],
},
);
@@ -82,7 +82,7 @@ run_test("triage", () => {
typeahead.triage("fred", names, (r) => r.name),
{
matches: [],
- rest: [alice, Alicia, steve, Stephanie],
+ rest: [alice, alicia, steve, stephanie],
},
);
});
diff --git a/frontend_tests/node_tests/typeahead_helper.js b/frontend_tests/node_tests/typeahead_helper.js
index 1a8a5148dc..43b0dbf7cb 100644
--- a/frontend_tests/node_tests/typeahead_helper.js
+++ b/frontend_tests/node_tests/typeahead_helper.js
@@ -143,7 +143,7 @@ run_test("sort_languages", () => {
const a_bot = {
email: "a_bot@zulip.com",
- full_name: "A zulip test bot",
+ full_name: "A Zulip test bot",
is_admin: false,
is_bot: true,
user_id: 1,
@@ -151,7 +151,7 @@ const a_bot = {
const a_user = {
email: "a_user@zulip.org",
- full_name: "A zulip user",
+ full_name: "A Zulip user",
is_admin: false,
is_bot: false,
user_id: 2,
diff --git a/frontend_tests/puppeteer_tests/04-subscriptions.js b/frontend_tests/puppeteer_tests/04-subscriptions.js
index bdf4ff27dd..ade26b39a9 100644
--- a/frontend_tests/puppeteer_tests/04-subscriptions.js
+++ b/frontend_tests/puppeteer_tests/04-subscriptions.js
@@ -123,7 +123,7 @@ async function create_stream(page) {
await page.waitForXPath('//*[text()="Create stream"]', {visible: true});
await common.fill_form(page, "form#stream_creation_form", {
stream_name: "Puppeteer",
- stream_description: "Everything puppeteer",
+ stream_description: "Everything Puppeteer",
});
await page.click(await stream_span(page, "Scotland")); // Subscribes all users from Scotland
await page.click(await user_span(page, "cordelia")); // Add cordelia.
@@ -142,7 +142,7 @@ async function create_stream(page) {
);
const subscriber_count_selector = "[data-stream-name='Puppeteer'] .subscriber-count";
assert.strictEqual(stream_name, "Puppeteer");
- assert.strictEqual(stream_description, "Everything puppeteer");
+ assert.strictEqual(stream_description, "Everything Puppeteer");
// Assert subscriber count becomes 5(scotland(+4), cordelia(+1), othello(-1), Desdemona(+1)).
await page.waitForFunction(
diff --git a/frontend_tests/puppeteer_tests/07-navigation.js b/frontend_tests/puppeteer_tests/07-navigation.js
index 934ad22edb..339af97146 100644
--- a/frontend_tests/puppeteer_tests/07-navigation.js
+++ b/frontend_tests/puppeteer_tests/07-navigation.js
@@ -78,7 +78,7 @@ async function navigation_tests(page) {
await navigate_to(page, verona_narrow, "message_feed_container");
// Hardcoded this instead of using `navigate_to`
- // as puppeteer cannot click hidden elements.
+ // as Puppeteer cannot click hidden elements.
await page.evaluate(() => $("a[href='#message_feed_container]'").click());
await wait_for_tab(page, "message_feed_container");
diff --git a/frontend_tests/zjsunit/mdiff.js b/frontend_tests/zjsunit/mdiff.js
index 70fcfd34ec..464fe0ee6b 100644
--- a/frontend_tests/zjsunit/mdiff.js
+++ b/frontend_tests/zjsunit/mdiff.js
@@ -10,7 +10,7 @@
* Based on diffing library difflib, a js port of the python library.
*
* The sole exported function diff_strings(string_0, string_1) returns a pretty-printed
- * unicode string containing their diff.
+ * Unicode string containing their diff.
*/
const difflib = require("difflib");
diff --git a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers
index b961b3cc3d..f6fb8ffee7 100755
--- a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers
+++ b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""
-Nagios plugin to check that the rabbitmq has the correct number of consumers.
+Nagios plugin to check that the RabbitMQ has the correct number of consumers.
This script just checks the contents of /var/lib/nagios_state/check-rabbitmq-consumers,
which is generated by scripts/nagios/check-rabbitmq-consumers.
diff --git a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_queues b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_queues
index 587ad906f1..bb77cb29a7 100755
--- a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_queues
+++ b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_queues
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""
-Nagios plugin to check that the rabbitmq queues are not overflowing as a result
+Nagios plugin to check that the RabbitMQ queues are not overflowing as a result
of a stuck consumer.
This script just checks the contents of /var/lib/nagios_state/check-rabbitmq-results,
diff --git a/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_postgres_replication_lag b/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_postgres_replication_lag
index 41ebdcaf2a..6669a96e39 100755
--- a/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_postgres_replication_lag
+++ b/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_postgres_replication_lag
@@ -2,7 +2,7 @@
"""Nagios plugin to check the difference between the primary and
replica Postgres servers' xlog location. Requires that the user this
-connects to postgres as has been granted the `pg_monitor` role.
+connects to Postgres as has been granted the `pg_monitor` role.
"""
import re
diff --git a/puppet/zulip/files/postgresql/process_fts_updates b/puppet/zulip/files/postgresql/process_fts_updates
index 3ede1606d3..dac753506c 100755
--- a/puppet/zulip/files/postgresql/process_fts_updates
+++ b/puppet/zulip/files/postgresql/process_fts_updates
@@ -1,10 +1,10 @@
#!/usr/bin/env python3
-# Processes updates to postgres Full Text Search for new/edited messages.
+# Processes updates to Postgres full-text search for new/edited messages.
#
-# Zulip manages its postgres full-text search as follows. When the
-# content of a message is modified, a postgres trigger logs the
+# Zulip manages its Postgres full-text search as follows. When the
+# content of a message is modified, a Postgres trigger logs the
# message ID to the `fts_update_log` table. In the background, this
-# program processes `fts_update_log`, updating the postgres full-text
+# program processes `fts_update_log`, updating the Postgres full-text
# search column search_tsvector in the main zerver_message.
import sys
@@ -88,12 +88,12 @@ try:
USING_PGROONGA = settings.USING_PGROONGA
except ImportError:
# process_fts_updates also supports running locally on a remote
- # postgres server; in that case, one can just connect to localhost
+ # Postgres server; in that case, one can just connect to localhost
USING_PGROONGA = False
# Since we don't want a hard dependency on being able to access the
# Zulip settings (as we may not be running on a server that has that
-# data), we determine whether we're using pgroonga using
+# data), we determine whether we're using PGroonga using
# /etc/zulip/zulip.conf.
#
# However, we still also check the `USING_PGROONGA` variable, since
diff --git a/puppet/zulip/manifests/app_frontend_base.pp b/puppet/zulip/manifests/app_frontend_base.pp
index cbe7f10679..5d75d2f306 100644
--- a/puppet/zulip/manifests/app_frontend_base.pp
+++ b/puppet/zulip/manifests/app_frontend_base.pp
@@ -9,7 +9,7 @@ class zulip::app_frontend_base {
if $::osfamily == 'debian' {
# Upgrade and other tooling wants to be able to get a database
- # shell. This is not necessary on CentOS because the postgresql
+ # shell. This is not necessary on CentOS because the PostgreSQL
# package already includes the client. This may get us a more
# recent client than the database server is configured to be,
# ($zulip::postgres_common::version), but they're compatible.
diff --git a/puppet/zulip/manifests/base.pp b/puppet/zulip/manifests/base.pp
index 5a78f93bf5..25a51891ca 100644
--- a/puppet/zulip/manifests/base.pp
+++ b/puppet/zulip/manifests/base.pp
@@ -34,7 +34,7 @@ class zulip::base {
# Used in scripts including install-yarn.sh
'curl',
'wget',
- # Used to read /etc/zulip/zulip.conf for `zulipconf` puppet function
+ # Used to read /etc/zulip/zulip.conf for `zulipconf` Puppet function
'crudini',
# Used for tools like sponge
'moreutils',
@@ -42,7 +42,7 @@ class zulip::base {
$zulip::common::nagios_plugins,
# Required for using HTTPS in apt repositories.
'apt-transport-https',
- # Needed for the cron jobs installed by puppet
+ # Needed for the cron jobs installed by Puppet
'cron',
]
}
diff --git a/puppet/zulip/manifests/dockervoyager.pp b/puppet/zulip/manifests/dockervoyager.pp
index 2511276a94..43f6c2afb0 100644
--- a/puppet/zulip/manifests/dockervoyager.pp
+++ b/puppet/zulip/manifests/dockervoyager.pp
@@ -1,6 +1,6 @@
# This class includes all the modules you need to install/run a Zulip installation
-# in a single container (without the database, memcached, redis services).
-# The database, memcached, redis services need to be run in separate containers.
+# in a single container (without the database, memcached, Redis services).
+# The database, memcached, Redis services need to be run in separate containers.
# Through this split of services, it is easier to scale the services to the needs.
class zulip::dockervoyager {
include zulip::base
diff --git a/puppet/zulip/manifests/postgres_appdb_base.pp b/puppet/zulip/manifests/postgres_appdb_base.pp
index 3fdc85abbb..fd6d4e904f 100644
--- a/puppet/zulip/manifests/postgres_appdb_base.pp
+++ b/puppet/zulip/manifests/postgres_appdb_base.pp
@@ -1,4 +1,4 @@
-# Minimal shared configuration needed to run a Zulip postgres database.
+# Minimal shared configuration needed to run a Zulip Postgres database.
class zulip::postgres_appdb_base {
include zulip::postgres_common
include zulip::process_fts_updates
@@ -33,7 +33,7 @@ class zulip::postgres_appdb_base {
$pgroonga_setup_sql_path = "${postgres_sharedir}/pgroonga_setup.sql"
$setup_system_deps = 'setup_yum_repo'
$postgres_restart = "systemctl restart postgresql-${zulip::postgres_common::version}"
- # TODO Since we can't find the postgres dicts directory on CentOS yet, we
+ # TODO Since we can't find the Postgres dicts directory on CentOS yet, we
# link directly to the hunspell directory.
$postgres_dict_dict = '/usr/share/myspell/en_US.dic'
$postgres_dict_affix = '/usr/share/myspell/en_US.aff'
diff --git a/puppet/zulip/manifests/rabbit.pp b/puppet/zulip/manifests/rabbit.pp
index 8c4c6fe62a..46523e6fa5 100644
--- a/puppet/zulip/manifests/rabbit.pp
+++ b/puppet/zulip/manifests/rabbit.pp
@@ -3,7 +3,7 @@ class zulip::rabbit {
'debian' => 'erlang-base',
'redhat' => 'erlang',
}
- $rabbit_packages = [# Needed to run rabbitmq
+ $rabbit_packages = [# Needed to run RabbitMQ
$erlang,
'rabbitmq-server',
]
diff --git a/puppet/zulip/manifests/tornado_sharding.pp b/puppet/zulip/manifests/tornado_sharding.pp
index ab390687f0..231a97a445 100644
--- a/puppet/zulip/manifests/tornado_sharding.pp
+++ b/puppet/zulip/manifests/tornado_sharding.pp
@@ -6,7 +6,7 @@ class zulip::tornado_sharding {
# The file entries below serve only to initialize the sharding config files
# with the correct default content for the "only one shard" setup. For this
# reason they use "replace => false", because the files are managed by
- # the sharding script afterwards and puppet shouldn't overwrite them.
+ # the sharding script afterwards and Puppet shouldn't overwrite them.
file { '/etc/zulip/nginx_sharding.conf':
ensure => file,
owner => 'root',
diff --git a/puppet/zulip/templates/postfix/main.cf.erb b/puppet/zulip/templates/postfix/main.cf.erb
index d3ed869570..fed7e94b51 100644
--- a/puppet/zulip/templates/postfix/main.cf.erb
+++ b/puppet/zulip/templates/postfix/main.cf.erb
@@ -1,4 +1,4 @@
-# This file is managed by puppet; local changes will be overridden.
+# This file is managed by Puppet; local changes will be overridden.
smtpd_banner = $myhostname ESMTP $mail_name (Zulip)
biff = no
diff --git a/puppet/zulip_ops/files/munin-plugins/tornado_event_queues b/puppet/zulip_ops/files/munin-plugins/tornado_event_queues
index 9e45b91b8e..f59be5a688 100755
--- a/puppet/zulip_ops/files/munin-plugins/tornado_event_queues
+++ b/puppet/zulip_ops/files/munin-plugins/tornado_event_queues
@@ -4,7 +4,7 @@
#
# Usage: Link or copy into /etc/munin/node.d/
#
-# No Parameters
+# No parameters
#
# Magic markers (optional - only used by munin-config and some
# installation scripts):
diff --git a/puppet/zulip_ops/files/nagios3/conf.d/hostgroups.cfg b/puppet/zulip_ops/files/nagios3/conf.d/hostgroups.cfg
index 00a846a2ad..881f2b101a 100644
--- a/puppet/zulip_ops/files/nagios3/conf.d/hostgroups.cfg
+++ b/puppet/zulip_ops/files/nagios3/conf.d/hostgroups.cfg
@@ -1,7 +1,7 @@
# A simple wildcard hostgroup
define hostgroup {
hostgroup_name all
- alias All Servers
+ alias All servers
members *
}
@@ -17,70 +17,70 @@ define hostgroup {
define hostgroup {
hostgroup_name web
- alias Web Servers
+ alias Web servers
}
define hostgroup {
hostgroup_name frontends
- alias Frontend Web Servers
+ alias Frontend web servers
}
define hostgroup {
hostgroup_name staging_frontends
- alias Staging Frontend Web Servers
+ alias Staging frontend web servers
}
define hostgroup {
hostgroup_name prod_frontends
- alias Production Frontend Web Servers
+ alias Production frontend web servers
}
define hostgroup {
hostgroup_name multitornado_frontends
- alias Frontend Web Servers with multiple Tornado processes
+ alias Frontend web servers with multiple Tornado processes
}
define hostgroup {
hostgroup_name singletornado_frontends
- alias Frontend Web Servers with a single Tornado process
+ alias Frontend web servers with a single Tornado process
}
define hostgroup {
hostgroup_name redis
- alias Redis Servers
+ alias Redis servers
hostgroup_members frontends
}
define hostgroup {
hostgroup_name zmirror
- alias Zephyr Mirror Servers
+ alias Zephyr mirror servers
}
define hostgroup {
hostgroup_name zmirrorp
- alias Zephyr Mirror Personals Servers
+ alias Zephyr mirror personals servers
}
define hostgroup {
hostgroup_name zmirror_main
- alias Zephyr Mirror Main Servers
+ alias Zephyr mirror main servers
}
define hostgroup {
hostgroup_name postgres
- alias PostgreSQL Servers
+ alias PostgreSQL servers
hostgroup_members postgres_appdb
}
define hostgroup {
hostgroup_name postgres_appdb
- alias PostgreSQL App Servers
+ alias PostgreSQL app servers
hostgroup_members postgres_appdb_primary
}
define hostgroup {
hostgroup_name postgres_appdb_primary
- alias Primary PostgreSQL App Servers
+ alias Primary PostgreSQL app servers
}
define hostgroup {
diff --git a/puppet/zulip_ops/files/nagios3/conf.d/services.cfg b/puppet/zulip_ops/files/nagios3/conf.d/services.cfg
index 744282a305..59ef289490 100644
--- a/puppet/zulip_ops/files/nagios3/conf.d/services.cfg
+++ b/puppet/zulip_ops/files/nagios3/conf.d/services.cfg
@@ -156,7 +156,7 @@ define service {
define service {
use generic-service
- service_description Check postgres autovac_freeze
+ service_description Check Postgres autovac_freeze
check_command check_postgres!zulip!nagios!autovac_freeze
hostgroup postgres_appdb_primary
contact_groups admins
@@ -164,7 +164,7 @@ define service {
define service {
use generic-service
- service_description Check postgres backends
+ service_description Check Postgres backends
check_command check_postgres!zulip!nagios!backends
hostgroup postgres_appdb
contact_groups admins
@@ -172,7 +172,7 @@ define service {
define service {
use generic-service
- service_description Check postgres connection
+ service_description Check Postgres connection
check_command check_postgres!zulip!nagios!connection
hostgroup postgres_appdb
contact_groups page_admins
@@ -180,7 +180,7 @@ define service {
define service {
use generic-service
- service_description Check postgres disabled triggers
+ service_description Check Postgres disabled triggers
check_command check_postgres!zulip!nagios!disabled_triggers
hostgroup postgres_appdb
contact_groups admins
@@ -188,7 +188,7 @@ define service {
define service {
use generic-service
- service_description Check postgres hitratio
+ service_description Check Postgres hitratio
check_command check_postgres!zulip!nagios!hitratio
hostgroup postgres_appdb
contact_groups admins
@@ -196,7 +196,7 @@ define service {
define service {
use generic-service
- service_description Check postgres locks
+ service_description Check Postgres locks
check_command check_postgres_alert_args!zulip!nagios!locks!400!600
hostgroup postgres_appdb
contact_groups admins
@@ -204,7 +204,7 @@ define service {
define service {
use generic-service
- service_description Check postgres query_time
+ service_description Check Postgres query_time
check_command check_postgres_alert_args!zulip!nagios!query_time!20 seconds!40 seconds
hostgroup postgres_appdb
contact_groups admins
@@ -212,7 +212,7 @@ define service {
define service {
use generic-service
- service_description Check postgres sequence
+ service_description Check Postgres sequence
check_command check_postgres!zulip!nagios!sequence
hostgroup postgres_appdb
contact_groups admins
@@ -220,7 +220,7 @@ define service {
define service {
use generic-service
- service_description Check postgres timesync
+ service_description Check Postgres timesync
check_command check_postgres!zulip!nagios!timesync
hostgroup postgres_appdb
contact_groups admins
@@ -228,7 +228,7 @@ define service {
# define service {
# use generic-service
-# service_description Check postgres txn_idle
+# service_description Check Postgres txn_idle
# check_command check_postgres_alert_args!zulip!nagios!txn_idle!20 seconds!40 seconds
# hostgroup postgres_appdb
# contact_groups admins
@@ -236,7 +236,7 @@ define service {
define service {
use generic-service
- service_description Check postgres txn_time
+ service_description Check Postgres txn_time
check_command check_postgres_alert_args!zulip!nagios!txn_time!20 seconds!40 seconds
hostgroup postgres_appdb
contact_groups admins
@@ -252,7 +252,7 @@ define service {
define service{
use generic-service
- service_description Check postgres replication lag
+ service_description Check Postgres replication lag
check_command check_postgres_replication_lag
hostgroup postgres_appdb
contact_groups admins
@@ -288,7 +288,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq queue sizes
+ service_description Check RabbitMQ queue sizes
check_command check_rabbitmq_queues!22
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -307,7 +307,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq notify_tornado consumers
+ service_description Check RabbitMQ notify_tornado consumers
check_command check_rabbitmq_consumers!notify_tornado
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -318,7 +318,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq user_activity_interval consumers
+ service_description Check RabbitMQ user_activity_interval consumers
check_command check_rabbitmq_consumers!user_activity_interval
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -329,7 +329,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq user_presence consumers
+ service_description Check RabbitMQ user_presence consumers
check_command check_rabbitmq_consumers!user_presence
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -340,7 +340,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq invites consumers
+ service_description Check RabbitMQ invites consumers
check_command check_rabbitmq_consumers!invites
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -351,7 +351,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq signups consumers
+ service_description Check RabbitMQ signups consumers
check_command check_rabbitmq_consumers!signups
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -362,7 +362,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq digest email consumers
+ service_description Check RabbitMQ digest email consumers
check_command check_rabbitmq_consumers!digest_emails
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -373,7 +373,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq email mirror consumers
+ service_description Check RabbitMQ email mirror consumers
check_command check_rabbitmq_consumers!email_mirror
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -384,7 +384,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq missedmessage mobile notifications consumers
+ service_description Check RabbitMQ missedmessage mobile notifications consumers
check_command check_rabbitmq_consumers!missedmessage_mobile_notifications
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -395,7 +395,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq missedmessage email consumers
+ service_description Check RabbitMQ missedmessage email consumers
check_command check_rabbitmq_consumers!missedmessage_email
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
@@ -406,7 +406,7 @@ define service {
define service {
use generic-service
- service_description Check rabbitmq user activity consumers
+ service_description Check RabbitMQ user activity consumers
check_command check_rabbitmq_consumers!user_activity
# Workaround weird checks 40s after first error causing alerts
# from a single failure because cron hasn't run again yet
diff --git a/puppet/zulip_ops/manifests/base.pp b/puppet/zulip_ops/manifests/base.pp
index f2e1a2c000..e5d523d9b1 100644
--- a/puppet/zulip_ops/manifests/base.pp
+++ b/puppet/zulip_ops/manifests/base.pp
@@ -190,8 +190,8 @@ class zulip_ops::base {
ensure => running,
# Because there is no running process for this service, the normal status
- # checks fail. Because puppet then thinks the service has been manually
- # stopped, it won't restart it. This fake status command will trick puppet
+ # checks fail. Because Puppet then thinks the service has been manually
+ # stopped, it won't restart it. This fake status command will trick Puppet
# into thinking the service is *always* running (which in a way it is, as
# iptables is part of the kernel.)
hasstatus => true,
diff --git a/requirements/common.in b/requirements/common.in
index b845bdc585..5357da329a 100644
--- a/requirements/common.in
+++ b/requirements/common.in
@@ -23,7 +23,7 @@ jsx-lexer
# Needed for manage.py
ipython
-# Needed for Image Processing
+# Needed for image processing
Pillow
# Needed for building complex DB queries
@@ -59,7 +59,7 @@ httplib2
# Forked to avoid pulling in scipy: https://github.com/mailgun/talon/issues/130
https://github.com/zulip/talon/archive/7d8bdc4dbcfcc5a73298747293b99fe53da55315.zip#egg=talon==1.2.10.zulip1
-# Needed for hipchat import
+# Needed for HipChat import
hypchat
# Needed for inlining the CSS in emails
@@ -71,7 +71,7 @@ PyJWT
# Needed for including other Markdown files for user docs
markdown-include
-# Needed to access rabbitmq
+# Needed to access RabbitMQ
pika
# Needed to access our database
@@ -89,7 +89,7 @@ python-dateutil
# Needed for timezone work
pytz
-# Needed for redis
+# Needed for Redis
redis
# Needed to parse source maps for error reporting
diff --git a/scripts/lib/clean-unused-caches b/scripts/lib/clean-unused-caches
index 5c548918bc..3d3f4d0202 100755
--- a/scripts/lib/clean-unused-caches
+++ b/scripts/lib/clean-unused-caches
@@ -9,7 +9,7 @@ from scripts.lib.zulip_tools import parse_cache_script_args
def main() -> None:
- args = parse_cache_script_args("This script cleans unused zulip caches.")
+ args = parse_cache_script_args("This script cleans unused Zulip caches.")
os.chdir(ZULIP_PATH)
clean_venv_cache.main(args)
clean_node_cache.main(args)
diff --git a/scripts/lib/clean_emoji_cache.py b/scripts/lib/clean_emoji_cache.py
index 6457949c54..ded3759667 100755
--- a/scripts/lib/clean_emoji_cache.py
+++ b/scripts/lib/clean_emoji_cache.py
@@ -43,5 +43,5 @@ def main(args: argparse.Namespace) -> None:
EMOJI_CACHE_PATH, caches_in_use, "emoji cache", args)
if __name__ == "__main__":
- args = parse_cache_script_args("This script cleans unused zulip emoji caches.")
+ args = parse_cache_script_args("This script cleans unused Zulip emoji caches.")
main(args)
diff --git a/scripts/lib/clean_node_cache.py b/scripts/lib/clean_node_cache.py
index b1fd1db0c3..c3a36c2ea5 100755
--- a/scripts/lib/clean_node_cache.py
+++ b/scripts/lib/clean_node_cache.py
@@ -46,5 +46,5 @@ def main(args: argparse.Namespace) -> None:
NODE_MODULES_CACHE_PATH, caches_in_use, "node modules cache", args)
if __name__ == "__main__":
- args = parse_cache_script_args("This script cleans unused zulip npm caches.")
+ args = parse_cache_script_args("This script cleans unused Zulip npm caches.")
main(args)
diff --git a/scripts/lib/clean_venv_cache.py b/scripts/lib/clean_venv_cache.py
index 513b4d532f..e8090cf10f 100755
--- a/scripts/lib/clean_venv_cache.py
+++ b/scripts/lib/clean_venv_cache.py
@@ -54,5 +54,5 @@ def main(args: argparse.Namespace) -> None:
VENV_CACHE_DIR, caches_in_use, "venv cache", args)
if __name__ == "__main__":
- args = parse_cache_script_args("This script cleans unused zulip venv caches.")
+ args = parse_cache_script_args("This script cleans unused Zulip venv caches.")
main(args)
diff --git a/scripts/lib/email-mirror-postfix b/scripts/lib/email-mirror-postfix
index 3eafc7d9a9..1709ff3164 100755
--- a/scripts/lib/email-mirror-postfix
+++ b/scripts/lib/email-mirror-postfix
@@ -6,13 +6,13 @@ forwarding emails into Zulip.
https://zulip.readthedocs.io/en/latest/production/settings.html#email-gateway
The email gateway supports two major modes of operation: An email
-server (using postfix) where the email address configured in
+server (using Postfix) where the email address configured in
EMAIL_GATEWAY_PATTERN delivers emails directly to Zulip (this) or a
cron job that connects to an IMAP inbox (which receives the emails)
periodically.
-Zulip's puppet configuration takes care of configuring postfix to
-execute this script when emails are received by postfix, piping the
+Zulip's Puppet configuration takes care of configuring Postfix to
+execute this script when emails are received by Postfix, piping the
email content via standard input (and the destination email address in
the ORIGINAL_RECIPIENT environment variable).
@@ -65,7 +65,7 @@ parser.add_argument('-d', '--dst-host', dest="host", default='https://127.0.0.1'
"Address must contain a HTTP protocol.")
parser.add_argument('-u', '--dst-url', dest="url", default='/email_mirror_message',
- help="Destination relative url for uploading email from email mirror.")
+ help="Destination relative URL for uploading email from email mirror.")
parser.add_argument('-n', '--not-verify-ssl', dest="verify_ssl", action='store_false',
help="Disable ssl certificate verifying for self-signed certificates")
diff --git a/scripts/lib/install b/scripts/lib/install
index 4200efa814..5af3c5b2b8 100755
--- a/scripts/lib/install
+++ b/scripts/lib/install
@@ -121,7 +121,7 @@ fi
read -r -a APT_OPTIONS <<<"${APT_OPTIONS:-}"
# Install additional packages.
read -r -a ADDITIONAL_PACKAGES <<<"${ADDITIONAL_PACKAGES:-}"
-# Comma-separated list of puppet manifests to install. default is
+# Comma-separated list of Puppet manifests to install. default is
# zulip::voyager for an all-in-one system or zulip::dockervoyager for
# Docker. Use e.g. zulip::app_frontend for a Zulip frontend server.
PUPPET_CLASSES="${PUPPET_CLASSES:-zulip::voyager}"
@@ -234,8 +234,8 @@ fi
case ",$PUPPET_CLASSES," in
*,zulip::voyager,* | *,zulip::postgres_appdb_tuned,*)
if [ "$package_system" = apt ]; then
- # We're going to install Postgres from the postgres apt
- # repository; this may conflict with the existing postgres.
+ # We're going to install Postgres from the Postgres apt
+ # repository; this may conflict with the existing Postgres.
OTHER_PG="$(dpkg --get-selections \
| grep -E '^postgresql-[0-9]+\s+install$' \
| grep -v "^postgresql-$POSTGRES_VERSION\b" \
@@ -405,7 +405,7 @@ EOF
--classfile=/var/lib/puppet/classes.txt \
>/dev/null
- # We only need the postgres version setting on database hosts; but
+ # We only need the Postgres version setting on database hosts; but
# we don't know if this is a database host until we have the catalog summary.
if ! has_class "zulip::postgres_common" || [ "$package_system" != apt ]; then
crudini --del /etc/zulip/zulip.conf postgresql
@@ -511,7 +511,7 @@ if has_class "zulip::app_frontend_base"; then
chown -R zulip:zulip /home/zulip /var/log/zulip /etc/zulip/settings.py
if ! [ -e "/home/zulip/prod-static/generated" ]; then
- # If we're installing from a git checkout, we need to run
+ # If we're installing from a Git checkout, we need to run
# `tools/update-prod-static` in order to build the static
# assets.
su zulip -c '/home/zulip/deployments/current/tools/update-prod-static'
@@ -525,7 +525,7 @@ if [ -n "$NO_INIT_DB" ]; then
Success!
Stopping because --no-init-db was passed.
- To complete the installation, configure postgres and then run:
+ To complete the installation, configure Postgres and then run:
su zulip -c '/home/zulip/deployments/current/scripts/setup/initialize-database'
su zulip -c '/home/zulip/deployments/current/manage.py generate_realm_creation_link'
diff --git a/scripts/lib/upgrade-zulip-stage-2 b/scripts/lib/upgrade-zulip-stage-2
index 69bc998f18..e0afd57a47 100755
--- a/scripts/lib/upgrade-zulip-stage-2
+++ b/scripts/lib/upgrade-zulip-stage-2
@@ -199,19 +199,19 @@ elif args.from_git:
logging.info("Shutting down server to build static assets on a low-RAM system.")
shutdown_server()
- # Note: The fact that this is before we apply puppet changes means
- # that we don't support adding new puppet dependencies of
- # update-prod-static with the git upgrade process. But it'll fail
+ # Note: The fact that this is before we apply Puppet changes means
+ # that we don't support adding new Puppet dependencies of
+ # update-prod-static with the Git upgrade process. But it'll fail
# safely; this seems like a worthwhile tradeoff to minimize downtime.
logging.info("Building static assets...")
subprocess.check_call(["./tools/update-prod-static", "--prev-deploy=" +
os.path.join(DEPLOYMENTS_DIR, 'current')],
preexec_fn=su_to_zulip)
- logging.info("Caching zulip git version...")
+ logging.info("Caching Zulip Git version...")
subprocess.check_call(["./tools/cache-zulip-git-version"], preexec_fn=su_to_zulip)
else:
# Since this doesn't do any actual work, it's likely safe to have
- # this run before we apply puppet changes (saving a bit of downtime).
+ # this run before we apply Puppet changes (saving a bit of downtime).
logging.info("Installing static assets...")
subprocess.check_call(["cp", "-rT", os.path.join(deploy_path, 'prod-static/serve'),
'/home/zulip/prod-static'], preexec_fn=su_to_zulip)
@@ -252,12 +252,12 @@ if os.path.exists("/etc/supervisor/conf.d/zulip_db.conf"):
if (not args.skip_puppet or migrations_needed) and IS_SERVER_UP:
# By default, we shut down the service to apply migrations and
- # puppet changes, to minimize risk of issues due to inconsistent
+ # Puppet changes, to minimize risk of issues due to inconsistent
# state.
shutdown_server()
if not args.skip_puppet:
- logging.info("Applying puppet changes...")
+ logging.info("Applying Puppet changes...")
subprocess.check_call(["./scripts/zulip-puppet-apply", "--force"])
subprocess.check_call(["apt-get", "-y", "upgrade"])
@@ -283,5 +283,5 @@ else:
logging.info("Skipping purging old deployments.")
if args.skip_puppet:
- logging.info("Showing un-applied puppet changes:")
+ logging.info("Showing un-applied Puppet changes:")
subprocess.check_call(["./scripts/zulip-puppet-apply", "--noop", "--show_diff"])
diff --git a/scripts/setup/generate_secrets.py b/scripts/setup/generate_secrets.py
index bd6a4179c1..6e1e757269 100755
--- a/scripts/setup/generate_secrets.py
+++ b/scripts/setup/generate_secrets.py
@@ -124,7 +124,7 @@ def generate_secrets(development: bool = False) -> None:
if settings.MEMCACHED_LOCATION == "127.0.0.1:11211":
add_secret("memcached_password", random_token())
- # Password for authentication to redis.
+ # Password for authentication to Redis.
if need_secret("redis_password"):
# We defer importing settings unless we need it, because
# importing settings is expensive (mostly because of
diff --git a/scripts/setup/postgres-init-db b/scripts/setup/postgres-init-db
index 71a1303d34..5a06229c1a 100755
--- a/scripts/setup/postgres-init-db
+++ b/scripts/setup/postgres-init-db
@@ -10,7 +10,7 @@ set -x
# What user should we use for connecting to the database
POSTGRES_USER="${POSTGRES_USER:-postgres}"
-# This psql command may fail because the zulip database doesn’t exist,
+# This psql command may fail because the Zulip database doesn’t exist,
# hence the &&.
if records="$(
cd / # Make sure the current working directory is readable by postgres
diff --git a/scripts/setup/restore-backup b/scripts/setup/restore-backup
index 10a076f573..11bf1bfad3 100755
--- a/scripts/setup/restore-backup
+++ b/scripts/setup/restore-backup
@@ -115,10 +115,10 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
"--production",
])
- # If there is a local rabbitmq, we need to reconfigure it
- # to ensure the rabbitmq password matches the value in the
+ # If there is a local RabbitMQ, we need to reconfigure it
+ # to ensure the RabbitMQ password matches the value in the
# restored zulip-secrets.conf. We need to be careful to
- # only do this if rabbitmq is configured to run locally on
+ # only do this if RabbitMQ is configured to run locally on
# the system.
rabbitmq_host = subprocess.check_output(
[os.path.join(settings.DEPLOY_ROOT,
diff --git a/scripts/setup/upgrade-postgres b/scripts/setup/upgrade-postgres
index c1925895ce..34a2e0dd29 100755
--- a/scripts/setup/upgrade-postgres
+++ b/scripts/setup/upgrade-postgres
@@ -24,8 +24,8 @@ if pg_lsclusters -h | grep -qE "^$UPGRADE_TO\s+main\b"; then
fi
(
- # Two-stage application of puppet; we apply the bare-bones
- # postgresql configuration first, so that FTS will be configured
+ # Two-stage application of Puppet; we apply the bare-bones
+ # PostgreSQL configuration first, so that FTS will be configured
# prior to the pg_upgradecluster.
TEMP_CONF_DIR=$(mktemp -d)
cp /etc/zulip/zulip.conf "$TEMP_CONF_DIR"
diff --git a/scripts/zulip-puppet-apply b/scripts/zulip-puppet-apply
index 2d6ce2c659..42bebb38fb 100755
--- a/scripts/zulip-puppet-apply
+++ b/scripts/zulip-puppet-apply
@@ -11,7 +11,7 @@ from lib.zulip_tools import assert_running_as_root, parse_os_release
assert_running_as_root()
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-parser = argparse.ArgumentParser(description="Run puppet")
+parser = argparse.ArgumentParser(description="Run Puppet")
parser.add_argument('--force', '-f', action="store_true",
help="Do not prompt with proposed changes")
parser.add_argument('--noop', action="store_true",
@@ -32,7 +32,7 @@ Exec { path => "/usr/sbin:/usr/bin:/sbin:/bin" }
for pclass in re.split(r'\s*,\s*', config.get('machine', 'puppet_classes')):
puppet_config += f"include {pclass}\n"
-# We use the puppet configuration from the same Zulip checkout as this script
+# We use the Puppet configuration from the same Zulip checkout as this script
scripts_path = os.path.join(BASE_DIR, "scripts")
puppet_module_path = os.path.join(BASE_DIR, "puppet")
puppet_cmd = ["puppet", "apply", f"--modulepath={puppet_module_path}", "-e", puppet_config]
@@ -40,12 +40,12 @@ if args.noop:
puppet_cmd += ["--noop"]
puppet_cmd += extra_args
-# Set the scripts path to be a factor so it can be used by puppet code
+# Set the scripts path to be a factor so it can be used by Puppet code
puppet_env = os.environ.copy()
puppet_env["FACTER_zulip_conf_path"] = args.config
puppet_env["FACTER_zulip_scripts_path"] = scripts_path
-# This is to suppress puppet warnings with ruby 2.7.
+# This is to suppress Puppet warnings with ruby 2.7.
if (distro_info['ID'], distro_info['VERSION_ID']) in [('ubuntu', '20.04')]:
puppet_env["RUBYOPT"] = "-W0"
diff --git a/static/assets/zulip-emoji/README b/static/assets/zulip-emoji/README
index 2208fa6606..51ddf9f520 100644
--- a/static/assets/zulip-emoji/README
+++ b/static/assets/zulip-emoji/README
@@ -3,7 +3,7 @@ about Noto, please refer to:
https://code.google.com/p/noto/
-These images were generated from the git repository at
+These images were generated from the Git repository at
as of
90372d894b5d9c9f2a111315d2eb3b8de1979ee4
diff --git a/static/js/bundles/app.js b/static/js/bundles/app.js
index ad554d3156..80b16a1375 100644
--- a/static/js/bundles/app.js
+++ b/static/js/bundles/app.js
@@ -1,6 +1,6 @@
import "./common";
-// Import Third party libraries
+// Import third party libraries
import "../../third/bootstrap-notify/js/bootstrap-notify";
import "../../third/bootstrap-typeahead/typeahead";
import "../../third/bootstrap-tooltip/tooltip";
@@ -12,7 +12,7 @@ import "jquery-validation";
import "flatpickr";
import "flatpickr/dist/plugins/confirmDate/confirmDate";
-// Import App JS
+// Import app JS
import "../i18n";
import "../feature_flags";
import "../loading";
@@ -195,7 +195,7 @@ import "../search_pill_widget";
import "../stream_ui_updates";
import "../spoilers";
-// Import Styles
+// Import styles
import "../../third/bootstrap-notify/css/bootstrap-notify.css";
import "spectrum-colorpicker/spectrum.css";
diff --git a/static/js/compose_ui.js b/static/js/compose_ui.js
index 2e6ce4bca8..4b85a59df9 100644
--- a/static/js/compose_ui.js
+++ b/static/js/compose_ui.js
@@ -104,7 +104,7 @@ exports.compute_placeholder_text = function (opts) {
}
}
- // For Private Messages
+ // For private messages
if (opts.private_message_recipient) {
const recipient_list = opts.private_message_recipient.split(",");
const recipient_names = recipient_list
diff --git a/static/js/hash_util.js b/static/js/hash_util.js
index 7db2bc8594..be907d0284 100644
--- a/static/js/hash_util.js
+++ b/static/js/hash_util.js
@@ -59,12 +59,12 @@ exports.encode_stream_name = function (operand) {
exports.decodeHashComponent = function (str) {
try {
- // This fails for URLS containing
+ // This fails for URLs containing
// foo.foo or foo%foo due to our fault in special handling
// of such characters when encoding. This can also,
// fail independent of our fault, so just tell the user
- // that the url is invalid.
- // TODO: Show possible valid urls to the user.
+ // that the URL is invalid.
+ // TODO: Show possible valid URLs to the user.
return decodeURIComponent(str.replace(/\./g, "%"));
} catch {
ui_report.error(i18n.t("Invalid URL"), undefined, $("#home-error"), 2000);
diff --git a/static/js/markdown.js b/static/js/markdown.js
index d84d52cae6..495a95d30c 100644
--- a/static/js/markdown.js
+++ b/static/js/markdown.js
@@ -231,7 +231,7 @@ exports.add_topic_links = function (message) {
}
}
- // Also make raw urls navigable
+ // Also make raw URLs navigable
const url_re = /\b(https?:\/\/[^\s<]+[^\s"'),.:;<\]])/g; // Slightly modified from third/marked.js
const match = topic.match(url_re);
if (match) {
@@ -268,11 +268,11 @@ function handleEmoji(emoji_name) {
const alt_text = ":" + emoji_name + ":";
const title = emoji_name.split("_").join(" ");
- // Zulip supports both standard/unicode emoji, served by a
+ // Zulip supports both standard/Unicode emoji, served by a
// spritesheet and custom realm-specific emoji (served by URL).
// We first check if this is a realm emoji, and if so, render it.
//
- // Otherwise we'll look at unicode emoji to render with an emoji
+ // Otherwise we'll look at Unicode emoji to render with an emoji
// span using the spritesheet; and if it isn't one of those
// either, we pass through the plain text syntax unmodified.
const emoji_url = emoji.get_realm_emoji_url(emoji_name);
@@ -381,7 +381,7 @@ function python_to_js_filter(pattern, url) {
const name = match[1];
// Replace named group with regular matching group
pattern = pattern.replace("(?P<" + name + ">", "(");
- // Replace named reference in url to numbered reference
+ // Replace named reference in URL to numbered reference
url = url.replace("%(" + name + ")s", "\\" + current_group);
// Reset the RegExp state
diff --git a/static/js/narrow.js b/static/js/narrow.js
index 21116f68f6..0ac994adf8 100644
--- a/static/js/narrow.js
+++ b/static/js/narrow.js
@@ -859,10 +859,10 @@ exports.deactivate = function () {
};
exports.restore_home_state = function () {
- // If we click on the All Messages link while already at All Messages, unnarrow.
- // If we click on the All Messages link from another nav pane, just go
+ // If we click on the All messages link while already at All messages, unnarrow.
+ // If we click on the All messages link from another nav pane, just go
// back to the state you were in (possibly still narrowed) before
- // you left the All Messages pane.
+ // you left the All messages pane.
if (!overlays.is_active()) {
exports.deactivate();
}
diff --git a/static/js/people.js b/static/js/people.js
index 5a0641af20..5a166c249c 100644
--- a/static/js/people.js
+++ b/static/js/people.js
@@ -648,7 +648,7 @@ export function small_avatar_url(message) {
let person;
if (message.sender_id) {
// We should always have message.sender_id, except for in the
- // tutorial, where it's ok to fall back to the url in the fake
+ // tutorial, where it's ok to fall back to the URL in the fake
// messages.
person = get_by_user_id(message.sender_id);
}
diff --git a/static/js/pm_list.js b/static/js/pm_list.js
index 17d9b9cbe4..8cf61a3e1e 100644
--- a/static/js/pm_list.js
+++ b/static/js/pm_list.js
@@ -6,7 +6,7 @@ const pm_conversations = require("./pm_conversations");
let prior_dom;
let private_messages_open = false;
-// This module manages the "Private Messages" section in the upper
+// This module manages the "Private messages" section in the upper
// left corner of the app. This was split out from stream_list.js.
function get_filter_li() {
diff --git a/static/js/portico/help.js b/static/js/portico/help.js
index 35cd6f761c..7951e9c781 100644
--- a/static/js/portico/help.js
+++ b/static/js/portico/help.js
@@ -37,8 +37,8 @@ function highlight_current_article() {
let article = $('.help .sidebar a[href="' + path + hash + '"]');
if (!article.length) {
// If there isn't an entry in the left sidebar that matches
- // the full url+hash pair, instead highlight an entry in the
- // left sidebar that just matches the url part.
+ // the full URL+hash pair, instead highlight an entry in the
+ // left sidebar that just matches the URL part.
article = $('.help .sidebar a[href="' + path + '"]');
}
// Highlight current article link and the heading of the same
diff --git a/static/js/portico/integrations_dev_panel.js b/static/js/portico/integrations_dev_panel.js
index fc0a078bb4..ef56a5d702 100644
--- a/static/js/portico/integrations_dev_panel.js
+++ b/static/js/portico/integrations_dev_panel.js
@@ -1,6 +1,6 @@
"use strict";
-// Main JavaScript file for the Integrations development panel at
+// Main JavaScript file for the integrations development panel at
// /devtools/integrations.
// Data Segment: We lazy load the requested fixtures from the backend
@@ -219,7 +219,7 @@ function get_fixtures(integration_name) {
}
// We don't have the fixtures for this integration; fetch them
- // from the backend. Relative url pattern:
+ // from the backend. Relative URL pattern:
// /devtools/integrations//fixtures
channel.get({
url: "/devtools/integrations/" + integration_name + "/fixtures",
diff --git a/static/js/portico/team.js b/static/js/portico/team.js
index 8dad2b9859..c158a4dcd6 100644
--- a/static/js/portico/team.js
+++ b/static/js/portico/team.js
@@ -57,7 +57,7 @@ function get_display_name(contributor) {
// TODO (for v2 of /team contributors):
// - Make tab header responsive.
-// - Display full name instead of github username.
+// - Display full name instead of GitHub username.
export default function render_tabs() {
const template = _.template($("#contributors-template").html());
const total_tab_html = _.chain(contributors_list)
diff --git a/static/js/recent_topics.js b/static/js/recent_topics.js
index 4512aa3a6b..df5ddd1549 100644
--- a/static/js/recent_topics.js
+++ b/static/js/recent_topics.js
@@ -124,7 +124,7 @@ exports.process_message = function (msg) {
topic_data.last_msg_id = msg.id;
}
// TODO: Add backend support for participated topics.
- // Currently participated === Recently Participated
+ // Currently participated === recently participated
// i.e. Only those topics are participated for which we have the user's
// message fetched in the topic. Ideally we would want this to be attached
// to topic info fetched from backend, which is currently not a thing.
diff --git a/static/js/scroll_bar.js b/static/js/scroll_bar.js
index e856a64be4..e40bd8c3c8 100644
--- a/static/js/scroll_bar.js
+++ b/static/js/scroll_bar.js
@@ -1,6 +1,6 @@
"use strict";
-// A few of our width properties in zulip depend on the width of the
+// A few of our width properties in Zulip depend on the width of the
// browser scrollbar that is generated at the far right side of the
// page, which unfortunately varies depending on the browser and
// cannot be detected directly using CSS. As a result, we adjust a
diff --git a/static/js/stream_create.js b/static/js/stream_create.js
index ef47d7ed1a..019a8e3888 100644
--- a/static/js/stream_create.js
+++ b/static/js/stream_create.js
@@ -350,7 +350,7 @@ exports.create_handlers_for_users = function (container) {
e.preventDefault();
});
- // Search People or Streams
+ // Search people or streams
container.on("input", ".add-user-list-filter", (e) => {
const user_list = $(".add-user-list-filter");
if (user_list === 0) {
diff --git a/static/js/subs.js b/static/js/subs.js
index acbc673367..5c1f4443d0 100644
--- a/static/js/subs.js
+++ b/static/js/subs.js
@@ -543,7 +543,7 @@ exports.setup_page = function (callback) {
// so it's too risky a change for now.
//
// The history behind setting up the page from scratch every
- // time we go into "Manage Streams" is that we used to have
+ // time we go into "Manage streams" is that we used to have
// some live-update issues, so being able to re-launch the
// streams page is kind of a workaround for those bugs, since
// we will re-populate the widget.
diff --git a/static/shared/js/emoji.js b/static/shared/js/emoji.js
index 45bd85e232..2ef7fbeaba 100644
--- a/static/shared/js/emoji.js
+++ b/static/shared/js/emoji.js
@@ -90,13 +90,13 @@ export function get_emoji_codepoint(emoji_name) {
export function get_realm_emoji_url(emoji_name) {
// If the emoji name is a realm emoji, returns the URL for it.
- // Returns undefined for unicode emoji.
+ // Returns undefined for Unicode emoji.
// get_realm_emoji_url('shrug') === '/user_avatars/2/emoji/images/31.png'
const data = active_realm_emojis.get(emoji_name);
if (!data) {
- // Not all emojis have urls, plus the user
+ // Not all emojis have URLs, plus the user
// may have hand-typed an invalid emoji.
// The caller can check the result for falsiness
// and then try alternate ways of parsing the
diff --git a/static/styles/left_sidebar.css b/static/styles/left_sidebar.css
index 9da03a24b8..291ac695a3 100644
--- a/static/styles/left_sidebar.css
+++ b/static/styles/left_sidebar.css
@@ -382,7 +382,7 @@ li.top_left_recent_topics {
}
/*
- The All Messages and Stream ellipsis-v(vertical 3 dots) are
+ The All messages and stream ellipsis-v (vertical 3 dots) are
pretty similar.
*/
.all-messages-sidebar-menu-icon,
diff --git a/static/templates/admin_emoji_list.hbs b/static/templates/admin_emoji_list.hbs
index bb5d6bb879..d19b74011a 100644
--- a/static/templates/admin_emoji_list.hbs
+++ b/static/templates/admin_emoji_list.hbs
@@ -14,7 +14,7 @@
{{#if author}}
{{author.full_name}}
{{else}}
- Unknown Author
+ Unknown author
{{/if}}
@@ -52,7 +52,7 @@ mirror script instead of using Webathena.
-
This will run the Zephyr Mirroring script inside a screen
+
This will run the Zephyr mirroring script inside a screen
session, and use `/mit/kchen/bin/cont-renew-notify` to continually
renew your Kerberos tickets for up to a week; each week, it will
send you a Zephyr/Zulip letting you know that you need to renew
diff --git a/templates/zerver/api/changelog.md b/templates/zerver/api/changelog.md
index d8eff38c30..8c676362c7 100644
--- a/templates/zerver/api/changelog.md
+++ b/templates/zerver/api/changelog.md
@@ -42,7 +42,7 @@ Note that as of this feature level, stream administrators are a
partially completed feature. In particular, it is impossible for a
user to be a stream administrator at this feature level.
-**Feature Level 30**
+**Feature level 30**
* [`GET users/me/subscriptions`](/api/get-subscriptions), [`GET
/streams`](/api/get-streams): Added `date_created` to Stream
@@ -259,7 +259,7 @@ No changes; feature level used for Zulip 3.0 release.
* [`POST /messages/{message_id}/reactions`](/api/add-reaction):
The `reaction_type` parameter is optional; the server will guess the
`reaction_type` if it is not specified (checking custom emoji, then
- unicode emoji for any with the provided name).
+ Unicode emoji for any with the provided name).
* `reactions` objects returned by the API (both in `GET /messages` and
in `GET /events`) now include the user who reacted in a top-level
`user_id` field. The legacy `user` dictionary (which had
@@ -283,7 +283,7 @@ No changes; feature level used for Zulip 3.0 release.
* [`GET /messages`](/api/get-messages) and [`GET
/events`](/api/get-events): Message objects now use
`topic_links` rather than `subject_links` to indicate links either
- present in the topic or generated by Linkifiers applied to the topic.
+ present in the topic or generated by linkifiers applied to the topic.
* [`POST /users/me/subscriptions`](/api/subscribe): Replaced
`is_announcement_only` boolean with `stream_post_policy` enum for
specifying who can post to a stream.
diff --git a/templates/zerver/api/configuring-python-bindings.md b/templates/zerver/api/configuring-python-bindings.md
index 81a97a73d1..d5b51b43c1 100644
--- a/templates/zerver/api/configuring-python-bindings.md
+++ b/templates/zerver/api/configuring-python-bindings.md
@@ -17,7 +17,7 @@ A `.zuliprc` file is a plain text document that looks like this:
```
[api]
-key=
+key=
email=
site=
...
diff --git a/templates/zerver/api/deploying-bots.md b/templates/zerver/api/deploying-bots.md
index a6c3e6d4d4..bbe4a0ccda 100644
--- a/templates/zerver/api/deploying-bots.md
+++ b/templates/zerver/api/deploying-bots.md
@@ -25,8 +25,8 @@ The Zulip Botserver is for people who want to
* run multiple bots at once.
The Zulip Botserver is a Python (Flask) server that implements Zulip's
-Outgoing Webhooks API. You can of course write your own servers using
-the Outgoing Webhooks API, but the Botserver is designed to make it
+outgoing webhooks API. You can of course write your own servers using
+the outgoing webhooks API, but the Botserver is designed to make it
easy for a novice Python programmer to write a new bot and deploy it
in production.
@@ -55,7 +55,7 @@ Botserver interaction are:
}
```
- This url is configured in the Zulip web-app in your Bot User's settings.
+ This URL is configured in the Zulip web-app in your Bot User's settings.
1. The Botserver searches for a bot to handle the message.
@@ -223,7 +223,7 @@ Botserver with SSL using an `nginx` or `Apache` reverse proxy and
### Troubleshooting
-1. Make sure the API key you're using is for an [Outgoing webhook
+1. Make sure the API key you're using is for an [outgoing webhook
bot](/api/outgoing-webhooks) and you've
correctly configured the URL for your Botserver.
diff --git a/templates/zerver/api/incoming-webhooks-walkthrough.md b/templates/zerver/api/incoming-webhooks-walkthrough.md
index b749dbf1a5..72cfc00ce3 100644
--- a/templates/zerver/api/incoming-webhooks-walkthrough.md
+++ b/templates/zerver/api/incoming-webhooks-walkthrough.md
@@ -132,7 +132,7 @@ letter upper-case).
The `webhook_view` decorator indicates that the 3rd party service will
send the authorization as an API key in the query parameters. If your service uses
-HTTP Basic authentication, you would instead use the `authenticated_rest_api_view`
+HTTP basic authentication, you would instead use the `authenticated_rest_api_view`
decorator.
You should name your webhook function as such
@@ -174,10 +174,10 @@ validate the message and do the following:
Finally, we return a 200 http status with a JSON format success message via
`json_success()`.
-## Step 3: Create an api endpoint for the webhook
+## Step 3: Create an API endpoint for the webhook
In order for an incoming webhook to be externally available, it must be mapped
-to a url. This is done in `zerver/lib/integrations.py`.
+to a URL. This is done in `zerver/lib/integrations.py`.
Look for the lines beginning with:
@@ -191,7 +191,7 @@ And you'll find the entry for Hello World:
WebhookIntegration('helloworld', ['misc'], display_name='Hello World'),
```
-This tells the Zulip api to call the `api_helloworld_webhook` function in
+This tells the Zulip API to call the `api_helloworld_webhook` function in
`zerver/webhooks/helloworld/view.py` when it receives a request at
`/api/v1/external/helloworld`.
@@ -232,7 +232,7 @@ Common validators are available in `zerver/lib/validators.py`.
For either one of the command line tools, first, you'll need to get an API key
from the **Your bots** section of your Zulip user's Settings page. To test the webhook,
you'll need to [create a bot](https://zulip.com/help/add-a-bot-or-integration) with
-the **Incoming Webhook** type. Replace `` with your bot's API key in the examples
+the **Incoming webhook** type. Replace `` with your bot's API key in the examples
presented below! This is how Zulip knows that the request was made by an authorized user.
### Curl
@@ -280,9 +280,9 @@ This is the GUI tool.
1. Run `./tools/run-dev.py` then go to http://localhost:9991/devtools/integrations/.
-2. Set the following mandatory fields:
-**Bot** - Any incoming webhook bot.
-**Integration** - One of the integrations.
+2. Set the following mandatory fields:
+**Bot** - Any incoming webhook bot.
+**Integration** - One of the integrations.
**Fixture** - Though not mandatory, it's recommended that you select one and then tweak it if necessary.
The remaining fields are optional, and the URL will automatically be generated.
@@ -467,7 +467,7 @@ request:
2. Read through [Code styles and conventions](
https://zulip.readthedocs.io/en/latest/contributing/code-style.html) and take a look
through your code to double-check that you've followed Zulip's guidelines.
-3. Take a look at your git history to ensure your commits have been clear and
+3. Take a look at your Git history to ensure your commits have been clear and
logical (see [Version Control](
https://zulip.readthedocs.io/en/latest/contributing/version-control.html) for tips). If not,
consider revising them with `git rebase --interactive`. For most incoming webhooks,
diff --git a/templates/zerver/api/outgoing-webhooks.md b/templates/zerver/api/outgoing-webhooks.md
index eb6e357dee..fca7809378 100644
--- a/templates/zerver/api/outgoing-webhooks.md
+++ b/templates/zerver/api/outgoing-webhooks.md
@@ -1,6 +1,6 @@
-# Outgoing Webhooks
+# Outgoing webhooks
-Outgoing Webhooks allow you to build or set up Zulip integrations
+Outgoing webhooks allow you to build or set up Zulip integrations
which are notified when certain types of messages are sent in
Zulip. When one of those events is triggered, we'll send a HTTP POST
payload to the webhook's configured URL. Webhooks can be used to
diff --git a/templates/zerver/api/rest.md b/templates/zerver/api/rest.md
index fa4fbc4628..8bc9f5ce8e 100644
--- a/templates/zerver/api/rest.md
+++ b/templates/zerver/api/rest.md
@@ -12,7 +12,7 @@ you can do in Zulip, you can do with Zulip's REST API. To use this API:
[other languages](/api/client-libraries), or
just make HTTP requests with your favorite programming language. If
you're making your own HTTP requests, you'll want to send the
- appropriate HTTP Basic Authentication headers; see each endpoint's
+ appropriate HTTP basic authentication headers; see each endpoint's
`curl` option for details on the request format.
* The Zulip API has a standard
[system for reporting errors](/api/rest-error-handling).
diff --git a/templates/zerver/apple-error.md b/templates/zerver/apple-error.md
index afe77f5ab7..f425144728 100644
--- a/templates/zerver/apple-error.md
+++ b/templates/zerver/apple-error.md
@@ -3,7 +3,7 @@ properly configured. Please check the following:
* You have registered `{{ root_domain_uri }}/complete/apple/`
as the callback URL for your Services ID in Apple's developer console. You can
- enable "Sign In with Apple" for an app at
+ enable "Sign in with Apple" for an app at
[Certificates, Identifiers & Profiles](https://developer.apple.com/account/resources/).
* You have set `SOCIAL_AUTH_APPLE_SERVICES_ID`,
diff --git a/templates/zerver/documentation_main.html b/templates/zerver/documentation_main.html
index 24fb3a2641..d6b5af6d1d 100644
--- a/templates/zerver/documentation_main.html
+++ b/templates/zerver/documentation_main.html
@@ -1,7 +1,7 @@
{% extends "zerver/portico-help.html" %}
{% set entrypoint = "help" %}
-{# Zulip User and API Documentation. #}
+{# Zulip user and API documentation. #}
{% block title %}
{{ OPEN_GRAPH_TITLE }}
{% endblock %}
diff --git a/templates/zerver/for/research.md b/templates/zerver/for/research.md
index 1e095ae12e..70829fef29 100644
--- a/templates/zerver/for/research.md
+++ b/templates/zerver/for/research.md
@@ -4,7 +4,7 @@ describes academic research better than any of our other use cases.
Zulip has long been popular with individual research groups, but
during the pandemic has started being used for large distributed
-communities focused around research topics like Category Theory or the
+communities focused around research topics like category theory or the
Lean Theorem Prover. We enthusiastically provide free hosting for
both use cases.
@@ -82,7 +82,7 @@ discussion of Zulip for large open communities.
Below, we’ve collected a list of [Zulip features](/features) that are
particularly useful to academic research organizations (both formal
organizations and online communities focused around research topics
-like Category Theory).
+like category theory).
### Free hosting at zulip.com.
diff --git a/templates/zerver/hello.html b/templates/zerver/hello.html
index 828743b998..b5a5a57bdc 100644
--- a/templates/zerver/hello.html
+++ b/templates/zerver/hello.html
@@ -396,8 +396,8 @@
diff --git a/templates/zerver/help/change-a-users-role.md b/templates/zerver/help/change-a-users-role.md
index 4b7f731c75..0387e6d1fd 100644
--- a/templates/zerver/help/change-a-users-role.md
+++ b/templates/zerver/help/change-a-users-role.md
@@ -11,7 +11,7 @@ organization administrator can change the role of most users, but
cannot create or demote an organization owner.
You can can revoke your own owner or administrative privileges if
-there is at least one other owner in the organization (Consider
+there is at least one other owner in the organization (consider
promoting a new owner or [deactivating the
organization](/help/deactivate-your-organization) instead).
diff --git a/templates/zerver/help/import-from-gitter.md b/templates/zerver/help/import-from-gitter.md
index 63deefc696..a8c8387690 100644
--- a/templates/zerver/help/import-from-gitter.md
+++ b/templates/zerver/help/import-from-gitter.md
@@ -21,9 +21,9 @@ First, export your data from Gitter.
Zulip.
!!! warn ""
- **Note:** You'll need a gitter API token to export data. You can get this
+ **Note:** You'll need a Gitter API token to export data. You can get this
token by following the instructions in the "**Getting Started**" section of the
- [gitter documentation](https://developer.gitter.im/docs/).
+ [Gitter documentation](https://developer.gitter.im/docs/).
{end_tabs}
@@ -90,7 +90,7 @@ to mark the appropriate users as administrators.
## Caveats
- The [Gitter data export tool](https://github.com/minrk/archive-gitter)
- doesn't support exporting private gitter channels.
+ doesn't support exporting private Gitter channels.
- Gitter's export tool doesn't export email addresses; just GitHub
usernames. The import tool will thus use [GitHub's generated
diff --git a/templates/zerver/help/import-from-hipchat.md b/templates/zerver/help/import-from-hipchat.md
index b0a1259b58..bfef4f9992 100644
--- a/templates/zerver/help/import-from-hipchat.md
+++ b/templates/zerver/help/import-from-hipchat.md
@@ -37,7 +37,7 @@ admin console.
1. Upgrade to the latest version to ensure you have the latest updates to
the HipChat export dashboard.
-1. Log in at the domain name configured for your Hipchat Server.
+1. Log in at the domain name configured for your HipChat Server.
1. Click on **Server Admin > Export**.
@@ -51,7 +51,7 @@ Once the export has completed, you will receive an **email** with a link to
download the file.
!!! tip ""
- If you are not on the latest version of Hipchat Server / Data Center,
+ If you are not on the latest version of HipChat Server / Data Center,
you can do a command line export with `hipchat export --export`. See
HipChat's [command line export docs][cli-export] for more information.
diff --git a/templates/zerver/help/import-from-mattermost.md b/templates/zerver/help/import-from-mattermost.md
index 7eba2834ea..67848d41f7 100644
--- a/templates/zerver/help/import-from-mattermost.md
+++ b/templates/zerver/help/import-from-mattermost.md
@@ -109,7 +109,7 @@ Replace `` and `` with the appropriate values below.
exported_emoji/ export.json
```
-3. Exit your shell on the Gitlab Omnibus server.
+3. Exit your shell on the GitLab Omnibus server.
`exit`
diff --git a/templates/zerver/help/import-from-slack.md b/templates/zerver/help/import-from-slack.md
index c261959ea6..2e7a05b2a8 100644
--- a/templates/zerver/help/import-from-slack.md
+++ b/templates/zerver/help/import-from-slack.md
@@ -43,7 +43,7 @@ email addresses.
3. [Install the app](https://api.slack.com/authentication/basics#installing)
to your workspace. You will get an API token that you can now use to fetch
- data from your slack workspace.
+ data from your Slack workspace.
{end_tabs}
@@ -118,7 +118,7 @@ root domain. Replace the last line above with the following, after replacing
- Import of [user roles](/help/roles-and-permissions):
- Slack's `Workspace Primary Owner` and `Workspace Owner` users
- are mapped to Zulip `Organization Owner` users.
+ are mapped to Zulip `Organization owner` users.
- Slack's `Workspace Admin` users are mapped to Zulip's `Organization
administrator` users.
- Slack's `Member` users is mapped to Zulip `Member` users.
diff --git a/templates/zerver/help/restrict-name-and-email-changes.md b/templates/zerver/help/restrict-name-and-email-changes.md
index 042e5caa2b..1490f9bd1c 100644
--- a/templates/zerver/help/restrict-name-and-email-changes.md
+++ b/templates/zerver/help/restrict-name-and-email-changes.md
@@ -26,8 +26,8 @@ source, and synced into Zulip via the [Zulip API](/api) or another method.
By default, any user can
[change their email address](/help/change-your-email-address). However, you
can instead prevent all email changes. This is especially useful for
-organizations that are using LDAP or another a Single
-Sign-On solution to manage user emails.
+organizations that are using LDAP or a another single
+sign-on solution to manage user emails.
{start_tabs}
diff --git a/templates/zerver/help/roles-and-permissions.md b/templates/zerver/help/roles-and-permissions.md
index 5ec8dbb71c..c59c2ca70b 100644
--- a/templates/zerver/help/roles-and-permissions.md
+++ b/templates/zerver/help/roles-and-permissions.md
@@ -2,10 +2,10 @@
There are several possible roles in a Zulip organization.
-* **Organization Owner**: Can manage users, public streams,
+* **Organization owner**: Can manage users, public streams,
organization settings, and billing.
-* **Organization Administrator**: Can manage users, public streams,
+* **Organization administrator**: Can manage users, public streams,
and organization settings. Cannot create or demote organization
owners.
diff --git a/templates/zerver/help/saml-authentication.md b/templates/zerver/help/saml-authentication.md
index e1bd303a56..3e40cb4485 100644
--- a/templates/zerver/help/saml-authentication.md
+++ b/templates/zerver/help/saml-authentication.md
@@ -1,6 +1,6 @@
# SAML authentication
-Zulip supports using SAML authentication for Single Sign On, both when
+Zulip supports using SAML authentication for single sign-on, both when
self-hosting or on the Zulip Cloud Plus plan.
This page documents details on how to setup SAML authentication with
@@ -29,11 +29,11 @@ Zulip with various common SAML Identity Providers.
* Optionally you can also send us an icon that should be shown on the button.
1. We will take care of the server-side setup and let you know as soon as it's ready.
-## Configure SAML with Onelogin
+## Configure SAML with OneLogin
1. Make sure you have created your organization. We'll assume its URL is
`https://.zulipchat.com` in the instructions below.
-1. Navigate to the Onelogin Applications page, and click "Add App".
+1. Navigate to the OneLogin Applications page, and click "Add App".
1. Search for the "SAML Test Connector (IdP w/ attr w/ sign response)" app and select it.
1. Set a name and logo according to your preferences and click "Save". This doesn't affect anything in Zulip,
but will be shown on your OneLogin Applications page.
@@ -45,7 +45,7 @@ Zulip with various common SAML Identity Providers.
1. Go to the "Parameters" section. Ignore the pre-configured parameters that are already there
and add custom ones to match the following screenshot:
- ![Onelogin parameters](/static/images/help/onelogin_parameters.png)
+ ![OneLogin parameters](/static/images/help/onelogin_parameters.png)
Make sure to set the "Include in SAML assertion" flag on them.
diff --git a/templates/zerver/help/stream-sending-policy.md b/templates/zerver/help/stream-sending-policy.md
index 0225f1c7c8..e9e4a85d03 100644
--- a/templates/zerver/help/stream-sending-policy.md
+++ b/templates/zerver/help/stream-sending-policy.md
@@ -18,6 +18,6 @@ certain users can send messages.
1. Under "Who can post to the stream?", select the option you prefer.
-1. Click **Save Changes**.
+1. Click **Save changes**.
{end_tabs}
diff --git a/templates/zerver/integrations/development/dev_panel.html b/templates/zerver/integrations/development/dev_panel.html
index cc731f8b5c..005dcbb313 100644
--- a/templates/zerver/integrations/development/dev_panel.html
+++ b/templates/zerver/integrations/development/dev_panel.html
@@ -18,7 +18,7 @@
-
Integrations Developer Panel
+
Integrations developer panel
Go to Zulip
diff --git a/templates/zerver/integrations/discourse.md b/templates/zerver/integrations/discourse.md
index 1123baae8f..972411ee55 100644
--- a/templates/zerver/integrations/discourse.md
+++ b/templates/zerver/integrations/discourse.md
@@ -13,7 +13,7 @@ key to third-party services.
Copy the `USERNAME` and `API KEY` - you'll need it later.
Head over to the
-[Discourse Chat Integration Setup Instructions](https://meta.discourse.org/t/68501)
+[discourse-chat-integration setup instructions](https://meta.discourse.org/t/68501)
and complete them.
{!congrats.md!}
diff --git a/templates/zerver/integrations/git.md b/templates/zerver/integrations/git.md
index 6072fb1678..1ebda3f5c5 100644
--- a/templates/zerver/integrations/git.md
+++ b/templates/zerver/integrations/git.md
@@ -16,10 +16,10 @@ Get Zulip notifications for your Git repositories!
notification.
1. Symlink `/usr/local/share/zulip/integrations/git/zulip_git_config.py`
- to the `.git/hooks` directory of your git repository.
+ to the `.git/hooks` directory of your Git repository.
1. Symlink `/usr/local/share/zulip/integrations/git/post-receive`
- to the `.git/hooks` directory of your git repository.
+ to the `.git/hooks` directory of your Git repository.
!!! tip ""
diff --git a/templates/zerver/integrations/google-calendar.md b/templates/zerver/integrations/google-calendar.md
index 0b356f7ca7..62e0fa489b 100644
--- a/templates/zerver/integrations/google-calendar.md
+++ b/templates/zerver/integrations/google-calendar.md
@@ -14,7 +14,7 @@ to your `~/` directory.
Next, install the latest Google API Client for Python by following the
instructions on the
-[Google Website](https://developers.google.com/api-client-library/python/start/installation).
+[Google website](https://developers.google.com/api-client-library/python/start/installation).
Then go to your **Zulip Settings** by clicking on the cog in the top
right corner, and then clicking on **Settings**.
@@ -46,7 +46,7 @@ There are two optional flags that you can specify when running this
script:
* `--calendar`: This flag specifies the calendar to watch from the
- user’s Google Account. By default, this flag is set to a user’s
+ user’s Google account. By default, this flag is set to a user’s
primary or default calendar. To specify a calendar, you need the
calendar ID which can be obtained by going to Google Calendar and
clicking on the wedge next to the calendar’s name. Click on settings
diff --git a/templates/zerver/integrations/jira-plugin.md b/templates/zerver/integrations/jira-plugin.md
index 784a647d9c..2cadec2303 100644
--- a/templates/zerver/integrations/jira-plugin.md
+++ b/templates/zerver/integrations/jira-plugin.md
@@ -9,7 +9,7 @@ versions of JIRA.*
{!download-python-bindings.md!}
-#### Plugin Installation
+#### Plugin installation
The JIRA integration plugin requires two JIRA plugins. Please install
the following plugins using the **Universal Plugin Manager** in your
@@ -21,7 +21,7 @@ JIRA installation:
[script-runner]: https://marketplace.atlassian.com/plugins/com.onresolve.jira.groovy.groovyrunner
[ssl-plugin]: https://marketplace.atlassian.com/plugins/com.atlassian.jira.plugin.jirasslplugin
-#### SSL Setup
+#### SSL setup
As Zulip is using a StartCOM SSL certificate that is not recognized by
default in the Java installation shipped with JIRA, you will need to
@@ -34,7 +34,7 @@ tell JIRA about the certificate.
instructions and restart JIRA for it to recognize the proper
certificates.
-#### Zulip Integration
+#### Zulip integration
1. Copy the folder `integrations/jira/org/` (from the tarball you
downloaded above) to your JIRA `classes` folder. For self-contained
diff --git a/templates/zerver/portico-header.html b/templates/zerver/portico-header.html
index 4fbc2538c8..4d312db38e 100644
--- a/templates/zerver/portico-header.html
+++ b/templates/zerver/portico-header.html
@@ -9,10 +9,10 @@
{% if page_is_help_center %}
- | User Documentation
+ | User documentation
{% endif %}
{% if page_is_api_center %}
- | API Documentation
+ | API documentation
{% endif %}
{% endif %}
diff --git a/templates/zerver/why-zulip.md b/templates/zerver/why-zulip.md
index c836c177e7..e61b897414 100644
--- a/templates/zerver/why-zulip.md
+++ b/templates/zerver/why-zulip.md
@@ -1,7 +1,7 @@
There are a lot of team chat apps. So why did we build Zulip?
We talk about Slack in the discussion below, but the problems apply equally
-to other apps with Slack’s conversation model, including Hipchat, IRC,
+to other apps with Slack’s conversation model, including HipChat, IRC,
Mattermost, Discord, Spark, and others.
## Reading busy Slack channels is extremely inefficient.
diff --git a/tools/build-release-tarball b/tools/build-release-tarball
index a0594c800f..c7c2240ce6 100755
--- a/tools/build-release-tarball
+++ b/tools/build-release-tarball
@@ -65,7 +65,7 @@ cd "$BASEDIR"
# Check out a temporary full copy of the index to generate static files
git checkout-index -f -a --prefix "$TMPDIR/$prefix/"
-# Add the git version information file
+# Add the Git version information file
./tools/cache-zulip-git-version
mv zulip-git-version "$TMPDIR/$prefix/"
diff --git a/tools/check-issue-labels b/tools/check-issue-labels
index 1ecf5a09bb..fa2b1b6ecf 100755
--- a/tools/check-issue-labels
+++ b/tools/check-issue-labels
@@ -12,10 +12,10 @@ import requests
# GitHub API token is required as GitHub limits unauthenticated
# requests to 60/hour. There is a good chance that this limit is
# bypassed in consecutive attempts.
-# The api token can be generated here
+# The API token can be generated here
# https://github.com/settings/tokens/new?description=Zulip%20Issue%20Label%20Checker
#
-# Copy conf.ini-template to conf.ini and populate with your api token.
+# Copy conf.ini-template to conf.ini and populate with your API token.
#
# usage: python check-issue-labels
# Pass --force as an argument to run without a token.
@@ -52,8 +52,8 @@ def check_issue_labels() -> None:
try:
token = config.get('github', 'api_token')
except configparser.Error:
- print("Error fetching GitHub api token. Copy conf.ini-template to conf.ini and populate with "
- "your api token. If you want to continue without using a token use --force.")
+ print("Error fetching GitHub API token. Copy conf.ini-template to conf.ini and populate with "
+ "your API token. If you want to continue without using a token use --force.")
sys.exit(1)
next_page_url: Optional[str] = 'https://api.github.com/repos/zulip/zulip/issues'
@@ -67,7 +67,7 @@ def check_issue_labels() -> None:
if response.status_code == 401:
sys.exit("Error. Please check the token.")
if response.status_code == 403:
- sys.exit("403 Error. This is generally caused when API limit is exceeded. You use an api "
+ sys.exit("403 Error. This is generally caused when API limit is exceeded. You use an API "
"token to overcome this limit.")
except requests.exceptions.RequestException as e:
print(e)
diff --git a/tools/check-node-fixtures b/tools/check-node-fixtures
index a5d2935b0f..4bc53d2e71 100755
--- a/tools/check-node-fixtures
+++ b/tools/check-node-fixtures
@@ -84,7 +84,7 @@ EXEMPT_OPENAPI_NAMES = [
"realm_filters_event",
# bots, delivery_email, profile_data
"realm_user_add_event",
- # openapi is incomplete
+ # OpenAPI is incomplete
"realm_update_dict_event",
# is_mirror_dummy
"reaction_add_event",
@@ -200,7 +200,7 @@ def validate_openapi_against_event_schema() -> None:
name += "_event"
if not hasattr(event_schema, name):
- print("NEED SCHEMA to match openapi", name)
+ print("NEED SCHEMA to match OpenAPI", name)
continue
openapi_type = from_openapi(sub_node)
diff --git a/tools/check-templates b/tools/check-templates
index 59e6cc840a..0a0733504a 100755
--- a/tools/check-templates
+++ b/tools/check-templates
@@ -108,7 +108,7 @@ def check_html_templates(templates: Iterable[str], all_dups: bool, fix: bool) ->
# indentation errors are often harmless, even stylistically
# harmless, but they tend to be in files that might be old
# and might eventually require more scrutiny for things like
- # localization. See github #1236.
+ # localization. See GitHub #1236.
bad_files = [
# These use various whitespace-dependent formatting that
# prevent cleaning them.
diff --git a/tools/ci/production-install b/tools/ci/production-install
index 581b58aa16..72013e6cb3 100755
--- a/tools/ci/production-install
+++ b/tools/ci/production-install
@@ -26,7 +26,7 @@ if ! apt-get dist-upgrade -y "${APT_OPTIONS[@]}"; then
apt-get dist-upgrade -y "${APT_OPTIONS[@]}"
fi
-# Pin to postgresql 10 on Bionic, so we can test upgrading it
+# Pin to PostgreSQL 10 on Bionic, so we can test upgrading it
if [ "$os_version_codename" = "bionic" ]; then
export POSTGRES_VERSION=10
fi
diff --git a/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py b/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py
index 86dce36313..0d304a6dd5 100755
--- a/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py
+++ b/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py
@@ -6,7 +6,7 @@ from .common.spiders import BaseDocumentationSpider
def get_start_url() -> List[str]:
- # Get index html file as start url and convert it to file uri
+ # Get index.html file as start URL and convert it to file URI
dir_path = os.path.dirname(os.path.realpath(__file__))
start_file = os.path.join(dir_path, os.path.join(*[os.pardir] * 4),
"docs/_build/html/index.html")
diff --git a/tools/droplets/README.md b/tools/droplets/README.md
index 312de11448..80b9ac91bd 100644
--- a/tools/droplets/README.md
+++ b/tools/droplets/README.md
@@ -81,12 +81,12 @@ the user.
In order for the script to work, the GitHub user must have:
- forked the [zulip/zulip][zulip-zulip] repository, and
-- created an ssh key pair and added it to their GitHub account.
+- created an SSH key pair and added it to their GitHub account.
(Share [this link][how-to-request] with students if they need to do these
steps.)
-The script will stop if it can't find the user's fork or ssh keys.
+The script will stop if it can't find the user's fork or SSH keys.
Once the droplet is created, you will see something similar to this message:
@@ -96,7 +96,7 @@ Your remote Zulip dev server has been created!
- Connect to your server by running
`ssh zulipdev@.zulipdev.org` on the command line
(Terminal for macOS and Linux, Bash for Git on Windows).
-- There is no password; your account is configured to use your ssh keys.
+- There is no password; your account is configured to use your SSH keys.
- Once you log in, you should see `(zulip-py3-venv) ~$`.
- To start the dev server, `cd zulip` and then run `./tools/run-dev.py`.
- While the dev server is running, you can see the Zulip server in your browser
@@ -131,7 +131,7 @@ Rough steps:
1. `tools/provision`
1. `git clean -f`, in case things were added/removed from `.gitignore`.
1. `tools/run-dev.py`, let it run to completion, and then Ctrl-C (to clear
- out anything in the Rabbit MQ queue, load messages, etc).
+ out anything in the RabbitMQ queue, load messages, etc).
1. `tools/run-dev.py`, and check that `base.zulipdev.org:9991` is up and running.
1. `> ~/.bash_history && history -c && sudo shutdown -h now` to clear any command
line history (To reduce chance of confusing new contributors in case you made a typo)
@@ -192,7 +192,7 @@ Rough steps:
## Remotely debugging a droplet
To SSH into a droplet, first make sure you have a SSH key associated with your
-github account, then ask the student to run the following in their
+GitHub account, then ask the student to run the following in their
VM:
```
diff --git a/tools/droplets/add_mentor.py b/tools/droplets/add_mentor.py
index 488908bb31..1b408edb9d 100644
--- a/tools/droplets/add_mentor.py
+++ b/tools/droplets/add_mentor.py
@@ -5,7 +5,7 @@
#
# $ python3 add_mentor.py
#
-# Alternatively you can pass in --remove to remove their ssh key from the
+# Alternatively you can pass in --remove to remove their SSH key from the
# machine:
#
# $ python3 add_mentor.py --remove
@@ -19,7 +19,7 @@ from typing import List
import requests
parser = ArgumentParser(description='Give a mentor ssh access to this machine.')
-parser.add_argument('username', help='Github username of the mentor.')
+parser.add_argument('username', help='GitHub username of the mentor.')
parser.add_argument('--remove', help='Remove his/her key from the machine.',
action='store_true')
@@ -35,7 +35,7 @@ def get_mentor_keys(username: str) -> List[str]:
r = requests.get(url)
if r.status_code != 200:
- print('Cannot connect to Github...')
+ print('Cannot connect to GitHub...')
sys.exit(1)
keys = r.json()
diff --git a/tools/droplets/create.py b/tools/droplets/create.py
index 20ce311a45..8863b5e613 100644
--- a/tools/droplets/create.py
+++ b/tools/droplets/create.py
@@ -8,10 +8,10 @@
# Requires python-digitalocean library:
# https://github.com/koalalorenzo/python-digitalocean
#
-# Also requires DigitalOcean team membership for Zulip and api token:
+# Also requires DigitalOcean team membership for Zulip and API token:
# https://cloud.digitalocean.com/settings/api/tokens
#
-# Copy conf.ini-template to conf.ini and populate with your api token.
+# Copy conf.ini-template to conf.ini and populate with your API token.
#
# usage: python3 create.py
import argparse
@@ -28,7 +28,7 @@ import digitalocean
# initiation argument parser
parser = argparse.ArgumentParser(description='Create a Zulip devopment VM DigitalOcean droplet.')
-parser.add_argument("username", help="Github username for whom you want to create a Zulip dev droplet")
+parser.add_argument("username", help="GitHub username for whom you want to create a Zulip dev droplet")
parser.add_argument('--tags', nargs='+', default=[])
parser.add_argument('-f', '--recreate', action="store_true")
@@ -47,7 +47,7 @@ def user_exists(username: str) -> bool:
return True
except urllib.error.HTTPError as err:
print(err)
- print(f"Does the github user {username} exist?")
+ print(f"Does the GitHub user {username} exist?")
sys.exit(1)
def get_keys(username: str) -> List[Dict[str, Any]]:
@@ -57,13 +57,13 @@ def get_keys(username: str) -> List[Dict[str, Any]]:
response = urllib.request.urlopen(apiurl_keys)
userkeys = json.load(response)
if not userkeys:
- print(f"No keys found. Has user {username} added ssh keys to their github account?")
+ print(f"No keys found. Has user {username} added SSH keys to their GitHub account?")
sys.exit(1)
print("...public keys found!")
return userkeys
except urllib.error.HTTPError as err:
print(err)
- print(f"Has user {username} added ssh keys to their github account?")
+ print(f"Has user {username} added SSH keys to their GitHub account?")
sys.exit(1)
def fork_exists(username: str) -> bool:
@@ -199,7 +199,7 @@ Your remote Zulip dev server has been created!
- Connect to your server by running
`ssh zulipdev@{0}.zulipdev.org` on the command line
(Terminal for macOS and Linux, Bash for Git on Windows).
-- There is no password; your account is configured to use your ssh keys.
+- There is no password; your account is configured to use your SSH keys.
- Once you log in, you should see `(zulip-py3-venv) ~$`.
- To start the dev server, `cd zulip` and then run `./tools/run-dev.py`.
- While the dev server is running, you can see the Zulip server in your browser at
diff --git a/tools/fetch-contributor-data b/tools/fetch-contributor-data
index 69fc38716d..520771ac7d 100755
--- a/tools/fetch-contributor-data
+++ b/tools/fetch-contributor-data
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
"""
-Fetch contributors data from Github using their API, convert it to structured
+Fetch contributors data from GitHub using their API, convert it to structured
JSON data for the /team page contributors section.
"""
import os
@@ -37,7 +37,7 @@ duplicate_commits_file = os.path.join(os.path.dirname(__file__), 'duplicate_comm
parser = argparse.ArgumentParser()
parser.add_argument('--max-retries', type=int, default=10,
- help='Number of times to retry fetching data from Github')
+ help='Number of times to retry fetching data from GitHub')
args = parser.parse_args()
class ContributorsJSON(TypedDict):
@@ -78,7 +78,7 @@ def fetch_contributors(repo_name: str, max_retries: int) -> List[Contributor]:
else:
retry_attempts += 1
if retry_attempts > args.max_retries:
- logger.warning("Failed retries fetching contributors data from Github.")
+ logger.warning("Failed retries fetching contributors data from GitHub.")
sys.exit(1)
sleep_time = randrange(0, min(64, 2**retry_attempts))
diff --git a/tools/lib/provision.py b/tools/lib/provision.py
index e9431be385..f4eef30c01 100755
--- a/tools/lib/provision.py
+++ b/tools/lib/provision.py
@@ -40,7 +40,7 @@ VAR_DIR_PATH = os.path.join(ZULIP_PATH, 'var')
CONTINUOUS_INTEGRATION = 'GITHUB_ACTIONS' in os.environ or 'CIRCLECI' in os.environ
if not os.path.exists(os.path.join(ZULIP_PATH, ".git")):
- print(FAIL + "Error: No Zulip git repository present!" + ENDC)
+ print(FAIL + "Error: No Zulip Git repository present!" + ENDC)
print("To set up the Zulip development environment, you should clone the code")
print("from GitHub, rather than using a Zulip production release tarball.")
sys.exit(1)
@@ -159,13 +159,13 @@ COMMON_YUM_DEPENDENCIES = [
BUILD_PGROONGA_FROM_SOURCE = False
if vendor == 'debian' and os_version in [] or vendor == 'ubuntu' and os_version in []:
- # For platforms without a pgroonga release, we need to build it
+ # For platforms without a PGroonga release, we need to build it
# from source.
BUILD_PGROONGA_FROM_SOURCE = True
SYSTEM_DEPENDENCIES = [
*UBUNTU_COMMON_APT_DEPENDENCIES,
f"postgresql-{POSTGRES_VERSION}",
- # Dependency for building pgroonga from source
+ # Dependency for building PGroonga from source
f"postgresql-server-dev-{POSTGRES_VERSION}",
"libgroonga-dev",
"libmsgpack-dev",
@@ -195,7 +195,7 @@ elif "fedora" in os_families():
f"postgresql{POSTGRES_VERSION}-server",
f"postgresql{POSTGRES_VERSION}",
f"postgresql{POSTGRES_VERSION}-devel",
- # Needed to build pgroonga from source
+ # Needed to build PGroonga from source
"groonga-devel",
"msgpack-devel",
*VENV_DEPENDENCIES,
@@ -227,7 +227,7 @@ def install_system_deps() -> None:
else:
raise AssertionError("Invalid vendor")
- # For some platforms, there aren't published pgroonga
+ # For some platforms, there aren't published PGroonga
# packages available, so we build them from source.
if BUILD_PGROONGA_FROM_SOURCE:
run_as_root(["./scripts/lib/build-pgroonga"])
@@ -279,13 +279,13 @@ def install_yum_deps(deps_to_install: List[str]) -> None:
run_as_root(["ln", "-nsf", "/usr/bin/python36", "/usr/bin/python3"])
postgres_dir = f'pgsql-{POSTGRES_VERSION}'
for cmd in ['pg_config', 'pg_isready', 'psql']:
- # Our tooling expects these postgres scripts to be at
+ # Our tooling expects these Postgres scripts to be at
# well-known paths. There's an argument for eventually
# making our tooling auto-detect, but this is simpler.
run_as_root(["ln", "-nsf", f"/usr/{postgres_dir}/bin/{cmd}",
f"/usr/bin/{cmd}"])
- # From here, we do the first-time setup/initialization for the postgres database.
+ # From here, we do the first-time setup/initialization for the Postgres database.
pg_datadir = f"/var/lib/pgsql/{POSTGRES_VERSION}/data"
pg_hba_conf = os.path.join(pg_datadir, "pg_hba.conf")
@@ -300,7 +300,7 @@ def install_yum_deps(deps_to_install: List[str]) -> None:
sudo_args = ['-H'])
# Use vendored pg_hba.conf, which enables password authentication.
run_as_root(["cp", "-a", "puppet/zulip/files/postgresql/centos_pg_hba.conf", pg_hba_conf])
- # Later steps will ensure postgres is started
+ # Later steps will ensure Postgres is started
# Link in tsearch data files
overwrite_symlink(
@@ -329,7 +329,7 @@ def main(options: argparse.Namespace) -> "NoReturn":
# hash the content of setup-yum-repo*
sha_sum.update(open('scripts/lib/setup-yum-repo', 'rb').read())
- # hash the content of build-pgroonga if pgroonga is built from source
+ # hash the content of build-pgroonga if PGroonga is built from source
if BUILD_PGROONGA_FROM_SOURCE:
sha_sum.update(open('scripts/lib/build-pgroonga', 'rb').read())
diff --git a/tools/run-dev.py b/tools/run-dev.py
index 145b8774a3..a2d5ded111 100755
--- a/tools/run-dev.py
+++ b/tools/run-dev.py
@@ -85,7 +85,7 @@ base_port = 9991
if options.test:
base_port = 9981
settings_module = "zproject.test_settings"
- # Don't auto-reload when running puppeteer tests
+ # Don't auto-reload when running Puppeteer tests
runserver_args = ['--noreload']
else:
settings_module = "zproject.settings"
@@ -159,7 +159,7 @@ def server_processes() -> List[List[str]]:
def do_one_time_webpack_compile() -> None:
# We just need to compile webpack assets once at startup, not run a daemon,
# in test mode. Additionally, webpack-dev-server doesn't support running 2
- # copies on the same system, so this model lets us run the puppeteer tests
+ # copies on the same system, so this model lets us run the Puppeteer tests
# with a running development server.
subprocess.check_call(['./tools/webpack', '--quiet', '--test'])
diff --git a/tools/setup/dev-vagrant-docker/Dockerfile b/tools/setup/dev-vagrant-docker/Dockerfile
index 96dcdfa5fb..488aaacf52 100644
--- a/tools/setup/dev-vagrant-docker/Dockerfile
+++ b/tools/setup/dev-vagrant-docker/Dockerfile
@@ -28,7 +28,7 @@ ARG VAGRANT_UID
RUN \
# We use https://github.com/gdraheim/docker-systemctl-replacement
- # to make services we install like postgres, redis, etc. normally
+ # to make services we install like Postgres, Redis, etc. normally
# managed by systemd start within Docker, which breaks normal
# operation of systemd.
dpkg-divert --add --rename /bin/systemctl \
diff --git a/tools/setup/emoji/build_emoji b/tools/setup/emoji/build_emoji
index 98b185ac0e..33b7ab558d 100755
--- a/tools/setup/emoji/build_emoji
+++ b/tools/setup/emoji/build_emoji
@@ -82,7 +82,7 @@ def main() -> None:
if not os.access(EMOJI_CACHE_PATH, os.W_OK):
# Note: In production, this block will fail, since we don't
# assume sudo access; but it should never run in production
- # anyway, because EMOJI_CACHE_PATH is created by puppet before
+ # anyway, because EMOJI_CACHE_PATH is created by Puppet before
# build_emoji would be run.
run_as_root(["mkdir", "-p", EMOJI_CACHE_PATH])
run_as_root(["chown", f"{os.getuid()}:{os.getgid()}", EMOJI_CACHE_PATH])
@@ -167,7 +167,7 @@ def generate_sprite_css_files(cache_path: str,
# emoji_is_universal? Because we briefly supported all
# Google emoji (not just the universal ones), we need to
# ensure the spritesheet is set up to correctly display
- # those google emoji (in case anyone used them).
+ # those Google emoji (in case anyone used them).
"""
For background-position we need to use percentages.
@@ -280,7 +280,7 @@ def setup_emoji_farms(cache_path: str, emoji_data: List[Dict[str, Any]]) -> None
for emojiset in ['google', 'twitter']:
setup_emoji_farm(emojiset, emoji_data)
- # Set up old google "blobs" emojiset.
+ # Set up old Google "blobs" emojiset.
GOOGLE_BLOB_EMOJI_DATA_PATH = os.path.join(NODE_MODULES_PATH,
'emoji-datasource-google-blob',
'emoji.json')
diff --git a/tools/setup/emoji/emoji_names.py b/tools/setup/emoji/emoji_names.py
index 23c007ab75..6b52e10db4 100644
--- a/tools/setup/emoji/emoji_names.py
+++ b/tools/setup/emoji/emoji_names.py
@@ -4,10 +4,10 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# seems like best emoji for happy
'1f600': {'canonical_name': 'grinning', 'aliases': ['happy']},
'1f603': {'canonical_name': 'smiley', 'aliases': []},
- # the google emoji for this is not great, so made People/9 'smile' and
+ # the Google emoji for this is not great, so made People/9 'smile' and
# renamed this one
'1f604': {'canonical_name': 'big_smile', 'aliases': []},
- # from gemoji/unicode
+ # from gemoji/Unicode
'1f601': {'canonical_name': 'grinning_face_with_smiling_eyes', 'aliases': []},
# satisfied doesn't seem like a good description of these images
'1f606': {'canonical_name': 'laughing', 'aliases': ['lol']},
@@ -18,7 +18,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# not sure how the glyphs match relaxed, but both iamcal and gemoji have it
'263a': {'canonical_name': 'smiling_face', 'aliases': ['relaxed']},
'1f60a': {'canonical_name': 'blush', 'aliases': []},
- # halo comes from gemoji/unicode
+ # halo comes from gemoji/Unicode
'1f607': {'canonical_name': 'innocent', 'aliases': ['halo']},
'1f642': {'canonical_name': 'smile', 'aliases': []},
'1f643': {'canonical_name': 'upside_down', 'aliases': ['oops']},
@@ -55,7 +55,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f614': {'canonical_name': 'pensive', 'aliases': ['tired']},
'1f61f': {'canonical_name': 'worried', 'aliases': []},
# these seem to better capture the glyphs. This is also what :/ turns into
- # in google hangouts
+ # in Google Hangouts
'1f615': {'canonical_name': 'oh_no', 'aliases': ['half_frown', 'concerned', 'confused']},
'1f641': {'canonical_name': 'frown', 'aliases': ['slight_frown']},
# sad seemed better than putting another frown as the primary name (see
@@ -88,7 +88,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f632': {'canonical_name': 'astonished', 'aliases': []},
'1f635': {'canonical_name': 'dizzy', 'aliases': []},
# the alternates are from https://emojipedia.org/flushed-face/. shame
- # doesn't work with the google emoji
+ # doesn't work with the Google emoji
'1f633': {'canonical_name': 'flushed', 'aliases': ['embarrassed', 'blushing']},
'1f631': {'canonical_name': 'scream', 'aliases': []},
# scared from https://emojipedia.org/fearful-face/, shock seemed like a
@@ -129,13 +129,13 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# hurt)
'1f915': {'canonical_name': 'hurt', 'aliases': ['head_bandage', 'injured']},
# devil from https://emojipedia.org/smiling-face-with-horns/,
- # smiling_face_with_horns from gemoji/unicode
+ # smiling_face_with_horns from gemoji/Unicode
'1f608': {'canonical_name': 'smiling_devil', 'aliases': ['smiling_imp', 'smiling_face_with_horns']},
# angry_devil from https://beebom.com/emoji-meanings/
'1f47f': {'canonical_name': 'devil', 'aliases': ['imp', 'angry_devil']},
'1f479': {'canonical_name': 'ogre', 'aliases': []},
'1f47a': {'canonical_name': 'goblin', 'aliases': []},
- # pile_of_poo from gemoji/unicode
+ # pile_of_poo from gemoji/Unicode
'1f4a9': {'canonical_name': 'poop', 'aliases': ['pile_of_poo']},
# alternates seemed like reasonable additions
'1f47b': {'canonical_name': 'ghost', 'aliases': ['boo', 'spooky', 'haunted']},
@@ -155,7 +155,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# smug_cat to parallel People/31
'1f63c': {'canonical_name': 'smirk_cat', 'aliases': ['smug_cat']},
'1f63d': {'canonical_name': 'kissing_cat', 'aliases': []},
- # weary_cat from unicode/gemoji
+ # weary_cat from Unicode/gemoji
'1f640': {'canonical_name': 'scream_cat', 'aliases': ['weary_cat']},
'1f63f': {'canonical_name': 'crying_cat', 'aliases': []},
# angry_cat to better parallel People/45
@@ -209,19 +209,19 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# due to 'high_five'
'1f44b': {'canonical_name': 'wave', 'aliases': ['hello', 'hi']},
'1f919': {'canonical_name': 'call_me', 'aliases': []},
- # flexed_biceps from gemoji/unicode, strong seemed like a good addition
+ # flexed_biceps from gemoji/Unicode, strong seemed like a good addition
'1f4aa': {'canonical_name': 'muscle', 'aliases': []},
'1f595': {'canonical_name': 'middle_finger', 'aliases': []},
'270d': {'canonical_name': 'writing', 'aliases': []},
'1f933': {'canonical_name': 'selfie', 'aliases': []},
- # Couldn't figure out why iamcal chose nail_care. unicode uses nail_polish,
+ # Couldn't figure out why iamcal chose nail_care. Unicode uses nail_polish,
# gemoji uses both
'1f485': {'canonical_name': 'nail_polish', 'aliases': ['nail_care']},
'1f48d': {'canonical_name': 'ring', 'aliases': []},
'1f484': {'canonical_name': 'lipstick', 'aliases': []},
# People/18 seems like a better kiss for most circumstances
'1f48b': {'canonical_name': 'lipstick_kiss', 'aliases': []},
- # mouth from gemoji/unicode
+ # mouth from gemoji/Unicode
'1f444': {'canonical_name': 'lips', 'aliases': ['mouth']},
'1f445': {'canonical_name': 'tongue', 'aliases': []},
'1f442': {'canonical_name': 'ear', 'aliases': []},
@@ -244,7 +244,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# It's used on twitter a bunch, either when showing off hair, or in a way
# where People/144 would substitute. It'd be nice if there were another
# emoji one could use for "good hair", but I think not a big loss to not
- # have one for zulip, and not worth the eurocentrism.
+ # have one for Zulip, and not worth the eurocentrism.
# '1f471': {'canonical_name': 'X', 'aliases': ['person_with_blond_hair']},
# Added elderly since I think some people prefer that term
'1f474': {'canonical_name': 'older_man', 'aliases': ['elderly_man']},
@@ -252,7 +252,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f475': {'canonical_name': 'older_woman', 'aliases': ['elderly_woman']},
'1f472': {'canonical_name': 'gua_pi_mao', 'aliases': []},
'1f473': {'canonical_name': 'turban', 'aliases': []},
- # police seems like a more polite term, and matches the unicode
+ # police seems like a more polite term, and matches the Unicode
'1f46e': {'canonical_name': 'police', 'aliases': ['cop']},
'1f477': {'canonical_name': 'construction_worker', 'aliases': []},
'1f482': {'canonical_name': 'guard', 'aliases': []},
@@ -328,7 +328,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# https://mashable.com/2015/10/23/ios-9-1-emoji-guide/. hard_hat and
# rescue_worker seem like good additions
'26d1': {'canonical_name': 'helmet', 'aliases': ['hard_hat', 'rescue_worker', 'safety_first', 'invincible']},
- # backpack from gemoji, dominates satchel on google trends
+ # backpack from gemoji, dominates satchel on Google Trends
'1f392': {'canonical_name': 'backpack', 'aliases': ['satchel']},
'1f45d': {'canonical_name': 'pouch', 'aliases': []},
'1f45b': {'canonical_name': 'purse', 'aliases': []},
@@ -340,7 +340,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f576': {'canonical_name': 'dark_sunglasses', 'aliases': []},
'1f302': {'canonical_name': 'closed_umbrella', 'aliases': []},
'2602': {'canonical_name': 'umbrella', 'aliases': []},
- # Some animals have a unicode codepoint "", some have a codepoint
+ # Some animals have a Unicode codepoint "", some have a codepoint
# " face", and some have both. If an animal has just a single
# codepoint, we call it , regardless of what the codepoint is. If
# an animal has both, we call the "" codepoint , and come
@@ -391,10 +391,10 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f41b': {'canonical_name': 'bug', 'aliases': ['caterpillar']},
'1f98b': {'canonical_name': 'butterfly', 'aliases': []},
'1f40c': {'canonical_name': 'snail', 'aliases': []},
- # spiral_shell from unicode/gemoji, the others seemed like reasonable
+ # spiral_shell from Unicode/gemoji, the others seemed like reasonable
# additions
'1f41a': {'canonical_name': 'shell', 'aliases': ['seashell', 'conch', 'spiral_shell']},
- # unicode/gemoji have lady_beetle; hopefully with ladybug we get both the
+ # Unicode/gemoji have lady_beetle; hopefully with ladybug we get both the
# people that prefer lady_beetle (with beetle) and ladybug. There is also
# ladybird, but seems a bit much for this to complete for bird.
'1f41e': {'canonical_name': 'beetle', 'aliases': ['ladybug']},
@@ -511,11 +511,11 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# dizzy taken by People/54, had to come up with something else
'1f4ab': {'canonical_name': 'seeing_stars', 'aliases': []},
'2b50': {'canonical_name': 'star', 'aliases': []},
- # glowing_star from gemoji/unicode
+ # glowing_star from gemoji/Unicode
'1f31f': {'canonical_name': 'glowing_star', 'aliases': []},
# glamour seems like a reasonable addition
'2728': {'canonical_name': 'sparkles', 'aliases': ['glamour']},
- # high_voltage from gemoji/unicode
+ # high_voltage from gemoji/Unicode
'26a1': {'canonical_name': 'high_voltage', 'aliases': ['zap']},
# https://emojipedia.org/fire/
'1f525': {'canonical_name': 'fire', 'aliases': ['lit', 'hot', 'flame']},
@@ -555,7 +555,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# https://emojipedia.org/wind-blowing-face/
'1f32c': {'canonical_name': 'windy', 'aliases': ['mother_nature']},
'1f4a8': {'canonical_name': 'dash', 'aliases': []},
- # tornado_cloud comes from the unicode, but e.g. gemoji drops the cloud
+ # tornado_cloud comes from the Unicode, but e.g. gemoji drops the cloud
'1f32a': {'canonical_name': 'tornado', 'aliases': []},
# hazy seemed like a good addition
'1f32b': {'canonical_name': 'fog', 'aliases': ['hazy']},
@@ -587,7 +587,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f346': {'canonical_name': 'eggplant', 'aliases': []},
'1f952': {'canonical_name': 'cucumber', 'aliases': []},
'1f955': {'canonical_name': 'carrot', 'aliases': []},
- # maize is from unicode
+ # maize is from Unicode
'1f33d': {'canonical_name': 'corn', 'aliases': ['maize']},
# chili_pepper seems like a reasonable addition
'1f336': {'canonical_name': 'hot_pepper', 'aliases': ['chili_pepper']},
@@ -632,8 +632,8 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# seems like the best noodles? maybe this should be Foods/47? Noodles seem
# like a bigger thing in east asia than in europe, so going with that.
'1f35c': {'canonical_name': 'ramen', 'aliases': ['noodles']},
- # seems like the best :food:. Also a reasonable :soup:, though the google
- # one is indeed more a pot of food (the unicode) than a soup
+ # seems like the best :food:. Also a reasonable :soup:, though the Google
+ # one is indeed more a pot of food (the Unicode) than a soup
'1f372': {'canonical_name': 'food', 'aliases': ['soup', 'stew']},
# naruto is actual name, and I think don't need this to autocomplete for
# "fish"
@@ -687,13 +687,13 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# Added eating_utensils so this would show up in typeahead for eat.
'1f374': {'canonical_name': 'fork_and_knife', 'aliases': ['eating_utensils']},
# Seems like the best emoji for hungry and meal. fork_and_knife_and_plate
- # is from gemoji/unicode, and I think is better than the shorter iamcal
+ # is from gemoji/Unicode, and I think is better than the shorter iamcal
# version in this case. The rest just seemed like good additions.
'1f37d': {'canonical_name': 'hungry', 'aliases': ['meal', 'table_setting', 'fork_and_knife_with_plate', 'lets_eat']},
# most people interested in this sport call it football
'26bd': {'canonical_name': 'football', 'aliases': ['soccer']},
'1f3c0': {'canonical_name': 'basketball', 'aliases': []},
- # to distinguish from Activity/1, but is also the unicode name
+ # to distinguish from Activity/1, but is also the Unicode name
'1f3c8': {'canonical_name': 'american_football', 'aliases': []},
'26be': {'canonical_name': 'baseball', 'aliases': []},
'1f3be': {'canonical_name': 'tennis', 'aliases': []},
@@ -701,9 +701,9 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f3c9': {'canonical_name': 'rugby', 'aliases': []},
# https://emojipedia.org/billiards/ suggests this is actually used for
# billiards, not for "unlucky" or "losing" or some other connotation of
- # 8ball. The unicode name is billiards.
+ # 8ball. The Unicode name is billiards.
'1f3b1': {'canonical_name': 'billiards', 'aliases': ['pool', '8_ball']},
- # ping pong is the unicode name, and seems slightly more popular on
+ # ping pong is the Unicode name, and seems slightly more popular on
# https://trends.google.com/trends/explore?q=table%20tennis,ping%20pong
'1f3d3': {'canonical_name': 'ping_pong', 'aliases': ['table_tennis']},
'1f3f8': {'canonical_name': 'badminton', 'aliases': []},
@@ -732,7 +732,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# generally than weight_lift. The others seemed like good additions.
'1f3cb': {'canonical_name': 'lift', 'aliases': ['work_out', 'weight_lift', 'gym']},
# The decisions on tenses here and in the rest of the sports section are
- # mostly from gut feel. The unicode itself is all over the place.
+ # mostly from gut feel. The Unicode itself is all over the place.
'1f93a': {'canonical_name': 'fencing', 'aliases': []},
'1f93c': {'canonical_name': 'wrestling', 'aliases': []},
# seemed like reasonable additions
@@ -763,7 +763,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# I feel like people call sports medals "medals", and military medals
# "military medals". Also see Activity/56
'1f3c5': {'canonical_name': 'medal', 'aliases': []},
- # See Activity/55. military_medal is the gemoji/unicode
+ # See Activity/55. military_medal is the gemoji/Unicode
'1f396': {'canonical_name': 'military_medal', 'aliases': []},
# gold and number_one seem like good additions
'1f947': {'canonical_name': 'first_place', 'aliases': ['gold', 'number_one']},
@@ -802,7 +802,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f3bb': {'canonical_name': 'violin', 'aliases': []},
# dice seems more useful
'1f3b2': {'canonical_name': 'dice', 'aliases': ['die']},
- # direct_hit from gemoji/unicode, and seems more useful. bulls_eye seemed
+ # direct_hit from gemoji/Unicode, and seems more useful. bulls_eye seemed
# like a reasonable addition
'1f3af': {'canonical_name': 'direct_hit', 'aliases': ['darts', 'bulls_eye']},
# strike seemed more useful than bowling
@@ -810,12 +810,12 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f3ae': {'canonical_name': 'video_game', 'aliases': []},
# gambling seemed more useful than slot_machine
'1f3b0': {'canonical_name': 'slot_machine', 'aliases': []},
- # the google emoji for this is not red
+ # the Google emoji for this is not red
'1f697': {'canonical_name': 'car', 'aliases': []},
# rideshare seems like a reasonable addition
'1f695': {'canonical_name': 'taxi', 'aliases': ['rideshare']},
- # the google emoji for this is not blue. recreational_vehicle is from
- # gemoji/unicode, jeep seemed like a good addition
+ # the Google emoji for this is not blue. recreational_vehicle is from
+ # gemoji/Unicode, jeep seemed like a good addition
'1f699': {'canonical_name': 'recreational_vehicle', 'aliases': ['jeep']},
# school_bus seemed like a reasonable addition, even though the twitter
# glyph for this doesn't really look like a school bus
@@ -860,9 +860,9 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f69e': {'canonical_name': 'mountain_railway', 'aliases': []},
# elevated_train seems like a reasonable addition
'1f69d': {'canonical_name': 'monorail', 'aliases': ['elevated_train']},
- # from gemoji/unicode. Also, don't thin we need two bullettrain's
+ # from gemoji/Unicode. Also, don't thin we need two bullettrain's
'1f684': {'canonical_name': 'high_speed_train', 'aliases': []},
- # google, wikipedia, etc prefer bullet train to bullettrain
+ # Google, Wikipedia, etc. prefer bullet train to bullettrain
'1f685': {'canonical_name': 'bullet_train', 'aliases': []},
'1f688': {'canonical_name': 'light_rail', 'aliases': []},
'1f682': {'canonical_name': 'train', 'aliases': ['steam_locomotive']},
@@ -899,10 +899,10 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'2693': {'canonical_name': 'anchor', 'aliases': []},
# there already is a construction in Places/82, and work_in_progress seems
# like a useful thing to have. Construction_zone seems better than the
- # unicode construction_sign, and is there partly so this autocompletes for
+ # Unicode construction_sign, and is there partly so this autocompletes for
# construction.
'1f6a7': {'canonical_name': 'work_in_progress', 'aliases': ['construction_zone']},
- # alternates from https://emojipedia.org/fuel-pump/. unicode is fuel_pump,
+ # alternates from https://emojipedia.org/fuel-pump/. Unicode is fuel_pump,
# not fuelpump
'26fd': {'canonical_name': 'fuel_pump', 'aliases': ['gas_pump', 'petrol_pump']},
# not sure why iamcal removed the space
@@ -985,7 +985,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# ocean_sunrise to parallel Places/109
'1f305': {'canonical_name': 'sunrise', 'aliases': ['ocean_sunrise']},
'1f304': {'canonical_name': 'mountain_sunrise', 'aliases': []},
- # shooting_star and wish seem like way better descriptions. gemoji/unicode
+ # shooting_star and wish seem like way better descriptions. gemoji/Unicode
# is shooting_star
'1f320': {'canonical_name': 'shooting_star', 'aliases': ['wish']},
'1f387': {'canonical_name': 'sparkler', 'aliases': []},
@@ -1000,16 +1000,16 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f309': {'canonical_name': 'bridge', 'aliases': []},
'1f301': {'canonical_name': 'foggy', 'aliases': []},
'231a': {'canonical_name': 'watch', 'aliases': []},
- # unicode/gemoji is mobile_phone. The rest seem like good additions
+ # Unicode/gemoji is mobile_phone. The rest seem like good additions
'1f4f1': {'canonical_name': 'mobile_phone', 'aliases': ['smartphone', 'iphone', 'android']},
'1f4f2': {'canonical_name': 'calling', 'aliases': []},
- # gemoji has laptop, even though the google emoji for this does not look
+ # gemoji has laptop, even though the Google emoji for this does not look
# like a laptop
'1f4bb': {'canonical_name': 'computer', 'aliases': ['laptop']},
'2328': {'canonical_name': 'keyboard', 'aliases': []},
'1f5a5': {'canonical_name': 'desktop_computer', 'aliases': []},
'1f5a8': {'canonical_name': 'printer', 'aliases': []},
- # gemoji/unicode is computer_mouse
+ # gemoji/Unicode is computer_mouse
'1f5b1': {'canonical_name': 'computer_mouse', 'aliases': []},
'1f5b2': {'canonical_name': 'trackball', 'aliases': []},
# arcade seems like a reasonable addition
@@ -1022,7 +1022,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f4be': {'canonical_name': 'floppy_disk', 'aliases': []},
'1f4bf': {'canonical_name': 'cd', 'aliases': []},
'1f4c0': {'canonical_name': 'dvd', 'aliases': []},
- # videocassette from gemoji/unicode
+ # videocassette from gemoji/Unicode
'1f4fc': {'canonical_name': 'vhs', 'aliases': ['videocassette']},
'1f4f7': {'canonical_name': 'camera', 'aliases': []},
# both of these seem more useful than camera_with_flash
@@ -1056,7 +1056,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
# seems like a reasonable addition
'1f50b': {'canonical_name': 'battery', 'aliases': ['full_battery']},
'1f50c': {'canonical_name': 'electric_plug', 'aliases': []},
- # light_bulb seems better and from unicode/gemoji. idea seems like a good
+ # light_bulb seems better and from Unicode/gemoji. idea seems like a good
# addition
'1f4a1': {'canonical_name': 'light_bulb', 'aliases': ['bulb', 'idea']},
'1f526': {'canonical_name': 'flashlight', 'aliases': []},
@@ -1143,7 +1143,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f381': {'canonical_name': 'gift', 'aliases': ['present']},
# seemed like the best celebration
'1f388': {'canonical_name': 'balloon', 'aliases': ['celebration']},
- # from gemoji/unicode
+ # from gemoji/Unicode
'1f38f': {'canonical_name': 'carp_streamer', 'aliases': ['flags']},
'1f380': {'canonical_name': 'ribbon', 'aliases': ['decoration']},
'1f38a': {'canonical_name': 'confetti', 'aliases': ['party_ball']},
@@ -1198,7 +1198,7 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f4d4': {'canonical_name': 'decorative_notebook', 'aliases': []},
'1f4d2': {'canonical_name': 'ledger', 'aliases': ['spiral_notebook']},
# the glyphs here are the same as Objects/147-149 (with a different color),
- # for all but google
+ # for all but Google
'1f4d5': {'canonical_name': 'red_book', 'aliases': ['closed_book']},
'1f4d7': {'canonical_name': 'green_book', 'aliases': []},
'1f4d8': {'canonical_name': 'blue_book', 'aliases': []},
@@ -1372,9 +1372,9 @@ EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
'1f521': {'canonical_name': 'abcd', 'aliases': ['alphabet']},
'1f520': {'canonical_name': 'capital_abcd', 'aliases': ['capital_letters']},
'1f196': {'canonical_name': 'ng', 'aliases': []},
- # from unicode/gemoji. Saving ok for People/111
+ # from Unicode/gemoji. Saving ok for People/111
'1f197': {'canonical_name': 'squared_ok', 'aliases': []},
- # from unicode, and to parallel Symbols/135. Saving up for Symbols/171
+ # from Unicode, and to parallel Symbols/135. Saving up for Symbols/171
'1f199': {'canonical_name': 'squared_up', 'aliases': []},
'1f192': {'canonical_name': 'cool', 'aliases': []},
'1f195': {'canonical_name': 'new', 'aliases': []},
diff --git a/tools/setup/emoji/export_emoji_names_to_csv b/tools/setup/emoji/export_emoji_names_to_csv
index 93e20c04ea..0adc447729 100755
--- a/tools/setup/emoji/export_emoji_names_to_csv
+++ b/tools/setup/emoji/export_emoji_names_to_csv
@@ -118,7 +118,7 @@ def main() -> None:
with open(args.output_file_path, 'w') as f:
writer = csv.writer(f, dialect='excel')
writer.writerows(output_data)
- # The CSV file exported by google sheets doesn't have a newline
+ # The CSV file exported by Google Sheets doesn't have a newline
# character in the end. So we also strip the last newline character
# so that round-trip conversion test passes.
line_sep_len = len(os.linesep)
diff --git a/tools/setup/emoji/generate_emoji_names_table b/tools/setup/emoji/generate_emoji_names_table
index fa9cdfb998..2dee0c6c28 100755
--- a/tools/setup/emoji/generate_emoji_names_table
+++ b/tools/setup/emoji/generate_emoji_names_table
@@ -3,7 +3,7 @@
# This is a debugging tool that takes as input a bunch of different
# emoji data sources, and outputs a convenient HTML table that can be
# used to sanity-check the differences between these different data
-# sources' decisions about what names to provide to each unicode
+# sources' decisions about what names to provide to each Unicode
# codepoint.
import os
from typing import Any, Dict, List
diff --git a/tools/setup/install-aws-server b/tools/setup/install-aws-server
index d73bf81e05..c2a80414ab 100755
--- a/tools/setup/install-aws-server
+++ b/tools/setup/install-aws-server
@@ -10,7 +10,7 @@ if [ -z "$SERVER" ] || [ -z "$ROLES" ]; then
echo "Installs an empty Ubuntu server in AWS with a Zulip server role."
echo
echo " * server is the local part of the hostname (e.g. postgres0)"
- echo " * roles is a list of puppet rules to be passed to scripts/lib/install"
+ echo " * roles is a list of Puppet rules to be passed to scripts/lib/install"
echo " E.g. 'zulip::base,zulip::postgres_common'"
echo " * branch is used to override the default branch to install from."
echo
diff --git a/tools/zulip-export/zulip-export b/tools/zulip-export/zulip-export
index fdec9427ca..98dbf16ecf 100755
--- a/tools/zulip-export/zulip-export
+++ b/tools/zulip-export/zulip-export
@@ -26,7 +26,7 @@ import sys
usage = """Export all messages on a given stream to a JSON dump.
-zulip-export --user= --api-key= --stream=
+zulip-export --user= --api-key= --stream=
(You can find your API key on your Settings page.)
diff --git a/tsconfig.json b/tsconfig.json
index 3f8db10557..4d92febf7a 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -6,7 +6,7 @@
},
"types": [],
- /* Typescript 3.4 added the --incremental flag but its API is not
+ /* TypeScript 3.4 added the --incremental flag but its API is not
* currently public so ts-loader cannot use it yet.
* Tracking issue: https://github.com/microsoft/TypeScript/issues/29978
*/
diff --git a/zerver/data_import/gitter.py b/zerver/data_import/gitter.py
index 72cf1a19fe..f89f52ee1f 100644
--- a/zerver/data_import/gitter.py
+++ b/zerver/data_import/gitter.py
@@ -39,10 +39,10 @@ def gitter_workspace_to_realm(domain_name: str, gitter_data: GitterDataT,
Dict[str, int]]:
"""
Returns:
- 1. realm, Converted Realm data
- 2. avatars, which is list to map avatars to zulip avatar records.json
- 3. user_map, which is a dictionary to map from gitter user id to zulip user id
- 4. stream_map, which is a dictionary to map from gitter rooms to zulip stream id
+ 1. realm, converted realm data
+ 2. avatars, which is list to map avatars to Zulip avatar records.json
+ 3. user_map, which is a dictionary to map from Gitter user id to Zulip user id
+ 4. stream_map, which is a dictionary to map from Gitter rooms to Zulip stream id
"""
NOW = float(timezone_now().timestamp())
zerver_realm: List[ZerverFieldsT] = build_zerver_realm(realm_id, realm_subdomain, NOW, 'Gitter')
@@ -68,8 +68,8 @@ def build_userprofile(timestamp: Any, domain_name: str,
"""
Returns:
1. zerver_userprofile, which is a list of user profile
- 2. avatar_list, which is list to map avatars to zulip avatars records.json
- 3. added_users, which is a dictionary to map from gitter user id to zulip id
+ 2. avatar_list, which is list to map avatars to Zulip avatars records.json
+ 3. added_users, which is a dictionary to map from Gitter user id to Zulip id
"""
logging.info('######### IMPORTING USERS STARTED #########\n')
zerver_userprofile = []
@@ -122,7 +122,7 @@ def build_stream_map(timestamp: Any,
Returns:
1. stream, which is the list of streams
2. defaultstreams, which is the list of default streams
- 3. stream_map, which is a dictionary to map from gitter rooms to zulip stream id
+ 3. stream_map, which is a dictionary to map from Gitter rooms to Zulip stream id
"""
logging.info('######### IMPORTING STREAM STARTED #########\n')
stream_id = 0
@@ -130,7 +130,7 @@ def build_stream_map(timestamp: Any,
# Default stream when no "room" field is present
stream.append(build_stream(timestamp, realm_id, 'from gitter',
- "Imported from gitter", stream_id))
+ "Imported from Gitter", stream_id))
defaultstream = build_defaultstream(realm_id=realm_id, stream_id=stream_id,
defaultstream_id=0)
stream_id += 1
@@ -218,7 +218,7 @@ def convert_gitter_workspace_messages(gitter_data: GitterDataT, output_dir: str,
mentioned_user_ids = get_usermentions(message, user_map,
user_short_name_to_full_name)
rendered_content = None
- topic_name = 'imported from gitter' + (f' room {message["room"]}' if 'room' in message else '')
+ topic_name = 'imported from Gitter' + (f' room {message["room"]}' if 'room' in message else '')
user_id = user_map[message['fromUser']['id']]
recipient_id = stream_map[message['room']] if 'room' in message else 0
zulip_message = build_message(topic_name, float(message_time), message_id, message['text'],
@@ -277,7 +277,7 @@ def do_convert_data(gitter_data_file: str, output_dir: str, threads: int=6) -> N
if os.listdir(output_dir):
raise Exception("Output directory should be empty!")
- # Read data from the gitter file
+ # Read data from the Gitter file
with open(gitter_data_file, "rb") as fp:
gitter_data = orjson.loads(fp.read())
diff --git a/zerver/data_import/hipchat.py b/zerver/data_import/hipchat.py
index 4f23007705..15e61f3993 100755
--- a/zerver/data_import/hipchat.py
+++ b/zerver/data_import/hipchat.py
@@ -107,12 +107,12 @@ def convert_user_data(user_handler: UserHandler,
if not email:
if role == UserProfile.ROLE_GUEST:
- # Hipchat guest users don't have emails, so
+ # HipChat guest users don't have emails, so
# we just fake them.
email = f'guest-{id}@example.com'
delivery_email = email
else:
- # Hipchat sometimes doesn't export an email for deactivated users.
+ # HipChat sometimes doesn't export an email for deactivated users.
assert not is_active
email = delivery_email = f"deactivated-{id}@example.com"
@@ -151,7 +151,7 @@ def convert_avatar_data(avatar_folder: str,
user_id_mapper: IdMapper,
realm_id: int) -> List[ZerverFieldsT]:
'''
- This code is pretty specific to how Hipchat sends us data.
+ This code is pretty specific to how HipChat sends us data.
They give us the avatar payloads in base64 in users.json.
We process avatars in our own pass of that data, rather
@@ -507,7 +507,7 @@ def get_hipchat_sender_id(realm_id: int,
if isinstance(message_dict['sender'], str):
if slim_mode:
return None
- # Some Hipchat instances just give us a person's
+ # Some HipChat instances just give us a person's
# name in the sender field for NotificationMessage.
# We turn them into a mirror user.
mirror_user = user_handler.get_mirror_user(
@@ -539,7 +539,7 @@ def get_hipchat_sender_id(realm_id: int,
sender_id = mirror_user['id']
return sender_id
- # HAPPY PATH: Hipchat just gave us an ordinary
+ # HAPPY PATH: HipChat just gave us an ordinary
# sender_id.
sender_id = user_id_mapper.get(raw_sender_id)
return sender_id
@@ -587,7 +587,7 @@ def process_message_file(realm_id: int,
# In Stride, user IDs are strings, but in HipChat,
# they are integers, and fn_id is always a string.
if str(sender_id) != str(fn_id):
- # PMs are in multiple places in the Hipchat export,
+ # PMs are in multiple places in the HipChat export,
# and we only use the copy from the sender
return None
@@ -702,7 +702,7 @@ def process_raw_message_batch(realm_id: int,
if is_pm_data:
topic_name = ''
else:
- topic_name = 'imported from hipchat'
+ topic_name = 'imported from HipChat'
user_id = raw_message['sender_id']
# Another side effect:
diff --git a/zerver/data_import/import_util.py b/zerver/data_import/import_util.py
index 760982d768..8b0a2ef8a0 100644
--- a/zerver/data_import/import_util.py
+++ b/zerver/data_import/import_util.py
@@ -103,7 +103,7 @@ def build_user_profile(avatar_source: str,
def build_avatar(zulip_user_id: int, realm_id: int, email: str, avatar_url: str,
timestamp: Any, avatar_list: List[ZerverFieldsT]) -> None:
avatar = dict(
- path=avatar_url, # Save original avatar url here, which is downloaded later
+ path=avatar_url, # Save original avatar URL here, which is downloaded later
realm_id=realm_id,
content_type=None,
user_profile_id=zulip_user_id,
@@ -170,9 +170,9 @@ def build_public_stream_subscriptions(
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
'''
- This function is only used for Hipchat now, but it may apply to
+ This function is only used for HipChat now, but it may apply to
future conversions. We often don't get full subscriber data in
- the Hipchat export, so this function just autosubscribes all
+ the HipChat export, so this function just autosubscribes all
users to every public stream. This returns a list of Subscription
dicts.
'''
@@ -550,7 +550,7 @@ def write_avatar_png(avatar_folder: str,
user_id: int,
bits: bytes) -> ZerverFieldsT:
'''
- Use this function for conversions like Hipchat where
+ Use this function for conversions like HipChat where
the bits for the .png file come in something like
a users.json file, and where we don't have to
fetch avatar images externally.
diff --git a/zerver/data_import/mattermost.py b/zerver/data_import/mattermost.py
index 8be5e7e1b6..db7d123e76 100644
--- a/zerver/data_import/mattermost.py
+++ b/zerver/data_import/mattermost.py
@@ -249,12 +249,12 @@ def build_reactions(realm_id: int, total_reactions: List[ZerverFieldsT], reactio
for realm_emoji in zerver_realmemoji:
realmemoji[realm_emoji['name']] = realm_emoji['id']
- # For the unicode emoji codes, we use equivalent of
+ # For the Unicode emoji codes, we use equivalent of
# function 'emoji_name_to_emoji_code' in 'zerver/lib/emoji' here
for mattermost_reaction in reactions:
emoji_name = mattermost_reaction['emoji_name']
username = mattermost_reaction["user"]
- # Check in unicode emoji
+ # Check in Unicode emoji
if emoji_name in name_to_codepoint:
emoji_code = name_to_codepoint[emoji_name]
reaction_type = Reaction.UNICODE_EMOJI
diff --git a/zerver/data_import/slack.py b/zerver/data_import/slack.py
index 7780dbe244..d7ed8530c4 100755
--- a/zerver/data_import/slack.py
+++ b/zerver/data_import/slack.py
@@ -73,15 +73,15 @@ def slack_workspace_to_realm(domain_name: str, realm_id: int, user_list: List[Ze
ZerverFieldsT]:
"""
Returns:
- 1. realm, Converted Realm data
- 2. slack_user_id_to_zulip_user_id, which is a dictionary to map from slack user id to zulip user id
- 3. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from slack recipient
- name(channel names, mpim names, usernames, etc) to zulip recipient id
- 4. added_channels, which is a dictionary to map from channel name to channel id, zulip stream_id
- 5. added_mpims, which is a dictionary to map from MPIM name to MPIM id, zulip huddle_id
+ 1. realm, converted realm data
+ 2. slack_user_id_to_zulip_user_id, which is a dictionary to map from Slack user id to Zulip user id
+ 3. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from Slack recipient
+ name(channel names, mpim names, usernames, etc) to Zulip recipient id
+ 4. added_channels, which is a dictionary to map from channel name to channel id, Zulip stream_id
+ 5. added_mpims, which is a dictionary to map from MPIM name to MPIM id, Zulip huddle_id
6. dm_members, which is a dictionary to map from DM id to tuple of DM participants.
- 7. avatars, which is list to map avatars to zulip avatar records.json
- 8. emoji_url_map, which is maps emoji name to its slack url
+ 7. avatars, which is list to map avatars to Zulip avatar records.json
+ 8. emoji_url_map, which is maps emoji name to its Slack URL
"""
NOW = float(timezone_now().timestamp())
@@ -116,7 +116,7 @@ def build_realmemoji(custom_emoji_list: ZerverFieldsT,
emoji_id = 0
for emoji_name, url in custom_emoji_list.items():
if 'emoji.slack-edge.com' in url:
- # Some of the emojis we get from the api have invalid links
+ # Some of the emojis we get from the API have invalid links
# this is to prevent errors related to them
realmemoji = RealmEmoji(
name=emoji_name,
@@ -142,8 +142,8 @@ def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT],
"""
Returns:
1. zerver_userprofile, which is a list of user profile
- 2. avatar_list, which is list to map avatars to zulip avatard records.json
- 3. slack_user_id_to_zulip_user_id, which is a dictionary to map from slack user id to zulip
+ 2. avatar_list, which is list to map avatars to Zulip avatard records.json
+ 3. slack_user_id_to_zulip_user_id, which is a dictionary to map from Slack user ID to Zulip
user id
4. zerver_customprofilefield, which is a list of all custom profile fields
5. zerver_customprofilefield_values, which is a list of user profile fields
@@ -155,8 +155,8 @@ def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT],
avatar_list: List[ZerverFieldsT] = []
slack_user_id_to_zulip_user_id = {}
- # The user data we get from the slack api does not contain custom profile data
- # Hence we get it from the slack zip file
+ # The user data we get from the Slack API does not contain custom profile data
+ # Hence we get it from the Slack zip file
slack_data_file_user_list = get_data_file(slack_data_dir + '/users.json')
slack_user_id_to_custom_profile_fields: ZerverFieldsT = {}
@@ -165,7 +165,7 @@ def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT],
for user in slack_data_file_user_list:
process_slack_custom_fields(user, slack_user_id_to_custom_profile_fields)
- # We have only one primary owner in slack, see link
+ # We have only one primary owner in Slack, see link
# https://get.slack.help/hc/en-us/articles/201912948-Owners-and-Administrators
# This is to import the primary owner first from all the users
user_id_count = custom_profile_field_value_id_count = custom_profile_field_id_count = 0
@@ -239,7 +239,7 @@ def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields:
custom_profile_field_id: int, realm_id: int,
slack_custom_field_name_to_zulip_custom_field_id: ZerverFieldsT) \
-> Tuple[ZerverFieldsT, int]:
- # The name of the custom profile field is not provided in the slack data
+ # The name of the custom profile field is not provided in the Slack data
# Hash keys of the fields are provided
# Reference: https://api.slack.com/methods/users.profile.set
for field, value in fields.items():
@@ -248,7 +248,7 @@ def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields:
if field in slack_custom_fields:
field_name = field
else:
- field_name = f"slack custom field {str(custom_profile_field_id + 1)}"
+ field_name = f"Slack custom field {str(custom_profile_field_id + 1)}"
customprofilefield = CustomProfileField(
id=custom_profile_field_id,
name=field_name,
@@ -359,12 +359,12 @@ def channels_to_zerver_stream(slack_data_dir: str, realm_id: int,
DMMembersT, SlackToZulipRecipientT]:
"""
Returns:
- 1. realm, Converted Realm data
- 2. added_channels, which is a dictionary to map from channel name to channel id, zulip stream_id
- 3. added_mpims, which is a dictionary to map from MPIM(multiparty IM) name to MPIM id, zulip huddle_id
+ 1. realm, converted realm data
+ 2. added_channels, which is a dictionary to map from channel name to channel id, Zulip stream_id
+ 3. added_mpims, which is a dictionary to map from MPIM(multiparty IM) name to MPIM id, Zulip huddle_id
4. dm_members, which is a dictionary to map from DM id to tuple of DM participants.
- 5. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from slack recipient
- name(channel names, mpim names, usernames etc) to zulip recipient_id
+ 5. slack_recipient_name_to_zulip_recipient_id, which is a dictionary to map from Slack recipient
+ name(channel names, mpim names, usernames etc) to Zulip recipient_id
"""
logging.info('######### IMPORTING CHANNELS STARTED #########\n')
@@ -709,7 +709,7 @@ def channel_message_to_zerver_message(realm_id: int,
slack_user_id = get_message_sending_user(message)
if not slack_user_id:
# Ignore messages without slack_user_id
- # These are Sometimes produced by slack
+ # These are Sometimes produced by Slack
continue
subtype = message.get('subtype', False)
@@ -787,7 +787,7 @@ def channel_message_to_zerver_message(realm_id: int,
has_attachment = file_info['has_attachment']
has_image = file_info['has_image']
- topic_name = 'imported from slack'
+ topic_name = 'imported from Slack'
zulip_message = build_message(topic_name, float(message['ts']), message_id, content,
rendered_content, slack_user_id_to_zulip_user_id[slack_user_id],
@@ -861,7 +861,7 @@ def process_message_files(message: ZerverFieldsT,
url = fileinfo['url_private']
if 'files.slack.com' in url:
- # For attachments with slack download link
+ # For attachments with Slack download link
has_attachment = True
has_link = True
has_image = True if 'image' in fileinfo['mimetype'] else False
@@ -878,7 +878,7 @@ def process_message_files(message: ZerverFieldsT,
build_attachment(realm_id, {message_id}, slack_user_id_to_zulip_user_id[slack_user_id],
fileinfo, s3_path, zerver_attachment)
else:
- # For attachments with link not from slack
+ # For attachments with link not from Slack
# Example: Google drive integration
has_link = True
if 'title' in fileinfo:
@@ -920,7 +920,7 @@ def build_reactions(reaction_list: List[ZerverFieldsT], reactions: List[ZerverFi
for realm_emoji in zerver_realmemoji:
realmemoji[realm_emoji['name']] = realm_emoji['id']
- # For the unicode emoji codes, we use equivalent of
+ # For the Unicode emoji codes, we use equivalent of
# function 'emoji_name_to_emoji_code' in 'zerver/lib/emoji' here
for slack_reaction in reactions:
emoji_name = slack_reaction['name']
@@ -951,7 +951,7 @@ def build_reactions(reaction_list: List[ZerverFieldsT], reactions: List[ZerverFi
def build_uploads(user_id: int, realm_id: int, email: str, fileinfo: ZerverFieldsT, s3_path: str,
uploads_list: List[ZerverFieldsT]) -> None:
upload = dict(
- path=fileinfo['url_private'], # Save slack's url here, which is used later while processing
+ path=fileinfo['url_private'], # Save Slack's URL here, which is used later while processing
realm_id=realm_id,
content_type=None,
user_profile_id=user_id,
@@ -1068,7 +1068,7 @@ def do_convert_data(slack_zip_file: str, output_dir: str, token: str, threads: i
subprocess.check_call(['unzip', '-q', slack_zip_file, '-d', slack_data_dir])
- # We get the user data from the legacy token method of slack api, which is depreciated
+ # We get the user data from the legacy token method of Slack API, which is depreciated
# but we use it as the user email data is provided only in this method
user_list = get_slack_api_data("https://slack.com/api/users.list", "members", token=token)
fetch_shared_channel_users(user_list, slack_data_dir, token)
diff --git a/zerver/data_import/slack_message_conversion.py b/zerver/data_import/slack_message_conversion.py
index 837bbbd64f..93bfd5b468 100644
--- a/zerver/data_import/slack_message_conversion.py
+++ b/zerver/data_import/slack_message_conversion.py
@@ -13,7 +13,7 @@ LINK_REGEX = r"""
([a-z0-9]+([\-\.]{1}[a-z0-9]+)*)(\.) # domain name
([a-z]{2,63}(:[0-9]{1,5})?) # domain
(\/[^>]*)? # path
- (\|)?(?:\|([^>]+))? # char after pipe (for slack links)
+ (\|)?(?:\|([^>]+))? # char after pipe (for Slack links)
(>)
"""
@@ -32,7 +32,7 @@ SLACK_USERMENTION_REGEX = r"""
(>) # ends with '>'
"""
# Slack doesn't have mid-word message-formatting like Zulip.
-# Hence, ~stri~ke doesn't format the word in slack, but ~~stri~~ke
+# Hence, ~stri~ke doesn't format the word in Slack, but ~~stri~~ke
# formats the word in Zulip
SLACK_STRIKETHROUGH_REGEX = r"""
(^|[ -(]|[+-/]|\*|\_|[:-?]|\{|\[|\||\^) # Start after specified characters
diff --git a/zerver/decorator.py b/zerver/decorator.py
index a650aae19d..3b55eaa4c5 100644
--- a/zerver/decorator.py
+++ b/zerver/decorator.py
@@ -66,7 +66,7 @@ def cachify(method: FuncT) -> FuncT:
def update_user_activity(request: HttpRequest, user_profile: UserProfile,
query: Optional[str]) -> None:
- # update_active_status also pushes to rabbitmq, and it seems
+ # update_active_status also pushes to RabbitMQ, and it seems
# redundant to log that here as well.
if request.META["PATH_INFO"] == '/json/users/me/presence':
return
@@ -350,7 +350,7 @@ def user_passes_test(test_func: Callable[[HttpResponse], bool], login_url: Optio
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
- # If the login url is the same scheme and net location then just
+ # If the login URL is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urllib.parse.urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urllib.parse.urlparse(path)[:2]
@@ -504,7 +504,7 @@ def authenticated_uploads_api_view(
return _wrapped_func_arguments
return _wrapped_view_func
-# A more REST-y authentication decorator, using, in particular, HTTP Basic
+# A more REST-y authentication decorator, using, in particular, HTTP basic
# authentication.
#
# If webhook_client_name is specific, the request is a webhook view
diff --git a/zerver/lib/avatar.py b/zerver/lib/avatar.py
index 80cfc41afb..94493eb99e 100644
--- a/zerver/lib/avatar.py
+++ b/zerver/lib/avatar.py
@@ -29,7 +29,7 @@ def avatar_url_from_dict(userdict: Dict[str, Any], medium: bool=False) -> str:
DEPRECATED: We should start using
get_avatar_field to populate users,
particularly for codepaths where the
- client can compute gravatar URLS
+ client can compute gravatar URLs
on the client side.
'''
url = _get_unversioned_avatar_url(
diff --git a/zerver/lib/cache.py b/zerver/lib/cache.py
index 56bfac40f5..894a0a8959 100644
--- a/zerver/lib/cache.py
+++ b/zerver/lib/cache.py
@@ -72,7 +72,7 @@ def get_or_create_key_prefix() -> str:
# This sets the prefix for the benefit of the Puppeteer tests.
#
# Having a fixed key is OK since we don't support running
- # multiple copies of the puppeteer tests at the same time anyway.
+ # multiple copies of the Puppeteer tests at the same time anyway.
return 'puppeteer_tests:'
elif settings.TEST_SUITE:
# The Python tests overwrite KEY_PREFIX on each test, but use
diff --git a/zerver/lib/camo.py b/zerver/lib/camo.py
index 5b57daa120..94e0ee1aad 100644
--- a/zerver/lib/camo.py
+++ b/zerver/lib/camo.py
@@ -15,7 +15,7 @@ def generate_camo_url(url: str) -> str:
# Encodes the provided URL using the same algorithm used by the camo
# caching https image proxy
def get_camo_url(url: str) -> str:
- # Only encode the url if Camo is enabled
+ # Only encode the URL if Camo is enabled
if settings.CAMO_URI == '':
return url
return f"{settings.CAMO_URI}{generate_camo_url(url)}"
diff --git a/zerver/lib/data_types.py b/zerver/lib/data_types.py
index a654dac72a..b0f2e8996d 100644
--- a/zerver/lib/data_types.py
+++ b/zerver/lib/data_types.py
@@ -63,7 +63,7 @@ class DictType:
raise AssertionError(f"Unknown key {k} in {var_name}")
def schema(self, var_name: str) -> str:
- # Our current schema is lossy, since our openapi configs
+ # Our current schema is lossy, since our OpenAPI configs
# aren't rigorous about "required" fields yet.
keys = sorted(list(self.required_keys) + list(self.optional_keys))
@@ -89,7 +89,7 @@ class Equals:
def __init__(self, expected_value: Any) -> None:
self.expected_value = expected_value
- # super hack for openapi workaround
+ # super hack for OpenAPI workaround
if self.expected_value is None:
self.equalsNone = True
@@ -99,7 +99,7 @@ class Equals:
def schema(self, var_name: str) -> str:
# Treat Equals as the degenerate case of EnumType, which
- # matches how we do things with openapi.
+ # matches how we do things with OpenAPI.
return f"{var_name} in {repr([self.expected_value])}"
@@ -164,7 +164,7 @@ class OptionalType:
check_data(self.sub_type, var_name, val)
def schema(self, var_name: str) -> str:
- # our openapi spec doesn't support optional types very well yet,
+ # our OpenAPI spec doesn't support optional types very well yet,
# so we just return the schema for our subtype
return schema(var_name, self.sub_type)
@@ -212,7 +212,7 @@ class UnionType:
raise AssertionError(f"{var_name} does not pass the union type check")
def schema(self, var_name: str) -> str:
- # We hack around our openapi specs not accounting for None.
+ # We hack around our OpenAPI specs not accounting for None.
sub_schemas = "\n".join(
sorted(
schema("type", sub_type)
@@ -231,7 +231,7 @@ class UrlType:
raise AssertionError(f"{var_name} is not a URL")
def schema(self, var_name: str) -> str:
- # just report str to match openapi
+ # just report str to match OpenAPI
return f"{var_name}: str"
@@ -278,7 +278,7 @@ def schema(
schema is a glorified repr of a data type, but it also includes a
var_name you pass in, plus we dumb things down a bit to match our
- current openapi spec.
+ current OpenAPI spec.
"""
if hasattr(data_type, "schema"):
return data_type.schema(var_name)
diff --git a/zerver/lib/email_notifications.py b/zerver/lib/email_notifications.py
index 3dad2c5fa6..acc29ba0f2 100644
--- a/zerver/lib/email_notifications.py
+++ b/zerver/lib/email_notifications.py
@@ -343,7 +343,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
Send a reminder email to a user if she's missed some PMs by being offline.
The email will have its reply to address set to a limited used email
- address that will send a zulip message to the correct recipient. This
+ address that will send a Zulip message to the correct recipient. This
allows the user to respond to missed PMs, huddles, and @-mentions directly
from the email.
diff --git a/zerver/lib/external_accounts.py b/zerver/lib/external_accounts.py
index a7a55dbeab..88e44e5255 100644
--- a/zerver/lib/external_accounts.py
+++ b/zerver/lib/external_accounts.py
@@ -18,7 +18,7 @@ from zerver.lib.validator import (
# text: Field text for admins - custom profile field in org settngs view
# name: Field label or name - user profile in user settings view
# hint: Field hint for realm users
-# url_patter: Field url linkifier
+# url_pattern: Field URL linkifier
DEFAULT_EXTERNAL_ACCOUNTS = {
"twitter": {
"text": "Twitter",
@@ -45,7 +45,7 @@ def validate_external_account_field_data(field_data: ProfileFieldData) -> Profil
if field_subtype not in DEFAULT_EXTERNAL_ACCOUNTS.keys():
if field_subtype == "custom":
if 'url_pattern' not in field_data.keys():
- raise ValidationError(_("Custom external account must define url pattern"))
+ raise ValidationError(_("Custom external account must define URL pattern"))
else:
raise ValidationError(_("Invalid external account type"))
diff --git a/zerver/lib/import_realm.py b/zerver/lib/import_realm.py
index 0287d73af6..61bb0290de 100644
--- a/zerver/lib/import_realm.py
+++ b/zerver/lib/import_realm.py
@@ -355,8 +355,8 @@ def idseq(model_class: Any) -> str:
def allocate_ids(model_class: Any, count: int) -> List[int]:
"""
Increases the sequence number for a given table by the amount of objects being
- imported into that table. Hence, this gives a reserved range of ids to import the
- converted slack objects into the tables.
+ imported into that table. Hence, this gives a reserved range of IDs to import the
+ converted Slack objects into the tables.
"""
conn = connection.cursor()
sequence = idseq(model_class)
@@ -1030,7 +1030,7 @@ def do_import_realm(import_dir: Path, subdomain: str, processes: int=1) -> Realm
import_uploads(realm, os.path.join(import_dir, "uploads"), processes)
# We need to have this check as the emoji files are only present in the data
- # importer from slack
+ # importer from Slack
# For Zulip export, this doesn't exist
if os.path.exists(os.path.join(import_dir, "emoji")):
import_uploads(realm, os.path.join(import_dir, "emoji"), processes, processing_emojis=True)
@@ -1198,7 +1198,7 @@ def import_message_data(realm: Realm,
re_map_foreign_keys(data, 'zerver_message', 'recipient', related_table="recipient")
re_map_foreign_keys(data, 'zerver_message', 'sending_client', related_table='client')
fix_datetime_fields(data, 'zerver_message')
- # Parser to update message content with the updated attachment urls
+ # Parser to update message content with the updated attachment URLs
fix_upload_links(data, 'zerver_message')
# We already create mappings for zerver_message ids
diff --git a/zerver/lib/integrations.py b/zerver/lib/integrations.py
index 2900ff57ce..d057802e9f 100644
--- a/zerver/lib/integrations.py
+++ b/zerver/lib/integrations.py
@@ -271,7 +271,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
WebhookIntegration(
'alertmanager',
['monitoring'],
- display_name='Prometheus AlertManager',
+ display_name='Prometheus Alertmanager',
logo='images/integrations/logos/prometheus.svg',
),
WebhookIntegration('ansibletower', ['deployment'], display_name='Ansible Tower'),
diff --git a/zerver/lib/markdown/__init__.py b/zerver/lib/markdown/__init__.py
index 6b8d010cd0..4b2436b3cf 100644
--- a/zerver/lib/markdown/__init__.py
+++ b/zerver/lib/markdown/__init__.py
@@ -162,7 +162,7 @@ def get_web_link_regex() -> str:
# A link starts at a word boundary, and ends at space, punctuation, or end-of-input.
#
- # We detect a url either by the `https?://` or by building around the TLD.
+ # We detect a URL either by the `https?://` or by building around the TLD.
# In lieu of having a recursive regex (which python doesn't support) to match
# arbitrary numbers of nested matching parenthesis, we manually build a regexp that
@@ -668,7 +668,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
description_elm.text = description
def get_actual_image_url(self, url: str) -> str:
- # Add specific per-site cases to convert image-preview urls to image urls.
+ # Add specific per-site cases to convert image-preview URLs to image URLs.
# See https://github.com/zulip/zulip/issues/4658 for more information
parsed_url = urllib.parse.urlparse(url)
if (parsed_url.netloc == 'github.com' or parsed_url.netloc.endswith('.github.com')):
@@ -685,7 +685,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
if not self.md.image_preview_enabled:
return False
parsed_url = urllib.parse.urlparse(url)
- # remove html urls which end with img extensions that can not be shorted
+ # remove HTML URLs which end with image extensions that can not be shorted
if parsed_url.netloc == 'pasteboard.co':
return False
@@ -696,15 +696,15 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
return False
def corrected_image_source(self, url: str) -> Optional[str]:
- # This function adjusts any urls from linx.li and
- # wikipedia.org to point to the actual image url. It's
+ # This function adjusts any URLs from linx.li and
+ # wikipedia.org to point to the actual image URL. It's
# structurally very similar to dropbox_image, and possibly
# should be rewritten to use open graph, but has some value.
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc.lower().endswith('.wikipedia.org'):
# Redirecting from "/wiki/File:" to "/wiki/Special:FilePath/File:"
# A possible alternative, that avoids the redirect after hitting "Special:"
- # is using the first characters of md5($filename) to generate the url
+ # is using the first characters of md5($filename) to generate the URL
domain = parsed_url.scheme + "://" + parsed_url.netloc
correct_url = domain + parsed_url.path[:6] + 'Special:FilePath' + parsed_url.path[5:]
return correct_url
@@ -760,7 +760,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
def youtube_id(self, url: str) -> Optional[str]:
if not self.md.image_preview_enabled:
return None
- # Youtube video id extraction regular expression from https://pastebin.com/KyKAFv1s
+ # YouTube video id extraction regular expression from https://pastebin.com/KyKAFv1s
# Slightly modified to support URLs of the forms
# - youtu.be/
# - youtube.com/playlist?v=&list=
@@ -817,11 +817,11 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
user_mentions: List[Dict[str, Any]],
media: List[Dict[str, Any]]) -> Element:
"""
- Use data from the twitter API to turn links, mentions and media into A
- tags. Also convert unicode emojis to images.
+ Use data from the Twitter API to turn links, mentions and media into A
+ tags. Also convert Unicode emojis to images.
- This works by using the urls, user_mentions and media data from
- the twitter API and searching for unicode emojis in the text using
+ This works by using the URLs, user_mentions and media data from
+ the twitter API and searching for Unicode emojis in the text using
`unicode_emoji_regex`.
The first step is finding the locations of the URLs, mentions, media and
@@ -1167,7 +1167,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
if not self.is_absolute_url(url):
if self.is_image(url):
self.handle_image_inlining(root, found_url)
- # We don't have a strong use case for doing url preview for relative links.
+ # We don't have a strong use case for doing URL preview for relative links.
continue
dropbox_image = self.dropbox_image(url)
@@ -1276,7 +1276,7 @@ class Timestamp(markdown.inlinepatterns.Pattern):
time_element.text = markdown.util.AtomicString(time_input_string)
return time_element
-# All of our emojis(non ZWJ sequences) belong to one of these unicode blocks:
+# All of our emojis(non ZWJ sequences) belong to one of these Unicode blocks:
# \U0001f100-\U0001f1ff - Enclosed Alphanumeric Supplement
# \U0001f200-\U0001f2ff - Enclosed Ideographic Supplement
# \U0001f300-\U0001f5ff - Miscellaneous Symbols and Pictographs
@@ -1311,13 +1311,13 @@ unicode_emoji_regex = '(?P['\
# The equivalent JS regex is \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f]|\ud83d[\ude80-\udeff]|
# \ud83e[\udd00-\uddff]|[\u2000-\u206f]|[\u2300-\u27bf]|[\u2b00-\u2bff]|[\u3000-\u303f]|
# [\u3200-\u32ff]. See below comments for explanation. The JS regex is used by marked.js for
-# frontend unicode emoji processing.
+# frontend Unicode emoji processing.
# The JS regex \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f] represents U0001f100-\U0001f64f
# The JS regex \ud83d[\ude80-\udeff] represents \U0001f680-\U0001f6ff
# The JS regex \ud83e[\udd00-\uddff] represents \U0001f900-\U0001f9ff
# The JS regex [\u2000-\u206f] represents \u2000-\u206f
# The JS regex [\u2300-\u27bf] represents \u2300-\u27bf
-# Similarly other JS regexes can be mapped to the respective unicode blocks.
+# Similarly other JS regexes can be mapped to the respective Unicode blocks.
# For more information, please refer to the following article:
# http://crocodillon.com/blog/parsing-emoji-unicode-in-javascript
@@ -1418,14 +1418,14 @@ class Tex(markdown.inlinepatterns.Pattern):
def sanitize_url(url: str) -> Optional[str]:
"""
- Sanitize a url against xss attacks.
+ Sanitize a URL against XSS attacks.
See the docstring on markdown.inlinepatterns.LinkPattern.sanitize_url.
"""
try:
parts = urllib.parse.urlparse(url.replace(' ', '%20'))
scheme, netloc, path, params, query, fragment = parts
except ValueError:
- # Bad url - so bad it couldn't be parsed.
+ # Bad URL - so bad it couldn't be parsed.
return ''
# If there is no scheme or netloc and there is a '@' in the path,
@@ -1467,7 +1467,7 @@ def sanitize_url(url: str) -> Optional[str]:
# We already converted an empty scheme to http:// above, so we skip
# the colon check, which would also forbid a lot of legitimate URLs.
- # Url passes all tests. Return url as-is.
+ # URL passes all tests. Return URL as-is.
return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))
def url_to_a(db_data: Optional[DbData], url: str, text: Optional[str]=None) -> Union[Element, str]:
@@ -1530,7 +1530,7 @@ class ListIndentProcessor(markdown.blockprocessors.ListIndentProcessor):
parser.md.tab_length = 4
class HashHeaderProcessor(markdown.blockprocessors.HashHeaderProcessor):
- """ Process Hash Headers.
+ """ Process hash headers.
Based on markdown.blockprocessors.HashHeaderProcessor, but requires space for heading.
"""
@@ -1540,7 +1540,7 @@ class HashHeaderProcessor(markdown.blockprocessors.HashHeaderProcessor):
RE = re.compile(r'(?:^|\n)(?P#{1,6})\s(?P(?:\\.|[^\\])*?)#*(?:\n|$)')
class BlockQuoteProcessor(markdown.blockprocessors.BlockQuoteProcessor):
- """ Process BlockQuotes.
+ """ Process block quotes.
Based on markdown.blockprocessors.BlockQuoteProcessor, but with 2-space indent
"""
@@ -1812,7 +1812,7 @@ class LinkInlineProcessor(markdown.inlinepatterns.LinkInlineProcessor):
href = el.get('href')
assert href is not None
- # Sanitize url or don't parse link. See linkify_tests in markdown_test_cases for banned syntax.
+ # Sanitize URL or don't parse link. See linkify_tests in markdown_test_cases for banned syntax.
href = sanitize_url(self.unescape(href.strip()))
if href is None:
return None # no-op; the link is not processed.
@@ -1841,7 +1841,7 @@ class LinkInlineProcessor(markdown.inlinepatterns.LinkInlineProcessor):
return el, match_start, index
def get_sub_registry(r: markdown.util.Registry, keys: List[str]) -> markdown.util.Registry:
- # Registry is a new class added by py-markdown to replace Ordered List.
+ # Registry is a new class added by Python-Markdown to replace OrderedDict.
# Since Registry doesn't support .keys(), it is easier to make a new
# object instead of removing keys from the existing object.
new_r = markdown.util.Registry()
@@ -1869,11 +1869,11 @@ class Markdown(markdown.Markdown):
self.set_output_format('html')
def build_parser(self) -> markdown.Markdown:
- # Build the parser using selected default features from py-markdown.
+ # Build the parser using selected default features from Python-Markdown.
# The complete list of all available processors can be found in the
# super().build_parser() function.
#
- # Note: for any py-markdown updates, manually check if we want any
+ # Note: for any Python-Markdown updates, manually check if we want any
# of the new features added upstream or not; they wouldn't get
# included by default.
self.preprocessors = self.build_preprocessors()
@@ -1948,7 +1948,7 @@ class Markdown(markdown.Markdown):
ENTITY_RE = markdown.inlinepatterns.ENTITY_RE
STRONG_EM_RE = r'(\*\*\*)(?!\s+)([^\*^\n]+)(? List[str]:
for m in re.finditer(pattern, topic_name):
matches += [realm_filter[1] % m.groupdict()]
- # Also make raw urls navigable.
+ # Also make raw URLs navigable.
for sub_string in basic_link_splitter.split(topic_name):
link_match = re.match(get_web_link_regex(), sub_string)
if link_match:
diff --git a/zerver/lib/markdown/tabbed_sections.py b/zerver/lib/markdown/tabbed_sections.py
index f337d093f5..6eb918aea9 100644
--- a/zerver/lib/markdown/tabbed_sections.py
+++ b/zerver/lib/markdown/tabbed_sections.py
@@ -58,7 +58,7 @@ TAB_DISPLAY_NAMES = {
'mm-default': 'Default installation',
'mm-docker': 'Docker',
- 'mm-gitlab-omnibus': 'Gitlab Omnibus',
+ 'mm-gitlab-omnibus': 'GitLab Omnibus',
'send-email-invitations': 'Send email invitations',
'share-an-invite-link': 'Share an invite link',
diff --git a/zerver/lib/migrate.py b/zerver/lib/migrate.py
index 8fb5399261..95b9723209 100644
--- a/zerver/lib/migrate.py
+++ b/zerver/lib/migrate.py
@@ -13,7 +13,7 @@ def do_batch_update(cursor: CursorObj,
batch_size: int=10000,
sleep: float=0.1) -> None: # nocoverage
# The string substitution below is complicated by our need to
- # support multiple postgres versions.
+ # support multiple Postgres versions.
stmt = SQL('''
UPDATE {}
SET {}
diff --git a/zerver/lib/name_restrictions.py b/zerver/lib/name_restrictions.py
index 151cfb44f1..53b8c67da9 100644
--- a/zerver/lib/name_restrictions.py
+++ b/zerver/lib/name_restrictions.py
@@ -18,7 +18,7 @@ def is_disposable_domain(domain: str) -> bool:
return domain.lower() in DISPOSABLE_DOMAINS
ZULIP_RESERVED_SUBDOMAINS = {
- # zulip terms
+ # Zulip terms
'stream', 'channel', 'topic', 'thread', 'installation', 'organization', 'realm',
'team', 'subdomain', 'activity', 'octopus', 'acme', 'push',
# machines
@@ -29,7 +29,7 @@ ZULIP_RESERVED_SUBDOMAINS = {
# competitor pages
'slack', 'mattermost', 'rocketchat', 'irc', 'twitter', 'zephyr', 'flowdock', 'spark',
'skype', 'microsoft', 'twist', 'ryver', 'matrix', 'discord', 'email', 'usenet',
- # zulip names
+ # Zulip names
'zulip', 'tulip', 'humbug',
# platforms
'plan9', 'electron', 'linux', 'mac', 'windows', 'cli', 'ubuntu', 'android', 'ios',
diff --git a/zerver/lib/push_notifications.py b/zerver/lib/push_notifications.py
index ef71a32517..1c2faa4b61 100644
--- a/zerver/lib/push_notifications.py
+++ b/zerver/lib/push_notifications.py
@@ -504,7 +504,7 @@ def get_gcm_alert(message: Message) -> str:
def get_mobile_push_content(rendered_content: str) -> str:
def get_text(elem: lxml.html.HtmlElement) -> str:
- # Convert default emojis to their unicode equivalent.
+ # Convert default emojis to their Unicode equivalent.
classes = elem.get("class", "")
if "emoji" in classes:
match = re.search(r"emoji-(?P\S+)", classes)
@@ -571,9 +571,9 @@ def get_mobile_push_content(rendered_content: str) -> str:
return plain_text
def truncate_content(content: str) -> Tuple[str, bool]:
- # We use unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
+ # We use Unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
# of three dots as this saves two extra characters for textual
- # content. This function will need to be updated to handle unicode
+ # content. This function will need to be updated to handle Unicode
# combining characters and tags when we start supporting themself.
if len(content) <= 200:
return content, False
diff --git a/zerver/lib/rate_limiter.py b/zerver/lib/rate_limiter.py
index 78826dc48d..ad2d5c754c 100644
--- a/zerver/lib/rate_limiter.py
+++ b/zerver/lib/rate_limiter.py
@@ -394,7 +394,7 @@ class RedisRateLimiterBackend(RateLimiterBackend):
list_key, set_key, _ = cls.get_keys(entity_key)
now = time.time()
- # Start redis transaction
+ # Start Redis transaction
with client.pipeline() as pipe:
count = 0
while True:
diff --git a/zerver/lib/redis_utils.py b/zerver/lib/redis_utils.py
index ab0233a01e..e1d37bb836 100644
--- a/zerver/lib/redis_utils.py
+++ b/zerver/lib/redis_utils.py
@@ -47,7 +47,7 @@ def get_dict_from_redis(redis_client: redis.StrictRedis, key_format: str, key: s
# This function requires inputting the intended key_format to validate
# that the key fits it, as an additionally security measure. This protects
# against bugs where a caller requests a key based on user input and doesn't
- # validate it - which could potentially allow users to poke around arbitrary redis keys.
+ # validate it - which could potentially allow users to poke around arbitrary Redis keys.
if len(key) > MAX_KEY_LENGTH:
error_msg = "Requested key too long in get_dict_from_redis: %s"
raise ZulipRedisKeyTooLongError(error_msg % (key,))
diff --git a/zerver/lib/rest.py b/zerver/lib/rest.py
index 8487bfbc28..6a29088b6d 100644
--- a/zerver/lib/rest.py
+++ b/zerver/lib/rest.py
@@ -43,7 +43,7 @@ def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
Unauthenticated endpoints should not use this, as authentication is verified
in the following ways:
- * for paths beginning with /api, HTTP Basic auth
+ * for paths beginning with /api, HTTP basic auth
* for paths beginning with /json (used by the web client), the session token
This calls the function named in kwargs[request.method], if that request
@@ -108,7 +108,7 @@ def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
# as we should worst-case fail closed if we miscategorise a request.
# for some special views (e.g. serving a file that has been
- # uploaded), we support using the same url for web and API clients.
+ # uploaded), we support using the same URL for web and API clients.
if ('override_api_url_scheme' in view_flags and
request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request uses standard API based authentication.
@@ -132,7 +132,7 @@ def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
auth_kwargs["skip_rate_limiting"] = True
target_function = csrf_protect(authenticated_json_view(target_function, **auth_kwargs))
- # most clients (mobile, bots, etc) use HTTP Basic Auth and REST calls, where instead of
+ # most clients (mobile, bots, etc) use HTTP basic auth and REST calls, where instead of
# username:password, we use email:apiKey
elif request.META.get('HTTP_AUTHORIZATION', None):
# Wrap function with decorator to authenticate the user before
diff --git a/zerver/lib/soft_deactivation.py b/zerver/lib/soft_deactivation.py
index ee6e955f28..c67a9a01db 100644
--- a/zerver/lib/soft_deactivation.py
+++ b/zerver/lib/soft_deactivation.py
@@ -82,7 +82,7 @@ def filter_by_subscription_history(user_profile: UserProfile,
if stream_messages[-1]['id'] <= log_entry.event_last_message_id:
stream_messages = []
else:
- raise AssertionError(f'{log_entry.event_type} is not a Subscription Event.')
+ raise AssertionError(f'{log_entry.event_type} is not a subscription event.')
if len(stream_messages) > 0:
# We do this check for last event since if the last subscription
@@ -218,7 +218,7 @@ def do_soft_deactivate_user(user_profile: UserProfile) -> None:
user_profile.save(update_fields=[
'long_term_idle',
'last_active_message_id'])
- logger.info('Soft Deactivated user %s', user_profile.id)
+ logger.info('Soft deactivated user %s', user_profile.id)
def do_soft_deactivate_users(users: List[UserProfile]) -> List[UserProfile]:
BATCH_SIZE = 100
diff --git a/zerver/lib/test_classes.py b/zerver/lib/test_classes.py
index ec0fc9ff2d..1fb9eb0c1b 100644
--- a/zerver/lib/test_classes.py
+++ b/zerver/lib/test_classes.py
@@ -82,7 +82,7 @@ if settings.ZILENCER_ENABLED:
class UploadSerializeMixin(SerializeMixin):
"""
We cannot use override_settings to change upload directory because
- because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
+ because settings.LOCAL_UPLOADS_DIR is used in URL pattern and URLs
are compiled only once. Otherwise using a different upload directory
for conflicting test cases would have provided better performance
while providing the required isolation.
@@ -117,7 +117,7 @@ class ZulipTestCase(TestCase):
flush_per_request_caches()
translation.activate(settings.LANGUAGE_CODE)
- # Clean up after using fakeldap in ldap tests:
+ # Clean up after using fakeldap in LDAP tests:
if hasattr(self, 'mock_ldap') and hasattr(self, 'mock_initialize'):
if self.mock_ldap is not None:
self.mock_ldap.reset()
@@ -192,8 +192,8 @@ Output:
def extract_api_suffix_url(self, url: str) -> Tuple[str, Dict[str, Any]]:
"""
- Function that extracts the url after `/api/v1` or `/json` and also
- returns the query data in the url, if there is any.
+ Function that extracts the URL after `/api/v1` or `/json` and also
+ returns the query data in the URL, if there is any.
"""
url_split = url.split('?')
data: Dict[str, Any] = {}
@@ -229,7 +229,7 @@ Output:
json_url = True
url, query_data = self.extract_api_suffix_url(url)
if len(query_data) != 0:
- # In some cases the query parameters are defined in the url itself. In such cases
+ # In some cases the query parameters are defined in the URL itself. In such cases
# The `data` argument of our function is not used. Hence get `data` argument
# from url.
data = query_data
@@ -1016,13 +1016,13 @@ Output:
for dn, attrs in directory.items():
if 'uid' in attrs:
- # Generate a password for the ldap account:
+ # Generate a password for the LDAP account:
attrs['userPassword'] = [self.ldap_password(attrs['uid'][0])]
# Load binary attributes. If in "directory", an attribute as its value
# has a string starting with "file:", the rest of the string is assumed
# to be a path to the file from which binary data should be loaded,
- # as the actual value of the attribute in ldap.
+ # as the actual value of the attribute in LDAP.
for attr, value in attrs.items():
if isinstance(value, str) and value.startswith("file:"):
with open(value[5:], 'rb') as f:
@@ -1054,9 +1054,9 @@ Output:
def ldap_username(self, username: str) -> str:
"""
- Maps zulip username to the name of the corresponding ldap user
+ Maps Zulip username to the name of the corresponding LDAP user
in our test directory at zerver/tests/fixtures/ldap/directory.json,
- if the ldap user exists.
+ if the LDAP user exists.
"""
return self.example_user_ldap_username_map[username]
@@ -1068,7 +1068,7 @@ class WebhookTestCase(ZulipTestCase):
Common for all webhooks tests
Override below class attributes and run send_and_test_message
- If you create your url in uncommon way you can override build_webhook_url method
+ If you create your URL in uncommon way you can override build_webhook_url method
In case that you need modify body or create it without using fixture you can also override get_body method
"""
STREAM_NAME: Optional[str] = None
diff --git a/zerver/lib/test_runner.py b/zerver/lib/test_runner.py
index f584c2947b..cf1d26bf90 100644
--- a/zerver/lib/test_runner.py
+++ b/zerver/lib/test_runner.py
@@ -205,7 +205,7 @@ def init_worker(counter: Synchronized) -> None:
create_test_databases(_worker_id)
initialize_worker_path(_worker_id)
- # We manually update the upload directory path in the url regex.
+ # We manually update the upload directory path in the URL regex.
from zproject.dev_urls import avatars_url
new_root = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars")
avatars_url.default_args['document_root'] = new_root
diff --git a/zerver/lib/thumbnail.py b/zerver/lib/thumbnail.py
index 115589a208..db3c112db5 100644
--- a/zerver/lib/thumbnail.py
+++ b/zerver/lib/thumbnail.py
@@ -69,7 +69,7 @@ def generate_thumbnail_url(path: str,
if settings.THUMBOR_URL == 'http://127.0.0.1:9995':
# If THUMBOR_URL is the default then thumbor is hosted on same machine
# as the Zulip server and we should serve a relative URL.
- # We add a /thumbor in front of the relative url because we make
+ # We add a /thumbor in front of the relative URL because we make
# use of a proxy pass to redirect request internally in Nginx to 9995
# port where thumbor is running.
thumbnail_url = '/thumbor' + encrypted_url
diff --git a/zerver/lib/topic.py b/zerver/lib/topic.py
index 51bc59cc89..b8a0fc2bad 100644
--- a/zerver/lib/topic.py
+++ b/zerver/lib/topic.py
@@ -66,7 +66,7 @@ DB_TOPIC_NAME = "subject"
MESSAGE__TOPIC = 'message__subject'
def topic_match_sa(topic_name: str) -> Any:
- # _sa is short for Sql Alchemy, which we use mostly for
+ # _sa is short for SQLAlchemy, which we use mostly for
# queries that search messages
topic_cond = func.upper(column("subject")) == func.upper(literal(topic_name))
return topic_cond
diff --git a/zerver/lib/upload.py b/zerver/lib/upload.py
index eeb4ba383f..e075a1259e 100644
--- a/zerver/lib/upload.py
+++ b/zerver/lib/upload.py
@@ -59,7 +59,7 @@ INLINE_MIME_TYPES = [
# image/svg+xml, text/html, or text/xml.
]
-# Performance Note:
+# Performance note:
#
# For writing files to S3, the file could either be stored in RAM
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
@@ -80,7 +80,7 @@ class RealmUploadQuotaError(JsonableError):
def sanitize_name(value: str) -> str:
"""
Sanitizes a value to be safe to store in a Linux filesystem, in
- S3, and in a URL. So unicode is allowed, but not special
+ S3, and in a URL. So Unicode is allowed, but not special
characters other than ".", "-", and "_".
This implementation is based on django.utils.text.slugify; it is
diff --git a/zerver/logging_handlers.py b/zerver/logging_handlers.py
index c8b4c5263a..273a568e6d 100644
--- a/zerver/logging_handlers.py
+++ b/zerver/logging_handlers.py
@@ -105,7 +105,7 @@ class AdminNotifyHandler(logging.Handler):
# recursive exception loops are made).
#
# We initialize is_markdown_rendering_exception to `True` to
- # prevent the infinite loop of zulip messages by ERROR_BOT if
+ # prevent the infinite loop of Zulip messages by ERROR_BOT if
# the outer try block here throws an exception before we have
# a chance to check the exception for whether it comes from
# markdown.
diff --git a/zerver/management/commands/check_redis.py b/zerver/management/commands/check_redis.py
index dc9f4e0838..39e626c606 100644
--- a/zerver/management/commands/check_redis.py
+++ b/zerver/management/commands/check_redis.py
@@ -10,8 +10,8 @@ from zerver.models import get_user_profile_by_id
class Command(BaseCommand):
- help = """Checks redis to make sure our rate limiting system hasn't grown a bug
- and left redis with a bunch of data
+ help = """Checks Redis to make sure our rate limiting system hasn't grown a bug
+ and left Redis with a bunch of data
Usage: ./manage.py [--trim] check_redis"""
@@ -41,7 +41,7 @@ than max_api_calls! (trying to trim) %s %s", key, count)
def handle(self, *args: Any, **options: Any) -> None:
if not settings.RATE_LIMITING:
- raise CommandError("This machine is not using redis or rate limiting, aborting")
+ raise CommandError("This machine is not using Redis or rate limiting, aborting")
# Find all keys, and make sure they're all within size constraints
wildcard_list = "ratelimit:*:*:list"
diff --git a/zerver/management/commands/convert_gitter_data.py b/zerver/management/commands/convert_gitter_data.py
index cc5da9fc40..f55e8dd200 100644
--- a/zerver/management/commands/convert_gitter_data.py
+++ b/zerver/management/commands/convert_gitter_data.py
@@ -41,5 +41,5 @@ class Command(BaseCommand):
if not os.path.exists(path):
raise CommandError(f"Gitter data file not found: '{path}'")
# TODO add json check
- print("Converting Data ...")
+ print("Converting data ...")
do_convert_data(path, output_dir, num_threads)
diff --git a/zerver/management/commands/convert_hipchat_data.py b/zerver/management/commands/convert_hipchat_data.py
index 615f1f6626..ce2154ae9e 100644
--- a/zerver/management/commands/convert_hipchat_data.py
+++ b/zerver/management/commands/convert_hipchat_data.py
@@ -27,12 +27,12 @@ from zerver.data_import.hipchat import do_convert_data
class Command(BaseCommand):
- help = """Convert the Hipchat data into Zulip data format."""
+ help = """Convert the HipChat data into Zulip data format."""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('hipchat_tar', nargs='+',
metavar='',
- help="tar of Hipchat data")
+ help="tar of HipChat data")
parser.add_argument('--output', dest='output_dir',
help='Directory to write exported data to.')
@@ -71,7 +71,7 @@ class Command(BaseCommand):
if not os.path.exists(path):
raise CommandError(f"Tar file not found: '{path}'")
- print("Converting Data ...")
+ print("Converting data ...")
do_convert_data(
input_tar_file=path,
output_dir=output_dir,
diff --git a/zerver/management/commands/convert_mattermost_data.py b/zerver/management/commands/convert_mattermost_data.py
index 887c3bef02..65a31226dc 100644
--- a/zerver/management/commands/convert_mattermost_data.py
+++ b/zerver/management/commands/convert_mattermost_data.py
@@ -56,7 +56,7 @@ class Command(BaseCommand):
raise CommandError(f"Directory not found: '{data_dir}'")
data_dir = os.path.realpath(data_dir)
- print("Converting Data ...")
+ print("Converting data ...")
do_convert_data(
mattermost_data_dir=data_dir,
output_dir=output_dir,
diff --git a/zerver/management/commands/convert_slack_data.py b/zerver/management/commands/convert_slack_data.py
index d0ac853daf..d429a1da6b 100644
--- a/zerver/management/commands/convert_slack_data.py
+++ b/zerver/management/commands/convert_slack_data.py
@@ -14,8 +14,8 @@ class Command(BaseCommand):
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('slack_data_zip', nargs='+',
- metavar='',
- help="Zipped slack data")
+ metavar='',
+ help="Zipped Slack data")
parser.add_argument('--token', metavar='',
help='Slack legacy token of the organsation')
@@ -38,7 +38,7 @@ class Command(BaseCommand):
token = options['token']
if token is None:
- raise CommandError("Enter slack legacy token!")
+ raise CommandError("Enter Slack legacy token!")
num_threads = int(options['threads'])
if num_threads < 1:
@@ -48,5 +48,5 @@ class Command(BaseCommand):
if not os.path.exists(path):
raise CommandError(f"Slack data directory not found: '{path}'")
- print("Converting Data ...")
+ print("Converting data ...")
do_convert_data(path, output_dir, token, threads=num_threads)
diff --git a/zerver/management/commands/create_large_indexes.py b/zerver/management/commands/create_large_indexes.py
index a77dd24419..c49160d426 100644
--- a/zerver/management/commands/create_large_indexes.py
+++ b/zerver/management/commands/create_large_indexes.py
@@ -7,7 +7,7 @@ from zerver.lib.management import ZulipBaseCommand
def create_indexes() -> None:
# Creating concurrent indexes is kind of a pain with current versions
- # of Django/postgres, because you will get this error with seemingly
+ # of Django/Postgres, because you will get this error with seemingly
# reasonable code:
#
# CREATE INDEX CONCURRENTLY cannot be executed from a function or multi-command string
diff --git a/zerver/management/commands/delete_old_unclaimed_attachments.py b/zerver/management/commands/delete_old_unclaimed_attachments.py
index 1e6ccf74f0..f73380a44a 100644
--- a/zerver/management/commands/delete_old_unclaimed_attachments.py
+++ b/zerver/management/commands/delete_old_unclaimed_attachments.py
@@ -38,4 +38,4 @@ class Command(BaseCommand):
do_delete_old_unclaimed_attachments(delta_weeks)
print("")
- print("Unclaimed Files deleted.")
+ print("Unclaimed files deleted.")
diff --git a/zerver/management/commands/email_mirror.py b/zerver/management/commands/email_mirror.py
index 2d3b904b03..251f550766 100644
--- a/zerver/management/commands/email_mirror.py
+++ b/zerver/management/commands/email_mirror.py
@@ -72,7 +72,7 @@ class Command(BaseCommand):
if (not settings.EMAIL_GATEWAY_BOT or not settings.EMAIL_GATEWAY_LOGIN or
not settings.EMAIL_GATEWAY_PASSWORD or not settings.EMAIL_GATEWAY_IMAP_SERVER or
not settings.EMAIL_GATEWAY_IMAP_PORT or not settings.EMAIL_GATEWAY_IMAP_FOLDER):
- raise CommandError("Please configure the Email Mirror Gateway in /etc/zulip/, "
+ raise CommandError("Please configure the email mirror gateway in /etc/zulip/, "
"or specify $ORIGINAL_RECIPIENT if piping a single mail.")
for message in get_imap_messages():
process_message(message)
diff --git a/zerver/management/commands/export.py b/zerver/management/commands/export.py
index 43bfb94c77..6ad07a2b23 100644
--- a/zerver/management/commands/export.py
+++ b/zerver/management/commands/export.py
@@ -30,15 +30,15 @@ class Command(ZulipBaseCommand):
* Sessions (everyone will need to log in again post-export)
* Users' passwords and API keys (users will need to use SSO or reset password)
* Mobile tokens for APNS/GCM (users will need to reconnect their mobile devices)
- * ScheduledEmail (Not relevant on a new server)
- * RemoteZulipServer (Unlikely to be migrated)
+ * ScheduledEmail (not relevant on a new server)
+ * RemoteZulipServer (unlikely to be migrated)
* third_party_api_results cache (this means rerending all old
messages could be expensive)
Things that will break as a result of the export:
* Passwords will not be transferred. They will all need to go
through the password reset flow to obtain a new password (unless
- they intend to only use e.g. Google Auth).
+ they intend to only use e.g. Google auth).
* Users will need to log out and re-log in to the Zulip desktop and
mobile apps. The apps now all have an option on the login page
where you can specify which Zulip server to use; your users
diff --git a/zerver/management/commands/rate_limit.py b/zerver/management/commands/rate_limit.py
index 032e5e6efb..45a1e702dc 100644
--- a/zerver/management/commands/rate_limit.py
+++ b/zerver/management/commands/rate_limit.py
@@ -41,7 +41,7 @@ class Command(ZulipBaseCommand):
try:
user_profile = get_user_profile_by_api_key(options['api_key'])
except UserProfile.DoesNotExist:
- raise CommandError("Unable to get user profile for api key {}".format(options['api_key']))
+ raise CommandError("Unable to get user profile for API key {}".format(options['api_key']))
users = [user_profile]
if options['bots']:
diff --git a/zerver/management/commands/realm_filters.py b/zerver/management/commands/realm_filters.py
index 8140dd3b17..6a68e7d77e 100644
--- a/zerver/management/commands/realm_filters.py
+++ b/zerver/management/commands/realm_filters.py
@@ -28,7 +28,7 @@ Example: ./manage.py realm_filters --realm=zulip --op=show
help='What operation to do (add, show, remove).')
parser.add_argument('pattern', metavar='', nargs='?',
help="regular expression to match")
- parser.add_argument('url_format_string', metavar='', nargs='?',
+ parser.add_argument('url_format_string', metavar='', nargs='?',
help="format string to substitute")
self.add_realm_args(parser, True)
diff --git a/zerver/management/commands/send_webhook_fixture_message.py b/zerver/management/commands/send_webhook_fixture_message.py
index 125fa6b040..b6ea82d1e3 100644
--- a/zerver/management/commands/send_webhook_fixture_message.py
+++ b/zerver/management/commands/send_webhook_fixture_message.py
@@ -36,7 +36,7 @@ approach shown above.
'into Zulip')
parser.add_argument('-u', '--url',
- help='The url on your Zulip server that you want '
+ help='The URL on your Zulip server that you want '
'to post the fixture to')
parser.add_argument('-H', '--custom-headers',
diff --git a/zerver/management/commands/soft_deactivate_users.py b/zerver/management/commands/soft_deactivate_users.py
index 7cd6f99f05..d9b29c5b70 100644
--- a/zerver/management/commands/soft_deactivate_users.py
+++ b/zerver/management/commands/soft_deactivate_users.py
@@ -82,7 +82,7 @@ class Command(ZulipBaseCommand):
else:
users_deactivated = do_auto_soft_deactivate_users(int(options['inactive_for']),
realm)
- logger.info('Soft Deactivated %d user(s)', len(users_deactivated))
+ logger.info('Soft deactivated %d user(s)', len(users_deactivated))
else:
self.print_help("./manage.py", "soft_deactivate_users")
diff --git a/zerver/management/commands/sync_ldap_user_data.py b/zerver/management/commands/sync_ldap_user_data.py
index e4b956bc9c..ca3106af85 100644
--- a/zerver/management/commands/sync_ldap_user_data.py
+++ b/zerver/management/commands/sync_ldap_user_data.py
@@ -32,7 +32,7 @@ def sync_ldap_user_data(user_profiles: List[UserProfile], deactivation_protectio
if deactivation_protection:
if not UserProfile.objects.filter(is_bot=False, is_active=True).exists():
error_msg = ("Ldap sync would have deactivated all users. This is most likely due " +
- "to a misconfiguration of ldap settings. Rolling back...\n" +
+ "to a misconfiguration of LDAP settings. Rolling back...\n" +
"Use the --force option if the mass deactivation is intended.")
logger.error(error_msg)
# Raising an exception in this atomic block will rollback the transaction.
@@ -42,7 +42,7 @@ def sync_ldap_user_data(user_profiles: List[UserProfile], deactivation_protectio
role__gte=UserProfile.ROLE_REALM_ADMINISTRATOR).exists():
error_msg = ("Ldap sync would have deactivated all administrators of realm %s. " +
"This is most likely due " +
- "to a misconfiguration of ldap settings. Rolling back...\n" +
+ "to a misconfiguration of LDAP settings. Rolling back...\n" +
"Use the --force option if the mass deactivation is intended.")
error_msg = error_msg % (string_id,)
logger.error(error_msg)
diff --git a/zerver/middleware.py b/zerver/middleware.py
index 7d96acb375..29cc9bccb5 100644
--- a/zerver/middleware.py
+++ b/zerver/middleware.py
@@ -390,7 +390,7 @@ class RateLimitMiddleware(MiddlewareMixin):
# The limit on the action that was requested is the minimum of the limits that get applied:
limit = min(result.entity.max_api_calls() for result in rate_limit_results)
response['X-RateLimit-Limit'] = str(limit)
- # Same principle applies to remaining api calls:
+ # Same principle applies to remaining API calls:
remaining_api_calls = min(result.remaining for result in rate_limit_results)
response['X-RateLimit-Remaining'] = str(remaining_api_calls)
diff --git a/zerver/migrations/0260_missed_message_addresses_from_redis_to_db.py b/zerver/migrations/0260_missed_message_addresses_from_redis_to_db.py
index 6b0c987360..03c15ae7ad 100644
--- a/zerver/migrations/0260_missed_message_addresses_from_redis_to_db.py
+++ b/zerver/migrations/0260_missed_message_addresses_from_redis_to_db.py
@@ -5,7 +5,7 @@ from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
-# Imported to avoid needing to duplicate redis-related code.
+# Imported to avoid needing to duplicate Redis-related code.
from zerver.lib.redis_utils import get_redis_client
@@ -41,12 +41,12 @@ def move_missed_message_addresses_to_database(apps: StateApps, schema_editor: Da
topic_name = subject_b.decode('utf-8')
# The data model for missed-message emails has changed in two
- # key ways: We're moving it from redis to the database for
+ # key ways: We're moving it from Redis to the database for
# better persistence, and also replacing the stream + topic
# (as the reply location) with a message to reply to. Because
- # the redis data structure only had stream/topic pairs, we use
+ # the Redis data structure only had stream/topic pairs, we use
# the following migration logic to find the latest message in
- # the thread indicated by the redis data (if it exists).
+ # the thread indicated by the Redis data (if it exists).
try:
user_profile = UserProfile.objects.get(id=user_profile_id)
recipient = Recipient.objects.get(id=recipient_id)
@@ -75,19 +75,19 @@ def move_missed_message_addresses_to_database(apps: StateApps, schema_editor: Da
# The timestamp will be set to the default (now) which means
# the address will take longer to expire than it would have in
- # redis, but this small issue is probably worth the simplicity
+ # Redis, but this small issue is probably worth the simplicity
# of not having to figure out the precise timestamp.
MissedMessageEmailAddress.objects.create(message=message,
user_profile=user_profile,
email_token=generate_missed_message_token())
# We successfully transferred this missed-message email's data
- # to the database, so this message can be deleted from redis.
+ # to the database, so this message can be deleted from Redis.
redis_client.delete(key)
class Migration(migrations.Migration):
- # Atomicity is not feasible here, since we're doing operations on redis too.
- # It's better to be non-atomic on both redis and database, than atomic
- # on the database and not on redis.
+ # Atomicity is not feasible here, since we're doing operations on Redis too.
+ # It's better to be non-atomic on both Redis and database, than atomic
+ # on the database and not on Redis.
atomic = False
dependencies = [
diff --git a/zerver/models.py b/zerver/models.py
index 77e0ef351d..a955f9dba8 100644
--- a/zerver/models.py
+++ b/zerver/models.py
@@ -2490,7 +2490,7 @@ def is_cross_realm_bot_email(email: str) -> bool:
return email.lower() in settings.CROSS_REALM_BOT_EMAILS
# The Huddle class represents a group of individuals who have had a
-# Group Private Message conversation together. The actual membership
+# group private message conversation together. The actual membership
# of the Huddle is stored in the Subscription table just like with
# Streams, and a hash of that list is stored in the huddle_hash field
# below, to support efficiently mapping from a set of users to the
@@ -3025,7 +3025,7 @@ class CustomProfileFieldValue(models.Model):
return f""
# Interfaces for services
-# They provide additional functionality like parsing message to obtain query url, data to be sent to url,
+# They provide additional functionality like parsing message to obtain query URL, data to be sent to URL,
# and parsing the response.
GENERIC_INTERFACE = 'GenericService'
SLACK_INTERFACE = 'SlackOutgoingWebhookService'
diff --git a/zerver/openapi/curl_param_value_generators.py b/zerver/openapi/curl_param_value_generators.py
index c4f5e78180..4550433e7d 100644
--- a/zerver/openapi/curl_param_value_generators.py
+++ b/zerver/openapi/curl_param_value_generators.py
@@ -30,7 +30,7 @@ helpers = ZulipTestCase()
def openapi_param_value_generator(
endpoints: List[str],
) -> Callable[[Callable[[], Dict[str, object]]], Callable[[], Dict[str, object]]]:
- """This decorator is used to register openapi param value genarator functions
+ """This decorator is used to register OpenAPI param value genarator functions
with endpoints. Example usage:
@openapi_param_value_generator(["/messages/render:post"])
diff --git a/zerver/openapi/python_examples.py b/zerver/openapi/python_examples.py
index 1df6d28eb5..9ba0ef5312 100644
--- a/zerver/openapi/python_examples.py
+++ b/zerver/openapi/python_examples.py
@@ -33,7 +33,7 @@ CALLED_TEST_FUNCTIONS: Set[str] = set()
FuncT = TypeVar("FuncT", bound=Callable[..., object])
def openapi_test_function(endpoint: str) -> Callable[[FuncT], FuncT]:
- """This decorator is used to register an openapi test function with
+ """This decorator is used to register an OpenAPI test function with
its endpoint. Example usage:
@openapi_test_function("/messages/render:post")
diff --git a/zerver/openapi/test_curl_examples.py b/zerver/openapi/test_curl_examples.py
index 81f4b8b93b..12e877da6f 100644
--- a/zerver/openapi/test_curl_examples.py
+++ b/zerver/openapi/test_curl_examples.py
@@ -72,7 +72,7 @@ Error verifying the success of the API documentation curl example.
File: {file_name}
Line: {line}
-Curl Command:
+Curl command:
{curl_command}
Response:
{response}
diff --git a/zerver/openapi/zulip.yaml b/zerver/openapi/zulip.yaml
index 9cb960f3cb..55081f1388 100644
--- a/zerver/openapi/zulip.yaml
+++ b/zerver/openapi/zulip.yaml
@@ -292,7 +292,7 @@ paths:
avatar_url:
type: string
description: |
- The url of the new avatar URL for the user.
+ The URL of the new avatar for the user.
avatar_source:
type: string
description: |
@@ -2311,7 +2311,7 @@ paths:
additionalProperties: false
description: |
Event sent to all users in a Zulip organization when the
- set of configured [Linkifiers](/help/add-a-custom-linkification-filter)
+ set of configured [linkifiers](/help/add-a-custom-linkification-filter)
for the organization has changed.
Processing this event is important to doing Markdown local echo
@@ -2335,7 +2335,7 @@ paths:
An array of tuples, where each tuple describes a linkifier.
The first element of the tuple is a
string regex pattern which represents the pattern that should
- be linkified on matching. The second element is the url with which the
+ be linkified on matching. The second element is the URL with which the
pattern matching string should be linkified with and the third element
is the ID of the realm filter.
example:
@@ -3076,7 +3076,7 @@ paths:
supported by Zulip's powerful full-text search backend.
When a narrow is not specified, it can be used to fetch a user's
- message history (We recommend paginating to 1000 messages at a time).
+ message history. (We recommend paginating to 1000 messages at a time.)
In either case, you specify an `anchor` message (or ask the server to
calculate the first unread message for you and use that as the
@@ -4675,7 +4675,7 @@ paths:
type: array
items:
type: object
- example: [{"name": "Verona", "description": "Italian City"}]
+ example: [{"name": "Verona", "description": "Italian city"}]
required: true
- $ref: "#/components/parameters/Principals"
- name: authorization_errors_fatal
@@ -5416,7 +5416,7 @@ paths:
*`email_notifications`: Whether to trigger an email notification for all
messages sent to the stream.
- *`in_home_view`: Whether to mute the stream (Legacy property)
+ *`in_home_view`: Whether to mute the stream (legacy property)
*`wildcard_mentions_notify`: whether wildcard mentions trigger notifications
as though they were personal mentions in this stream.
@@ -6040,11 +6040,11 @@ paths:
Present if `realm_filters` is present in `fetch_event_types`.
An array of tuples (fixed-length arrays) where each tuple describes
- a single realm filter ([Linkifier](/help/add-a-custom-linkification-filter).
+ a single realm filter ([linkifier](/help/add-a-custom-linkification-filter).
The first element of the tuple is a string regex pattern which represents
the pattern that should be linkified on matching.
- The second element is the url with which the
+ The second element is the URL with which the
pattern matching string should be linkified with and the third element
is the id of the realm filter.
realm_user_groups:
@@ -6875,7 +6875,7 @@ paths:
Present if `realm` is present in `fetch_event_types`.
Members whose accounts have been created at least this many days ago
- will be treated as [Full Members](/help/restrict-permissions-of-new-members)
+ will be treated as [full members](/help/restrict-permissions-of-new-members)
for the purpose of settings that restrict access to new members.
realm_digest_weekday:
type: integer
@@ -6971,7 +6971,7 @@ paths:
description: |
Present if `realm` is present in `fetch_event_types`.
- The url of the organization's [profile icon](/help/create-your-organization-profile).
+ The URL of the organization's [profile icon](/help/create-your-organization-profile).
realm_icon_source:
type: string
description: |
@@ -6995,7 +6995,7 @@ paths:
description: |
Present if `realm` is present in `fetch_event_types`.
- The url of the organization's wide logo configured in the
+ The URL of the organization's wide logo configured in the
[organization profile](/help/create-your-organization-profile).
realm_logo_source:
type: string
@@ -7014,7 +7014,7 @@ paths:
description: |
Present if `realm` is present in `fetch_event_types`.
- The url of the organization's night theme wide-format logo configured in the
+ The URL of the organization's night theme wide-format logo configured in the
[organization profile](/help/create-your-organization-profile).
realm_night_logo_source:
type: string
@@ -7195,7 +7195,7 @@ paths:
url_pattern:
type: string
description: |
- The regex pattern of the url of a profile page
+ The regex pattern of the URL of a profile page
on the external site.
jitsi_server_url:
type: string
@@ -7348,7 +7348,7 @@ paths:
description: |
Present if `realm_user` is present in `fetch_event_types`.
- The url of the avatar URL for the current user at 100x100
+ The URL of the avatar for the current user at 100x100
resolution. See also `avatar_url_medium`.
can_create_streams:
type: boolean
@@ -7517,7 +7517,7 @@ paths:
type: boolean
dev:
description: |
- Whether the user can authenticate using development api key.
+ Whether the user can authenticate using development API key.
type: boolean
email:
description: |
@@ -7525,35 +7525,35 @@ paths:
type: boolean
ldap:
description: |
- Whether the user can authenticate using ldap.
+ Whether the user can authenticate using LDAP.
type: boolean
remoteuser:
description: |
- Whether the user can authenticate using remoteuser.
+ Whether the user can authenticate using REMOTE_USER.
type: boolean
github:
description: |
- Whether the user can authenticate using their github account.
+ Whether the user can authenticate using their GitHub account.
type: boolean
azuread:
description: |
- Whether the user can authenticate using their azuread account.
+ Whether the user can authenticate using their Azure Active Directory account.
type: boolean
gitlab:
description: |
- Whether the user can authenticate using their gitlab account.
+ Whether the user can authenticate using their GitLab account.
type: boolean
apple:
description: |
- Whether the user can authenticate using their apple account.
+ Whether the user can authenticate using their Apple account.
type: boolean
google:
description: |
- Whether the user can authenticate using their google account.
+ Whether the user can authenticate using their Google account.
type: boolean
saml:
description: |
- Whether the user can authenticate using saml.
+ Whether the user can authenticate using SAML.
type: boolean
external_authentication_methods:
type: array
@@ -8249,7 +8249,7 @@ paths:
The user_ids of the recipients of the message being typed. Typing
notifications are only supported for private messages. Send a
JSON-encoded list of user_ids. (Use a list even if there is only one
- recipient.).
+ recipient.)
**Changes**: Before Zulip 2.0, this parameter accepted only a JSON-encoded
list of email addresses. Support for the email address-based format was
@@ -8480,7 +8480,7 @@ paths:
members:
type: array
description: |
- The integer User IDs of the user group members.
+ The integer user IDs of the user group members.
items:
type: integer
name:
@@ -8548,7 +8548,7 @@ paths:
operationId: zulip_outgoing_webhooks
tags: ["webhooks"]
description: |
- Outgoing Webhooks allows to build or set up Zulip integrations which are
+ Outgoing webhooks allows to build or set up Zulip integrations which are
notified when certain types of messages are sent in Zulip.
responses:
"200":
@@ -8612,7 +8612,7 @@ paths:
The content/body of the message rendered in HTML.
example:
{
- "data": "@**Outgoing Webhook Test** Zulip is the world\u2019s most productive group chat!",
+ "data": "@**Outgoing webhook test** Zulip is the world\u2019s most productive group chat!",
"trigger": "mention",
"token": "xvOzfurIutdRRVLzpXrIIHXJvNfaJLJ0",
"message":
@@ -8626,10 +8626,10 @@ paths:
"topic_links": [],
"sender_full_name": "Iago",
"avatar_url": "https://secure.gravatar.com/avatar/1f4f1575bf002ae562fea8fc4b861b09?d=identicon&version=1",
- "rendered_content": "
@Outgoing Webhook Test Zulip is the world\u2019s most productive group chat!
",
+ "rendered_content": "
@Outgoing webhook test Zulip is the world\u2019s most productive group chat!
",
"sender_id": 5,
"stream_id": 5,
- "content": "@**Outgoing Webhook Test** Zulip is the world\u2019s most productive group chat!",
+ "content": "@**Outgoing webhook test** Zulip is the world\u2019s most productive group chat!",
"display_recipient": "Verona",
"type": "stream",
"id": 112,
@@ -8645,7 +8645,7 @@ paths:
tags: ["streams"]
operationId: create_big_blue_button_video_call
description: |
- Create a video call url for a Big Blue Button video call.
+ Create a video call URL for a Big Blue Button video call.
Requires Big Blue Button to be configured on the Zulip server.
responses:
"200":
@@ -8661,7 +8661,7 @@ paths:
msg: {}
url:
description: |
- The url for the Big Blue Button video call.
+ The URL for the Big Blue Button video call.
type: string
example: "/calls/bbb/join?meeting_id=%22zulip-something%22&password=%22something%22&checksum=%22somechecksum%22"
example:
@@ -8936,7 +8936,7 @@ components:
base_url:
type: string
description: |
- The url the outgoing webhook is configured to post to.
+ The URL the outgoing webhook is configured to post to.
token:
type: string
description: |
@@ -9160,7 +9160,7 @@ components:
type: string
nullable: true
description: |
- The url of the export. Null if there's no url.
+ The URL of the export. `null` if there's no URL.
pending:
type: boolean
description: |
@@ -9450,7 +9450,7 @@ components:
within the namespace of the `reaction_type`.
For example, for `unicode_emoji`, this will be an encoding of the
- unicode codepoint.
+ Unicode codepoint.
emoji_name:
type: string
description: |
@@ -9460,7 +9460,7 @@ components:
description: |
One of the following values:
- * `unicode_emoji`: Unicode emoji (`emoji_code` will be its unicode
+ * `unicode_emoji`: Unicode emoji (`emoji_code` will be its Unicode
codepoint).
* `realm_emoji`: [Custom emoji](/help/add-custom-emoji).
(`emoji_code` will be its ID).
@@ -10324,7 +10324,7 @@ components:
from the streams specified in the `subscriptions` parameter. If
not provided, then the requesting user/bot is subscribed.
- **Changes**: The integer format is new in Zulip 3.0 (Feature level 9).
+ **Changes**: The integer format is new in Zulip 3.0 (feature level 9).
content:
application/json:
schema:
@@ -10342,7 +10342,7 @@ components:
it should pass this parameter using the value the server provided
for the existing reaction for specificity. Supported values:
- * `unicode_emoji`: Unicode emoji (`emoji_code` will be its unicode codepoint).
+ * `unicode_emoji`: Unicode emoji (`emoji_code` will be its Unicode codepoint).
* `realm_emoji`: Custom emoji. (`emoji_code` will be its ID).
* `zulip_extra_emoji`: Special emoji included with Zulip. Exists to
namespace the `zulip` emoji.
diff --git a/zerver/tests/fixtures/markdown_test_cases.json b/zerver/tests/fixtures/markdown_test_cases.json
index 5dd482675a..6fb15c0d4c 100644
--- a/zerver/tests/fixtures/markdown_test_cases.json
+++ b/zerver/tests/fixtures/markdown_test_cases.json
@@ -427,10 +427,10 @@
},
{
"name": "only_named_inline_image",
- "input": "[Google Link](https://www.google.com/images/srpr/logo4w.png)",
- "expected_output": "
",
"backend_only_rendering": true,
- "text_content": "Google Link\n"
+ "text_content": "Google link\n"
},
{
"name": "only_non_image_link",
diff --git a/zerver/tests/fixtures/mattermost_fixtures/direct_channel/export.json b/zerver/tests/fixtures/mattermost_fixtures/direct_channel/export.json
index c4b056c699..010a15b66e 100644
--- a/zerver/tests/fixtures/mattermost_fixtures/direct_channel/export.json
+++ b/zerver/tests/fixtures/mattermost_fixtures/direct_channel/export.json
@@ -15,7 +15,7 @@
{"type":"direct_channel","direct_channel":{"members":["ron","harry", "ginny"],"favorited_by":null,"header":""}}
{"type":"direct_post","direct_post":{"channel_members":["ron","harry"],"user":"ron","message":"hey harry","create_at":1566376137676,"flagged_by":null,"reactions":null,"replies":null,"attachments":null}}
{"type":"direct_post","direct_post":{"channel_members":["ron","harry"],"user":"harry","message":"what's up","create_at":1566376318568,"flagged_by":null,"reactions":null,"replies":null,"attachments":null}}
-{"type":"direct_post","direct_post":{"channel_members":["ron","harry","ginny"],"user":"ginny","message":"Who is going to Hogesmead this weekend?","create_at":1566376226493,"flagged_by":null,"reactions":null,"replies":null,"attachments":null}}
+{"type":"direct_post","direct_post":{"channel_members":["ron","harry","ginny"],"user":"ginny","message":"Who is going to Hogsmeade this weekend?","create_at":1566376226493,"flagged_by":null,"reactions":null,"replies":null,"attachments":null}}
{"type":"direct_post","direct_post":{"channel_members":["ron","harry","ginny"],"user":"harry","message":"I am going.","create_at":1566376311350,"flagged_by":null,"reactions":null,"replies":null,"attachments":null}}
{"type":"direct_post","direct_post":{"channel_members":["ron","harry","ginny"],"user":"ron","message":"I am going as well","create_at":1566376286363,"flagged_by":null,"reactions":null,"replies":null,"attachments":null}}
{"type":"direct_post","direct_post":{"channel_members":["harry","voldemort"],"user":"voldemort","message":"Hey Harry.","create_at":1566376318569,"flagged_by":null,"reactions":null,"replies":null,"attachments":null}}
diff --git a/zerver/tests/test_alert_words.py b/zerver/tests/test_alert_words.py
index a72eda0956..09d3eeba5d 100644
--- a/zerver/tests/test_alert_words.py
+++ b/zerver/tests/test_alert_words.py
@@ -184,8 +184,8 @@ class AlertWordTests(ZulipTestCase):
self.assertTrue(self.message_does_alert(user, "Case of ONE, won't stop me"))
# We don't cause alerts for matches in URLs.
- self.assertFalse(self.message_does_alert(user, "Don't alert on http://t.co/one/ urls"))
- self.assertFalse(self.message_does_alert(user, "Don't alert on http://t.co/one urls"))
+ self.assertFalse(self.message_does_alert(user, "Don't alert on http://t.co/one/ URLs"))
+ self.assertFalse(self.message_does_alert(user, "Don't alert on http://t.co/one URLs"))
def test_update_alert_words(self) -> None:
user = self.get_user()
diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py
index 7b7e4be5c1..d0355ef232 100644
--- a/zerver/tests/test_auth_backends.py
+++ b/zerver/tests/test_auth_backends.py
@@ -858,7 +858,7 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
# redirected back to the registered redirect uri.
# We register callbacks for the key URLs on Identity Provider that
- # auth completion url will call
+ # auth completion URL will call
with responses.RequestsMock(assert_all_requests_are_fired=False) as requests_mock:
requests_mock.add(
requests_mock.POST,
@@ -1368,11 +1368,11 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
result = self.social_auth_test(account_data_dict,
expect_choose_email_screen=True,
subdomain=subdomain, is_signup=True)
- # Full name should get populated from ldap:
+ # Full name should get populated from LDAP:
self.stage_two_of_registration(result, realm, subdomain, email, name, "New LDAP fullname",
skip_registration_form=True)
- # Now try a user that doesn't exist in ldap:
+ # Now try a user that doesn't exist in LDAP:
email = self.nonreg_email("alice")
name = "Alice Social"
account_data_dict = self.get_account_data_dict(email=email, name=name)
@@ -1380,7 +1380,7 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
expect_choose_email_screen=True,
subdomain=subdomain, is_signup=True)
# Full name should get populated as provided by the social backend, because
- # this user isn't in the ldap dictionary:
+ # this user isn't in the LDAP dictionary:
self.stage_two_of_registration(result, realm, subdomain, email, name, name,
skip_registration_form=self.BACKEND_CLASS.full_name_validated)
self.assertEqual(log_warn.output, [f'WARNING:root:New account email {email} could not be found in LDAP'])
@@ -1388,8 +1388,8 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
@override_settings(TERMS_OF_SERVICE=None)
def test_social_auth_with_ldap_auth_registration_from_confirmation(self) -> None:
"""
- This test checks that in configurations that use the ldap authentication backend
- and a social backend, it is possible to create non-ldap users via the social backend.
+ This test checks that in configurations that use the LDAP authentication backend
+ and a social backend, it is possible to create non-LDAP users via the social backend.
"""
self.init_default_ldap_database()
email = self.nonreg_email("alice")
@@ -1413,11 +1413,11 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
expect_choose_email_screen=True,
subdomain=subdomain, is_signup=True)
# Full name should get populated as provided by the social backend, because
- # this user isn't in the ldap dictionary:
+ # this user isn't in the LDAP dictionary:
self.stage_two_of_registration(result, realm, subdomain, email, name, name,
skip_registration_form=self.BACKEND_CLASS.full_name_validated)
self.assertEqual(log_warn.output, [f'WARNING:root:New account email {email} could not be found in LDAP'])
- self.assertEqual(log_debug.output, [f'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No ldap user matching django_to_ldap_username result: {email}. Input username: {email}'])
+ self.assertEqual(log_debug.output, [f'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: {email}. Input username: {email}'])
def test_social_auth_complete(self) -> None:
with mock.patch('social_core.backends.oauth.BaseOAuth2.process_error',
@@ -1867,7 +1867,7 @@ class SAMLAuthBackendTest(SocialAuthBase):
self.assert_in_success_response(["Configuration error", "SAML authentication"], result)
result = self.client_get(f'/accounts/{action}/social/saml/')
- # No matching url pattern.
+ # No matching URL pattern.
self.assertEqual(result.status_code, 404)
def test_social_auth_saml_require_limit_to_subdomains(self) -> None:
@@ -2178,7 +2178,7 @@ class AppleIdAuthBackendTest(AppleAuthMixin, SocialAuthBase):
self.assertEqual(result.status_code, 302)
self.assertIn('login', result.url)
- # (2) Check if auth fails when a state sent has no valid data stored in redis.
+ # (2) Check if auth fails when a state sent has no valid data stored in Redis.
fake_state = "fa42e4ccdb630f0070c1daab70ad198d8786d4b639cd7a1b4db4d5a13c623060"
result = self.client_post('/complete/apple/', {'state': fake_state})
self.assertEqual(result.status_code, 302)
@@ -2392,7 +2392,7 @@ class GitHubAuthBackendTest(SocialAuthBase):
# that requires "choose email" screen;
self.assert_in_success_response(["Select account"], result)
# Verify that all the emails returned by GitHub auth
- # are in the "choose email" screen.
+ # Are in the "choose email" screen.
all_emails_verified = True
for email_data_dict in self.email_data:
email = email_data_dict["email"]
@@ -3353,19 +3353,19 @@ class ExternalMethodDictsTests(ZulipTestCase):
):
# Calling get_external_method_dicts without a realm returns all methods configured on the server:
external_auth_methods = get_external_method_dicts()
- # 1 IdP enabled for all realms + a dict for github auth
+ # 1 IdP enabled for all realms + a dict for GitHub auth
self.assert_length(external_auth_methods, 2)
self.assertEqual([external_auth_methods[0]['name'], external_auth_methods[1]['name']],
['saml:test_idp', 'github'])
external_auth_methods = get_external_method_dicts(get_realm("zulip"))
- # Only test_idp enabled for the zulip realm, + github auth.
+ # Only test_idp enabled for the zulip realm, + GitHub auth.
self.assert_length(external_auth_methods, 2)
self.assertEqual([external_auth_methods[0]['name'], external_auth_methods[1]['name']],
['saml:test_idp', 'github'])
external_auth_methods = get_external_method_dicts(get_realm("zephyr"))
- # Both idps enabled for the zephyr realm, + github auth.
+ # Both idps enabled for the zephyr realm, + GitHub auth.
self.assert_length(external_auth_methods, 3)
self.assertEqual({external_auth_methods[0]['name'], external_auth_methods[1]['name']},
{'saml:test_idp', 'saml:test_idp2'})
@@ -4074,7 +4074,7 @@ class DjangoToLDAPUsernameTests(ZulipTestCase):
def test_authenticate_to_ldap_via_email(self) -> None:
"""
With AUTH_LDAP_REVERSE_EMAIL_SEARCH configured, django_to_ldap_username
- should be able to translate an email to ldap username,
+ should be able to translate an email to LDAP username,
and thus it should be possible to authenticate through user_profile.delivery_email.
"""
realm = get_realm("zulip")
@@ -4252,7 +4252,7 @@ class TestLDAP(ZulipLDAPTestCase):
password="doesnt_matter",
realm=get_realm('zulip'))
self.assertEqual(log_debug.output, [
- 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No ldap user matching django_to_ldap_username result: nonexistent. Input username: nonexistent@zulip.com'
+ 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: nonexistent. Input username: nonexistent@zulip.com'
])
self.assertIs(user, None)
@@ -4822,7 +4822,7 @@ class TestQueryLDAP(ZulipLDAPTestCase):
with self.settings(AUTH_LDAP_USER_ATTR_MAP={'full_name': 'cn'},
LDAP_EMAIL_ATTR='mail'):
# This will look up the user by email in our test dictionary,
- # should successfully find hamlet's ldap entry.
+ # should successfully find hamlet's LDAP entry.
values = query_ldap(self.example_email('hamlet'))
self.assertEqual(len(values), 2)
self.assertIn('full_name: King Hamlet', values)
diff --git a/zerver/tests/test_bots.py b/zerver/tests/test_bots.py
index 4afea31b43..2aab06ee80 100644
--- a/zerver/tests/test_bots.py
+++ b/zerver/tests/test_bots.py
@@ -341,7 +341,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
list of subscriptions and confirm the right number of events
are generated.
When 'principals' has a bot, no notification message event or invitation email
- is sent when add_subscriptions_backend is called in the above api call.
+ is sent when add_subscriptions_backend is called in the above API call.
"""
hamlet = self.example_user('hamlet')
iago = self.example_user('iago')
diff --git a/zerver/tests/test_camo.py b/zerver/tests/test_camo.py
index 3ef7c1e28a..441e1c9adb 100644
--- a/zerver/tests/test_camo.py
+++ b/zerver/tests/test_camo.py
@@ -3,12 +3,12 @@ from zerver.lib.test_classes import ZulipTestCase
class CamoURLTest(ZulipTestCase):
def test_legacy_camo_url(self) -> None:
- # Test with valid hex and url pair
+ # Test with valid hex and URL pair
result = self.client_get("/external_content/0f50f0bda30b6e65e9442c83ddb4076c74e75f96/687474703a2f2f7777772e72616e646f6d2e736974652f696d616765732f666f6f6261722e6a706567")
self.assertEqual(result.status_code, 302, result)
self.assertIn('/filters:no_upscale():quality(100)/aHR0cDovL3d3dy5yYW5kb20uc2l0ZS9pbWFnZXMvZm9vYmFyLmpwZWc=/source_type/external', result.url)
- # Test with invalid hex and url pair
+ # Test with invalid hex and URL pair
result = self.client_get("/external_content/074c5e6c9c6d4ce97db1c740d79dc561cf7eb379/687474703a2f2f7777772e72616e646f6d2e736974652f696d616765732f666f6f6261722e6a706567")
self.assertEqual(result.status_code, 403, result)
self.assert_in_response("Not a valid URL.", result)
diff --git a/zerver/tests/test_custom_profile_data.py b/zerver/tests/test_custom_profile_data.py
index eecf414205..9f5b24670e 100644
--- a/zerver/tests/test_custom_profile_data.py
+++ b/zerver/tests/test_custom_profile_data.py
@@ -237,7 +237,7 @@ class CreateCustomProfileFieldTest(CustomProfileFieldTestCase):
'subtype': 'custom',
}).decode()
result = self.client_post("/json/realm/profile_fields", info=data)
- self.assert_json_error(result, 'Custom external account must define url pattern')
+ self.assert_json_error(result, 'Custom external account must define URL pattern')
data["field_data"] = orjson.dumps({
'subtype': 'custom',
diff --git a/zerver/tests/test_event_queue.py b/zerver/tests/test_event_queue.py
index 3c47909a4b..f3f8beff47 100644
--- a/zerver/tests/test_event_queue.py
+++ b/zerver/tests/test_event_queue.py
@@ -197,7 +197,7 @@ class MissedMessageNotificationsTest(ZulipTestCase):
all_public_streams=True,
apply_markdown=True,
client_gravatar=True,
- client_type_name='home grown api program',
+ client_type_name='home grown API program',
event_types=['message'],
last_connection_time=time.time(),
queue_timeout=0,
diff --git a/zerver/tests/test_external.py b/zerver/tests/test_external.py
index 5d35b50b7d..c601a9e451 100644
--- a/zerver/tests/test_external.py
+++ b/zerver/tests/test_external.py
@@ -48,7 +48,7 @@ class RateLimitTests(ZulipTestCase):
# Some tests here can be somewhat timing-sensitive in a way
# that can't be eliminated, e.g. due to testing things that rely
- # on redis' internal timing mechanism which we can't mock.
+ # on Redis' internal timing mechanism which we can't mock.
# The first API request when running a suite of tests is slow
# and can take multiple seconds. This is not a problem when running
# multiple tests, but if an individual, time-sensitive test from this class
diff --git a/zerver/tests/test_home.py b/zerver/tests/test_home.py
index 0e5d13a8dc..346181046d 100644
--- a/zerver/tests/test_home.py
+++ b/zerver/tests/test_home.py
@@ -905,7 +905,7 @@ class HomeTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_log:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_log.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
@@ -930,7 +930,7 @@ class HomeTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_log:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_log.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
@@ -959,7 +959,7 @@ class HomeTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_log:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_log.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
diff --git a/zerver/tests/test_i18n.py b/zerver/tests/test_i18n.py
index 781a95e95d..1b4415015f 100644
--- a/zerver/tests/test_i18n.py
+++ b/zerver/tests/test_i18n.py
@@ -87,7 +87,7 @@ class TranslationTestCase(ZulipTestCase):
]
for lang, word in languages:
- # Applying str function to LANGUAGE_COOKIE_NAME to convert unicode
+ # Applying str function to LANGUAGE_COOKIE_NAME to convert Unicode
# into an ascii otherwise SimpleCookie will raise an exception
self.client.cookies = SimpleCookie({str(settings.LANGUAGE_COOKIE_NAME): lang})
diff --git a/zerver/tests/test_integrations_dev_panel.py b/zerver/tests/test_integrations_dev_panel.py
index 8aab019845..bd2155402e 100644
--- a/zerver/tests/test_integrations_dev_panel.py
+++ b/zerver/tests/test_integrations_dev_panel.py
@@ -87,7 +87,7 @@ class TestIntegrationsDevPanel(ZulipTestCase):
def test_check_send_webhook_fixture_message_for_success_with_headers_and_non_json_fixtures(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
- url = f"/api/v1/external/wordpress?api_key={bot.api_key}&stream=Denmark&topic=Wordpress Notifications"
+ url = f"/api/v1/external/wordpress?api_key={bot.api_key}&stream=Denmark&topic=WordPress Notifications"
target_url = "/devtools/integrations/check_send_webhook_fixture_message"
with open("zerver/webhooks/wordpress/fixtures/publish_post_no_data_provided.txt") as f:
body = f.read()
@@ -106,7 +106,7 @@ class TestIntegrationsDevPanel(ZulipTestCase):
expected_message = "New post published:\n* [New WordPress Post](WordPress Post URL)"
self.assertEqual(latest_msg.content, expected_message)
self.assertEqual(Stream.objects.get(id=latest_msg.recipient.type_id).name, "Denmark")
- self.assertEqual(latest_msg.topic_name(), "Wordpress Notifications")
+ self.assertEqual(latest_msg.topic_name(), "WordPress Notifications")
def test_get_fixtures_for_nonexistant_integration(self) -> None:
target_url = "/devtools/integrations/somerandomnonexistantintegration/fixtures"
@@ -182,7 +182,7 @@ class TestIntegrationsDevPanel(ZulipTestCase):
def test_send_all_webhook_fixture_messages_for_success_with_non_json_fixtures(self) -> None:
bot = get_user('webhook-bot@zulip.com', self.zulip_realm)
- url = f"/api/v1/external/wordpress?api_key={bot.api_key}&stream=Denmark&topic=Wordpress Bulk Notifications"
+ url = f"/api/v1/external/wordpress?api_key={bot.api_key}&stream=Denmark&topic=WordPress Bulk Notifications"
target_url = "/devtools/integrations/send_all_webhook_fixture_messages"
data = {
diff --git a/zerver/tests/test_link_embed.py b/zerver/tests/test_link_embed.py
index 31d731fb2e..1a302ec1ac 100644
--- a/zerver/tests/test_link_embed.py
+++ b/zerver/tests/test_link_embed.py
@@ -431,7 +431,7 @@ class PreviewTestCase(ZulipTestCase):
@override_settings(INLINE_URL_EMBED_PREVIEW=True)
def test_inline_relative_url_embed_preview(self) -> None:
- # Relative urls should not be sent for url preview.
+ # Relative URLs should not be sent for URL preview.
with mock_queue_publish('zerver.lib.actions.queue_json_publish') as patched:
self.send_personal_message(
self.example_user('prospero'),
@@ -442,7 +442,7 @@ class PreviewTestCase(ZulipTestCase):
def test_inline_url_embed_preview_with_relative_image_url(self) -> None:
with_preview_relative = '
'
- # Try case where the opengraph image is a relative url.
+ # Try case where the Open Graph image is a relative URL.
msg = self._send_message_with_test_org_url(sender=self.example_user('prospero'), relative_url=True)
self.assertEqual(msg.rendered_content, with_preview_relative)
@@ -744,7 +744,7 @@ class PreviewTestCase(ZulipTestCase):
@override_settings(INLINE_URL_EMBED_PREVIEW=True)
def test_custom_title_replaces_youtube_url_title(self) -> None:
- url = '[Youtube link](https://www.youtube.com/watch?v=eSJTXC7Ixgg)'
+ url = '[YouTube link](https://www.youtube.com/watch?v=eSJTXC7Ixgg)'
with mock_queue_publish('zerver.lib.actions.queue_json_publish'):
msg_id = self.send_personal_message(
self.example_user('hamlet'),
@@ -766,9 +766,9 @@ class PreviewTestCase(ZulipTestCase):
lambda *args, **kwargs: mocked_data):
FetchLinksEmbedData().consume(event)
self.assertTrue(
- 'INFO:root:Time spent on get_link_embed_data for [Youtube link](https://www.youtube.com/watch?v=eSJTXC7Ixgg):' in info_logs.output[0]
+ 'INFO:root:Time spent on get_link_embed_data for [YouTube link](https://www.youtube.com/watch?v=eSJTXC7Ixgg):' in info_logs.output[0]
)
msg.refresh_from_db()
- expected_content = '
'
self.assertEqual(expected_content, msg.rendered_content)
diff --git a/zerver/tests/test_logging_handlers.py b/zerver/tests/test_logging_handlers.py
index 4ecbbe62a8..bffdad6f97 100644
--- a/zerver/tests/test_logging_handlers.py
+++ b/zerver/tests/test_logging_handlers.py
@@ -63,7 +63,7 @@ class AdminNotifyHandlerTest(ZulipTestCase):
"""A random exception passes happily through AdminNotifyHandler"""
handler = self.get_admin_zulip_handler()
try:
- raise Exception("Testing Error!")
+ raise Exception("Testing error!")
except Exception:
exc_info = sys.exc_info()
record = self.logger.makeRecord('name', logging.ERROR, 'function', 16,
@@ -162,7 +162,7 @@ class AdminNotifyHandlerTest(ZulipTestCase):
# Now simulate a DisallowedHost exception
def get_host_error() -> None:
- raise Exception("Get Host Failure!")
+ raise Exception("Get host failure!")
orig_get_host = record.request.get_host
record.request.get_host = get_host_error
report = self.run_handler(record)
diff --git a/zerver/tests/test_management_commands.py b/zerver/tests/test_management_commands.py
index 6d6bc554a1..b2f80b45c7 100644
--- a/zerver/tests/test_management_commands.py
+++ b/zerver/tests/test_management_commands.py
@@ -421,7 +421,7 @@ class TestConvertMattermostData(ZulipTestCase):
output_dir=os.path.realpath(output_dir),
)
self.assertEqual(mock_print.mock_calls, [
- call('Converting Data ...')
+ call('Converting data ...')
])
@skipUnless(settings.ZILENCER_ENABLED, "requires zilencer")
diff --git a/zerver/tests/test_markdown.py b/zerver/tests/test_markdown.py
index 96c344ff15..804347af1d 100644
--- a/zerver/tests/test_markdown.py
+++ b/zerver/tests/test_markdown.py
@@ -516,7 +516,7 @@ class MarkdownTest(ZulipTestCase):
converted = markdown_convert_wrapper(msg)
self.assertIn(thumbnail_img, converted)
- # Any url which is not an external link and doesn't start with
+ # Any URL which is not an external link and doesn't start with
# /user_uploads/ is not thumbnailed
msg = '[foobar](/static/images/cute/turtle.png)'
thumbnail_img = '
'
@@ -603,7 +603,7 @@ class MarkdownTest(ZulipTestCase):
@override_settings(INLINE_IMAGE_PREVIEW=True)
def test_corrected_image_source(self) -> None:
- # testing only wikipedia because linx.li urls can be expected to expire
+ # testing only Wikipedia because linx.li URLs can be expected to expire
content = 'https://en.wikipedia.org/wiki/File:Wright_of_Derby,_The_Orrery.jpg'
expected = '
'
@@ -721,17 +721,17 @@ class MarkdownTest(ZulipTestCase):
self.assertEqual(converted, '
')
msg = 'https://www.youtube.com/watch?v=0c46YHS3RY8\n\nSample text\n\nhttps://www.youtube.com/watch?v=lXFO2ULktEI'
converted = markdown_convert_wrapper(msg)
@@ -892,7 +892,7 @@ class MarkdownTest(ZulipTestCase):
make_link('http://twitter.com/wdaher/status/287977969287315460'),
make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315460', emoji_in_tweet_html)))
- # Test twitter previews in spoiler tags.
+ # Test Twitter previews in spoiler tags.
msg = '```spoiler secret tweet\nTweet: http://twitter.com/wdaher/status/287977969287315456\n```'
converted = markdown_convert_wrapper(msg)
diff --git a/zerver/tests/test_mattermost_importer.py b/zerver/tests/test_mattermost_importer.py
index 6b8bd48021..910ee8c697 100644
--- a/zerver/tests/test_mattermost_importer.py
+++ b/zerver/tests/test_mattermost_importer.py
@@ -670,7 +670,7 @@ class MatterMostImporter(ZulipTestCase):
self.assertEqual(len(huddle_messages), 3)
self.assertEqual(len(set(huddle_recipients)), 1)
self.assertEqual(huddle_messages[0].sender.email, "ginny@zulip.com")
- self.assertEqual(huddle_messages[0].content, "Who is going to Hogesmead this weekend?\n\n")
+ self.assertEqual(huddle_messages[0].content, "Who is going to Hogsmeade this weekend?\n\n")
personal_messages = messages.filter(recipient__type=Recipient.PERSONAL).order_by("date_sent")
personal_recipients = personal_messages.values_list("recipient", flat=True)
diff --git a/zerver/tests/test_message_fetch.py b/zerver/tests/test_message_fetch.py
index 9080aa1f55..b1f1527bde 100644
--- a/zerver/tests/test_message_fetch.py
+++ b/zerver/tests/test_message_fetch.py
@@ -1614,7 +1614,7 @@ class GetOldMessagesTest(ZulipTestCase):
def test_get_messages_with_narrow_stream_mit_unicode_regex(self) -> None:
"""
- A request for old messages for a user in the mit.edu relam with unicode
+ A request for old messages for a user in the mit.edu relam with Unicode
stream name should be correctly escaped in the database query.
"""
user = self.mit_user('starnine')
@@ -1648,7 +1648,7 @@ class GetOldMessagesTest(ZulipTestCase):
def test_get_messages_with_narrow_topic_mit_unicode_regex(self) -> None:
"""
- A request for old messages for a user in the mit.edu realm with unicode
+ A request for old messages for a user in the mit.edu realm with Unicode
topic name should be correctly escaped in the database query.
"""
mit_user_profile = self.mit_user("starnine")
@@ -1875,7 +1875,7 @@ class GetOldMessagesTest(ZulipTestCase):
self.assertEqual(len(multi_search_result['messages']), 1)
self.assertEqual(multi_search_result['messages'][0]['match_content'], '
discuss lunch after lunch
')
- # Test searching in messages with unicode characters
+ # Test searching in messages with Unicode characters
narrow = [
dict(operator='search', operand='日本'),
]
@@ -1910,7 +1910,7 @@ class GetOldMessagesTest(ZulipTestCase):
english_message['match_content'],
'
I want to go to 日本!
')
- # Multiple search operands with unicode
+ # Multiple search operands with Unicode
multi_search_narrow = [
dict(operator='search', operand='ちは'),
dict(operator='search', operand='今日は'),
@@ -2052,7 +2052,7 @@ class GetOldMessagesTest(ZulipTestCase):
self.assertEqual(multi_search_result['messages'][0]['match_content'],
'
')
- # Search operands with HTML Special Characters
+ # Search operands with HTML special characters
special_search_narrow = [
dict(operator='search', operand='butter'),
]
@@ -2522,7 +2522,7 @@ class GetOldMessagesTest(ZulipTestCase):
self.exercise_bad_narrow_operand_using_dict_api('search', [''], error_msg)
# The exercise_bad_narrow_operand helper method uses legacy tuple format to
- # test bad narrow, this method uses the current dict api format
+ # test bad narrow, this method uses the current dict API format
def exercise_bad_narrow_operand_using_dict_api(self, operator: str,
operands: Sequence[Any],
error_msg: str) -> None:
@@ -3236,7 +3236,7 @@ class MessageHasKeywordsTest(ZulipTestCase):
msg_contents = ["Link: foo.org",
"Image: https://www.google.com/images/srpr/logo4w.png",
"Image: https://www.google.com/images/srpr/logo4w.pdf",
- "[Google Link](https://www.google.com/images/srpr/logo4w.png)"]
+ "[Google link](https://www.google.com/images/srpr/logo4w.png)"]
for msg_content in msg_contents:
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
'Denmark', content=msg_content))
@@ -3245,7 +3245,7 @@ class MessageHasKeywordsTest(ZulipTestCase):
self.update_message(msgs[0], 'https://www.google.com/images/srpr/logo4w.png')
self.assertTrue(msgs[0].has_image)
- self.update_message(msgs[0], 'No Image Again')
+ self.update_message(msgs[0], 'No image again')
self.assertFalse(msgs[0].has_image)
def test_has_attachment(self) -> None:
@@ -3259,7 +3259,7 @@ class MessageHasKeywordsTest(ZulipTestCase):
msg_id = self.send_stream_message(hamlet, "Denmark", body, "test")
msg = Message.objects.get(id=msg_id)
self.assertTrue(msg.has_attachment)
- self.update_message(msg, 'No Attachments')
+ self.update_message(msg, 'No attachments')
self.assertFalse(msg.has_attachment)
self.update_message(msg, body)
self.assertTrue(msg.has_attachment)
diff --git a/zerver/tests/test_middleware.py b/zerver/tests/test_middleware.py
index 10fc5ce693..a0e5895d15 100644
--- a/zerver/tests/test_middleware.py
+++ b/zerver/tests/test_middleware.py
@@ -105,7 +105,7 @@ class OpenGraphTest(ZulipTestCase):
self.check_title_and_description(
'/api/',
- "Zulip API Documentation",
+ "Zulip API documentation",
[("Zulip's APIs allow you to integrate other services with Zulip. This "
"guide should help you find the API you need:")], [])
diff --git a/zerver/tests/test_openapi.py b/zerver/tests/test_openapi.py
index 3c56d47658..b0d6f4a562 100644
--- a/zerver/tests/test_openapi.py
+++ b/zerver/tests/test_openapi.py
@@ -327,14 +327,14 @@ class OpenAPIArgumentsTest(ZulipTestCase):
if not msg: # nocoverage
msg = f"""
We found some OpenAPI documentation for {method} {url_pattern},
-so maybe we shouldn't mark it as intentionally undocumented in the urls.
+so maybe we shouldn't mark it as intentionally undocumented in the URLs.
"""
raise AssertionError(msg) # nocoverage
except KeyError:
return
def check_for_non_existant_openapi_endpoints(self) -> None:
- """ Here, we check to see if every endpoint documented in the openapi
+ """ Here, we check to see if every endpoint documented in the OpenAPI
documentation actually exists in urls.py and thus in actual code.
Note: We define this as a helper called at the end of
test_openapi_arguments instead of as a separate test to ensure that
@@ -500,7 +500,7 @@ do not match the types declared in the implementation of {function.__name__}.\n"
# validator in these cases, but it does happen.
#
# If the REQ type is not string then, insert the
- # REQ and OPENAPI data types of the variable in
+ # REQ and OpenAPI data types of the variable in
# the respective sets so that they can be dealt
# with later. In either case remove the variable
# from `json_params`.
@@ -536,7 +536,7 @@ do not match the types declared in the implementation of {function.__name__}.\n"
has_request_variables decorator).
At the end, we perform a reverse mapping test that verifies that
- every url pattern defined in the openapi documentation actually exists
+ every URL pattern defined in the OpenAPI documentation actually exists
in code.
"""
diff --git a/zerver/tests/test_push_notifications.py b/zerver/tests/test_push_notifications.py
index df7878d475..dcd92139d3 100644
--- a/zerver/tests/test_push_notifications.py
+++ b/zerver/tests/test_push_notifications.py
@@ -1092,7 +1092,7 @@ class HandlePushNotificationTest(PushNotificationTest):
with self.assertLogs(logger_string, level='INFO') as info_logs:
do_soft_deactivate_users([self.user_profile])
self.assertEqual(info_logs.output, [
- f"INFO:{logger_string}:Soft Deactivated user {self.user_profile.id}",
+ f"INFO:{logger_string}:Soft deactivated user {self.user_profile.id}",
f"INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process"
])
sender = self.example_user('iago')
diff --git a/zerver/tests/test_rate_limiter.py b/zerver/tests/test_rate_limiter.py
index 7cc6fa71fc..d0c23be25d 100644
--- a/zerver/tests/test_rate_limiter.py
+++ b/zerver/tests/test_rate_limiter.py
@@ -156,7 +156,7 @@ class RedisRateLimiterBackendTest(RateLimiterBackendBase):
"""
This test cannot verify that the user will get unblocked
after the correct amount of time, because that event happens
- inside redis, so we're not able to mock the timer. Making the test
+ inside Redis, so we're not able to mock the timer. Making the test
sleep for 1s is also too costly to be worth it.
"""
obj = self.create_object('test', [(2, 5)])
diff --git a/zerver/tests/test_reactions.py b/zerver/tests/test_reactions.py
index e747423b6f..7b13a44ea7 100644
--- a/zerver/tests/test_reactions.py
+++ b/zerver/tests/test_reactions.py
@@ -205,12 +205,12 @@ class ReactionEmojiTest(ZulipTestCase):
self.assertEqual(emoji_code, 'zulip')
self.assertEqual(reaction_type, 'zulip_extra_emoji')
- # Test unicode emoji.
+ # Test Unicode emoji.
emoji_code, reaction_type = emoji_name_to_emoji_code(realm, 'astonished')
self.assertEqual(emoji_code, '1f632')
self.assertEqual(reaction_type, 'unicode_emoji')
- # Test override unicode emoji.
+ # Test override Unicode emoji.
overriding_emoji = RealmEmoji.objects.create(
name='astonished', realm=realm, file_name='astonished')
emoji_code, reaction_type = emoji_name_to_emoji_code(realm, 'astonished')
diff --git a/zerver/tests/test_settings.py b/zerver/tests/test_settings.py
index 64e85f7f97..ebfc1e947c 100644
--- a/zerver/tests/test_settings.py
+++ b/zerver/tests/test_settings.py
@@ -282,7 +282,7 @@ class ChangeSettingsTest(ZulipTestCase):
result = self.client_patch(
"/json/settings",
dict(
- old_password=self.ldap_password("hamlet"), # hamlet's password in ldap
+ old_password=self.ldap_password("hamlet"), # hamlet's password in LDAP
new_password="ignored",
))
self.assert_json_error(result, "Your Zulip password is managed in LDAP")
diff --git a/zerver/tests/test_signup.py b/zerver/tests/test_signup.py
index d276b89847..64ed4fe0e7 100644
--- a/zerver/tests/test_signup.py
+++ b/zerver/tests/test_signup.py
@@ -510,7 +510,7 @@ class PasswordResetTest(ZulipTestCase):
'zproject.backends.EmailAuthBackend',
'zproject.backends.ZulipDummyBackend'))
def test_ldap_and_email_auth(self) -> None:
- """If both email and ldap auth backends are enabled, limit password
+ """If both email and LDAP auth backends are enabled, limit password
reset to users outside the LDAP domain"""
# If the domain matches, we don't generate an email
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
@@ -1524,7 +1524,7 @@ so we didn't send them an invitation. We did send invitations to everyone else!"
self.assertEqual(ScheduledEmail.objects.filter(type=ScheduledEmail.INVITATION_REMINDER).count(), 1)
# make sure users can't take a valid confirmation key from another
- # pathway and use it with the invitation url route
+ # pathway and use it with the invitation URL route
def test_confirmation_key_of_wrong_type(self) -> None:
email = self.nonreg_email("alice")
realm = get_realm('zulip')
@@ -3613,15 +3613,15 @@ class UserSignUpTest(InviteUserBase):
HTTP_HOST=subdomain + ".testserver")
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because emails matching LDAP_APPEND_DOMAIN,
- # aren't allowed to create non-ldap accounts.
+ # aren't allowed to create non-LDAP accounts.
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
self.assertEqual(debug_log.output, [
- 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No ldap user matching django_to_ldap_username result: newuser. Input username: newuser@zulip.com'
+ 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: newuser. Input username: newuser@zulip.com'
])
- # If the email is outside of LDAP_APPEND_DOMAIN, we successfully create a non-ldap account,
- # with the password managed in the zulip database.
+ # If the email is outside of LDAP_APPEND_DOMAIN, we successfully create a non-LDAP account,
+ # with the password managed in the Zulip database.
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN='example.com',
@@ -3739,7 +3739,7 @@ class UserSignUpTest(InviteUserBase):
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver")
self.assertEqual(debug_log.output, [
- 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No ldap user matching django_to_ldap_username result: nonexistent@zulip.com. Input username: nonexistent@zulip.com'
+ 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: nonexistent@zulip.com. Input username: nonexistent@zulip.com'
])
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zulip.testserver/")
@@ -3764,7 +3764,7 @@ class UserSignUpTest(InviteUserBase):
# Invite user.
self.login('iago')
self.assertEqual(debug_log.output, [
- 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No ldap user matching django_to_ldap_username result: iago. Input username: iago@zulip.com'
+ 'DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: iago. Input username: iago@zulip.com'
])
response = self.invite(invitee_emails='newuser@zulip.com',
stream_names=streams,
diff --git a/zerver/tests/test_slack_importer.py b/zerver/tests/test_slack_importer.py
index 155c32b08c..e74723c05d 100644
--- a/zerver/tests/test_slack_importer.py
+++ b/zerver/tests/test_slack_importer.py
@@ -309,7 +309,7 @@ class SlackImporter(ZulipTestCase):
cpf_name.remove('phone')
cpf_name.remove('skype')
for name in cpf_name:
- self.assertTrue(name.startswith('slack custom field '))
+ self.assertTrue(name.startswith('Slack custom field '))
self.assertEqual(len(customprofilefield_value), 6)
self.assertEqual(customprofilefield_value[0]['field'], 0)
@@ -460,7 +460,7 @@ class SlackImporter(ZulipTestCase):
self.assertDictEqual(test_added_mpims, added_mpims)
self.assertDictEqual(test_dm_members, dm_members)
- # We can't do an assertDictEqual since during the construction of Personal
+ # We can't do an assertDictEqual since during the construction of personal
# recipients, slack_user_id_to_zulip_user_id are iterated in different order in Python 3.5 and 3.6.
self.assertEqual(set(slack_recipient_name_to_zulip_recipient_id.keys()), slack_recipient_names)
self.assertEqual(set(slack_recipient_name_to_zulip_recipient_id.values()), set(range(11)))
@@ -654,10 +654,10 @@ class SlackImporter(ZulipTestCase):
self.assertEqual(zerver_message[5]['has_link'], False)
self.assertEqual(zerver_message[7]['has_link'], False)
- self.assertEqual(zerver_message[3][EXPORT_TOPIC_NAME], 'imported from slack')
+ self.assertEqual(zerver_message[3][EXPORT_TOPIC_NAME], 'imported from Slack')
self.assertEqual(zerver_message[3]['content'], '/me added bot')
self.assertEqual(zerver_message[4]['recipient'], slack_recipient_name_to_zulip_recipient_id['general'])
- self.assertEqual(zerver_message[2][EXPORT_TOPIC_NAME], 'imported from slack')
+ self.assertEqual(zerver_message[2][EXPORT_TOPIC_NAME], 'imported from Slack')
self.assertEqual(zerver_message[1]['recipient'], slack_recipient_name_to_zulip_recipient_id['random'])
self.assertEqual(zerver_message[5]['recipient'], slack_recipient_name_to_zulip_recipient_id['mpdm-user9--user2--user10-1'])
self.assertEqual(zerver_message[6]['recipient'], slack_recipient_name_to_zulip_recipient_id['mpdm-user6--user7--user4-1'])
diff --git a/zerver/tests/test_soft_deactivation.py b/zerver/tests/test_soft_deactivation.py
index 88fb38afa1..2be89c902f 100644
--- a/zerver/tests/test_soft_deactivation.py
+++ b/zerver/tests/test_soft_deactivation.py
@@ -43,7 +43,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
with self.assertLogs(logger_string, level="INFO") as m:
do_soft_deactivate_user(user)
- self.assertEqual(m.output, [f"INFO:{logger_string}:Soft Deactivated user {user.id}"])
+ self.assertEqual(m.output, [f"INFO:{logger_string}:Soft deactivated user {user.id}"])
user.refresh_from_db()
self.assertTrue(user.long_term_idle)
@@ -66,7 +66,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
log_output = []
for user in users:
- log_output.append(f"INFO:{logger_string}:Soft Deactivated user {user.id}")
+ log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
log_output.append(f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process")
self.assertEqual(m.output, log_output)
@@ -119,7 +119,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
log_output = []
for user in users:
- log_output.append(f"INFO:{logger_string}:Soft Deactivated user {user.id}")
+ log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
log_output.append(f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process")
self.assertEqual(m.output, log_output)
@@ -179,7 +179,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
log_output = []
for user in users:
- log_output.append(f"INFO:{logger_string}:Soft Deactivated user {user.id}")
+ log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
log_output.append(f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process")
self.assertEqual(m.output, log_output)
@@ -237,7 +237,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
log_output = []
for user in users:
- log_output.append(f"INFO:{logger_string}:Soft Deactivated user {user.id}")
+ log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
log_output.append(f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process")
log_output.append(f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users")
self.assertEqual(set(m.output), set(log_output))
@@ -307,7 +307,7 @@ class SoftDeactivationMessageTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_logs:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_logs.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
@@ -357,7 +357,7 @@ class SoftDeactivationMessageTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_logs:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_logs.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
@@ -464,7 +464,7 @@ class SoftDeactivationMessageTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_logs:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_logs.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
send_fake_message('Test Message 10', stream)
@@ -524,7 +524,7 @@ class SoftDeactivationMessageTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_logs:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_logs.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
@@ -573,7 +573,7 @@ class SoftDeactivationMessageTest(ZulipTestCase):
with self.assertLogs(logger_string, level='INFO') as info_logs:
do_soft_deactivate_users([long_term_idle_user])
self.assertEqual(info_logs.output, [
- f'INFO:{logger_string}:Soft Deactivated user {long_term_idle_user.id}',
+ f'INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}',
f'INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process'
])
diff --git a/zerver/tests/test_subs.py b/zerver/tests/test_subs.py
index cf9e395e39..a44951ca58 100644
--- a/zerver/tests/test_subs.py
+++ b/zerver/tests/test_subs.py
@@ -710,18 +710,18 @@ class StreamAdminTest(ZulipTestCase):
self.assertNotIn(self.example_user('polonius').id,
notified_user_ids)
- # Test case to handle unicode stream name change
- # *NOTE: Here Encoding is needed when Unicode string is passed as an argument*
+ # Test case to handle Unicode stream name change
+ # *NOTE: Here encoding is needed when Unicode string is passed as an argument*
with tornado_redirected_to_list(events):
stream_id = stream_name2_exists.id
result = self.client_patch(f'/json/streams/{stream_id}',
{'new_name': orjson.dumps('नया नाम').decode()})
self.assert_json_success(result)
- # While querying, system can handle unicode strings.
+ # While querying, system can handle Unicode strings.
stream_name_uni_exists = get_stream('नया नाम', realm)
self.assertTrue(stream_name_uni_exists)
- # Test case to handle changing of unicode stream name to newer name
+ # Test case to handle changing of Unicode stream name to newer name
# NOTE: Unicode string being part of URL is handled cleanly
# by client_patch call, encoding of URL is not needed.
with tornado_redirected_to_list(events):
@@ -729,7 +729,7 @@ class StreamAdminTest(ZulipTestCase):
result = self.client_patch(f'/json/streams/{stream_id}',
{'new_name': orjson.dumps('नाम में क्या रक्खा हे').decode()})
self.assert_json_success(result)
- # While querying, system can handle unicode strings.
+ # While querying, system can handle Unicode strings.
self.assertRaises(Stream.DoesNotExist, get_stream, 'नया नाम', realm)
stream_name_new_uni_exists = get_stream('नाम में क्या रक्खा हे', realm)
diff --git a/zerver/tests/test_thumbnail.py b/zerver/tests/test_thumbnail.py
index 7f440c7184..66ba1df24f 100644
--- a/zerver/tests/test_thumbnail.py
+++ b/zerver/tests/test_thumbnail.py
@@ -60,7 +60,7 @@ class ThumbnailTest(ZulipTestCase):
expected_part_url = get_file_path_urlpart(uri, '0x300')
self.assertIn(expected_part_url, result.url)
- # Test custom emoji urls in Zulip messages.
+ # Test custom emoji URLs in Zulip messages.
user_profile = self.example_user("hamlet")
image_file = get_test_image_file("img.png")
file_name = "emoji.png"
@@ -77,7 +77,7 @@ class ThumbnailTest(ZulipTestCase):
self.assertEqual(result.status_code, 302, result)
self.assertIn(custom_emoji_url, result.url)
- # Tests the /api/v1/thumbnail api endpoint with standard API auth
+ # Tests the /api/v1/thumbnail API endpoint with standard API auth
self.logout()
result = self.api_get(
hamlet,
@@ -109,7 +109,7 @@ class ThumbnailTest(ZulipTestCase):
expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external'
self.assertIn(expected_part_url, result.url)
- # Test api endpoint with standard API authentication.
+ # Test API endpoint with standard API authentication.
self.logout()
user_profile = self.example_user("hamlet")
result = self.api_get(user_profile,
@@ -118,7 +118,7 @@ class ThumbnailTest(ZulipTestCase):
expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external'
self.assertIn(expected_part_url, result.url)
- # Test api endpoint with legacy API authentication.
+ # Test API endpoint with legacy API authentication.
user_profile = self.example_user("hamlet")
result = self.client_get(f"/thumbnail?url={quoted_url}&size=thumbnail&api_key={get_api_key(user_profile)}")
self.assertEqual(result.status_code, 302, result)
@@ -186,7 +186,7 @@ class ThumbnailTest(ZulipTestCase):
expected_part_url = get_file_path_urlpart(uri, '0x300')
self.assertIn(expected_part_url, result.url)
- # Test with a unicode filename.
+ # Test with a Unicode filename.
fp = StringIO("zulip!")
fp.name = "μένει.jpg"
@@ -221,7 +221,7 @@ class ThumbnailTest(ZulipTestCase):
self.assertEqual(result.status_code, 302, result)
self.assertIn(custom_emoji_url, result.url)
- # Tests the /api/v1/thumbnail api endpoint with HTTP basic auth.
+ # Tests the /api/v1/thumbnail API endpoint with HTTP basic auth.
self.logout()
user_profile = self.example_user("hamlet")
result = self.api_get(
@@ -231,7 +231,7 @@ class ThumbnailTest(ZulipTestCase):
expected_part_url = get_file_path_urlpart(uri)
self.assertIn(expected_part_url, result.url)
- # Tests the /api/v1/thumbnail api endpoint with ?api_key
+ # Tests the /api/v1/thumbnail API endpoint with ?api_key
# auth.
user_profile = self.example_user("hamlet")
result = self.client_get(
diff --git a/zerver/tests/test_upload.py b/zerver/tests/test_upload.py
index ff8da24864..35a58e64ad 100644
--- a/zerver/tests/test_upload.py
+++ b/zerver/tests/test_upload.py
@@ -81,7 +81,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
def test_rest_endpoint(self) -> None:
"""
- Tests the /api/v1/user_uploads api endpoint. Here a single file is uploaded
+ Tests the /api/v1/user_uploads API endpoint. Here a single file is uploaded
and downloaded using a username and api_key
"""
fp = StringIO("zulip!")
@@ -107,7 +107,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
def test_mobile_api_endpoint(self) -> None:
"""
- Tests the /api/v1/user_uploads api endpoint with ?api_key
+ Tests the /api/v1/user_uploads API endpoint with ?api_key
auth. Here a single file is uploaded and downloaded using a
username and api_key
"""
@@ -186,7 +186,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
self.assert_json_error(result, "You must specify a file to upload")
# This test will go through the code path for uploading files onto LOCAL storage
- # when zulip is in DEVELOPMENT mode.
+ # when Zulip is in DEVELOPMENT mode.
def test_file_upload_authed(self) -> None:
"""
A call to /json/user_uploads should return a uri and actually create an
@@ -216,7 +216,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
body = "First message ...[zulip.txt](http://localhost:9991" + uri + ")"
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
- # Now try the endpoint that's supposed to return a temporary url for access
+ # Now try the endpoint that's supposed to return a temporary URL for access
# to the file.
result = self.client_get('/json' + uri)
self.assert_json_success(result)
@@ -226,7 +226,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
self.assertNotEqual(url_only_url, uri)
self.assertIn('user_uploads/temporary/', url_only_url)
self.assertTrue(url_only_url.endswith('zulip.txt'))
- # The generated url has a token authorizing the requestor to access the file
+ # The generated URL has a token authorizing the requestor to access the file
# without being logged in.
self.logout()
self.assert_url_serves_contents_of_file(url_only_url, b"zulip!")
@@ -1041,7 +1041,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase):
def test_valid_avatars(self) -> None:
"""
- A PUT request to /json/users/me/avatar with a valid file should return a url and actually create an avatar.
+ A PUT request to /json/users/me/avatar with a valid file should return a URL and actually create an avatar.
"""
version = 2
for fname, rfname in self.correct_files:
@@ -1281,7 +1281,7 @@ class RealmIconTest(UploadSerializeMixin, ZulipTestCase):
def test_valid_icons(self) -> None:
"""
- A PUT request to /json/realm/icon with a valid file should return a url
+ A PUT request to /json/realm/icon with a valid file should return a URL
and actually create an realm icon.
"""
for fname, rfname in self.correct_files:
@@ -1437,7 +1437,7 @@ class RealmLogoTest(UploadSerializeMixin, ZulipTestCase):
def test_valid_logos(self) -> None:
"""
- A PUT request to /json/realm/logo with a valid file should return a url
+ A PUT request to /json/realm/logo with a valid file should return a URL
and actually create an realm logo.
"""
for fname, rfname in self.correct_files:
@@ -1687,7 +1687,7 @@ class S3Test(ZulipTestCase):
key = path[1:]
self.assertEqual(b"zulip!", bucket.Object(key).get()['Body'].read())
- # Now try the endpoint that's supposed to return a temporary url for access
+ # Now try the endpoint that's supposed to return a temporary URL for access
# to the file.
result = self.client_get('/json' + uri)
self.assert_json_success(result)
diff --git a/zerver/tests/test_users.py b/zerver/tests/test_users.py
index fac178a719..321b4c195e 100644
--- a/zerver/tests/test_users.py
+++ b/zerver/tests/test_users.py
@@ -771,7 +771,7 @@ class BulkCreateUserTest(ZulipTestCase):
realm.save()
name_list = [
- ('Fred Flinstone', 'fred@zulip.com'),
+ ('Fred Flintstone', 'fred@zulip.com'),
('Lisa Simpson', 'lisa@zulip.com'),
]
@@ -1232,7 +1232,7 @@ class ActivateTest(ZulipTestCase):
def test_api_with_nonexistent_user(self) -> None:
self.login('iago')
- # Organization Administrator cannot deactivate organization owner.
+ # Organization administrator cannot deactivate organization owner.
result = self.client_delete(f'/json/users/{self.example_user("desdemona").id}')
self.assert_json_error(result, 'Must be an organization owner')
@@ -1643,7 +1643,7 @@ class GetProfileTest(ZulipTestCase):
self.assertTrue(result['is_owner'])
self.assertFalse(result['is_guest'])
- # Tests the GET ../users/{id} api endpoint.
+ # Tests the GET ../users/{id} API endpoint.
user = self.example_user('hamlet')
result = orjson.loads(self.client_get(f'/json/users/{user.id}').content)
self.assertEqual(result['user']['email'], user.email)
diff --git a/zerver/tornado/application.py b/zerver/tornado/application.py
index a1fee11ac1..10361fcfe9 100644
--- a/zerver/tornado/application.py
+++ b/zerver/tornado/application.py
@@ -9,7 +9,7 @@ from zerver.tornado.handlers import AsyncDjangoHandler
def setup_tornado_rabbitmq() -> None: # nocoverage
- # When tornado is shut down, disconnect cleanly from rabbitmq
+ # When tornado is shut down, disconnect cleanly from RabbitMQ
if settings.USING_RABBITMQ:
queue_client = get_queue_client()
atexit.register(lambda: queue_client.close())
diff --git a/zerver/views/auth.py b/zerver/views/auth.py
index 2a2ea35e1d..ceb171bf7c 100644
--- a/zerver/views/auth.py
+++ b/zerver/views/auth.py
@@ -550,7 +550,7 @@ def log_into_subdomain(request: HttpRequest, token: str) -> HttpResponse:
"""Given a valid authentication token (generated by
redirect_and_log_into_subdomain called on auth.zulip.example.com),
call login_or_register_remote_user, passing all the authentication
- result data that has been stored in redis, associated with this token.
+ result data that has been stored in Redis, associated with this token.
"""
# The tokens are intended to have the same format as API keys.
if not has_api_key_format(token):
diff --git a/zerver/views/development/registration.py b/zerver/views/development/registration.py
index 849151234f..ad2295279b 100644
--- a/zerver/views/development/registration.py
+++ b/zerver/views/development/registration.py
@@ -12,7 +12,7 @@ from zerver.views.auth import create_preregistration_user
from zerver.views.registration import accounts_register
-# This is used only by the puppeteer test in 00-realm-creation.js.
+# This is used only by the Puppeteer test in 00-realm-creation.js.
def confirmation_key(request: HttpRequest) -> HttpResponse:
return json_success(request.session.get('confirmation_key'))
diff --git a/zerver/views/documentation.py b/zerver/views/documentation.py
index b2cdba721f..71938c1080 100644
--- a/zerver/views/documentation.py
+++ b/zerver/views/documentation.py
@@ -96,7 +96,7 @@ class MarkdownDirectoryView(ApiURLView):
context["page_is_api_center"] = True
context["doc_root"] = "/api/"
(sidebar_index, http_status_ignored) = self.get_path("sidebar_index")
- title_base = "Zulip API Documentation"
+ title_base = "Zulip API documentation"
# The following is a somewhat hacky approach to extract titles from articles.
# Hack: `context["article"] has a leading `/`, so we use + to add directories.
diff --git a/zerver/views/message_fetch.py b/zerver/views/message_fetch.py
index abf60857b3..b68f06f0f7 100644
--- a/zerver/views/message_fetch.py
+++ b/zerver/views/message_fetch.py
@@ -227,15 +227,15 @@ class NarrowBuilder:
"""
Escape user input to place in a regex
- Python's re.escape escapes unicode characters in a way which postgres
+ Python's re.escape escapes Unicode characters in a way which Postgres
fails on, '\u03bb' to '\\\u03bb'. This function will correctly escape
- them for postgres, '\u03bb' to '\\u03bb'.
+ them for Postgres, '\u03bb' to '\\u03bb'.
"""
s = list(pattern)
for i, c in enumerate(s):
if c not in self._alphanum:
if ord(c) >= 128:
- # convert the character to hex postgres regex will take
+ # convert the character to hex Postgres regex will take
# \uXXXX
s[i] = f'\\u{ord(c):0>4x}'
else:
@@ -325,7 +325,7 @@ class NarrowBuilder:
topic_match_sa('(instance "").d.d.d.d'),
)
else:
- # We limit `.d` counts, since postgres has much better
+ # We limit `.d` counts, since Postgres has much better
# query planning for this than they do for a regular
# expression (which would sometimes table scan).
cond = or_(
diff --git a/zerver/views/realm_icon.py b/zerver/views/realm_icon.py
index 2a4c270159..25eae53d36 100644
--- a/zerver/views/realm_icon.py
+++ b/zerver/views/realm_icon.py
@@ -49,7 +49,7 @@ def delete_icon_backend(request: HttpRequest, user_profile: UserProfile) -> Http
def get_icon_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
url = realm_icon_url(user_profile.realm)
- # We can rely on the url already having query parameters. Because
+ # We can rely on the URL already having query parameters. Because
# our templates depend on being able to use the ampersand to
# add query parameters to our url, get_icon_url does '?version=version_number'
# hacks to prevent us from having to jump through decode/encode hoops.
diff --git a/zerver/views/realm_logo.py b/zerver/views/realm_logo.py
index de43224160..cb00357ee2 100644
--- a/zerver/views/realm_logo.py
+++ b/zerver/views/realm_logo.py
@@ -46,7 +46,7 @@ def get_logo_backend(request: HttpRequest, user_profile: UserProfile,
night: bool=REQ(validator=check_bool)) -> HttpResponse:
url = get_realm_logo_url(user_profile.realm, night)
- # We can rely on the url already having query parameters. Because
+ # We can rely on the URL already having query parameters. Because
# our templates depend on being able to use the ampersand to
# add query parameters to our url, get_logo_url does '?version=version_number'
# hacks to prevent us from having to jump through decode/encode hoops.
diff --git a/zerver/views/registration.py b/zerver/views/registration.py
index 35e16d0643..62f62362c7 100644
--- a/zerver/views/registration.py
+++ b/zerver/views/registration.py
@@ -333,7 +333,7 @@ def accounts_register(request: HttpRequest) -> HttpResponse:
if user_profile is None:
can_use_different_backend = email_auth_enabled(realm) or any_social_backend_enabled(realm)
if settings.LDAP_APPEND_DOMAIN:
- # In LDAP_APPEND_DOMAIN configurations, we don't allow making a non-ldap account
+ # In LDAP_APPEND_DOMAIN configurations, we don't allow making a non-LDAP account
# if the email matches the ldap domain.
can_use_different_backend = can_use_different_backend and (
not email_belongs_to_ldap(realm, email))
diff --git a/zerver/views/users.py b/zerver/views/users.py
index be5c3dcf3a..93b1056c6f 100644
--- a/zerver/views/users.py
+++ b/zerver/views/users.py
@@ -199,7 +199,7 @@ def avatar(request: HttpRequest, user_profile: UserProfile,
avatar_version = 1
url = get_gravatar_url(email, avatar_version, medium)
- # We can rely on the url already having query parameters. Because
+ # We can rely on the URL already having query parameters. Because
# our templates depend on being able to use the ampersand to
# add query parameters to our url, get_avatar_url does '?x=x'
# hacks to prevent us from having to jump through decode/encode hoops.
diff --git a/zerver/views/video_calls.py b/zerver/views/video_calls.py
index 867f5e1e1e..78b8ad1218 100644
--- a/zerver/views/video_calls.py
+++ b/zerver/views/video_calls.py
@@ -162,7 +162,7 @@ def deauthorize_zoom_user(request: HttpRequest) -> HttpResponse:
def get_bigbluebutton_url(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
- # https://docs.bigbluebutton.org/dev/api.html#create for reference on the api calls
+ # https://docs.bigbluebutton.org/dev/api.html#create for reference on the API calls
# https://docs.bigbluebutton.org/dev/api.html#usage for reference for checksum
id = "zulip-" + str(random.randint(100000000000, 999999999999))
password = b32encode(secrets.token_bytes(7))[:10].decode()
diff --git a/zerver/views/zephyr.py b/zerver/views/zephyr.py
index 61d50cda36..fabdeb162a 100644
--- a/zerver/views/zephyr.py
+++ b/zerver/views/zephyr.py
@@ -55,7 +55,7 @@ def webathena_kerberos_login(request: HttpRequest, user_profile: UserProfile,
except Exception:
return json_error(_("Invalid Kerberos cache"))
- # TODO: Send these data via (say) rabbitmq
+ # TODO: Send these data via (say) RabbitMQ
try:
api_key = get_api_key(user_profile)
command = [
diff --git a/zerver/webhooks/alertmanager/doc.md b/zerver/webhooks/alertmanager/doc.md
index 6bf8063e27..e278497666 100644
--- a/zerver/webhooks/alertmanager/doc.md
+++ b/zerver/webhooks/alertmanager/doc.md
@@ -1,4 +1,4 @@
-Get Zulip notifications from AlertManager!
+Get Zulip notifications from Alertmanager!
1. {!create-stream.md!}
@@ -10,12 +10,12 @@ Get Zulip notifications from AlertManager!
{{ api_url }}{{ integration_url }}?api_key=abcdefgh&stream=stream%20name&name=host&desc=alertname
-1. In your AlertManager config, set up a new webhook receiver, like so:
+1. In your Alertmanager config, set up a new webhook receiver, like so:
```
- name: ops-zulip
webhook_configs:
- - url: ""
+ - url: ""
```
{!congrats.md!}
diff --git a/zerver/webhooks/alertmanager/view.py b/zerver/webhooks/alertmanager/view.py
index 2ec11e5ccb..8e651fe312 100644
--- a/zerver/webhooks/alertmanager/view.py
+++ b/zerver/webhooks/alertmanager/view.py
@@ -10,7 +10,7 @@ from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
-@webhook_view('AlertManager')
+@webhook_view('Alertmanager')
@has_request_variables
def api_alertmanager_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any] = REQ(argument_type='body')) -> HttpResponse:
diff --git a/zerver/webhooks/ansibletower/view.py b/zerver/webhooks/ansibletower/view.py
index 40a2355d63..5e5f5f2833 100644
--- a/zerver/webhooks/ansibletower/view.py
+++ b/zerver/webhooks/ansibletower/view.py
@@ -18,7 +18,7 @@ ANSIBLETOWER_JOB_MESSAGE_TEMPLATE = """
ANSIBLETOWER_JOB_HOST_ROW_TEMPLATE = '* {hostname}: {status}\n'
-@webhook_view('Ansibletower')
+@webhook_view('AnsibleTower')
@has_request_variables
def api_ansibletower_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body')) -> HttpResponse:
diff --git a/zerver/webhooks/beanstalk/view.py b/zerver/webhooks/beanstalk/view.py
index 8f888f879c..7125021ecc 100644
--- a/zerver/webhooks/beanstalk/view.py
+++ b/zerver/webhooks/beanstalk/view.py
@@ -40,7 +40,7 @@ def _transform_commits_list_to_common_format(commits: List[Dict[str, Any]]) -> L
})
return new_commits_list
-# Beanstalk's web hook UI rejects url with a @ in the username section of a url
+# Beanstalk's web hook UI rejects URL with a @ in the username section
# So we ask the user to replace them with %40
# We manually fix the username here before passing it along to @authenticated_rest_api_view
def beanstalk_decoder(view_func: ViewFuncT) -> ViewFuncT:
@@ -66,9 +66,9 @@ def beanstalk_decoder(view_func: ViewFuncT) -> ViewFuncT:
def api_beanstalk_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(validator=check_dict([])),
branches: Optional[str]=REQ(default=None)) -> HttpResponse:
- # Beanstalk supports both SVN and git repositories
+ # Beanstalk supports both SVN and Git repositories
# We distinguish between the two by checking for a
- # 'uri' key that is only present for git repos
+ # 'uri' key that is only present for Git repos
git_repo = 'uri' in payload
if git_repo:
if branches is not None and branches.find(payload['branch']) == -1:
diff --git a/zerver/webhooks/beeminder/view.py b/zerver/webhooks/beeminder/view.py
index e75b8243f0..8873cdcd03 100644
--- a/zerver/webhooks/beeminder/view.py
+++ b/zerver/webhooks/beeminder/view.py
@@ -19,7 +19,7 @@ def get_time(payload: Dict[str, Any]) -> Any:
time_remaining = (losedate - time.time())/3600
return time_remaining
-@webhook_view("beeminder")
+@webhook_view("Beeminder")
@has_request_variables
def api_beeminder_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body')) -> HttpResponse:
diff --git a/zerver/webhooks/bitbucket3/tests.py b/zerver/webhooks/bitbucket3/tests.py
index 4fb156ca71..4941784e73 100644
--- a/zerver/webhooks/bitbucket3/tests.py
+++ b/zerver/webhooks/bitbucket3/tests.py
@@ -8,7 +8,7 @@ class Bitbucket3HookTests(WebhookTestCase):
URL_TEMPLATE = "/api/v1/external/bitbucket3?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = "bitbucket3"
- # Diagnostics Events:
+ # Diagnostics events:
def test_ping(self) -> None:
expected_message = "Congratulations! The Bitbucket Server webhook was configured successfully!"
self.check_webhook("diagnostics_ping", "Bitbucket Server Ping", expected_message)
@@ -18,7 +18,7 @@ class Bitbucket3HookTests(WebhookTestCase):
expected_message = "Congratulations! The Bitbucket Server webhook was configured successfully!"
self.check_webhook("diagnostics_ping", "my topic", expected_message)
- # Core Repo Events:
+ # Core repo events:
def test_commit_comment_added(self) -> None:
expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) commented on [508d1b6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/commits/508d1b67f1f8f3a25f543a030a7a178894aa9907):\n~~~ quote\nJust an arbitrary comment on a commit.\n~~~"""
self.check_webhook("commit_comment_added", TOPIC, expected_message)
@@ -40,7 +40,7 @@ class Bitbucket3HookTests(WebhookTestCase):
expected_topic = "sandbox v2"
self.check_webhook("repo_modified", expected_topic, expected_message)
- # Repo Push Events:
+ # Repo push events:
def test_push_add_branch(self) -> None:
expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) created branch2 branch."""
expected_topic = TOPIC_BRANCH_EVENTS.format(branch="branch2")
@@ -105,7 +105,7 @@ class Bitbucket3HookTests(WebhookTestCase):
expected_topic = TOPIC_BRANCH_EVENTS.format(branch="branch1")
self.check_webhook("repo_push_update_multiple_branches", expected_topic, expected_message)
- # Core PR Events:
+ # Core PR events:
def test_pr_opened_without_reviewers(self) -> None:
expected_topic = "sandbox / PR #1 Branch1"
expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) opened [PR #1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1) from `branch1` to `master`:\n\n~~~ quote\n* Add file2.txt\r\n* Add file3.txt\n~~~"""
@@ -175,7 +175,7 @@ class Bitbucket3HookTests(WebhookTestCase):
expected_message = """[zura](http://139.59.64.214:7990/users/zura) merged [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)."""
self.check_webhook("pull_request_merged", expected_topic, expected_message)
- # PR Reviewer Events:
+ # PR reviewer events:
def test_pr_approved(self) -> None:
expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt."
expected_message = """[zura](http://139.59.64.214:7990/users/zura) approved [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)."""
@@ -225,7 +225,7 @@ class Bitbucket3HookTests(WebhookTestCase):
self.url = self.build_webhook_url(topic='custom_topic')
self.check_webhook("pull_request_remove_reviewer", expected_topic, expected_message)
- # PR Comment Events:
+ # PR comment events:
def test_pull_request_comment_added(self) -> None:
expected_message = """[zura](http://139.59.64.214:7990/users/zura) commented on [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6):\n\n~~~ quote\nThis seems like a pretty good idea.\n~~~"""
expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt."
diff --git a/zerver/webhooks/circleci/view.py b/zerver/webhooks/circleci/view.py
index 268c6bf2d1..74e49a915c 100644
--- a/zerver/webhooks/circleci/view.py
+++ b/zerver/webhooks/circleci/view.py
@@ -49,7 +49,7 @@ def get_commit_range_info(payload: Dict[str, Any]) -> str:
commit_range_url = f"{vcs_url}/compare/{first_commit_id}...{last_commit_id}"
return f"- **Commits ({num_commits}):** [{shortened_first_commit_id} ... {shortened_last_commit_id}]({commit_range_url})"
else:
- # BitBucket doesn't have a good commit range url feature like GitHub does.
+ # Bitbucket doesn't have a good commit range URL feature like GitHub does.
# So let's just show the two separately.
# https://community.atlassian.com/t5/Bitbucket-questions/BitBucket-4-14-diff-between-any-two-commits/qaq-p/632974
first_commit_url = commits[0]["commit_url"]
diff --git a/zerver/webhooks/deskdotcom/tests.py b/zerver/webhooks/deskdotcom/tests.py
index 3bcc603552..b66cddad62 100644
--- a/zerver/webhooks/deskdotcom/tests.py
+++ b/zerver/webhooks/deskdotcom/tests.py
@@ -2,7 +2,7 @@ from zerver.lib.test_classes import WebhookTestCase
# Tests for the Desk.com webhook integration.
#
-# The stream name must be provided in the url-encoded test fixture data,
+# The stream name must be provided in the URL-encoded test fixture data,
# and must match STREAM_NAME set here.
#
# Example:
diff --git a/zerver/webhooks/dialogflow/tests.py b/zerver/webhooks/dialogflow/tests.py
index 74249c8df3..4db4b2564d 100644
--- a/zerver/webhooks/dialogflow/tests.py
+++ b/zerver/webhooks/dialogflow/tests.py
@@ -38,5 +38,5 @@ class DialogflowHookTests(WebhookTestCase):
username="aaron",
user_ip="127.0.0.1",
)
- expected_message = "DialogFlow couldn't process your query."
+ expected_message = "Dialogflow couldn't process your query."
self.send_and_test_private_message("exception", expected_message)
diff --git a/zerver/webhooks/dialogflow/view.py b/zerver/webhooks/dialogflow/view.py
index 8617294bbd..f0380262d7 100644
--- a/zerver/webhooks/dialogflow/view.py
+++ b/zerver/webhooks/dialogflow/view.py
@@ -10,7 +10,7 @@ from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
-@webhook_view("dialogflow")
+@webhook_view("Dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
@@ -22,7 +22,7 @@ def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
- body = "DialogFlow couldn't process your query."
+ body = "Dialogflow couldn't process your query."
else:
body = alternate_result
else:
diff --git a/zerver/webhooks/gci/view.py b/zerver/webhooks/gci/view.py
index cba4c9aa25..ab420f95ed 100644
--- a/zerver/webhooks/gci/view.py
+++ b/zerver/webhooks/gci/view.py
@@ -100,7 +100,7 @@ def get_outoftime_event_body(payload: Dict[str, Any]) -> str:
task_url=build_instance_url(payload['task_instance']),
)
-@webhook_view("Google-Code-In")
+@webhook_view("GoogleCodeIn")
@has_request_variables
def api_gci_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body')) -> HttpResponse:
diff --git a/zerver/webhooks/github/tests.py b/zerver/webhooks/github/tests.py
index 132e6ff507..d7d49c07c8 100644
--- a/zerver/webhooks/github/tests.py
+++ b/zerver/webhooks/github/tests.py
@@ -13,7 +13,7 @@ TOPIC_ORGANIZATION = "baxterandthehackers organization"
TOPIC_BRANCH = "public-repo / changes"
TOPIC_WIKI = "public-repo / Wiki Pages"
-class GithubWebhookTest(WebhookTestCase):
+class GitHubWebhookTest(WebhookTestCase):
STREAM_NAME = 'github'
URL_TEMPLATE = "/api/v1/external/github?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = 'github'
@@ -212,7 +212,7 @@ class GithubWebhookTest(WebhookTestCase):
self.check_webhook("release", TOPIC_REPO, expected_message)
def test_page_build_msg(self) -> None:
- expected_message = "Github Pages build, triggered by baxterthehacker, has finished building."
+ expected_message = "GitHub Pages build, triggered by baxterthehacker, has finished building."
self.check_webhook("page_build", TOPIC_REPO, expected_message)
def test_status_msg(self) -> None:
diff --git a/zerver/webhooks/github/view.py b/zerver/webhooks/github/view.py
index 62664ac7a3..00921f5559 100644
--- a/zerver/webhooks/github/view.py
+++ b/zerver/webhooks/github/view.py
@@ -330,7 +330,7 @@ def get_page_build_body(helper: Helper) -> str:
CONTENT_MESSAGE_TEMPLATE.format(message=build['error']['message']),
)
- return "Github Pages build, triggered by {}, {}.".format(
+ return "GitHub Pages build, triggered by {}, {}.".format(
payload['build']['pusher']['login'],
action,
)
@@ -605,7 +605,7 @@ def api_github_webhook(
branches: Optional[str]=REQ(default=None),
user_specified_topic: Optional[str]=REQ("topic", default=None)) -> HttpResponse:
"""
- Github sends the event as an HTTP header. We have our
+ GitHub sends the event as an HTTP header. We have our
own Zulip-specific concept of an event that often maps
directly to the X_GITHUB_EVENT header's event, but we sometimes
refine it based on the payload.
diff --git a/zerver/webhooks/gitlab/view.py b/zerver/webhooks/gitlab/view.py
index a8ecf57579..0025d56588 100644
--- a/zerver/webhooks/gitlab/view.py
+++ b/zerver/webhooks/gitlab/view.py
@@ -361,7 +361,7 @@ EVENT_FUNCTION_MAPPER = {
'Pipeline Hook': get_pipeline_event_body,
}
-@webhook_view("Gitlab")
+@webhook_view("GitLab")
@has_request_variables
def api_gitlab_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
@@ -441,7 +441,7 @@ def get_subject_based_on_event(event: str, payload: Dict[str, Any], use_merge_re
def get_event(request: HttpRequest, payload: Dict[str, Any], branches: Optional[str]) -> Optional[str]:
event = validate_extract_webhook_http_header(request, 'X_GITLAB_EVENT', 'GitLab')
if event == "System Hook":
- # Convert the event name to a Gitlab event title
+ # Convert the event name to a GitLab event title
event_name = payload.get('event_name', payload.get('object_kind'))
event = event_name.split("__")[0].replace("_", " ").title()
event = f"{event} Hook"
diff --git a/zerver/webhooks/jira/view.py b/zerver/webhooks/jira/view.py
index d5c660c678..81f6de892d 100644
--- a/zerver/webhooks/jira/view.py
+++ b/zerver/webhooks/jira/view.py
@@ -101,7 +101,7 @@ def get_in(payload: Dict[str, Any], keys: List[str], default: str='') -> Any:
def get_issue_string(payload: Dict[str, Any], issue_id: Optional[str]=None, with_title: bool=False) -> str:
# Guess the URL as it is not specified in the payload
# We assume that there is a /browse/BUG-### page
- # from the REST url of the issue itself
+ # from the REST URL of the issue itself
if issue_id is None:
issue_id = get_issue_id(payload)
diff --git a/zerver/webhooks/opsgenie/tests.py b/zerver/webhooks/opsgenie/tests.py
index 5503355553..4f88b9d086 100644
--- a/zerver/webhooks/opsgenie/tests.py
+++ b/zerver/webhooks/opsgenie/tests.py
@@ -9,7 +9,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_acknowledge_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: Acknowledge
* **Message**: test alert
* **Tags**: `tag1`, `tag2`
@@ -25,7 +25,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_addnote_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: AddNote
* **Note**: note to test alert
* **Message**: test alert
@@ -42,7 +42,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_addrecipient_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: AddRecipient
* **Recipient**: team2_escalation
* **Message**: test alert
@@ -59,7 +59,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_addtags_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: AddTags
* **Tags added**: tag1,tag2,tag3
* **Message**: test alert
@@ -76,7 +76,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_addteam_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: AddTeam
* **Team added**: team2
* **Message**: test alert
@@ -93,7 +93,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_assignownership_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: AssignOwnership
* **Assigned owner**: user2@ifountain.com
* **Message**: test alert
@@ -110,7 +110,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_close_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: Close
* **Message**: test alert
""".strip()
@@ -125,7 +125,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_create_alert(self) -> None:
expected_topic = "Webhook"
expected_message = """
-[OpsGenie Alert for Webhook](https://app.opsgenie.com/alert/V2#/show/ec03dad6-62c8-4c94-b38b-d88f398e900f):
+[OpsGenie alert for Webhook](https://app.opsgenie.com/alert/V2#/show/ec03dad6-62c8-4c94-b38b-d88f398e900f):
* **Type**: Create
* **Message**: another alert
* **Tags**: `vip`
@@ -141,7 +141,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_customaction_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: TestAction
* **Message**: test alert
* **Tags**: `tag1`, `tag2`
@@ -157,7 +157,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_delete_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: Delete
* **Message**: test alert
""".strip()
@@ -172,7 +172,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_escalate_alert(self) -> None:
expected_topic = "Webhook_Test"
expected_message = """
-[OpsGenie Alert for Webhook_Test](https://app.opsgenie.com/alert/V2#/show/7ba97e3a-d328-4b5e-8f9a-39e945a3869a):
+[OpsGenie alert for Webhook_Test](https://app.opsgenie.com/alert/V2#/show/7ba97e3a-d328-4b5e-8f9a-39e945a3869a):
* **Type**: Escalate
* **Escalation**: test_esc
""".strip()
@@ -187,7 +187,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_removetags_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: RemoveTags
* **Tags removed**: tag3
* **Message**: test alert
@@ -204,7 +204,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_takeownership_alert(self) -> None:
expected_topic = "Webhook"
expected_message = """
-[OpsGenie Alert for Webhook](https://app.opsgenie.com/alert/V2#/show/8a745a79-3ed3-4044-8427-98e067c0623c):
+[OpsGenie alert for Webhook](https://app.opsgenie.com/alert/V2#/show/8a745a79-3ed3-4044-8427-98e067c0623c):
* **Type**: TakeOwnership
* **Message**: message test
* **Tags**: `tag1`, `tag2`
@@ -220,7 +220,7 @@ class OpsGenieHookTests(WebhookTestCase):
def test_unacknowledge_alert(self) -> None:
expected_topic = "Integration1"
expected_message = """
-[OpsGenie Alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
+[OpsGenie alert for Integration1](https://app.opsgenie.com/alert/V2#/show/052652ac-5d1c-464a-812a-7dd18bbfba8c):
* **Type**: UnAcknowledge
* **Message**: test alert
* **Tags**: `tag1`, `tag2`
diff --git a/zerver/webhooks/opsgenie/view.py b/zerver/webhooks/opsgenie/view.py
index 76e1e72dca..acfbde7190 100644
--- a/zerver/webhooks/opsgenie/view.py
+++ b/zerver/webhooks/opsgenie/view.py
@@ -73,7 +73,7 @@ def api_opsgenie_webhook(request: HttpRequest, user_profile: UserProfile,
)
body_template = """
-[OpsGenie Alert for {integration_name}](https://app.opsgenie.com/alert/V2#/show/{alert_id}):
+[OpsGenie alert for {integration_name}](https://app.opsgenie.com/alert/V2#/show/{alert_id}):
* **Type**: {alert_type}
{additional_info}
""".strip()
diff --git a/zerver/webhooks/papertrail/doc.md b/zerver/webhooks/papertrail/doc.md
index 62a5e55d7c..8dee64f8d5 100644
--- a/zerver/webhooks/papertrail/doc.md
+++ b/zerver/webhooks/papertrail/doc.md
@@ -4,7 +4,7 @@ Get Zulip notifications for your Papertrail logs!
1. {!create-bot-construct-url-indented.md!}
-1. On your Papertrail Dashboard, search for the logs you'd like
+1. On your Papertrail dashboard, search for the logs you'd like
to set up alerts for, and click on **Save Search**. Provide a
name for the search, and click **Save & Setup an Alert**.
Under **Create an alert**, click on **Webhook**.
diff --git a/zerver/webhooks/pivotal/view.py b/zerver/webhooks/pivotal/view.py
index 62e0fdde7d..4a336d2360 100644
--- a/zerver/webhooks/pivotal/view.py
+++ b/zerver/webhooks/pivotal/view.py
@@ -31,7 +31,7 @@ def api_pivotal_webhook_v3(request: HttpRequest, user_profile: UserProfile) -> T
description = payload.find('description').text
project_id = payload.find('project_id').text
story_id = get_text(['stories', 'story', 'id'])
- # Ugh, the URL in the XML data is not a clickable url that works for the user
+ # Ugh, the URL in the XML data is not a clickable URL that works for the user
# so we try to build one that the user can actually click on
url = f"https://www.pivotaltracker.com/s/projects/{project_id}/stories/{story_id}"
diff --git a/zerver/webhooks/semaphore/tests.py b/zerver/webhooks/semaphore/tests.py
index 8ea4075b2c..80b491073a 100644
--- a/zerver/webhooks/semaphore/tests.py
+++ b/zerver/webhooks/semaphore/tests.py
@@ -44,7 +44,7 @@ class SemaphoreHookTests(WebhookTestCase):
content_type="application/x-www-form-urlencoded",
)
- # Tests For Semaphore 2.0
+ # Tests for Semaphore 2.0
def test_semaphore2_push(self) -> None:
expected_topic = "notifications/rw/webhook_impl" # repo/branch
diff --git a/zerver/webhooks/solano/view.py b/zerver/webhooks/solano/view.py
index 85c36d863c..6ddb4d2c8a 100644
--- a/zerver/webhooks/solano/view.py
+++ b/zerver/webhooks/solano/view.py
@@ -44,7 +44,7 @@ def api_solano_webhook(request: HttpRequest, user_profile: UserProfile,
elif status in neutral_status:
emoji = ':arrows_counterclockwise:'
- # If the service is not one of the following, the url is of the repository home, not the individual
+ # If the service is not one of the following, the URL is of the repository home, not the individual
# commit itself.
commit_url = repository.split('@')[1]
if 'github' in repository:
diff --git a/zerver/webhooks/wordpress/view.py b/zerver/webhooks/wordpress/view.py
index ae1d16211f..af9eaa19e4 100644
--- a/zerver/webhooks/wordpress/view.py
+++ b/zerver/webhooks/wordpress/view.py
@@ -19,7 +19,7 @@ New blog user registered:
""".strip()
WP_LOGIN_TEMPLATE = 'User {name} logged in.'
-@webhook_view("Wordpress", notify_bot_owner_on_invalid_json=False)
+@webhook_view("WordPress", notify_bot_owner_on_invalid_json=False)
@has_request_variables
def api_wordpress_webhook(request: HttpRequest, user_profile: UserProfile,
hook: str=REQ(default="WordPress Action"),
diff --git a/zerver/webhooks/zabbix/doc.md b/zerver/webhooks/zabbix/doc.md
index ec49bf0af7..8ab3c72ed5 100644
--- a/zerver/webhooks/zabbix/doc.md
+++ b/zerver/webhooks/zabbix/doc.md
@@ -19,7 +19,7 @@ Receive Zabbix notifications in Zulip!
the exact path might differ depending on your environment. Make sure the
script is executable by your Zabbix environment.
-1. Go to your Zabbix Web Interface, and click **Administration**. Click on
+1. Go to your Zabbix web interface, and click **Administration**. Click on
**Media Types**, and click **Create Media Type**.
1. Set **name** to a name of your choice, such as `Zulip`. Set **type** to **Script**,
@@ -31,7 +31,7 @@ Receive Zabbix notifications in Zulip!
Check the **Enabled** option, and click **Update**.
-1. Go back to your Zabbix Web Interface, and click **Administration**. Click
+1. Go back to your Zabbix web interface, and click **Administration**. Click
on **Users**, and select the alias of the user you would like
to use to set the notification. Click **Media**, and click **Add**.
@@ -39,7 +39,7 @@ Receive Zabbix notifications in Zulip!
Tweak the severity for notifications as appropriate, and check the
**Enabled** option.
-1. Go back to your Zabbix Web Interface, and click **Configuration**.
+1. Go back to your Zabbix web interface, and click **Configuration**.
Click **Actions**, and click **Create Action**.
1. Set **Name** to a name of your choice, such as `Zulip`. Under
diff --git a/zilencer/README.md b/zilencer/README.md
index ecd7dc37a0..e1da509a9d 100644
--- a/zilencer/README.md
+++ b/zilencer/README.md
@@ -1,4 +1,4 @@
-ZILENCER -- The Zulip License Manager
+ZILENCER -- The Zulip license manager
========
This app is the place for storing state about various deployments of
diff --git a/zilencer/management/commands/queue_rate.py b/zilencer/management/commands/queue_rate.py
index 2641659b4b..260300f097 100644
--- a/zilencer/management/commands/queue_rate.py
+++ b/zilencer/management/commands/queue_rate.py
@@ -8,7 +8,7 @@ from zerver.worker.queue_processors import BatchNoopWorker, NoopWorker, QueuePro
class Command(BaseCommand):
- help = """Times the overhead of enqueuing and dequeuing messages from rabbitmq."""
+ help = """Times the overhead of enqueuing and dequeuing messages from RabbitMQ."""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument(
@@ -22,7 +22,7 @@ class Command(BaseCommand):
)
parser.add_argument(
"--prefetch",
- help="Limits the prefetch size; rabbitmq defaults to unbounded (0)",
+ help="Limits the prefetch size; RabbitMQ defaults to unbounded (0)",
default=0,
type=int,
)
diff --git a/zproject/backends.py b/zproject/backends.py
index fc64d31d36..e78495754a 100644
--- a/zproject/backends.py
+++ b/zproject/backends.py
@@ -333,7 +333,7 @@ def check_password_strength(password: str) -> bool:
class EmailAuthBackend(ZulipAuthMixin):
"""
- Email+Password Authentication Backend (the default).
+ Email+Password authentication backend (the default).
Allows a user to sign in using an email/password pair.
"""
@@ -464,8 +464,8 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
def django_to_ldap_username(self, username: str) -> str:
"""
Translates django username (user_profile.delivery_email or whatever the user typed in the login
- field when authenticating via the ldap backend) into ldap username.
- Guarantees that the username it returns actually has an entry in the ldap directory.
+ field when authenticating via the LDAP backend) into LDAP username.
+ Guarantees that the username it returns actually has an entry in the LDAP directory.
Raises ZulipLDAPExceptionNoMatchingLDAPUser if that's not possible.
"""
result = username
@@ -490,9 +490,9 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
result = username
if _LDAPUser(self, result).attrs is None:
- # Check that there actually is an ldap entry matching the result username
+ # Check that there actually is an LDAP entry matching the result username
# we want to return. Otherwise, raise an exception.
- error_message = "No ldap user matching django_to_ldap_username result: {}. Input username: {}"
+ error_message = "No LDAP user matching django_to_ldap_username result: {}. Input username: {}"
raise ZulipLDAPExceptionNoMatchingLDAPUser(
error_message.format(result, username),
)
@@ -509,7 +509,7 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
return "@".join((username, settings.LDAP_APPEND_DOMAIN))
if settings.LDAP_EMAIL_ATTR is not None:
- # Get email from ldap attributes.
+ # Get email from LDAP attributes.
if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs:
raise ZulipLDAPException(f"LDAP user doesn't have the needed {settings.LDAP_EMAIL_ATTR} attribute")
else:
@@ -690,7 +690,7 @@ class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase):
email address for this user obtained from LDAP has an active
account in this Zulip realm. If so, it will log them in.
- Otherwise, to provide a seamless Single Sign-On experience
+ Otherwise, to provide a seamless single sign-on experience
with LDAP, this function can automatically create a new Zulip
user account in the realm (assuming the realm is configured to
allow that email address to sign up).
@@ -767,9 +767,9 @@ class ZulipLDAPUser(_LDAPUser):
"""
This is an extension of the _LDAPUser class, with a realm attribute
attached to it. It's purpose is to call its inherited method
- populate_user() which will sync the ldap data with the corresponding
+ populate_user() which will sync the LDAP data with the corresponding
UserProfile. The realm attribute serves to uniquely identify the UserProfile
- in case the ldap user is registered to multiple realms.
+ in case the LDAP user is registered to multiple realms.
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
@@ -961,7 +961,7 @@ def external_auth_method(cls: Type[ExternalAuthMethod]) -> Type[ExternalAuthMeth
EXTERNAL_AUTH_METHODS.append(cls)
return cls
-# We want to be able to store this data in redis, so it has to be easy to serialize.
+# We want to be able to store this data in Redis, so it has to be easy to serialize.
# That's why we avoid having fields that could pose a problem for that.
class ExternalAuthDataDict(TypedDict, total=False):
subdomain: str
@@ -1093,8 +1093,8 @@ def redirect_deactivated_user_to_login() -> HttpResponseRedirect:
def social_associate_user_helper(backend: BaseAuth, return_data: Dict[str, Any],
*args: Any, **kwargs: Any) -> Union[HttpResponse, Optional[UserProfile]]:
- """Responsible for doing the Zulip-account lookup and validation parts
- of the Zulip Social auth pipeline (similar to the authenticate()
+ """Responsible for doing the Zulip account lookup and validation parts
+ of the Zulip social auth pipeline (similar to the authenticate()
methods in most other auth backends in this file).
Returns a UserProfile object for successful authentication, and None otherwise.
@@ -1586,7 +1586,7 @@ class AppleAuthBackend(SocialAuthMixin, AppleIdAuth):
return self.strategy.request_data().get('native_flow', False)
# This method replaces a method from python-social-auth; it is adapted to store
- # the state_token data in redis.
+ # the state_token data in Redis.
def get_or_create_state(self) -> str:
'''Creates the Oauth2 state parameter in first step of the flow,
before redirecting the user to the IdP (aka Apple).
@@ -1600,10 +1600,10 @@ class AppleAuthBackend(SocialAuthMixin, AppleIdAuth):
POST request coming from Apple.
To work around this, we replace python-social-auth's default
- session-based storage with storing the parameters in redis
+ session-based storage with storing the parameters in Redis
under a random token derived from the state. That will allow
us to validate the state and retrieve the params after the
- redirect - by querying redis for the key derived from the
+ redirect - by querying Redis for the key derived from the
state sent in the POST redirect.
'''
request_data = self.strategy.request_data().dict()
@@ -1622,7 +1622,7 @@ class AppleAuthBackend(SocialAuthMixin, AppleIdAuth):
def validate_state(self) -> Optional[str]:
"""
This method replaces a method from python-social-auth; it is
- adapted to retrieve the data stored in redis, save it in
+ adapted to retrieve the data stored in Redis, save it in
the session so that it can be accessed by the social pipeline.
"""
request_state = self.get_request_state()
@@ -1772,7 +1772,7 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth):
#
# To protect against network eavesdropping of these
# parameters, we send just a random token to the IdP in
- # RelayState, which is used as a key into our redis data store
+ # RelayState, which is used as a key into our Redis data store
# for fetching the actual parameters after the IdP has
# returned a successful authentication.
params_to_relay = self.standard_relay_params
@@ -1794,7 +1794,7 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth):
def get_data_from_redis(cls, key: str) -> Optional[Dict[str, Any]]:
data = None
if key.startswith('saml_token_'):
- # Safety if statement, to not allow someone to poke around arbitrary redis keys here.
+ # Safety if statement, to not allow someone to poke around arbitrary Redis keys here.
data = get_dict_from_redis(redis_client, "saml_token_{token}", key)
return data
diff --git a/zproject/computed_settings.py b/zproject/computed_settings.py
index 83d5066141..6a9bc195ed 100644
--- a/zproject/computed_settings.py
+++ b/zproject/computed_settings.py
@@ -193,7 +193,7 @@ MIDDLEWARE = (
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
# Make sure 2FA middlewares come after authentication middleware.
- 'django_otp.middleware.OTPMiddleware', # Required by Two Factor auth.
+ 'django_otp.middleware.OTPMiddleware', # Required by two factor auth.
'two_factor.middleware.threadlocals.ThreadLocals', # Required by Twilio
# Needs to be after CommonMiddleware, which sets Content-Length
'zerver.middleware.FinalizeOpenGraphDescription',
@@ -255,7 +255,7 @@ SILENCED_SYSTEM_CHECKS = [
########################################################################
# Zulip's Django configuration supports 4 different ways to do
-# postgres authentication:
+# Postgres authentication:
#
# * The development environment uses the `local_database_password`
# secret from `zulip-secrets.conf` to authenticate with a local
@@ -264,18 +264,18 @@ SILENCED_SYSTEM_CHECKS = [
#
# The remaining 3 options are for production use:
#
-# * Using postgres' "peer" authentication to authenticate to a
+# * Using Postgres' "peer" authentication to authenticate to a
# database on the local system using one's user ID (processes
# running as user `zulip` on the system are automatically
# authenticated as database user `zulip`). This is the default in
# production. We don't use this in the development environment,
# because it requires the developer's user to be called `zulip`.
#
-# * Using password authentication with a remote postgres server using
+# * Using password authentication with a remote Postgres server using
# the `REMOTE_POSTGRES_HOST` setting and the password from the
# `postgres_password` secret.
#
-# * Using passwordless authentication with a remote postgres server
+# * Using passwordless authentication with a remote Postgres server
# using the `REMOTE_POSTGRES_HOST` setting and a client certificate
# under `/home/zulip/.postgresql/`.
#
@@ -411,7 +411,7 @@ if PRODUCTION:
SESSION_COOKIE_NAME = "__Host-sessionid"
CSRF_COOKIE_NAME = "__Host-csrftoken"
-# Prevent Javascript from reading the CSRF token from cookies. Our code gets
+# Prevent JavaScript from reading the CSRF token from cookies. Our code gets
# the token from the DOM, which means malicious code could too. But hiding the
# cookie will slow down some attackers.
CSRF_COOKIE_HTTPONLY = True
@@ -1056,7 +1056,7 @@ SOCIAL_AUTH_GITHUB_TEAM_SECRET = SOCIAL_AUTH_GITHUB_SECRET
SOCIAL_AUTH_GOOGLE_SECRET = get_secret('social_auth_google_secret')
# Fallback to google-oauth settings in case social auth settings for
-# google are missing; this is for backwards-compatibility with older
+# Google are missing; this is for backwards-compatibility with older
# Zulip versions where /etc/zulip/settings.py has not been migrated yet.
GOOGLE_OAUTH2_CLIENT_SECRET = get_secret('google_oauth2_client_secret')
SOCIAL_AUTH_GOOGLE_KEY = SOCIAL_AUTH_GOOGLE_KEY or GOOGLE_OAUTH2_CLIENT_ID
diff --git a/zproject/default_settings.py b/zproject/default_settings.py
index 856e5d621d..16c6ac37e9 100644
--- a/zproject/default_settings.py
+++ b/zproject/default_settings.py
@@ -173,7 +173,7 @@ RATE_LIMITING_AUTHENTICATE = True
SEND_LOGIN_EMAILS = True
EMBEDDED_BOTS_ENABLED = False
-# Two Factor Authentication is not yet implementation-complete
+# Two factor authentication is not yet implementation-complete
TWO_FACTOR_AUTHENTICATION_ENABLED = False
# This is used to send all hotspots for convenient manual testing
@@ -252,7 +252,7 @@ REALM_HOSTS: Dict[str, str] = {}
# mean that a different hostname must be used for mobile access.
REALM_MOBILE_REMAP_URIS: Dict[str, str] = {}
-# Whether the server is using the Pgroonga full-text search
+# Whether the server is using the PGroonga full-text search
# backend. Plan is to turn this on for everyone after further
# testing.
USING_PGROONGA = False
diff --git a/zproject/dev_settings.py b/zproject/dev_settings.py
index 631615024d..2478094d8b 100644
--- a/zproject/dev_settings.py
+++ b/zproject/dev_settings.py
@@ -167,7 +167,7 @@ SEARCH_PILLS_ENABLED = bool(os.getenv('SEARCH_PILLS_ENABLED', False))
BILLING_ENABLED = True
LANDING_PAGE_NAVBAR_MESSAGE = None
-# Test Custom TOS template rendering
+# Test custom TOS template rendering
TERMS_OF_SERVICE = 'corporate/terms.md'
# Our run-dev.py proxy uses X-Forwarded-Port to communicate to Django
diff --git a/zproject/legacy_urls.py b/zproject/legacy_urls.py
index 1366f13be0..1ad5f18b83 100644
--- a/zproject/legacy_urls.py
+++ b/zproject/legacy_urls.py
@@ -6,7 +6,7 @@ import zerver.views.report
import zerver.views.streams
import zerver.views.tutorial
-# Future endpoints should add to urls.py, which includes these legacy urls
+# Future endpoints should add to urls.py, which includes these legacy URLs
legacy_urls = [
# These are json format views used by the web client. They require a logged in browser.
diff --git a/zproject/prod_settings_template.py b/zproject/prod_settings_template.py
index 422255abad..0abc61992d 100644
--- a/zproject/prod_settings_template.py
+++ b/zproject/prod_settings_template.py
@@ -434,7 +434,7 @@ ENABLE_GRAVATAR = True
# and uncomment the following line.
#DEFAULT_AVATAR_URI = '/local-static/default-avatar.png'
-# To access an external postgres database you should define the host name in
+# To access an external Postgres database you should define the host name in
# REMOTE_POSTGRES_HOST, port in REMOTE_POSTGRES_PORT, password in the secrets file in the
# property postgres_password, and the SSL connection mode in REMOTE_POSTGRES_SSLMODE
# Valid values for REMOTE_POSTGRES_SSLMODE are documented in the
@@ -471,7 +471,7 @@ ENABLE_GRAVATAR = True
################
# Email gateway integration.
#
-# The Email gateway integration supports sending messages into Zulip
+# The email gateway integration supports sending messages into Zulip
# by sending an email.
# For details, see the documentation:
# https://zulip.readthedocs.io/en/latest/production/settings.html#email-gateway
@@ -599,11 +599,11 @@ CAMO_URI = '/external_content/'
# RabbitMQ configuration
#
-# By default, Zulip connects to rabbitmq running locally on the machine,
+# By default, Zulip connects to RabbitMQ running locally on the machine,
# but Zulip also supports connecting to RabbitMQ over the network;
# to use a remote RabbitMQ instance, set RABBITMQ_HOST to the hostname here.
# RABBITMQ_HOST = "127.0.0.1"
-# To use another rabbitmq user than the default 'zulip', set RABBITMQ_USERNAME here.
+# To use another RabbitMQ user than the default 'zulip', set RABBITMQ_USERNAME here.
# RABBITMQ_USERNAME = 'zulip'
# Memcached configuration
@@ -619,14 +619,14 @@ CAMO_URI = '/external_content/'
# Redis configuration
#
-# By default, Zulip connects to redis running locally on the machine,
-# but Zulip also supports connecting to redis over the network;
+# By default, Zulip connects to Redis running locally on the machine,
+# but Zulip also supports connecting to Redis over the network;
# to use a remote Redis instance, set REDIS_HOST here.
# REDIS_HOST = '127.0.0.1'
-# For a different redis port set the REDIS_PORT here.
+# For a different Redis port set the REDIS_PORT here.
# REDIS_PORT = 6379
# If you set redis_password in zulip-secrets.conf, Zulip will use that password
-# to connect to the redis server.
+# to connect to the Redis server.
# Controls whether Zulip will rate-limit user requests.
# RATE_LIMITING = True
diff --git a/zproject/test_extra_settings.py b/zproject/test_extra_settings.py
index f4c89cb248..647734a5c7 100644
--- a/zproject/test_extra_settings.py
+++ b/zproject/test_extra_settings.py
@@ -87,7 +87,7 @@ AUTH_LDAP_REVERSE_EMAIL_SEARCH = LDAPSearch("ou=users,dc=zulip,dc=com",
TEST_SUITE = True
RATE_LIMITING = False
RATE_LIMITING_AUTHENTICATE = False
-# Don't use rabbitmq from the test suite -- the user_profile_ids for
+# Don't use RabbitMQ from the test suite -- the user_profile_ids for
# any generated queue elements won't match those being used by the
# real app.
USING_RABBITMQ = False
diff --git a/zproject/urls.py b/zproject/urls.py
index 732321e126..eccf5dda8b 100644
--- a/zproject/urls.py
+++ b/zproject/urls.py
@@ -511,7 +511,7 @@ v1_api_and_json_patterns = [
rest_path('streams',
GET=get_streams_backend),
- # GET returns `stream_id`, stream name should be encoded in the url query (in `stream` param)
+ # GET returns `stream_id`, stream name should be encoded in the URL query (in `stream` param)
rest_path('get_stream_id',
GET=json_get_stream_id),
@@ -591,7 +591,7 @@ v1_api_and_json_patterns = [
integrations_view = IntegrationView.as_view()
# These views serve pages (HTML). As such, their internationalization
-# must depend on the url.
+# must depend on the URL.
#
# If you're adding a new page to the website (as opposed to a new
# endpoint for use by code), you should add it here.
@@ -685,12 +685,12 @@ i18n_urls = [
# Go to organization subdomain
path('accounts/go/', realm_redirect, name='realm_redirect'),
- # Realm Creation
+ # Realm creation
path('new/', create_realm),
path('new/',
create_realm, name='create_realm'),
- # Realm Reactivation
+ # Realm reactivation
path('reactivate/', realm_reactivation,
name='realm_reactivation'),
@@ -759,7 +759,7 @@ urls += [
# user_uploads -> zerver.views.upload.serve_file_backend
#
-# This url is an exception to the url naming schemes for endpoints. It
+# This URL is an exception to the URL naming schemes for endpoints. It
# supports both API and session cookie authentication, using a single
# URL for both (not 'api/v1/' or 'json/' prefix). This is required to
# easily support the mobile apps fetching uploaded files without
@@ -788,13 +788,13 @@ urls += [
{'override_api_url_scheme'})),
]
-# This url serves as a way to receive CSP violation reports from the users.
+# This URL serves as a way to receive CSP violation reports from the users.
# We use this endpoint to just log these reports.
urls += [
path('report/csp_violations', report_csp_violations),
]
-# This url serves as a way to provide backward compatibility to messages
+# This URL serves as a way to provide backward compatibility to messages
# rendered at the time Zulip used camo for doing http -> https conversion for
# such links with images previews. Now thumbor can be used for serving such
# images.
@@ -804,7 +804,7 @@ urls += [
]
# Incoming webhook URLs
-# We don't create urls for particular git integrations here
+# We don't create URLs for particular Git integrations here
# because of generic one below
for incoming_webhook in WEBHOOK_INTEGRATIONS:
if incoming_webhook.url_object:
@@ -890,7 +890,7 @@ urls += [
path('api/', api_documentation_view),
]
-# Two Factor urls
+# Two-factor URLs
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
urls += [path('', include(tf_urls)),
path('', include(tf_twilio_urls))]
@@ -899,7 +899,7 @@ if settings.DEVELOPMENT:
urls += dev_urls.urls
i18n_urls += dev_urls.i18n_urls
-# The sequence is important; if i18n urls don't come first then
-# reverse url mapping points to i18n urls which causes the frontend
+# The sequence is important; if i18n URLs don't come first then
+# reverse URL mapping points to i18n URLs which causes the frontend
# tests to fail
urlpatterns = i18n_patterns(*i18n_urls) + urls + legacy_urls