2020-06-11 00:54:34 +02:00
|
|
|
from typing import Any, Optional
|
2018-05-15 22:35:23 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import sqlalchemy
|
2016-07-19 08:12:35 +02:00
|
|
|
from django.db import connection
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2016-09-10 21:08:37 +02:00
|
|
|
from zerver.lib.db import TimeTrackingConnection
|
2016-07-19 08:12:35 +02:00
|
|
|
|
|
|
|
|
|
|
|
# This is a Pool that doesn't close connections. Therefore it can be used with
|
|
|
|
# existing Django database connections.
|
|
|
|
class NonClosingPool(sqlalchemy.pool.NullPool):
|
2017-11-05 11:15:10 +01:00
|
|
|
def status(self) -> str:
|
2016-07-19 08:12:35 +02:00
|
|
|
return "NonClosingPool"
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def _do_return_conn(self, conn: sqlalchemy.engine.base.Connection) -> None:
|
2016-07-19 08:12:35 +02:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def recreate(self) -> "NonClosingPool":
|
2020-11-16 22:52:27 +01:00
|
|
|
return self.__class__(
|
|
|
|
creator=self._creator, # type: ignore[attr-defined] # implementation detail
|
|
|
|
recycle=self._recycle, # type: ignore[attr-defined] # implementation detail
|
|
|
|
use_threadlocal=self._use_threadlocal, # type: ignore[attr-defined] # implementation detail
|
|
|
|
reset_on_return=self._reset_on_return, # type: ignore[attr-defined] # implementation detail
|
|
|
|
echo=self.echo,
|
|
|
|
logging_name=self._orig_logging_name, # type: ignore[attr-defined] # implementation detail
|
|
|
|
_dispatch=self.dispatch, # type: ignore[attr-defined] # implementation detail
|
|
|
|
)
|
2016-07-19 08:12:35 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
sqlalchemy_engine: Optional[Any] = None
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_sqlalchemy_connection() -> sqlalchemy.engine.base.Connection:
|
2016-07-19 08:12:35 +02:00
|
|
|
global sqlalchemy_engine
|
|
|
|
if sqlalchemy_engine is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_dj_conn() -> TimeTrackingConnection:
|
2016-07-19 08:12:35 +02:00
|
|
|
connection.ensure_connection()
|
|
|
|
return connection.connection
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
sqlalchemy_engine = sqlalchemy.create_engine(
|
2021-02-12 08:20:45 +01:00
|
|
|
"postgresql://",
|
2021-02-12 08:19:30 +01:00
|
|
|
creator=get_dj_conn,
|
|
|
|
poolclass=NonClosingPool,
|
|
|
|
pool_reset_on_return=False,
|
|
|
|
)
|
2016-07-19 08:12:35 +02:00
|
|
|
sa_connection = sqlalchemy_engine.connect()
|
|
|
|
sa_connection.execution_options(autocommit=False)
|
|
|
|
return sa_connection
|