2017-09-22 18:30:18 +02:00
|
|
|
import os
|
2020-06-11 00:54:34 +02:00
|
|
|
import re
|
2021-03-15 20:30:58 +01:00
|
|
|
from typing import Dict, List, Optional
|
2013-03-28 18:48:37 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import sourcemap
|
|
|
|
|
2019-12-20 00:00:45 +01:00
|
|
|
from zerver.lib.pysa import mark_sanitized
|
|
|
|
|
2013-03-28 18:48:37 +01:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class SourceMap:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""Map (line, column) pairs from generated to source file."""
|
2013-03-28 18:48:37 +01:00
|
|
|
|
2018-05-10 19:13:36 +02:00
|
|
|
def __init__(self, sourcemap_dirs: List[str]) -> None:
|
2017-07-28 20:32:57 +02:00
|
|
|
self._dirs = sourcemap_dirs
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self._indices: Dict[str, sourcemap.SourceMapDecoder] = {}
|
2013-03-28 18:48:37 +01:00
|
|
|
|
2021-03-15 20:30:58 +01:00
|
|
|
def _index_for(self, minified_src: str) -> Optional[sourcemap.SourceMapDecoder]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Return the source map index for minified_src, loading it if not
|
|
|
|
already loaded."""
|
2019-12-20 00:00:45 +01:00
|
|
|
|
|
|
|
# Prevent path traversal
|
|
|
|
assert ".." not in minified_src and "/" not in minified_src
|
|
|
|
|
2013-07-12 22:01:31 +02:00
|
|
|
if minified_src not in self._indices:
|
2017-07-28 20:32:57 +02:00
|
|
|
for source_dir in self._dirs:
|
2021-02-12 08:20:45 +01:00
|
|
|
filename = os.path.join(source_dir, minified_src + ".map")
|
2017-07-28 20:32:57 +02:00
|
|
|
if os.path.isfile(filename):
|
2019-12-20 00:00:45 +01:00
|
|
|
# Use 'mark_sanitized' to force Pysa to ignore the fact that
|
|
|
|
# 'filename' is user controlled. While putting user
|
|
|
|
# controlled data into a filesystem operation is bad, in
|
|
|
|
# this case it's benign because 'filename' can't traverse
|
|
|
|
# directories outside of the pre-configured 'sourcemap_dirs'
|
|
|
|
# (due to the above assertions) and will always end in
|
|
|
|
# '.map'. Additionally, the result of this function is used
|
|
|
|
# for error logging and not returned to the user, so
|
|
|
|
# controlling the loaded file would not be useful to an
|
|
|
|
# attacker.
|
|
|
|
with open(mark_sanitized(filename)) as fp:
|
2017-07-28 20:32:57 +02:00
|
|
|
self._indices[minified_src] = sourcemap.load(fp)
|
|
|
|
break
|
2013-03-28 18:48:37 +01:00
|
|
|
|
2021-03-15 20:30:58 +01:00
|
|
|
return self._indices.get(minified_src)
|
2013-03-28 18:48:37 +01:00
|
|
|
|
2018-05-10 19:13:36 +02:00
|
|
|
def annotate_stacktrace(self, stacktrace: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
out: str = ""
|
2013-03-28 18:48:37 +01:00
|
|
|
for ln in stacktrace.splitlines():
|
2021-02-12 08:20:45 +01:00
|
|
|
out += ln + "\n"
|
2021-06-02 01:41:28 +02:00
|
|
|
match = re.search(r"/webpack-bundles/([^:]+):(\d+):(\d+)", ln)
|
2013-03-28 18:48:37 +01:00
|
|
|
if match:
|
2013-07-12 22:01:31 +02:00
|
|
|
# Get the appropriate source map for the minified file.
|
2019-11-02 01:19:39 +01:00
|
|
|
minified_src = match.groups()[0]
|
2013-07-12 22:01:31 +02:00
|
|
|
index = self._index_for(minified_src)
|
2021-03-15 20:30:58 +01:00
|
|
|
if index is None:
|
|
|
|
out += " [Unable to look up in source map]\n"
|
|
|
|
continue
|
2013-07-12 22:01:31 +02:00
|
|
|
|
2019-11-02 01:19:39 +01:00
|
|
|
gen_line, gen_col = list(map(int, match.groups()[1:3]))
|
2013-07-12 22:01:31 +02:00
|
|
|
# The sourcemap lib is 0-based, so subtract 1 from line and col.
|
|
|
|
try:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = index.lookup(line=gen_line - 1, column=gen_col - 1)
|
2018-07-31 07:12:08 +02:00
|
|
|
display_src = result.src
|
2019-11-02 01:19:39 +01:00
|
|
|
if display_src is not None:
|
|
|
|
webpack_prefix = "webpack:///"
|
|
|
|
if display_src.startswith(webpack_prefix):
|
2021-02-12 08:19:30 +01:00
|
|
|
display_src = display_src[len(webpack_prefix) :]
|
2021-02-12 08:20:45 +01:00
|
|
|
out += f" = {display_src} line {result.src_line+1} column {result.src_col+1}\n"
|
2013-07-12 22:01:31 +02:00
|
|
|
except IndexError:
|
2021-02-12 08:20:45 +01:00
|
|
|
out += " [Unable to look up in source map]\n"
|
2013-03-28 18:48:37 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if ln.startswith(" at"):
|
|
|
|
out += "\n"
|
2013-03-28 18:48:37 +01:00
|
|
|
return out
|