response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Fetch a summary from the full spec object. | def get_openapi_summary(endpoint: str, method: str) -> str:
"""Fetch a summary from the full spec object."""
return openapi_spec.openapi()["paths"][endpoint][method.lower()]["summary"] |
Compare a "content" dict with the defined schema for a specific method
in an endpoint. Return true if validated and false if skipped. | def validate_test_response(request: Request, response: Response) -> bool:
"""Compare a "content" dict with the defined schema for a specific method
in an endpoint. Return true if validated and false if skipped.
"""
if request.path.startswith("/json/"):
path = request.path[len("/json") :]
elif request.path.startswith("/api/v1/"):
path = request.path[len("/api/v1") :]
else:
return False
assert request.method is not None
method = request.method.lower()
status_code = str(response.status_code)
# This first set of checks are primarily training wheels that we
# hope to eliminate over time as we improve our API documentation.
if path not in openapi_spec.openapi()["paths"]:
endpoint = find_openapi_endpoint(path)
# If it doesn't match it hasn't been documented yet.
if endpoint is None:
return False
else:
endpoint = path
# Excluded endpoint/methods
if (endpoint, method) in EXCLUDE_UNDOCUMENTED_ENDPOINTS:
return False
# Return true for endpoints with only response documentation remaining
if (endpoint, method) in EXCLUDE_DOCUMENTED_ENDPOINTS: # nocoverage
return True
# Code is not declared but appears in various 400 responses. If
# common, it can be added to 400 response schema
if status_code.startswith("4"):
# This return statement should ideally be not here. But since
# we have not defined 400 responses for various paths this has
# been added as all 400 have the same schema. When all 400
# response have been defined this should be removed.
return True
try:
openapi_spec.spec().validate_response(request, response)
except OpenAPIValidationError as error:
message = f"Response validation error at {method} /api/v1{path} ({status_code}):"
message += f"\n\n{type(error).__name__}: {error}"
message += (
"\n\nFor help debugging these errors see: "
"https://zulip.readthedocs.io/en/latest/documentation/api.html#debugging-schema-validation-errors"
)
raise SchemaError(message) from None
return True |
Check if opaque objects are present in the OpenAPI spec; this is an
important part of our policy for ensuring every detail of Zulip's
API responses is correct.
This is done by checking for the presence of the
`additionalProperties` attribute for all objects (dictionaries). | def validate_schema(schema: Dict[str, Any]) -> None:
"""Check if opaque objects are present in the OpenAPI spec; this is an
important part of our policy for ensuring every detail of Zulip's
API responses is correct.
This is done by checking for the presence of the
`additionalProperties` attribute for all objects (dictionaries).
"""
if "oneOf" in schema:
for subschema in schema["oneOf"]:
validate_schema(subschema)
elif schema["type"] == "array":
validate_schema(schema["items"])
elif schema["type"] == "object":
if "additionalProperties" not in schema:
raise SchemaError(
"additionalProperties needs to be defined for objects to make sure they have no"
" additional properties left to be documented."
)
for property_schema in schema.get("properties", {}).values():
validate_schema(property_schema)
if schema["additionalProperties"]:
validate_schema(schema["additionalProperties"]) |
This decorator is used to register an OpenAPI test function with
its endpoint. Example usage:
@openapi_test_function("/messages/render:post")
def ... | def openapi_test_function(
endpoint: str,
) -> Callable[[Callable[ParamT, ReturnT]], Callable[ParamT, ReturnT]]:
"""This decorator is used to register an OpenAPI test function with
its endpoint. Example usage:
@openapi_test_function("/messages/render:post")
def ...
"""
def wrapper(test_func: Callable[ParamT, ReturnT]) -> Callable[ParamT, ReturnT]:
@wraps(test_func)
def _record_calls_wrapper(*args: ParamT.args, **kwargs: ParamT.kwargs) -> ReturnT:
CALLED_TEST_FUNCTIONS.add(test_func.__name__)
return test_func(*args, **kwargs)
REGISTERED_TEST_FUNCTIONS.add(test_func.__name__)
TEST_FUNCTIONS[endpoint] = _record_calls_wrapper
return _record_calls_wrapper
return wrapper |
The has_alert_word flag can be ignored for most tests. | def check_flags(flags: List[str], expected: Set[str]) -> None:
"""
The has_alert_word flag can be ignored for most tests.
"""
assert "has_alert_word" not in expected
flag_set = set(flags)
flag_set.discard("has_alert_word")
if flag_set != expected:
raise AssertionError(f"expected flags (ignoring has_alert_word) to be {expected}") |
A helper to let us ignore the view function's signature | def call_endpoint(
view: Callable[..., T], request: HttpRequest, *args: object, **kwargs: object
) -> T:
"""A helper to let us ignore the view function's signature"""
return view(request, *args, **kwargs) |
`users` is a list of user IDs, or in some special cases like message
send/update or embeds, dictionaries containing extra data. | def send_event(
realm: Realm, event: Mapping[str, Any], users: Union[Iterable[int], Iterable[Mapping[str, Any]]]
) -> None:
"""`users` is a list of user IDs, or in some special cases like message
send/update or embeds, dictionaries containing extra data."""
realm_ports = get_realm_tornado_ports(realm)
if len(realm_ports) == 1:
port_user_map = {realm_ports[0]: list(users)}
else:
port_user_map = defaultdict(list)
for user in users:
user_id = user if isinstance(user, int) else user["id"]
port_user_map[get_user_id_tornado_port(realm_ports, user_id)].append(user)
for port, port_users in port_user_map.items():
queue_json_publish(
notify_tornado_queue_name(port),
dict(event=event, users=port_users),
partial(send_notification_http, port),
) |
Prunes the internal_data data structures, which are not intended to
be exposed to API clients. | def prune_internal_data(events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Prunes the internal_data data structures, which are not intended to
be exposed to API clients.
"""
events = copy.deepcopy(events)
for event in events:
if event["type"] == "message" and "internal_data" in event:
del event["internal_data"]
return events |
The receiver_is_off_zulip logic used to determine whether a user
has no active client suffers from a somewhat fundamental race
condition. If the client is no longer on the Internet,
receiver_is_off_zulip will still return False for
DEFAULT_EVENT_QUEUE_TIMEOUT_SECS, until the queue is
garbage-collected. This would cause us to reliably miss
push/email notifying users for messages arriving during the
DEFAULT_EVENT_QUEUE_TIMEOUT_SECS after they suspend their laptop (for
example). We address this by, when the queue is garbage-collected
at the end of those 10 minutes, checking to see if it's the last
one, and if so, potentially triggering notifications to the user
at that time, resulting in at most a DEFAULT_EVENT_QUEUE_TIMEOUT_SECS
delay in the arrival of their notifications.
As Zulip's APIs get more popular and the mobile apps start using
long-lived event queues for perf optimization, future versions of
this will likely need to replace checking `last_for_client` with
something more complicated, so that we only consider clients like
web browsers, not the mobile apps or random API scripts. | def missedmessage_hook(
user_profile_id: int, client: ClientDescriptor, last_for_client: bool
) -> None:
"""The receiver_is_off_zulip logic used to determine whether a user
has no active client suffers from a somewhat fundamental race
condition. If the client is no longer on the Internet,
receiver_is_off_zulip will still return False for
DEFAULT_EVENT_QUEUE_TIMEOUT_SECS, until the queue is
garbage-collected. This would cause us to reliably miss
push/email notifying users for messages arriving during the
DEFAULT_EVENT_QUEUE_TIMEOUT_SECS after they suspend their laptop (for
example). We address this by, when the queue is garbage-collected
at the end of those 10 minutes, checking to see if it's the last
one, and if so, potentially triggering notifications to the user
at that time, resulting in at most a DEFAULT_EVENT_QUEUE_TIMEOUT_SECS
delay in the arrival of their notifications.
As Zulip's APIs get more popular and the mobile apps start using
long-lived event queues for perf optimization, future versions of
this will likely need to replace checking `last_for_client` with
something more complicated, so that we only consider clients like
web browsers, not the mobile apps or random API scripts.
"""
# Only process missedmessage hook when the last queue for a
# client has been garbage collected
if not last_for_client:
return
for event in client.event_queue.contents(include_internal_data=True):
if event["type"] != "message":
continue
internal_data = event.get("internal_data", {})
sender_id = event["message"]["sender_id"]
# TODO/compatibility: Translation code for the rename of
# `pm_push_notify` to `dm_push_notify`. Remove this when
# one can no longer directly upgrade from 7.x to main.
dm_push_notify = False
if "dm_push_notify" in internal_data:
dm_push_notify = internal_data.get("dm_push_notify")
elif "pm_push_notify" in internal_data:
dm_push_notify = internal_data.get("pm_push_notify")
# TODO/compatibility: Translation code for the rename of
# `pm_email_notify` to `dm_email_notify`. Remove this when
# one can no longer directly upgrade from 7.x to main.
dm_email_notify = False
if "dm_email_notify" in internal_data:
dm_email_notify = internal_data.get("dm_email_notify")
elif "pm_email_notify" in internal_data:
dm_email_notify = internal_data.get("pm_email_notify")
# TODO/compatibility: Translation code for the rename of
# `wildcard_mention_push_notify` to `stream_wildcard_mention_push_notify`.
# Remove this when one can no longer directly upgrade from 7.x to main.
stream_wildcard_mention_push_notify = False
if "stream_wildcard_mention_push_notify" in internal_data:
stream_wildcard_mention_push_notify = internal_data.get(
"stream_wildcard_mention_push_notify"
)
elif "wildcard_mention_push_notify" in internal_data:
stream_wildcard_mention_push_notify = internal_data.get("wildcard_mention_push_notify")
# TODO/compatibility: Translation code for the rename of
# `wildcard_mention_email_notify` to `stream_wildcard_mention_email_notify`.
# Remove this when one can no longer directly upgrade from 7.x to main.
stream_wildcard_mention_email_notify = False
if "stream_wildcard_mention_email_notify" in internal_data:
stream_wildcard_mention_email_notify = internal_data.get(
"stream_wildcard_mention_email_notify"
)
elif "wildcard_mention_email_notify" in internal_data:
stream_wildcard_mention_email_notify = internal_data.get(
"wildcard_mention_email_notify"
)
user_notifications_data = UserMessageNotificationsData(
user_id=user_profile_id,
sender_is_muted=internal_data.get("sender_is_muted", False),
dm_push_notify=dm_push_notify,
dm_email_notify=dm_email_notify,
mention_push_notify=internal_data.get("mention_push_notify", False),
mention_email_notify=internal_data.get("mention_email_notify", False),
topic_wildcard_mention_push_notify=internal_data.get(
"topic_wildcard_mention_push_notify", False
),
topic_wildcard_mention_email_notify=internal_data.get(
"topic_wildcard_mention_email_notify", False
),
stream_wildcard_mention_push_notify=stream_wildcard_mention_push_notify,
stream_wildcard_mention_email_notify=stream_wildcard_mention_email_notify,
stream_push_notify=internal_data.get("stream_push_notify", False),
stream_email_notify=internal_data.get("stream_email_notify", False),
followed_topic_push_notify=internal_data.get("followed_topic_push_notify", False),
followed_topic_email_notify=internal_data.get("followed_topic_email_notify", False),
topic_wildcard_mention_in_followed_topic_push_notify=internal_data.get(
"topic_wildcard_mention_in_followed_topic_push_notify", False
),
topic_wildcard_mention_in_followed_topic_email_notify=internal_data.get(
"topic_wildcard_mention_in_followed_topic_email_notify", False
),
stream_wildcard_mention_in_followed_topic_push_notify=internal_data.get(
"stream_wildcard_mention_in_followed_topic_push_notify", False
),
stream_wildcard_mention_in_followed_topic_email_notify=internal_data.get(
"stream_wildcard_mention_in_followed_topic_email_notify", False
),
# Since one is by definition idle, we don't need to check online_push_enabled
online_push_enabled=False,
disable_external_notifications=internal_data.get(
"disable_external_notifications", False
),
)
mentioned_user_group_id = internal_data.get("mentioned_user_group_id")
# Since we just GC'd the last event queue, the user is definitely idle.
idle = True
message_id = event["message"]["id"]
# Pass on the information on whether a push or email notification was already sent.
already_notified = dict(
push_notified=internal_data.get("push_notified", False),
email_notified=internal_data.get("email_notified", False),
)
maybe_enqueue_notifications(
user_notifications_data=user_notifications_data,
acting_user_id=sender_id,
message_id=message_id,
mentioned_user_group_id=mentioned_user_group_id,
idle=idle,
already_notified=already_notified,
) |
This function has a complete unit test suite in
`test_enqueue_notifications` that should be expanded as we add
more features here.
See https://zulip.readthedocs.io/en/latest/subsystems/notifications.html
for high-level design documentation. | def maybe_enqueue_notifications(
*,
user_notifications_data: UserMessageNotificationsData,
acting_user_id: int,
message_id: int,
mentioned_user_group_id: Optional[int],
idle: bool,
already_notified: Dict[str, bool],
) -> Dict[str, bool]:
"""This function has a complete unit test suite in
`test_enqueue_notifications` that should be expanded as we add
more features here.
See https://zulip.readthedocs.io/en/latest/subsystems/notifications.html
for high-level design documentation.
"""
notified: Dict[str, bool] = {}
if user_notifications_data.is_push_notifiable(acting_user_id, idle):
notice = build_offline_notification(user_notifications_data.user_id, message_id)
notice["trigger"] = user_notifications_data.get_push_notification_trigger(
acting_user_id, idle
)
notice["type"] = "add"
notice["mentioned_user_group_id"] = mentioned_user_group_id
if not already_notified.get("push_notified"):
queue_json_publish("missedmessage_mobile_notifications", notice)
notified["push_notified"] = True
# Send missed_message emails if a direct message or a
# mention. Eventually, we'll add settings to allow email
# notifications to match the model of push notifications
# above.
if user_notifications_data.is_email_notifiable(acting_user_id, idle):
notice = build_offline_notification(user_notifications_data.user_id, message_id)
notice["trigger"] = user_notifications_data.get_email_notification_trigger(
acting_user_id, idle
)
notice["mentioned_user_group_id"] = mentioned_user_group_id
if not already_notified.get("email_notified"):
queue_json_publish("missedmessage_emails", notice, lambda notice: None)
notified["email_notified"] = True
return notified |
Return client info for all the clients interested in a message.
This basically includes clients for users who are recipients
of the message, with some nuances for bots that auto-subscribe
to all streams, plus users who may be mentioned, etc. | def get_client_info_for_message_event(
event_template: Mapping[str, Any], users: Iterable[Mapping[str, Any]]
) -> Dict[str, ClientInfo]:
"""
Return client info for all the clients interested in a message.
This basically includes clients for users who are recipients
of the message, with some nuances for bots that auto-subscribe
to all streams, plus users who may be mentioned, etc.
"""
send_to_clients: Dict[str, ClientInfo] = {}
sender_queue_id: Optional[str] = event_template.get("sender_queue_id", None)
def is_sender_client(client: ClientDescriptor) -> bool:
return (sender_queue_id is not None) and client.event_queue.id == sender_queue_id
# If we're on a public stream, look for clients (typically belonging to
# bots) that are registered to get events for ALL streams.
if "stream_name" in event_template and not event_template.get("invite_only"):
realm_id = event_template["realm_id"]
for client in get_client_descriptors_for_realm_all_streams(realm_id):
send_to_clients[client.event_queue.id] = dict(
client=client,
flags=[],
is_sender=is_sender_client(client),
)
for user_data in users:
user_profile_id: int = user_data["id"]
flags: Collection[str] = user_data.get("flags", [])
for client in get_client_descriptors_for_user(user_profile_id):
send_to_clients[client.event_queue.id] = dict(
client=client,
flags=flags,
is_sender=is_sender_client(client),
)
return send_to_clients |
See
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html
for high-level documentation on this subsystem. | def process_message_event(
event_template: Mapping[str, Any], users: Collection[Mapping[str, Any]]
) -> None:
"""See
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html
for high-level documentation on this subsystem.
"""
send_to_clients = get_client_info_for_message_event(event_template, users)
presence_idle_user_ids = set(event_template.get("presence_idle_user_ids", []))
online_push_user_ids = set(event_template.get("online_push_user_ids", []))
# TODO/compatibility: Translation code for the rename of
# `pm_mention_push_disabled_user_ids` to `dm_mention_push_disabled_user_ids`.
# Remove this when one can no longer directly upgrade from 7.x to main.
dm_mention_push_disabled_user_ids = set()
if "dm_mention_push_disabled_user_ids" in event_template:
dm_mention_push_disabled_user_ids = set(
event_template.get("dm_mention_push_disabled_user_ids", [])
)
elif "pm_mention_push_disabled_user_ids" in event_template:
dm_mention_push_disabled_user_ids = set(
event_template.get("pm_mention_push_disabled_user_ids", [])
)
# TODO/compatibility: Translation code for the rename of
# `pm_mention_email_disabled_user_ids` to `dm_mention_email_disabled_user_ids`.
# Remove this when one can no longer directly upgrade from 7.x to main.
dm_mention_email_disabled_user_ids = set()
if "dm_mention_email_disabled_user_ids" in event_template:
dm_mention_email_disabled_user_ids = set(
event_template.get("dm_mention_email_disabled_user_ids", [])
)
elif "pm_mention_email_disabled_user_ids" in event_template:
dm_mention_email_disabled_user_ids = set(
event_template.get("pm_mention_email_disabled_user_ids", [])
)
stream_push_user_ids = set(event_template.get("stream_push_user_ids", []))
stream_email_user_ids = set(event_template.get("stream_email_user_ids", []))
topic_wildcard_mention_user_ids = set(event_template.get("topic_wildcard_mention_user_ids", []))
# TODO/compatibility: Translation code for the rename of
# `wildcard_mention_user_ids` to `stream_wildcard_mention_user_ids`.
# Remove this when one can no longer directly upgrade from 7.x to main.
stream_wildcard_mention_user_ids = set()
if "stream_wildcard_mention_user_ids" in event_template:
stream_wildcard_mention_user_ids = set(
event_template.get("stream_wildcard_mention_user_ids", [])
)
elif "wildcard_mention_user_ids" in event_template:
stream_wildcard_mention_user_ids = set(event_template.get("wildcard_mention_user_ids", []))
followed_topic_push_user_ids = set(event_template.get("followed_topic_push_user_ids", []))
followed_topic_email_user_ids = set(event_template.get("followed_topic_email_user_ids", []))
topic_wildcard_mention_in_followed_topic_user_ids = set(
event_template.get("topic_wildcard_mention_in_followed_topic_user_ids", [])
)
stream_wildcard_mention_in_followed_topic_user_ids = set(
event_template.get("stream_wildcard_mention_in_followed_topic_user_ids", [])
)
muted_sender_user_ids = set(event_template.get("muted_sender_user_ids", []))
all_bot_user_ids = set(event_template.get("all_bot_user_ids", []))
disable_external_notifications = event_template.get("disable_external_notifications", False)
user_ids_without_access_to_sender = event_template.get("user_ids_without_access_to_sender", [])
realm_host = event_template.get("realm_host", "")
wide_dict: Dict[str, Any] = event_template["message_dict"]
# Temporary transitional code: Zulip servers that have message
# events in their event queues and upgrade to the new version
# that expects sender_delivery_email in these events will
# throw errors processing events. We can remove this block
# once we don't expect anyone to be directly upgrading from
# 2.0.x to the latest Zulip.
if "sender_delivery_email" not in wide_dict: # nocoverage
wide_dict["sender_delivery_email"] = wide_dict["sender_email"]
sender_id: int = wide_dict["sender_id"]
message_id: int = wide_dict["id"]
recipient_type_name: str = wide_dict["type"]
sending_client: str = wide_dict["client"]
@cache
def get_client_payload(
apply_markdown: bool, client_gravatar: bool, can_access_sender: bool
) -> Dict[str, Any]:
return MessageDict.finalize_payload(
wide_dict,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
can_access_sender=can_access_sender,
realm_host=realm_host,
)
# Extra user-specific data to include
extra_user_data: Dict[int, Any] = {}
for user_data in users:
user_profile_id: int = user_data["id"]
flags: Collection[str] = user_data.get("flags", [])
mentioned_user_group_id: Optional[int] = user_data.get("mentioned_user_group_id")
# If the recipient was offline and the message was a (1:1 or group) direct message
# to them or they were @-notified potentially notify more immediately
private_message = recipient_type_name == "private"
user_notifications_data = UserMessageNotificationsData.from_user_id_sets(
user_id=user_profile_id,
flags=flags,
private_message=private_message,
disable_external_notifications=disable_external_notifications,
online_push_user_ids=online_push_user_ids,
dm_mention_push_disabled_user_ids=dm_mention_push_disabled_user_ids,
dm_mention_email_disabled_user_ids=dm_mention_email_disabled_user_ids,
stream_push_user_ids=stream_push_user_ids,
stream_email_user_ids=stream_email_user_ids,
topic_wildcard_mention_user_ids=topic_wildcard_mention_user_ids,
stream_wildcard_mention_user_ids=stream_wildcard_mention_user_ids,
followed_topic_push_user_ids=followed_topic_push_user_ids,
followed_topic_email_user_ids=followed_topic_email_user_ids,
topic_wildcard_mention_in_followed_topic_user_ids=topic_wildcard_mention_in_followed_topic_user_ids,
stream_wildcard_mention_in_followed_topic_user_ids=stream_wildcard_mention_in_followed_topic_user_ids,
muted_sender_user_ids=muted_sender_user_ids,
all_bot_user_ids=all_bot_user_ids,
)
# Calling asdict would be slow, as it does a deep copy; pull
# the attributes out directly and perform a shallow copy, as
# we do intend to adjust the dict.
internal_data = {**vars(user_notifications_data)}
# Remove fields sent through other pipes to save some space.
internal_data.pop("user_id")
internal_data["mentioned_user_group_id"] = mentioned_user_group_id
extra_user_data[user_profile_id] = dict(internal_data=internal_data)
# If the message isn't notifiable had the user been idle, then the user
# shouldn't receive notifications even if they were online. In that case we can
# avoid the more expensive `receiver_is_off_zulip` call, and move on to process
# the next user.
if not user_notifications_data.is_notifiable(acting_user_id=sender_id, idle=True):
continue
idle = receiver_is_off_zulip(user_profile_id) or (user_profile_id in presence_idle_user_ids)
extra_user_data[user_profile_id]["internal_data"].update(
maybe_enqueue_notifications(
user_notifications_data=user_notifications_data,
acting_user_id=sender_id,
message_id=message_id,
mentioned_user_group_id=mentioned_user_group_id,
idle=idle,
already_notified={},
)
)
for client_data in send_to_clients.values():
client = client_data["client"]
flags = client_data["flags"]
is_sender: bool = client_data.get("is_sender", False)
extra_data: Optional[Mapping[str, bool]] = extra_user_data.get(client.user_profile_id, None)
if not client.accepts_messages():
# The actual check is the accepts_event() check below;
# this line is just an optimization to avoid copying
# message data unnecessarily
continue
can_access_sender = client.user_profile_id not in user_ids_without_access_to_sender
message_dict = get_client_payload(
client.apply_markdown, client.client_gravatar, can_access_sender
)
# Make sure Zephyr mirroring bots know whether stream is invite-only
if "mirror" in client.client_type_name and event_template.get("invite_only"):
message_dict = message_dict.copy()
message_dict["invite_only_stream"] = True
user_event: Dict[str, Any] = dict(type="message", message=message_dict, flags=flags)
if extra_data is not None:
user_event.update(extra_data)
if is_sender:
local_message_id = event_template.get("local_id", None)
if local_message_id is not None:
user_event["local_message_id"] = local_message_id
if not client.accepts_event(user_event):
continue
# The below prevents (Zephyr) mirroring loops.
if "mirror" in sending_client and sending_client.lower() == client.client_type_name.lower():
continue
client.add_event(user_event) |
Given a successful authentication for an email address (i.e. we've
confirmed the user controls the email address) that does not
currently have a Zulip account in the target realm, send them to
the registration flow or the "continue to registration" flow,
depending on is_signup, whether the email address can join the
organization (checked in HomepageForm), and similar details. | def maybe_send_to_registration(
request: HttpRequest,
email: str,
full_name: str = "",
mobile_flow_otp: Optional[str] = None,
desktop_flow_otp: Optional[str] = None,
is_signup: bool = False,
multiuse_object_key: str = "",
full_name_validated: bool = False,
params_to_store_in_authenticated_session: Optional[Dict[str, str]] = None,
) -> HttpResponse:
"""Given a successful authentication for an email address (i.e. we've
confirmed the user controls the email address) that does not
currently have a Zulip account in the target realm, send them to
the registration flow or the "continue to registration" flow,
depending on is_signup, whether the email address can join the
organization (checked in HomepageForm), and similar details.
"""
# In the desktop and mobile registration flows, the sign up
# happens in the browser so the user can use their
# already-logged-in social accounts. Then at the end, with the
# user account created, we pass the appropriate data to the app
# via e.g. a `zulip://` redirect. We store the OTP keys for the
# mobile/desktop flow in the session with 1-hour expiry, because
# we want this configuration of having a successful authentication
# result in being logged into the app to persist if the user makes
# mistakes while trying to authenticate (E.g. clicks the wrong
# Google account, hits back, etc.) during a given browser session,
# rather than just logging into the web app in the target browser.
#
# We can't use our usual pre-account-creation state storage
# approach of putting something in PreregistrationUser, because
# that would apply to future registration attempts on other
# devices, e.g. just creating an account on the web on their laptop.
assert not (mobile_flow_otp and desktop_flow_otp)
if mobile_flow_otp:
set_expirable_session_var(
request.session,
"registration_mobile_flow_otp",
mobile_flow_otp,
expiry_seconds=EXPIRABLE_SESSION_VAR_DEFAULT_EXPIRY_SECS,
)
elif desktop_flow_otp:
set_expirable_session_var(
request.session,
"registration_desktop_flow_otp",
desktop_flow_otp,
expiry_seconds=EXPIRABLE_SESSION_VAR_DEFAULT_EXPIRY_SECS,
)
if params_to_store_in_authenticated_session:
set_expirable_session_var(
request.session,
"registration_desktop_flow_params_to_store_in_authenticated_session",
orjson.dumps(params_to_store_in_authenticated_session).decode(),
expiry_seconds=EXPIRABLE_SESSION_VAR_DEFAULT_EXPIRY_SECS,
)
try:
# TODO: This should use get_realm_from_request, but a bunch of tests
# rely on mocking get_subdomain here, so they'll need to be tweaked first.
realm: Optional[Realm] = get_realm(get_subdomain(request))
except Realm.DoesNotExist:
realm = None
multiuse_obj: Optional[MultiuseInvite] = None
from_multiuse_invite = False
if multiuse_object_key:
from_multiuse_invite = True
try:
confirmation_obj = get_object_from_key(
multiuse_object_key, [Confirmation.MULTIUSE_INVITE], mark_object_used=False
)
except ConfirmationKeyError as exception:
return render_confirmation_key_error(request, exception)
assert isinstance(confirmation_obj, MultiuseInvite)
multiuse_obj = confirmation_obj
if realm != multiuse_obj.realm:
return render(request, "confirmation/link_does_not_exist.html", status=404)
invited_as = multiuse_obj.invited_as
else:
invited_as = PreregistrationUser.INVITE_AS["MEMBER"]
form = HomepageForm(
{"email": email},
realm=realm,
from_multiuse_invite=from_multiuse_invite,
invited_as=invited_as,
)
if form.is_valid():
# If the email address is allowed to sign up for an account in
# this organization, construct a PreregistrationUser and
# Confirmation objects, and then send the user to account
# creation or confirm-continue-registration depending on
# is_signup.
try:
# If there's an existing, valid PreregistrationUser for this
# user, we want to fetch it since some values from it will be used
# as defaults for creating the signed up user.
existing_prereg_user = filter_to_valid_prereg_users(
PreregistrationUser.objects.filter(email__iexact=email, realm=realm)
).latest("invited_at")
except PreregistrationUser.DoesNotExist:
existing_prereg_user = None
# full_name data passed here as argument should take precedence
# over the defaults with which the existing PreregistrationUser that we've just fetched
# was created.
prereg_user = create_preregistration_user(
email,
realm,
password_required=False,
full_name=full_name,
full_name_validated=full_name_validated,
multiuse_invite=multiuse_obj,
)
streams_to_subscribe = None
if multiuse_obj is not None:
# If the user came here explicitly via a multiuse invite link, then
# we use the defaults implied by the invite.
streams_to_subscribe = list(multiuse_obj.streams.all())
elif existing_prereg_user:
# Otherwise, the user is doing this signup not via any invite link,
# but we can use the pre-existing PreregistrationUser for these values
# since it tells how they were intended to be, when the user was invited.
streams_to_subscribe = list(existing_prereg_user.streams.all())
invited_as = existing_prereg_user.invited_as
if streams_to_subscribe:
prereg_user.streams.set(streams_to_subscribe)
prereg_user.invited_as = invited_as
prereg_user.multiuse_invite = multiuse_obj
prereg_user.save()
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
if is_signup:
return redirect(confirmation_link)
context = {"email": email, "continue_link": confirmation_link, "full_name": full_name}
return render(request, "zerver/confirm_continue_registration.html", context=context)
# This email address it not allowed to join this organization, so
# just send the user back to the registration page.
url = reverse("register")
context = login_context(request)
extra_context: Mapping[str, Any] = {
"form": form,
"current_url": lambda: url,
"from_multiuse_invite": from_multiuse_invite,
"multiuse_object_key": multiuse_object_key,
"mobile_flow_otp": mobile_flow_otp,
"desktop_flow_otp": desktop_flow_otp,
}
context.update(extra_context)
return render(request, "zerver/accounts_home.html", context=context) |
Given a successful authentication showing the user controls given
email address (email) and potentially a UserProfile
object (if the user already has a Zulip account), redirect the
browser to the appropriate place:
* The logged-in app if the user already has a Zulip account and is
trying to log in, potentially to an initial narrow or page that had been
saved in the `redirect_to` parameter.
* The registration form if is_signup was set (i.e. the user is
trying to create a Zulip account)
* A special `confirm_continue_registration.html` "do you want to
register or try another account" if the user doesn't have a
Zulip account but is_signup is False (i.e. the user tried to log in
and then did social authentication selecting an email address that does
not have a Zulip account in this organization).
* A zulip:// URL to send control back to the mobile or desktop apps if they
are doing authentication using the mobile_flow_otp or desktop_flow_otp flow. | def login_or_register_remote_user(request: HttpRequest, result: ExternalAuthResult) -> HttpResponse:
"""Given a successful authentication showing the user controls given
email address (email) and potentially a UserProfile
object (if the user already has a Zulip account), redirect the
browser to the appropriate place:
* The logged-in app if the user already has a Zulip account and is
trying to log in, potentially to an initial narrow or page that had been
saved in the `redirect_to` parameter.
* The registration form if is_signup was set (i.e. the user is
trying to create a Zulip account)
* A special `confirm_continue_registration.html` "do you want to
register or try another account" if the user doesn't have a
Zulip account but is_signup is False (i.e. the user tried to log in
and then did social authentication selecting an email address that does
not have a Zulip account in this organization).
* A zulip:// URL to send control back to the mobile or desktop apps if they
are doing authentication using the mobile_flow_otp or desktop_flow_otp flow.
"""
params_to_store_in_authenticated_session = result.data_dict.get(
"params_to_store_in_authenticated_session", {}
)
mobile_flow_otp = result.data_dict.get("mobile_flow_otp")
desktop_flow_otp = result.data_dict.get("desktop_flow_otp")
if not mobile_flow_otp and not desktop_flow_otp:
# We don't want to store anything in the browser session if we're doing
# mobile or desktop flows, since that's just an intermediary step and the
# browser session is not to be used any further. Storing extra data in
# it just risks bugs or leaking the data.
for key, value in params_to_store_in_authenticated_session.items():
request.session[key] = value
user_profile = result.user_profile
if user_profile is None or user_profile.is_mirror_dummy:
return register_remote_user(request, result)
# Otherwise, the user has successfully authenticated to an
# account, and we need to do the right thing depending whether
# or not they're using the mobile OTP flow or want a browser session.
is_realm_creation = result.data_dict.get("is_realm_creation")
if mobile_flow_otp is not None:
return finish_mobile_flow(request, user_profile, mobile_flow_otp)
elif desktop_flow_otp is not None:
return finish_desktop_flow(
request, user_profile, desktop_flow_otp, params_to_store_in_authenticated_session
)
do_login(request, user_profile)
redirect_to = result.data_dict.get("redirect_to", "")
if is_realm_creation is not None and settings.BILLING_ENABLED:
from corporate.lib.stripe import is_free_trial_offer_enabled
if is_free_trial_offer_enabled(False):
redirect_to = reverse("upgrade_page")
redirect_to = get_safe_redirect_to(redirect_to, user_profile.realm.uri)
return HttpResponseRedirect(redirect_to) |
The desktop otp flow returns to the app (through the clipboard)
a token that allows obtaining (through log_into_subdomain) a logged in session
for the user account we authenticated in this flow.
The token can only be used once and within ExternalAuthResult.LOGIN_KEY_EXPIRATION_SECONDS
of being created, as nothing more powerful is needed for the desktop flow
and this ensures the key can only be used for completing this authentication attempt. | def finish_desktop_flow(
request: HttpRequest,
user_profile: UserProfile,
otp: str,
params_to_store_in_authenticated_session: Optional[Dict[str, str]] = None,
) -> HttpResponse:
"""
The desktop otp flow returns to the app (through the clipboard)
a token that allows obtaining (through log_into_subdomain) a logged in session
for the user account we authenticated in this flow.
The token can only be used once and within ExternalAuthResult.LOGIN_KEY_EXPIRATION_SECONDS
of being created, as nothing more powerful is needed for the desktop flow
and this ensures the key can only be used for completing this authentication attempt.
"""
data_dict = None
if params_to_store_in_authenticated_session:
data_dict = ExternalAuthDataDict(
params_to_store_in_authenticated_session=params_to_store_in_authenticated_session
)
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
token = result.store_data()
key = bytes.fromhex(otp)
iv = secrets.token_bytes(12)
desktop_data = (iv + AESGCM(key).encrypt(iv, token.encode(), b"")).hex()
context = {
"desktop_data": desktop_data,
"browser_url": reverse("login_page", kwargs={"template_name": "zerver/login.html"}),
"realm_icon_url": realm_icon_url(user_profile.realm),
}
return TemplateResponse(request, "zerver/desktop_redirect.html", context=context) |
The purpose of this endpoint is to provide an initial step in the flow
on which we can handle the special behavior for the desktop app.
/accounts/login/sso may have Apache intercepting requests to it
to do authentication, so we need this additional endpoint. | def start_remote_user_sso(request: HttpRequest) -> HttpResponse:
"""
The purpose of this endpoint is to provide an initial step in the flow
on which we can handle the special behavior for the desktop app.
/accounts/login/sso may have Apache intercepting requests to it
to do authentication, so we need this additional endpoint.
"""
query = request.META["QUERY_STRING"]
return redirect(append_url_query_string(reverse(remote_user_sso), query)) |
Given a valid authentication token (generated by
redirect_and_log_into_subdomain called on auth.zulip.example.com),
call login_or_register_remote_user, passing all the authentication
result data that has been stored in Redis, associated with this token. | def log_into_subdomain(request: HttpRequest, token: str) -> HttpResponse:
"""Given a valid authentication token (generated by
redirect_and_log_into_subdomain called on auth.zulip.example.com),
call login_or_register_remote_user, passing all the authentication
result data that has been stored in Redis, associated with this token.
"""
# The tokens are intended to have the same format as API keys.
if not has_api_key_format(token):
logging.warning("log_into_subdomain: Malformed token given: %s", token)
return HttpResponse(status=400)
try:
result = ExternalAuthResult(request=request, login_token=token)
except ExternalAuthResult.InvalidTokenError:
logging.warning("log_into_subdomain: Invalid token given: %s", token)
return render(request, "zerver/log_into_subdomain_token_invalid.html", status=400)
subdomain = get_subdomain(request)
if result.data_dict["subdomain"] != subdomain:
raise JsonableError(_("Invalid subdomain"))
return login_or_register_remote_user(request, result) |
Returns which authentication methods are enabled on the server | def get_auth_backends_data(request: HttpRequest) -> Dict[str, Any]:
"""Returns which authentication methods are enabled on the server"""
subdomain = get_subdomain(request)
try:
realm = Realm.objects.get(string_id=subdomain)
except Realm.DoesNotExist:
# If not the root subdomain, this is an error
if subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
raise JsonableError(_("Invalid subdomain"))
# With the root subdomain, it's an error or not depending
# whether ROOT_DOMAIN_LANDING_PAGE (which indicates whether
# there are some realms without subdomains on this server)
# is set.
if settings.ROOT_DOMAIN_LANDING_PAGE:
raise JsonableError(_("Subdomain required"))
else:
realm = None
result = {
"password": password_auth_enabled(realm),
}
for auth_backend_name in AUTH_BACKEND_NAME_MAP:
key = auth_backend_name.lower()
result[key] = auth_enabled_helper([auth_backend_name], realm)
return result |
This is the view function for generating our SP metadata
for SAML authentication. It's meant for helping check the correctness
of the configuration when setting up SAML, or for obtaining the XML metadata
if the IdP requires it.
Taken from https://python-social-auth.readthedocs.io/en/latest/backends/saml.html | def saml_sp_metadata(request: HttpRequest) -> HttpResponse: # nocoverage
"""
This is the view function for generating our SP metadata
for SAML authentication. It's meant for helping check the correctness
of the configuration when setting up SAML, or for obtaining the XML metadata
if the IdP requires it.
Taken from https://python-social-auth.readthedocs.io/en/latest/backends/saml.html
"""
if not saml_auth_enabled():
return config_error(request, "saml")
complete_url = reverse("social:complete", args=("saml",))
saml_backend = load_backend(load_strategy(request), "saml", complete_url)
metadata, errors = saml_backend.generate_metadata_xml()
if not errors:
return HttpResponse(content=metadata, content_type="text/xml")
return HttpResponseServerError(content=", ".join(errors)) |
This function implements Zulip's support for a mini Zulip window
that just handles messages from a single narrow | def detect_narrowed_window(
request: HttpRequest, user_profile: Optional[UserProfile]
) -> Tuple[List[NarrowTerm], Optional[Stream], Optional[str]]:
"""This function implements Zulip's support for a mini Zulip window
that just handles messages from a single narrow"""
if user_profile is None:
return [], None, None
narrow: List[NarrowTerm] = []
narrow_stream = None
narrow_topic_name = request.GET.get("topic")
if "stream" in request.GET:
try:
# TODO: We should support stream IDs and direct messages here as well.
narrow_stream_name = request.GET.get("stream")
assert narrow_stream_name is not None
(narrow_stream, ignored_sub) = access_stream_by_name(user_profile, narrow_stream_name)
narrow = [NarrowTerm(operator="stream", operand=narrow_stream.name)]
except Exception:
logging.warning("Invalid narrow requested, ignoring", extra=dict(request=request))
if narrow_stream is not None and narrow_topic_name is not None:
narrow.append(NarrowTerm(operator="topic", operand=narrow_topic_name))
return narrow, narrow_stream, narrow_topic_name |
Reset our don't-spam-users-with-email counter since the
user has since logged in | def update_last_reminder(user_profile: Optional[UserProfile]) -> None:
"""Reset our don't-spam-users-with-email counter since the
user has since logged in
"""
if user_profile is None:
return
if user_profile.last_reminder is not None: # nocoverage
# TODO: Look into the history of last_reminder; we may have
# eliminated that as a useful concept for non-bot users.
user_profile.last_reminder = None
user_profile.save(update_fields=["last_reminder"]) |
This fills out the message edit history entries from the database
to have the current topic + content as of that time, plus data on
whatever changed. This makes it much simpler to do future
processing. | def fill_edit_history_entries(
raw_edit_history: List[EditHistoryEvent], message: Message
) -> List[FormattedEditHistoryEvent]:
"""
This fills out the message edit history entries from the database
to have the current topic + content as of that time, plus data on
whatever changed. This makes it much simpler to do future
processing.
"""
prev_content = message.content
prev_rendered_content = message.rendered_content
prev_topic_name = message.topic_name()
# Make sure that the latest entry in the history corresponds to the
# message's last edit time
if len(raw_edit_history) > 0:
assert message.last_edit_time is not None
assert datetime_to_timestamp(message.last_edit_time) == raw_edit_history[0]["timestamp"]
formatted_edit_history: List[FormattedEditHistoryEvent] = []
for edit_history_event in raw_edit_history:
formatted_entry: FormattedEditHistoryEvent = {
"content": prev_content,
"rendered_content": prev_rendered_content,
"timestamp": edit_history_event["timestamp"],
"topic": prev_topic_name,
"user_id": edit_history_event["user_id"],
}
if "prev_topic" in edit_history_event:
prev_topic_name = edit_history_event["prev_topic"]
formatted_entry["prev_topic"] = prev_topic_name
# Fill current values for content/rendered_content.
if "prev_content" in edit_history_event:
formatted_entry["prev_content"] = edit_history_event["prev_content"]
prev_content = formatted_entry["prev_content"]
formatted_entry["prev_rendered_content"] = edit_history_event["prev_rendered_content"]
prev_rendered_content = formatted_entry["prev_rendered_content"]
assert prev_rendered_content is not None
rendered_content = formatted_entry["rendered_content"]
assert rendered_content is not None
formatted_entry["content_html_diff"] = highlight_html_differences(
prev_rendered_content, rendered_content, message.id
)
if "prev_stream" in edit_history_event:
formatted_entry["prev_stream"] = edit_history_event["prev_stream"]
formatted_entry["stream"] = edit_history_event["stream"]
formatted_edit_history.append(formatted_entry)
initial_message_history: FormattedEditHistoryEvent = {
"content": prev_content,
"rendered_content": prev_rendered_content,
"timestamp": datetime_to_timestamp(message.date_sent),
"topic": prev_topic_name,
"user_id": message.sender_id,
}
formatted_edit_history.append(initial_message_history)
return formatted_edit_history |
This endpoint is used by the web app running in the browser. We serve HTML
error pages, and in case of success a simple redirect to the remote billing
access link received from the bouncer. | def self_hosting_auth_redirect_endpoint(
request: HttpRequest,
*,
next_page: Optional[str] = None,
) -> HttpResponse:
"""
This endpoint is used by the web app running in the browser. We serve HTML
error pages, and in case of success a simple redirect to the remote billing
access link received from the bouncer.
"""
user = request.user
assert user.is_authenticated
assert isinstance(user, UserProfile)
try:
redirect_url = self_hosting_auth_view_common(request, user, next_page)
except ResourceNotFoundError:
return render(request, "404.html", status=404)
except RemoteRealmServerMismatchError:
return render(request, "zerver/remote_realm_server_mismatch_error.html", status=403)
return HttpResponseRedirect(redirect_url) |
This endpoint is used by the desktop application. It makes an API request here,
expecting a JSON response with either the billing access link, or appropriate
error information. | def self_hosting_auth_json_endpoint(
request: HttpRequest,
user_profile: UserProfile,
*,
next_page: Optional[str] = None,
) -> HttpResponse:
"""
This endpoint is used by the desktop application. It makes an API request here,
expecting a JSON response with either the billing access link, or appropriate
error information.
"""
redirect_url = self_hosting_auth_view_common(request, user_profile, next_page)
return json_success(request, data={"billing_access_url": redirect_url}) |
The purpose of this little endpoint is primarily to take a GET
request to a long URL containing a confirmation key, and render
a page that will via JavaScript immediately do a POST request to
/accounts/register, so that the user can create their account on
a page with a cleaner URL (and with the browser security and UX
benefits of an HTTP POST having generated the page).
The only thing it does before rendering that page is to check
the validity of the confirmation link. This is redundant with a
similar check in accounts_register, but it provides a slightly nicer
user-facing error handling experience if the URL you visited is
displayed in the browser. (E.g. you can debug that you
accidentally adding an extra character after pasting). | def get_prereg_key_and_redirect(
request: HttpRequest, confirmation_key: str, full_name: Optional[str] = REQ(default=None)
) -> HttpResponse:
"""
The purpose of this little endpoint is primarily to take a GET
request to a long URL containing a confirmation key, and render
a page that will via JavaScript immediately do a POST request to
/accounts/register, so that the user can create their account on
a page with a cleaner URL (and with the browser security and UX
benefits of an HTTP POST having generated the page).
The only thing it does before rendering that page is to check
the validity of the confirmation link. This is redundant with a
similar check in accounts_register, but it provides a slightly nicer
user-facing error handling experience if the URL you visited is
displayed in the browser. (E.g. you can debug that you
accidentally adding an extra character after pasting).
"""
try:
prereg_object, realm_creation = check_prereg_key(request, confirmation_key)
except ConfirmationKeyError as e:
return render_confirmation_key_error(request, e)
registration_url = reverse("accounts_register")
if realm_creation:
registration_url = reverse("realm_register")
return render(
request,
"confirmation/confirm_preregistrationuser.html",
context={
"key": confirmation_key,
"full_name": full_name,
"registration_url": registration_url,
},
) |
Checks if the Confirmation key is valid, returning the PreregistrationUser or
PreregistrationRealm object in case of success and raising an appropriate
ConfirmationKeyError otherwise. | def check_prereg_key(
request: HttpRequest, confirmation_key: str
) -> Tuple[Union[PreregistrationUser, PreregistrationRealm], bool]:
"""
Checks if the Confirmation key is valid, returning the PreregistrationUser or
PreregistrationRealm object in case of success and raising an appropriate
ConfirmationKeyError otherwise.
"""
confirmation_types = [
Confirmation.USER_REGISTRATION,
Confirmation.INVITATION,
Confirmation.REALM_CREATION,
]
prereg_object = get_object_from_key(
confirmation_key, confirmation_types, mark_object_used=False
)
assert isinstance(prereg_object, (PreregistrationRealm, PreregistrationUser))
confirmation_obj = prereg_object.confirmation.get()
realm_creation = confirmation_obj.type == Confirmation.REALM_CREATION
if realm_creation:
assert isinstance(prereg_object, PreregistrationRealm)
# Defensive assert to make sure no mix-up in how .status is set leading to reuse
# of a PreregistrationRealm object.
assert prereg_object.created_realm is None
else:
assert isinstance(prereg_object, PreregistrationUser)
# Defensive assert to make sure no mix-up in how .status is set leading to reuse
# of a PreregistrationUser object.
assert prereg_object.created_user is None
return prereg_object, realm_creation |
Send an email with a confirmation link to the provided e-mail so the user
can complete their registration. | def prepare_activation_url(
email: str,
session: SessionBase,
*,
realm: Optional[Realm],
streams: Optional[Iterable[Stream]] = None,
invited_as: Optional[int] = None,
multiuse_invite: Optional[MultiuseInvite] = None,
) -> str:
"""
Send an email with a confirmation link to the provided e-mail so the user
can complete their registration.
"""
prereg_user = create_preregistration_user(email, realm, multiuse_invite=multiuse_invite)
if streams is not None:
prereg_user.streams.set(streams)
if invited_as is not None:
prereg_user.invited_as = invited_as
prereg_user.save()
confirmation_type = Confirmation.USER_REGISTRATION
activation_url = create_confirmation_link(prereg_user, confirmation_type)
return activation_url |
Returns whether the target user is either the current user or a bot
owned by the current user | def user_directly_controls_user(user_profile: UserProfile, target: UserProfile) -> bool:
"""Returns whether the target user is either the current user or a bot
owned by the current user"""
if user_profile == target:
return True
if target.is_bot and target.bot_owner_id == user_profile.id:
return True
return False |
This takes a series of thunks and calls them in sequence, and it
smushes all the json results into a single response when
everything goes right. (This helps clients avoid extra latency
hops.) It rolls back the transaction when things go wrong in any
one of the composed methods. | def compose_views(thunks: List[Callable[[], HttpResponse]]) -> Dict[str, Any]:
"""
This takes a series of thunks and calls them in sequence, and it
smushes all the json results into a single response when
everything goes right. (This helps clients avoid extra latency
hops.) It rolls back the transaction when things go wrong in any
one of the composed methods.
"""
json_dict: Dict[str, Any] = {}
with transaction.atomic():
for thunk in thunks:
response = thunk()
json_dict.update(orjson.loads(response.content))
return json_dict |
If you are subscribing lots of new users to new streams,
this function can be pretty expensive in terms of generating
lots of queries and sending lots of messages. We isolate
the code partly to make it easier to test things like
excessive query counts by mocking this function so that it
doesn't drown out query counts from other code. | def send_messages_for_new_subscribers(
user_profile: UserProfile,
subscribers: Set[UserProfile],
new_subscriptions: Dict[str, List[str]],
email_to_user_profile: Dict[str, UserProfile],
created_streams: List[Stream],
announce: bool,
) -> None:
"""
If you are subscribing lots of new users to new streams,
this function can be pretty expensive in terms of generating
lots of queries and sending lots of messages. We isolate
the code partly to make it easier to test things like
excessive query counts by mocking this function so that it
doesn't drown out query counts from other code.
"""
bots = {subscriber.email: subscriber.is_bot for subscriber in subscribers}
newly_created_stream_names = {s.name for s in created_streams}
realm = user_profile.realm
mention_backend = MentionBackend(realm.id)
# Inform the user if someone else subscribed them to stuff,
# or if a new stream was created with the "announce" option.
notifications = []
if new_subscriptions:
for email, subscribed_stream_names in new_subscriptions.items():
if email == user_profile.email:
# Don't send a Zulip if you invited yourself.
continue
if bots[email]:
# Don't send invitation Zulips to bots
continue
# For each user, we notify them about newly subscribed streams, except for
# streams that were newly created.
notify_stream_names = set(subscribed_stream_names) - newly_created_stream_names
if not notify_stream_names:
continue
recipient_user = email_to_user_profile[email]
sender = get_system_bot(settings.NOTIFICATION_BOT, recipient_user.realm_id)
msg = you_were_just_subscribed_message(
acting_user=user_profile,
recipient_user=recipient_user,
stream_names=notify_stream_names,
)
notifications.append(
internal_prep_private_message(
sender=sender,
recipient_user=recipient_user,
content=msg,
mention_backend=mention_backend,
)
)
if announce and len(created_streams) > 0:
new_stream_announcements_stream = user_profile.realm.get_new_stream_announcements_stream()
if new_stream_announcements_stream is not None:
with override_language(new_stream_announcements_stream.realm.default_language):
if len(created_streams) > 1:
content = _("{user_name} created the following channels: {new_channels}.")
else:
content = _("{user_name} created a new channel {new_channels}.")
topic_name = _("new channels")
content = content.format(
user_name=silent_mention_syntax_for_user(user_profile),
new_channels=", ".join(f"#**{s.name}**" for s in created_streams),
)
sender = get_system_bot(
settings.NOTIFICATION_BOT, new_stream_announcements_stream.realm_id
)
notifications.append(
internal_prep_stream_message(
sender=sender,
stream=new_stream_announcements_stream,
topic_name=topic_name,
content=content,
),
)
if not user_profile.realm.is_zephyr_mirror_realm and len(created_streams) > 0:
sender = get_system_bot(settings.NOTIFICATION_BOT, user_profile.realm_id)
for stream in created_streams:
with override_language(stream.realm.default_language):
if stream.description == "":
stream_description = "*" + _("No description.") + "*"
else:
stream_description = stream.description
notifications.append(
internal_prep_stream_message(
sender=sender,
stream=stream,
topic_name=str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC_NAME),
content=_(
"**{policy}** channel created by {user_name}. **Description:**"
).format(
user_name=silent_mention_syntax_for_user(user_profile),
policy=get_stream_permission_policy_name(
invite_only=stream.invite_only,
history_public_to_subscribers=stream.history_public_to_subscribers,
is_web_public=stream.is_web_public,
),
)
+ f"\n```` quote\n{stream_description}\n````",
),
)
if len(notifications) > 0:
do_send_messages(notifications, mark_as_read=[user_profile.id]) |
This is the entry point to changing subscription properties. This
is a bulk endpoint: requesters always provide a subscription_data
list containing dictionaries for each stream of interest.
Requests are of the form:
[{"stream_id": "1", "property": "is_muted", "value": False},
{"stream_id": "1", "property": "color", "value": "#c2c2c2"}] | def update_subscription_properties_backend(
request: HttpRequest,
user_profile: UserProfile,
subscription_data: List[Dict[str, Any]] = REQ(
json_validator=check_list(
check_dict(
[
("stream_id", check_int),
("property", check_string),
("value", check_union([check_string, check_bool])),
]
),
),
),
) -> HttpResponse:
"""
This is the entry point to changing subscription properties. This
is a bulk endpoint: requesters always provide a subscription_data
list containing dictionaries for each stream of interest.
Requests are of the form:
[{"stream_id": "1", "property": "is_muted", "value": False},
{"stream_id": "1", "property": "color", "value": "#c2c2c2"}]
"""
property_converters = {
"color": check_color,
"in_home_view": check_bool,
"is_muted": check_bool,
"desktop_notifications": check_bool,
"audible_notifications": check_bool,
"push_notifications": check_bool,
"email_notifications": check_bool,
"pin_to_top": check_bool,
"wildcard_mentions_notify": check_bool,
}
for change in subscription_data:
stream_id = change["stream_id"]
property = change["property"]
value = change["value"]
if property not in property_converters:
raise JsonableError(
_("Unknown subscription property: {property}").format(property=property)
)
(stream, sub) = access_stream_by_id(user_profile, stream_id)
if sub is None:
raise JsonableError(
_("Not subscribed to channel ID {channel_id}").format(channel_id=stream_id)
)
try:
value = property_converters[property](property, value)
except ValidationError as error:
raise JsonableError(error.message)
do_change_subscription_property(
user_profile, sub, stream, property, value, acting_user=user_profile
)
return json_success(request) |
We should return a signed, short-lived URL
that the client can use for native mobile download, rather than serving a redirect. | def serve_file_url_backend(
request: HttpRequest, user_profile: UserProfile, realm_id_str: str, filename: str
) -> HttpResponseBase:
"""
We should return a signed, short-lived URL
that the client can use for native mobile download, rather than serving a redirect.
"""
return serve_file(request, user_profile, realm_id_str, filename, url_only=True) |
Serves avatar images off disk, via nginx (or directly in dev), with no auth.
This is done unauthed because these need to be accessed from HTML
emails, where the client does not have any auth. We rely on the
URL being generated using the AVATAR_SALT secret. | def serve_local_avatar_unauthed(request: HttpRequest, path: str) -> HttpResponseBase:
"""Serves avatar images off disk, via nginx (or directly in dev), with no auth.
This is done unauthed because these need to be accessed from HTML
emails, where the client does not have any auth. We rely on the
URL being generated using the AVATAR_SALT secret.
"""
if settings.LOCAL_AVATARS_DIR is None:
# We do not expect clients to hit this URL when using the S3
# backend; however, there is no reason to not serve the
# redirect to S3 where the content lives.
return redirect(
get_public_upload_root_url() + path + "?" + request.GET.urlencode(), permanent=True
)
local_path = os.path.join(settings.LOCAL_AVATARS_DIR, path)
assert_is_local_storage_path("avatars", local_path)
if not os.path.isfile(local_path):
return HttpResponseNotFound("<p>File not found</p>")
if settings.DEVELOPMENT:
response: HttpResponseBase = FileResponse(open(local_path, "rb")) # noqa: SIM115
else:
response = internal_nginx_redirect(quote(f"/internal/local/user_avatars/{path}"))
# We do _not_ mark the contents as immutable for caching purposes,
# since the path for avatar images is hashed only by their user-id
# and a salt, and as such are reused when a user's avatar is
# updated.
return response |
Accepts an email address or user ID and returns the avatar | def avatar(
request: HttpRequest,
maybe_user_profile: Union[UserProfile, AnonymousUser],
email_or_id: str,
medium: bool = False,
) -> HttpResponse:
"""Accepts an email address or user ID and returns the avatar"""
is_email = False
try:
int(email_or_id)
except ValueError:
is_email = True
if not maybe_user_profile.is_authenticated:
# Allow anonymous access to avatars only if spectators are
# enabled in the organization.
realm = get_valid_realm_from_request(request)
if not realm.allow_web_public_streams_access():
raise MissingAuthenticationError
# We only allow the ID format for accessing a user's avatar
# for spectators. This is mainly for defense in depth, since
# email_address_visibility should mean spectators only
# interact with fake email addresses anyway.
if is_email:
raise MissingAuthenticationError
if settings.RATE_LIMITING:
unique_avatar_key = f"{realm.id}/{email_or_id}/{medium}"
rate_limit_spectator_attachment_access_by_file(unique_avatar_key)
else:
realm = maybe_user_profile.realm
try:
if is_email:
avatar_user_profile = get_user_including_cross_realm(email_or_id, realm)
else:
avatar_user_profile = get_user_by_id_in_realm_including_cross_realm(
int(email_or_id), realm
)
url: Optional[str] = None
if maybe_user_profile.is_authenticated and not check_can_access_user(
avatar_user_profile, maybe_user_profile
):
url = get_avatar_for_inaccessible_user()
else:
# If there is a valid user account passed in, use its avatar
url = avatar_url(avatar_user_profile, medium=medium)
assert url is not None
except UserProfile.DoesNotExist:
# If there is no such user, treat it as a new gravatar
email = email_or_id
avatar_version = 1
url = get_gravatar_url(email, avatar_version, medium)
# We can rely on the URL already having query parameters. Because
# our templates depend on being able to use the ampersand to
# add query parameters to our url, get_avatar_url does '?x=x'
# hacks to prevent us from having to jump through decode/encode hoops.
assert url is not None
url = append_url_query_string(url, request.META["QUERY_STRING"])
return redirect(url) |
The client_gravatar field here is set to True by default assuming that clients
can compute their own gravatars, which saves bandwidth. This is more important of
an optimization than it might seem because gravatar URLs contain MD5 hashes that
compress very poorly compared to other data. | def get_user_data(
user_profile: UserProfile,
include_custom_profile_fields: bool,
client_gravatar: bool,
target_user: Optional[UserProfile] = None,
) -> Dict[str, Any]:
"""
The client_gravatar field here is set to True by default assuming that clients
can compute their own gravatars, which saves bandwidth. This is more important of
an optimization than it might seem because gravatar URLs contain MD5 hashes that
compress very poorly compared to other data.
"""
realm = user_profile.realm
members = get_users_for_api(
realm,
user_profile,
target_user=target_user,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=False,
include_custom_profile_fields=include_custom_profile_fields,
)
if target_user is not None:
data: Dict[str, Any] = {"user": members[target_user.id]}
else:
data = {"members": [members[k] for k in members]}
return data |
This function allows logging in without a password on the Zulip
mobile apps when connecting to a Zulip development environment. It
requires DevAuthBackend to be included in settings.AUTHENTICATION_BACKENDS. | def api_dev_fetch_api_key(request: HttpRequest, username: str = REQ()) -> HttpResponse:
"""This function allows logging in without a password on the Zulip
mobile apps when connecting to a Zulip development environment. It
requires DevAuthBackend to be included in settings.AUTHENTICATION_BACKENDS.
"""
check_dev_auth_backend()
# Django invokes authenticate methods by matching arguments, and this
# authentication flow will not invoke LDAP authentication because of
# this condition of Django so no need to check if LDAP backend is
# enabled.
validate_login_email(username)
realm = get_realm_from_request(request)
if realm is None:
raise InvalidSubdomainError
return_data: Dict[str, bool] = {}
user_profile = authenticate(dev_auth_username=username, realm=realm, return_data=return_data)
if return_data.get("inactive_realm"):
raise RealmDeactivatedError
if return_data.get("inactive_user"):
raise UserDeactivatedError
if return_data.get("invalid_subdomain"): # nocoverage
raise InvalidSubdomainError
if user_profile is None:
# Since we're not actually checking passwords, this condition
# is when one's attempting to send an email address that
# doesn't have an account, i.e. it's definitely invalid username.
raise AuthenticationFailedError
assert isinstance(user_profile, UserProfile)
do_login(request, user_profile)
api_key = get_api_key(user_profile)
return json_success(
request,
data={"api_key": api_key, "email": user_profile.delivery_email, "user_id": user_profile.id},
) |
Construct a response to a webhook event from a Thinkst canarytoken from
canarytokens.org. Canarytokens from Thinkst's paid product have a different
schema and should use the "thinkst" integration. See linked documentation
below for a schema:
https://help.canary.tools/hc/en-gb/articles/360002426577-How-do-I-configure-notifications-for-a-Generic-Webhook- | def api_canarytoken_webhook(
request: HttpRequest,
user_profile: UserProfile,
*,
message: JsonBodyPayload[WildValue],
user_specified_topic: OptionalUserSpecifiedTopicStr = None,
) -> HttpResponse:
"""
Construct a response to a webhook event from a Thinkst canarytoken from
canarytokens.org. Canarytokens from Thinkst's paid product have a different
schema and should use the "thinkst" integration. See linked documentation
below for a schema:
https://help.canary.tools/hc/en-gb/articles/360002426577-How-do-I-configure-notifications-for-a-Generic-Webhook-
"""
topic_name = "canarytoken alert"
body = (
f"**:alert: Canarytoken has been triggered on {message['time'].tame(check_string)}!**\n\n"
f"{message['memo'].tame(check_string)} \n\n"
f"[Manage this canarytoken]({message['manage_url'].tame(check_string)})"
)
if user_specified_topic:
topic_name = user_specified_topic
check_send_webhook_message(request, user_profile, topic_name, body)
return json_success(request) |
The Freshdesk API is currently pretty broken: statuses are customizable
but the API will only tell you the number associated with the status, not
the name. While we engage the Freshdesk developers about exposing this
information through the API, since only FlightCar uses this integration,
hardcode their statuses. | def property_name(property: str, index: int) -> str:
"""The Freshdesk API is currently pretty broken: statuses are customizable
but the API will only tell you the number associated with the status, not
the name. While we engage the Freshdesk developers about exposing this
information through the API, since only FlightCar uses this integration,
hardcode their statuses.
"""
statuses = [
"",
"",
"Open",
"Pending",
"Resolved",
"Closed",
"Waiting on Customer",
"Job Application",
"Monthly",
]
priorities = ["", "Low", "Medium", "High", "Urgent"]
name = ""
if property == "status":
name = statuses[index] if index < len(statuses) else str(index)
elif property == "priority":
name = priorities[index] if index < len(priorities) else str(index)
return name |
These are always of the form "{ticket_action:created}" or
"{status:{from:4,to:6}}". Note the lack of string quoting: this isn't
valid JSON so we have to parse it ourselves. | def parse_freshdesk_event(event_string: str) -> List[str]:
"""These are always of the form "{ticket_action:created}" or
"{status:{from:4,to:6}}". Note the lack of string quoting: this isn't
valid JSON so we have to parse it ourselves.
"""
data = event_string.replace("{", "").replace("}", "").replace(",", ":").split(":")
if len(data) == 2:
# This is a simple ticket action event, like
# {ticket_action:created}.
return data
else:
# This is a property change event, like {status:{from:4,to:6}}. Pull out
# the property, from, and to states.
property, _, from_state, _, to_state = data
return [
property,
property_name(property, int(from_state)),
property_name(property, int(to_state)),
] |
There are public (visible to customers) and private note types. | def format_freshdesk_note_message(ticket: WildValue, event_info: List[str]) -> str:
"""There are public (visible to customers) and private note types."""
note_type = event_info[1]
content = NOTE_TEMPLATE.format(
name=ticket["requester_name"].tame(check_string),
email=ticket["requester_email"].tame(check_string),
note_type=note_type,
ticket_id=ticket["ticket_id"].tame(check_string),
ticket_url=ticket["ticket_url"].tame(check_string),
)
return content |
Freshdesk will only tell us the first event to match our webhook
configuration, so if we change multiple properties, we only get the before
and after data for the first one. | def format_freshdesk_property_change_message(ticket: WildValue, event_info: List[str]) -> str:
"""Freshdesk will only tell us the first event to match our webhook
configuration, so if we change multiple properties, we only get the before
and after data for the first one.
"""
content = PROPERTY_CHANGE_TEMPLATE.format(
name=ticket["requester_name"].tame(check_string),
email=ticket["requester_email"].tame(check_string),
ticket_id=ticket["ticket_id"].tame(check_string),
ticket_url=ticket["ticket_url"].tame(check_string),
property_name=event_info[0].capitalize(),
old=event_info[1],
new=event_info[2],
)
return content |
They send us the description as HTML. | def format_freshdesk_ticket_creation_message(ticket: WildValue) -> str:
"""They send us the description as HTML."""
cleaned_description = convert_html_to_markdown(ticket["ticket_description"].tame(check_string))
content = TICKET_CREATION_TEMPLATE.format(
name=ticket["requester_name"].tame(check_string),
email=ticket["requester_email"].tame(check_string),
ticket_id=ticket["ticket_id"].tame(check_string),
ticket_url=ticket["ticket_url"].tame(check_string),
description=cleaned_description,
type=ticket["ticket_type"].tame(check_string),
priority=ticket["ticket_priority"].tame(check_string),
status=ticket["ticket_status"].tame(check_string),
)
return content |
GitHub sends the event as an HTTP header. We have our
own Zulip-specific concept of an event that often maps
directly to the X-GitHub-Event header's event, but we sometimes
refine it based on the payload. | def api_github_webhook(
request: HttpRequest,
user_profile: UserProfile,
*,
payload: JsonBodyPayload[WildValue],
branches: Optional[str] = None,
user_specified_topic: OptionalUserSpecifiedTopicStr = None,
) -> HttpResponse:
"""
GitHub sends the event as an HTTP header. We have our
own Zulip-specific concept of an event that often maps
directly to the X-GitHub-Event header's event, but we sometimes
refine it based on the payload.
"""
header_event = validate_extract_webhook_http_header(request, "X-GitHub-Event", "GitHub")
event = get_zulip_event_name(header_event, payload, branches)
if event is None:
# This is nothing to worry about--get_event() returns None
# for events that are valid but not yet handled by us.
# See IGNORED_EVENTS, for example.
return json_success(request)
topic_name = get_topic_based_on_type(payload, event)
body_function = EVENT_FUNCTION_MAPPER[event]
helper = Helper(
request=request,
payload=payload,
include_title=user_specified_topic is not None,
)
body = body_function(helper)
check_send_webhook_message(request, user_profile, topic_name, body, event)
return json_success(request) |
Usually, we return an event name that is a key in EVENT_FUNCTION_MAPPER.
We return None for an event that we know we don't want to handle. | def get_zulip_event_name(
header_event: str,
payload: WildValue,
branches: Optional[str],
) -> Optional[str]:
"""
Usually, we return an event name that is a key in EVENT_FUNCTION_MAPPER.
We return None for an event that we know we don't want to handle.
"""
if header_event == "pull_request":
action = payload["action"].tame(check_string)
if action in ("opened", "reopened"):
return "opened_pull_request"
elif action in ("synchronize", "edited"):
return "updated_pull_request"
if action in ("assigned", "unassigned"):
return "assigned_or_unassigned_pull_request"
if action == "closed":
return "closed_pull_request"
if action == "review_requested":
return "pull_request_review_requested"
if action == "ready_for_review":
return "pull_request_ready_for_review"
if action in ("locked", "unlocked"):
return "locked_or_unlocked_pull_request"
if action in ("auto_merge_enabled", "auto_merge_disabled"):
return "pull_request_auto_merge"
if action in IGNORED_PULL_REQUEST_ACTIONS:
return None
elif header_event == "push":
if is_merge_queue_push_event(payload):
return None
if is_commit_push_event(payload):
if branches is not None:
branch = get_branch_name_from_ref(payload["ref"].tame(check_string))
if branches.find(branch) == -1:
return None
return "push_commits"
else:
return "push_tags"
elif header_event == "check_run":
if payload["check_run"]["status"].tame(check_string) != "completed":
return None
return header_event
elif header_event == "team":
action = payload["action"].tame(check_string)
if action == "edited":
return "team"
if action in IGNORED_TEAM_ACTIONS:
# no need to spam our logs, we just haven't implemented it yet
return None
else:
# this means GH has actually added new actions since September 2020,
# so it's a bit more cause for alarm
raise UnsupportedWebhookEventTypeError(f"unsupported team action {action}")
elif header_event == "issues":
action = payload["action"].tame(check_string)
if action in ("labeled", "unlabeled"):
return "issue_labeled_or_unlabeled"
if action in ("milestoned", "demilestoned"):
return "issue_milestoned_or_demilestoned"
else:
return "issues"
elif header_event in EVENT_FUNCTION_MAPPER:
return header_event
elif header_event in IGNORED_EVENTS:
return None
complete_event = "{}:{}".format(
header_event, payload.get("action", "???").tame(check_string)
) # nocoverage
raise UnsupportedWebhookEventTypeError(complete_event) |
Replace the username of each assignee with their (full) name.
This is a hack-like adaptor so that when assignees are passed to
`get_pull_request_event_message` we can use the assignee's name
and not their username (for more consistency). | def replace_assignees_username_with_name(
assignees: Union[List[WildValue], WildValue],
) -> List[Dict[str, str]]:
"""Replace the username of each assignee with their (full) name.
This is a hack-like adaptor so that when assignees are passed to
`get_pull_request_event_message` we can use the assignee's name
and not their username (for more consistency).
"""
formatted_assignees = []
for assignee in assignees:
formatted_assignee = {}
formatted_assignee["username"] = assignee["name"].tame(check_string)
formatted_assignees.append(formatted_assignee)
return formatted_assignees |
This uses the subject name from opbeat to make the topic,
and the summary from Opbeat as the message body, with
details about the object mentioned. | def api_opbeat_webhook(
request: HttpRequest,
user_profile: UserProfile,
*,
payload: JsonBodyPayload[WildValue],
) -> HttpResponse:
"""
This uses the subject name from opbeat to make the topic,
and the summary from Opbeat as the message body, with
details about the object mentioned.
"""
topic_name = payload["title"].tame(check_string)
message = format_object(payload, "base", "")
check_send_webhook_message(request, user_profile, topic_name, message)
return json_success(request) |
Usually, we return an event name that is a key in EVENT_FUNCTION_MAPPER.
We return None for an event that we know we don't want to handle. | def get_zulip_event_name(
header_event: str,
payload: WildValue,
) -> Optional[str]:
"""
Usually, we return an event name that is a key in EVENT_FUNCTION_MAPPER.
We return None for an event that we know we don't want to handle.
"""
if header_event in EVENT_FUNCTION_MAPPER:
return header_event
elif header_event in IGNORED_EVENTS:
return None
raise UnsupportedWebhookEventTypeError(header_event) |
Creates a stat chunk about total occurrences and users affected for the
error.
Example: usersAffected: 2, totalOccurrences: 10
Output: 2 users affected with 10 total occurrences
:param error_dict: The error dictionary containing the error keys and
values
:returns: A message chunk that will be added to the main message | def make_user_stats_chunk(error_dict: WildValue) -> str:
"""Creates a stat chunk about total occurrences and users affected for the
error.
Example: usersAffected: 2, totalOccurrences: 10
Output: 2 users affected with 10 total occurrences
:param error_dict: The error dictionary containing the error keys and
values
:returns: A message chunk that will be added to the main message
"""
users_affected = error_dict["usersAffected"].tame(check_int)
total_occurrences = error_dict["totalOccurrences"].tame(check_int)
# One line is subjectively better than two lines for this.
return f"* {users_affected} users affected with {total_occurrences} total occurrences\n" |
Creates a time message chunk.
Example: firstOccurredOn: "X", lastOccurredOn: "Y"
Output:
First occurred: X
Last occurred: Y
:param error_dict: The error dictionary containing the error keys and
values
:returns: A message chunk that will be added to the main message | def make_time_chunk(error_dict: WildValue) -> str:
"""Creates a time message chunk.
Example: firstOccurredOn: "X", lastOccurredOn: "Y"
Output:
First occurred: X
Last occurred: Y
:param error_dict: The error dictionary containing the error keys and
values
:returns: A message chunk that will be added to the main message
"""
# Make the timestamp more readable to a human.
time_first = parse_time(error_dict["firstOccurredOn"].tame(check_string))
time_last = parse_time(error_dict["lastOccurredOn"].tame(check_string))
# Provide time information about this error,
return f"* **First occurred**: {time_first}\n* **Last occurred**: {time_last}\n" |
Creates a message chunk if exists.
Example: message: "This is an example message" returns "Message: This is an
example message". Whereas message: "" returns "".
:param message: The value of message inside of the error dictionary
:returns: A message chunk if there exists an additional message, otherwise
returns an empty string. | def make_message_chunk(message: str) -> str:
"""Creates a message chunk if exists.
Example: message: "This is an example message" returns "Message: This is an
example message". Whereas message: "" returns "".
:param message: The value of message inside of the error dictionary
:returns: A message chunk if there exists an additional message, otherwise
returns an empty string.
"""
# "Message" shouldn't be included if there is none supplied.
return f"* **Message**: {message}\n" if message != "" else "" |
Creates a message chunk that contains the application info and the link
to the Raygun dashboard about the application.
:param app_dict: The application dictionary obtained from the payload
:returns: A message chunk that will be added to the main message | def make_app_info_chunk(app_dict: WildValue) -> str:
"""Creates a message chunk that contains the application info and the link
to the Raygun dashboard about the application.
:param app_dict: The application dictionary obtained from the payload
:returns: A message chunk that will be added to the main message
"""
app_name = app_dict["name"].tame(check_string)
app_url = app_dict["url"].tame(check_string)
return f"* **Application details**: [{app_name}]({app_url})\n" |
Creates a message for a repeating error follow up
:param payload: Raygun payload
:return: Returns the message, somewhat beautifully formatted | def notification_message_follow_up(payload: WildValue) -> str:
"""Creates a message for a repeating error follow up
:param payload: Raygun payload
:return: Returns the message, somewhat beautifully formatted
"""
message = ""
# Link to Raygun about the follow up
followup_link_md = "[follow-up error]({})".format(payload["error"]["url"].tame(check_string))
followup_type = payload["eventType"].tame(check_string)
if followup_type == "HourlyFollowUp":
prefix = "Hourly"
else:
# Cut the "MinuteFollowUp" from the possible event types, then add "
# minute" after that. So prefix for "OneMinuteFollowUp" is "One
# minute", where "FiveMinuteFollowUp" is "Five minute".
prefix = followup_type[: len(followup_type) - 14] + " minute"
message += f"{prefix} {followup_link_md}:\n"
# Get the message of the error.
payload_msg = payload["error"]["message"].tame(check_string)
message += make_message_chunk(payload_msg)
message += make_time_chunk(payload["error"])
message += make_user_stats_chunk(payload["error"])
message += make_app_info_chunk(payload["application"])
return message |
Creates a message for a new error or reoccurred error
:param payload: Raygun payload
:return: Returns the message, somewhat beautifully formatted | def notification_message_error_occurred(payload: WildValue) -> str:
"""Creates a message for a new error or reoccurred error
:param payload: Raygun payload
:return: Returns the message, somewhat beautifully formatted
"""
message = ""
# Provide a clickable link that goes to Raygun about this error.
error_link_md = "[Error]({})".format(payload["error"]["url"].tame(check_string))
# Stylize the message based on the event type of the error.
if payload["eventType"].tame(check_string) == "NewErrorOccurred":
message += "{}:\n".format(f"New {error_link_md} occurred")
elif payload["eventType"].tame(check_string) == "ErrorReoccurred":
message += "{}:\n".format(f"{error_link_md} reoccurred")
# Get the message of the error. This value can be empty (as in "").
payload_msg = payload["error"]["message"].tame(check_string)
message += make_message_chunk(payload_msg)
message += make_time_chunk(payload["error"])
message += make_user_stats_chunk(payload["error"])
# Only NewErrorOccurred and ErrorReoccurred contain an error instance.
error_instance = payload["error"]["instance"]
# Extract each of the keys and values in error_instance for easier handle
# Contains list of tags for the error. Can be empty (null)
tags = error_instance["tags"]
# Contains the identity of affected user at the moment this error
# happened. This surprisingly can be null. Somehow.
affected_user = error_instance["affectedUser"]
# Contains custom data for this particular error (if supplied). Can be
# null.
custom_data = error_instance["customData"]
if tags is not None:
message += "* **Tags**: {}\n".format(", ".join(tags.tame(check_list(check_string))))
if affected_user is not None:
user_uuid = affected_user["UUID"].tame(check_string)
message += f"* **Affected user**: {user_uuid[:6]}...{user_uuid[-5:]}\n"
if custom_data is not None:
# We don't know what the keys and values beforehand, so we are forced
# to iterate.
for key in sorted(custom_data.keys()):
message += f"* **{key}**: {custom_data[key].tame(check_anything)}\n"
message += make_app_info_chunk(payload["application"])
return message |
Composes a message that contains information on the error
:param payload: Raygun payload
:return: Returns a response message | def compose_notification_message(payload: WildValue) -> str:
"""Composes a message that contains information on the error
:param payload: Raygun payload
:return: Returns a response message
"""
# Get the event type of the error. This can be "NewErrorOccurred",
# "ErrorReoccurred", "OneMinuteFollowUp", "FiveMinuteFollowUp", ...,
# "HourlyFollowUp" for notification error.
event_type = payload["eventType"].tame(check_string)
# "NewErrorOccurred" and "ErrorReoccurred" contain error instance
# information, meaning that it has payload['error']['instance']. The other
# event type (the follow ups) doesn't have this instance.
# We now split this main function again into two functions. One is for
# "NewErrorOccurred" and "ErrorReoccurred", and one is for the rest. Both
# functions will return a text message that is formatted for the chat.
if event_type in ("NewErrorOccurred", "ErrorReoccurred"):
return notification_message_error_occurred(payload)
elif "FollowUp" in event_type:
return notification_message_follow_up(payload)
else:
raise UnsupportedWebhookEventTypeError(event_type) |
Creates a message from an activity that is being taken for an error
:param payload: Raygun payload
:return: Returns the message, somewhat beautifully formatted | def activity_message(payload: WildValue) -> str:
"""Creates a message from an activity that is being taken for an error
:param payload: Raygun payload
:return: Returns the message, somewhat beautifully formatted
"""
message = ""
error_link_md = "[Error]({})".format(payload["error"]["url"].tame(check_string))
event_type = payload["eventType"].tame(check_string)
user = payload["error"]["user"].tame(check_string)
if event_type == "StatusChanged":
error_status = payload["error"]["status"].tame(check_string)
message += f"{error_link_md} status changed to **{error_status}** by {user}:\n"
elif event_type == "CommentAdded":
comment = payload["error"]["comment"].tame(check_string)
message += f"{user} commented on {error_link_md}:\n\n``` quote\n{comment}\n```\n"
elif event_type == "AssignedToUser":
assigned_to = payload["error"]["assignedTo"].tame(check_string)
message += f"{user} assigned {error_link_md} to {assigned_to}:\n"
message += "* **Timestamp**: {}\n".format(
parse_time(payload["error"]["activityDate"].tame(check_string))
)
message += make_app_info_chunk(payload["application"])
return message |
Composes a message that contains an activity that is being taken to
an error, such as commenting, assigning an error to a user, ignoring the
error, etc.
:param payload: Raygun payload
:return: Returns a response message | def compose_activity_message(payload: WildValue) -> str:
"""Composes a message that contains an activity that is being taken to
an error, such as commenting, assigning an error to a user, ignoring the
error, etc.
:param payload: Raygun payload
:return: Returns a response message
"""
event_type = payload["eventType"].tame(check_string)
# Activity is separated into three main categories: status changes (
# ignores, resolved), error is assigned to user, and comment added to
# an error,
# But, they all are almost identical and the only differences between them
# are the keys at line 9 (check fixtures). So there's no need to split
# the function like the notification one.
if event_type in ("StatusChanged", "AssignedToUser", "CommentAdded"):
return activity_message(payload)
else:
raise UnsupportedWebhookEventTypeError(event_type) |
Parses and returns the timestamp provided
:param timestamp: The timestamp provided by the payload
:returns: A string containing the time | def parse_time(timestamp: str) -> str:
"""Parses and returns the timestamp provided
:param timestamp: The timestamp provided by the payload
:returns: A string containing the time
"""
# Raygun provides two timestamp format, one with the Z at the end,
# and one without the Z.
format = "%Y-%m-%dT%H:%M:%S"
format += "Z" if timestamp[-1:] == "Z" else ""
parsed_time = time.strftime("%c", time.strptime(timestamp, format))
return parsed_time |
Handle either an exception type event or a message type event payload. | def handle_event_payload(event: Dict[str, Any]) -> Tuple[str, str]:
"""Handle either an exception type event or a message type event payload."""
topic_name = event["title"]
platform_name = event["platform"]
syntax_highlight_as = syntax_highlight_as_map.get(platform_name, "")
if syntax_highlight_as == "": # nocoverage
logging.info("Unknown Sentry platform: %s", platform_name)
# We shouldn't support the officially deprecated Raven series of
# Python SDKs.
if platform_name == "python" and int(event["version"]) < 7 and not is_sample_event(event):
# The sample event is still an old "version" -- accept it even
# though we don't accept events from the old Python SDK.
raise UnsupportedWebhookEventTypeError("Raven SDK")
context = {
"title": topic_name,
"level": event["level"],
"web_link": event["web_url"],
"datetime": event["datetime"].split(".")[0].replace("T", " "),
}
if "exception" in event:
# The event was triggered by a sentry.capture_exception() call
# (in the Python Sentry SDK) or something similar.
filename = event["metadata"].get("filename", None)
stacktrace = None
for value in reversed(event["exception"]["values"]):
if "stacktrace" in value:
stacktrace = value["stacktrace"]
break
if stacktrace and filename:
exception_frame = None
for frame in reversed(stacktrace["frames"]):
if frame.get("filename", None) == filename:
exception_frame = frame
break
if (
exception_frame
and "context_line" in exception_frame
and exception_frame["context_line"] is not None
):
pre_context = convert_lines_to_traceback_string(
exception_frame.get("pre_context", None)
)
context_line = exception_frame["context_line"] + "\n"
post_context = convert_lines_to_traceback_string(
exception_frame.get("post_context", None)
)
context.update(
syntax_highlight_as=syntax_highlight_as,
filename=filename,
pre_context=pre_context,
context_line=context_line,
post_context=post_context,
)
body = EXCEPTION_EVENT_TEMPLATE_WITH_TRACEBACK.format(**context)
return (topic_name, body)
context.update(filename=filename) # nocoverage
body = EXCEPTION_EVENT_TEMPLATE.format(**context) # nocoverage
return (topic_name, body) # nocoverage
elif "logentry" in event:
# The event was triggered by a sentry.capture_message() call
# (in the Python Sentry SDK) or something similar.
body = MESSAGE_EVENT_TEMPLATE.format(**context)
else:
raise UnsupportedWebhookEventTypeError("unknown-event type")
return (topic_name, body) |
Handle either an issue type event. | def handle_issue_payload(
action: str, issue: Dict[str, Any], actor: Dict[str, Any]
) -> Tuple[str, str]:
"""Handle either an issue type event."""
topic_name = issue["title"]
datetime = issue["lastSeen"].split(".")[0].replace("T", " ")
if issue["assignedTo"]:
if issue["assignedTo"]["type"] == "team":
assignee = "team {}".format(issue["assignedTo"]["name"])
else:
assignee = issue["assignedTo"]["name"]
else:
assignee = "No one"
if action == "created":
context = {
"title": topic_name,
"level": issue["level"],
"datetime": datetime,
"assignee": assignee,
}
body = ISSUE_CREATED_MESSAGE_TEMPLATE.format(**context)
elif action == "resolved":
context = {
"title": topic_name,
"actor": actor["name"],
}
body = ISSUE_RESOLVED_MESSAGE_TEMPLATE.format(**context)
elif action == "assigned":
context = {
"title": topic_name,
"assignee": assignee,
"actor": actor["name"],
}
body = ISSUE_ASSIGNED_MESSAGE_TEMPLATE.format(**context)
elif action == "ignored":
context = {
"title": topic_name,
"actor": actor["name"],
}
body = ISSUE_IGNORED_MESSAGE_TEMPLATE.format(**context)
else:
raise UnsupportedWebhookEventTypeError(f"{action} action")
return (topic_name, body) |
Attempt to use webhook payload for the notification.
When the integration is configured as a webhook, instead of being added as
an internal integration, the payload is slightly different, but has all the
required information for sending a notification. We transform this payload to
look like the payload from a "properly configured" integration. | def transform_webhook_payload(payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
"""Attempt to use webhook payload for the notification.
When the integration is configured as a webhook, instead of being added as
an internal integration, the payload is slightly different, but has all the
required information for sending a notification. We transform this payload to
look like the payload from a "properly configured" integration.
"""
event = payload.get("event", {})
# deprecated payloads don't have event_id
event_id = event.get("event_id")
if not event_id:
return None
event_path = f"events/{event_id}/"
event["web_url"] = urljoin(payload["url"], event_path)
timestamp = event.get("timestamp", event["received"])
event["datetime"] = datetime.fromtimestamp(timestamp, timezone.utc).isoformat(
timespec="microseconds"
)
return payload |
Parses the payload and finds previous and current value of change_type. | def get_old_and_new_values(change_type: str, message: WildValue) -> ReturnType:
"""Parses the payload and finds previous and current value of change_type."""
old = message["change"]["diff"][change_type].get("from")
new = message["change"]["diff"][change_type].get("to")
return old, new |
Parses the comment to issue, task or US. | def parse_comment(
message: WildValue,
) -> EventType:
"""Parses the comment to issue, task or US."""
return {
"event": "commented",
"type": message["type"].tame(check_string),
"values": {
"user": get_owner_name(message),
"user_link": get_owner_link(message),
"subject": get_subject(message),
},
} |
Parses create or delete event. | def parse_create_or_delete(
message: WildValue,
) -> EventType:
"""Parses create or delete event."""
if message["type"].tame(check_string) == "relateduserstory":
return {
"type": message["type"].tame(check_string),
"event": message["action"].tame(check_string),
"values": {
"user": get_owner_name(message),
"user_link": get_owner_link(message),
"epic_subject": get_epic_subject(message),
"userstory_subject": get_userstory_subject(message),
},
}
return {
"type": message["type"].tame(check_string),
"event": message["action"].tame(check_string),
"values": {
"user": get_owner_name(message),
"user_link": get_owner_link(message),
"subject": get_subject(message),
},
} |
Parses change event. | def parse_change_event(change_type: str, message: WildValue) -> Optional[EventType]:
"""Parses change event."""
evt: EventType = {}
values: Dict[str, Optional[Union[str, bool]]] = {
"user": get_owner_name(message),
"user_link": get_owner_link(message),
"subject": get_subject(message),
}
if change_type in ["description_diff", "points"]:
event_type = change_type
elif change_type in ["milestone", "assigned_to"]:
old, new = get_old_and_new_values(change_type, message)
tamed_old = old.tame(check_none_or(check_string))
tamed_new = new.tame(check_none_or(check_string))
if not tamed_old:
event_type = "set_" + change_type
values["new"] = tamed_new
elif not tamed_new:
event_type = "unset_" + change_type
values["old"] = tamed_old
else:
event_type = "changed_" + change_type
values.update(old=tamed_old, new=tamed_new)
elif change_type == "is_blocked":
if message["change"]["diff"]["is_blocked"]["to"].tame(check_bool):
event_type = "blocked"
else:
event_type = "unblocked"
elif change_type == "is_closed":
if message["change"]["diff"]["is_closed"]["to"].tame(check_bool):
event_type = "closed"
else:
event_type = "reopened"
elif change_type == "user_story":
old, new = get_old_and_new_values(change_type, message)
event_type = "changed_us"
tamed_old = old.tame(check_none_or(check_string))
tamed_new = new.tame(check_none_or(check_string))
values.update(old=tamed_old, new=tamed_new)
elif change_type in ["subject", "name"]:
event_type = "renamed"
old, new = get_old_and_new_values(change_type, message)
tamed_old = old.tame(check_none_or(check_string))
tamed_new = new.tame(check_none_or(check_string))
values.update(old=tamed_old, new=tamed_new)
elif change_type in ["estimated_finish", "estimated_start", "due_date"]:
old, new = get_old_and_new_values(change_type, message)
tamed_old = old.tame(check_none_or(check_string))
tamed_new = new.tame(check_none_or(check_string))
if not tamed_old:
event_type = "set_" + change_type
values["new"] = tamed_new
elif tamed_old != tamed_new:
event_type = change_type
values.update(old=tamed_old, new=tamed_new)
else:
# date hasn't changed
return None
elif change_type in ["priority", "severity", "type", "status"]:
event_type = "changed_" + change_type
old, new = get_old_and_new_values(change_type, message)
tamed_old = old.tame(check_none_or(check_string))
tamed_new = new.tame(check_none_or(check_string))
values.update(old=tamed_old, new=tamed_new)
else:
# we are not supporting this type of event
return None
evt.update(type=message["type"].tame(check_string), event=event_type, values=values)
return evt |
Parses the payload by delegating to specialized functions. | def parse_message(
message: WildValue,
) -> List[EventType]:
"""Parses the payload by delegating to specialized functions."""
events: List[EventType] = []
if message["action"].tame(check_string) in ["create", "delete"]:
events.append(parse_create_or_delete(message))
elif message["action"].tame(check_string) == "change":
if message["change"]["diff"]:
for value in message["change"]["diff"].keys(): # noqa: SIM118
parsed_event = parse_change_event(value, message)
if parsed_event:
events.append(parsed_event)
if message["change"]["comment"].tame(check_string):
events.append(parse_comment(message))
elif message["action"].tame(check_string) == "test":
events.append(parse_webhook_test(message))
return events |
Gets the template string and formats it with parsed data. | def generate_content(data: EventType) -> str:
"""Gets the template string and formats it with parsed data."""
assert isinstance(data["type"], str) and isinstance(data["event"], str)
template = templates[data["type"]][data["event"]]
assert isinstance(data["values"], dict)
content = template.format(**data["values"])
return content |
Requests sent from Thinkst canaries are either from canarytokens or
canaries, which can be differentiated by the value of the `AlertType`
field. | def is_canarytoken(message: WildValue) -> bool:
"""
Requests sent from Thinkst canaries are either from canarytokens or
canaries, which can be differentiated by the value of the `AlertType`
field.
"""
return message["AlertType"].tame(check_string) == "CanarytokenIncident" |
Returns the name of the canary or canarytoken. | def canary_name(message: WildValue) -> str:
"""
Returns the name of the canary or canarytoken.
"""
if is_canarytoken(message):
return message["Reminder"].tame(check_string)
else:
return message["CanaryName"].tame(check_string) |
Returns a description of the kind of request - canary or canarytoken. | def canary_kind(message: WildValue) -> str:
"""
Returns a description of the kind of request - canary or canarytoken.
"""
if is_canarytoken(message):
return "canarytoken"
else:
return "canary" |
Extract the source IP and reverse DNS information from a canary request. | def source_ip_and_reverse_dns(message: WildValue) -> Tuple[Optional[str], Optional[str]]:
"""
Extract the source IP and reverse DNS information from a canary request.
"""
reverse_dns, source_ip = (None, None)
if "SourceIP" in message:
source_ip = message["SourceIP"].tame(check_string)
# `ReverseDNS` can sometimes exist and still be empty.
if "ReverseDNS" in message and message["ReverseDNS"].tame(check_string) != "":
reverse_dns = message["ReverseDNS"].tame(check_string)
return (source_ip, reverse_dns) |
Construct the response to a canary or canarytoken request. | def body(message: WildValue) -> str:
"""
Construct the response to a canary or canarytoken request.
"""
title = canary_kind(message).title()
name = canary_name(message)
body = f"**:alert: {title} *{name}* has been triggered!**\n\n{message['Intro'].tame(check_string)}\n\n"
if "IncidentHash" in message:
body += f"**Incident ID:** `{message['IncidentHash'].tame(check_string)}`\n"
if "Token" in message:
body += f"**Token:** `{message['Token'].tame(check_string)}`\n"
if "Description" in message:
body += f"**Kind:** {message['Description'].tame(check_string)}\n"
if "Timestamp" in message:
body += f"**Timestamp:** {message['Timestamp'].tame(check_string)}\n"
if "CanaryIP" in message:
body += f"**Canary IP:** `{message['CanaryIP'].tame(check_string)}`\n"
if "CanaryLocation" in message:
body += f"**Canary location:** {message['CanaryLocation'].tame(check_string)}\n"
if "Triggered" in message:
unit = "times" if message["Triggered"].tame(check_int) > 1 else "time"
body += f"**Triggered:** {message['Triggered'].tame(check_int)} {unit}\n"
source_ip, reverse_dns = source_ip_and_reverse_dns(message)
if source_ip:
body += f"**Source IP:** `{source_ip}`\n"
if reverse_dns:
body += f"**Reverse DNS:** `{reverse_dns}`\n"
if "AdditionalDetails" in message:
for detail in message["AdditionalDetails"]:
key = detail[0].tame(check_string)
value = detail[1].tame(check_union([check_string, check_int]))
if isinstance(value, str) and "*" in value:
# Thinkst sends passwords as a series of stars which can mess with
# formatting, so wrap these in backticks.
body += f"**{key}:** `{value}`\n"
else:
body += f"**{key}:** {value}\n"
return body |
Construct a response to a webhook event from a Thinkst canary or canarytoken.
Thinkst offers public canarytokens with canarytokens.org and with their canary
product, but the schema returned by these identically named services are
completely different - canarytokens from canarytokens.org are handled by a
different Zulip integration.
Thinkst's documentation for the schema is linked below, but in practice the JSON
received doesn't always conform.
https://help.canary.tools/hc/en-gb/articles/360002426577-How-do-I-configure-notifications-for-a-Generic-Webhook- | def api_thinkst_webhook(
request: HttpRequest,
user_profile: UserProfile,
*,
message: JsonBodyPayload[WildValue],
user_specified_topic: OptionalUserSpecifiedTopicStr = None,
) -> HttpResponse:
"""
Construct a response to a webhook event from a Thinkst canary or canarytoken.
Thinkst offers public canarytokens with canarytokens.org and with their canary
product, but the schema returned by these identically named services are
completely different - canarytokens from canarytokens.org are handled by a
different Zulip integration.
Thinkst's documentation for the schema is linked below, but in practice the JSON
received doesn't always conform.
https://help.canary.tools/hc/en-gb/articles/360002426577-How-do-I-configure-notifications-for-a-Generic-Webhook-
"""
response = body(message)
topic_name = None
if user_specified_topic:
topic_name = user_specified_topic
else:
name = canary_name(message)
kind = canary_kind(message)
topic_name = f"{kind} alert - {name}"
check_send_webhook_message(request, user_profile, topic_name, response)
return json_success(request) |
Zendesk uses triggers with message templates. This webhook uses the
ticket_id and ticket_title to create a topic. And passes with zendesk
user's configured message to zulip. | def api_zendesk_webhook(
request: HttpRequest,
user_profile: UserProfile,
*,
ticket_title: str,
ticket_id: str,
message: str,
) -> HttpResponse:
"""
Zendesk uses triggers with message templates. This webhook uses the
ticket_id and ticket_title to create a topic. And passes with zendesk
user's configured message to zulip.
"""
topic_name = truncate(f"#{ticket_id}: {ticket_title}", 60)
check_send_webhook_message(request, user_profile, topic_name, message)
return json_success(request) |
Returns all (either test, or real) worker queues. | def get_active_worker_queues(only_test_queues: bool = False) -> List[str]:
"""Returns all (either test, or real) worker queues."""
for module_info in pkgutil.iter_modules(zerver.worker.__path__, "zerver.worker."):
importlib.import_module(module_info.name)
return [
queue_name
for queue_name in worker_classes
if bool(queue_name in test_queues) == only_test_queues
] |
When migrating to support registration by UUID, we introduced a bug where duplicate
registrations for the same device+user could be created - one by user_id and one by
user_uuid. Given no good way of detecting these duplicates at database level, we need to
take advantage of the fact that when a remote server sends a push notification request
to us, it sends both user_id and user_uuid of the user.
See https://github.com/zulip/zulip/issues/24969 for reference.
This function, knowing the user_id and user_uuid of the user, can detect duplicates
and delete the legacy user_id registration if appropriate.
Return the list of registrations with the user_id-based duplicates removed. | def delete_duplicate_registrations(
registrations: List[RemotePushDeviceToken], server_id: int, user_id: int, user_uuid: str
) -> List[RemotePushDeviceToken]:
"""
When migrating to support registration by UUID, we introduced a bug where duplicate
registrations for the same device+user could be created - one by user_id and one by
user_uuid. Given no good way of detecting these duplicates at database level, we need to
take advantage of the fact that when a remote server sends a push notification request
to us, it sends both user_id and user_uuid of the user.
See https://github.com/zulip/zulip/issues/24969 for reference.
This function, knowing the user_id and user_uuid of the user, can detect duplicates
and delete the legacy user_id registration if appropriate.
Return the list of registrations with the user_id-based duplicates removed.
"""
# All registrations passed here should be of the same kind (apple vs android).
assert len({registration.kind for registration in registrations}) == 1
kind = registrations[0].kind
tokens_counter = Counter(device.token for device in registrations)
tokens_to_deduplicate = []
for key in tokens_counter:
if tokens_counter[key] <= 1:
continue
if tokens_counter[key] > 2:
raise AssertionError(
f"More than two registrations for token {key} for user id:{user_id} uuid:{user_uuid}, shouldn't be possible"
)
assert tokens_counter[key] == 2
tokens_to_deduplicate.append(key)
if not tokens_to_deduplicate:
return registrations
logger.info(
"Deduplicating push registrations for server id:%s user id:%s uuid:%s and tokens:%s",
server_id,
user_id,
user_uuid,
sorted(tokens_to_deduplicate),
)
RemotePushDeviceToken.objects.filter(
token__in=tokens_to_deduplicate, kind=kind, server_id=server_id, user_id=user_id
).delete()
deduplicated_registrations_to_return = []
for registration in registrations:
if registration.token in tokens_to_deduplicate and registration.user_id is not None:
# user_id registrations are the ones we deleted
continue
deduplicated_registrations_to_return.append(registration)
return deduplicated_registrations_to_return |
Tries to fetch RemoteRealm for the given realm_uuid and server. Otherwise,
returns None and logs what happened using request and user_uuid args to make
the output more informative. | def get_remote_realm_helper(
request: HttpRequest, server: RemoteZulipServer, realm_uuid: str, user_uuid: str
) -> Optional[RemoteRealm]:
"""
Tries to fetch RemoteRealm for the given realm_uuid and server. Otherwise,
returns None and logs what happened using request and user_uuid args to make
the output more informative.
"""
try:
remote_realm = RemoteRealm.objects.get(uuid=realm_uuid)
except RemoteRealm.DoesNotExist:
logger.info(
"%s: Received request for unknown realm %s, server %s, user %s",
request.path,
realm_uuid,
server.id,
user_uuid,
)
return None
if remote_realm.server_id != server.id:
logger.warning(
"%s: Realm %s exists, but not registered to server %s",
request.path,
realm_uuid,
server.id,
)
raise RemoteRealmServerMismatchError
return remote_realm |
The remote server sends us a list of (tokens of) devices that it
believes it has registered. However some of them may have been
deleted by us due to errors received in the low level code
responsible for directly sending push notifications.
Query the database for the RemotePushDeviceTokens from these lists
that we do indeed have and return a list of the ones that we don't
have and thus presumably have already deleted - the remote server
will want to delete them too. | def get_deleted_devices(
user_identity: UserPushIdentityCompat,
server: RemoteZulipServer,
android_devices: List[str],
apple_devices: List[str],
) -> DevicesToCleanUpDict:
"""The remote server sends us a list of (tokens of) devices that it
believes it has registered. However some of them may have been
deleted by us due to errors received in the low level code
responsible for directly sending push notifications.
Query the database for the RemotePushDeviceTokens from these lists
that we do indeed have and return a list of the ones that we don't
have and thus presumably have already deleted - the remote server
will want to delete them too.
"""
android_devices_we_have = RemotePushDeviceToken.objects.filter(
user_identity.filter_q(),
token__in=android_devices,
kind=RemotePushDeviceToken.GCM,
server=server,
).values_list("token", flat=True)
apple_devices_we_have = RemotePushDeviceToken.objects.filter(
user_identity.filter_q(),
token__in=apple_devices,
kind=RemotePushDeviceToken.APNS,
server=server,
).values_list("token", flat=True)
return DevicesToCleanUpDict(
android_devices=list(set(android_devices) - set(android_devices_we_have)),
apple_devices=list(set(apple_devices) - set(apple_devices_we_have)),
) |
Finds the RemoteRealmCount and RemoteRealmAuditLog entries without .remote_realm
set and sets it based on the "realms" data received from the remote server,
if possible. | def fix_remote_realm_foreign_keys(
server: RemoteZulipServer, realms: List[RealmDataForAnalytics]
) -> None:
"""
Finds the RemoteRealmCount and RemoteRealmAuditLog entries without .remote_realm
set and sets it based on the "realms" data received from the remote server,
if possible.
"""
if (
not RemoteRealmCount.objects.filter(server=server, remote_realm=None).exists()
and not RemoteRealmAuditLog.objects.filter(server=server, remote_realm=None).exists()
):
return
realm_id_to_remote_realm = build_realm_id_to_remote_realm_dict(server, realms)
for realm_id in realm_id_to_remote_realm:
RemoteRealmCount.objects.filter(server=server, remote_realm=None, realm_id=realm_id).update(
remote_realm=realm_id_to_remote_realm[realm_id]
)
RemoteRealmAuditLog.objects.filter(
server=server, remote_realm=None, realm_id=realm_id
).update(remote_realm=realm_id_to_remote_realm[realm_id]) |
We want to keep these flags mostly intact after we create
messages. The is_private flag, for example, would be bad to overwrite.
So we're careful to only toggle the read flag.
We exclude marking messages as read for bots, since bots, by
default, never mark messages as read. | def mark_all_messages_as_read() -> None:
"""
We want to keep these flags mostly intact after we create
messages. The is_private flag, for example, would be bad to overwrite.
So we're careful to only toggle the read flag.
We exclude marking messages as read for bots, since bots, by
default, never mark messages as read.
"""
# Mark all messages as read
UserMessage.objects.filter(user_profile__is_bot=False).update(
flags=F("flags").bitor(UserMessage.flags.read),
) |
Clean up duplicated RemoteRealmCount and RemoteInstallationCount rows.
This is the equivalent of analytics' 0015_clear_duplicate_counts
migration -- but it also has additional duplicates if there are
multiple servers submitting information with the same UUID.
We drop the behaviour of rolling up and updating the value to the
sum, since the active_users_log:is_bot:day field has a subgroup
(and is thus not affected by the bug), and the few cases for
`invites_sent::day` seem more likely to be re-submissions of the
same data, not duplicates to roll up.
We must do this step before switching the non-unique indexes to be
unique, as there are currently violations. | def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
"""Clean up duplicated RemoteRealmCount and RemoteInstallationCount rows.
This is the equivalent of analytics' 0015_clear_duplicate_counts
migration -- but it also has additional duplicates if there are
multiple servers submitting information with the same UUID.
We drop the behaviour of rolling up and updating the value to the
sum, since the active_users_log:is_bot:day field has a subgroup
(and is thus not affected by the bug), and the few cases for
`invites_sent::day` seem more likely to be re-submissions of the
same data, not duplicates to roll up.
We must do this step before switching the non-unique indexes to be
unique, as there are currently violations.
"""
count_tables = dict(
realm=apps.get_model("zilencer", "RemoteRealmCount"),
installation=apps.get_model("zilencer", "RemoteInstallationCount"),
)
for name, count_table in count_tables.items():
value = ["realm_id", "server_id", "property", "end_time"]
if name == "installation":
value = ["server_id", "property", "end_time"]
duplicated_rows = (
count_table.objects.filter(subgroup=None)
.values(*value)
.annotate(Count("id"), Min("id"))
.filter(id__count__gt=1)
)
for duplicated_row in duplicated_rows:
duplicated_row.pop("id__count")
first_id = duplicated_row.pop("id__min")
count_table.objects.filter(**duplicated_row, id__gt=first_id).delete() |
Pads an authentication methods dict to contain all auth backends
supported by the software, regardless of whether they are
configured on this server | def pad_method_dict(method_dict: Dict[str, bool]) -> Dict[str, bool]:
"""Pads an authentication methods dict to contain all auth backends
supported by the software, regardless of whether they are
configured on this server"""
for key in AUTH_BACKEND_NAME_MAP:
if key not in method_dict:
method_dict[key] = False
return method_dict |
realm_authentication_methods can be passed if already fetched to avoid
a database query. | def auth_enabled_helper(
backends_to_check: List[str],
realm: Optional[Realm],
realm_authentication_methods: Optional[Dict[str, bool]] = None,
) -> bool:
"""
realm_authentication_methods can be passed if already fetched to avoid
a database query.
"""
if realm is not None:
if realm_authentication_methods is not None:
# Copy the dict to avoid mutating the original if it was passed in as argument.
enabled_method_dict = realm_authentication_methods.copy()
else:
enabled_method_dict = realm.authentication_methods_dict()
else:
enabled_method_dict = dict.fromkeys(AUTH_BACKEND_NAME_MAP, True)
pad_method_dict(enabled_method_dict)
for supported_backend in supported_auth_backends():
for backend_name in backends_to_check:
backend = AUTH_BACKEND_NAME_MAP[backend_name]
if enabled_method_dict[backend_name] and isinstance(supported_backend, backend):
return True
return False |
This is the core common function used by essentially all
authentication backends to check if there's an active user account
with a given email address in the organization, handling both
user-level and realm-level deactivation correctly. | def common_get_active_user(
email: str, realm: Realm, return_data: Optional[Dict[str, Any]] = None
) -> Optional[UserProfile]:
"""This is the core common function used by essentially all
authentication backends to check if there's an active user account
with a given email address in the organization, handling both
user-level and realm-level deactivation correctly.
"""
try:
user_profile = get_user_by_delivery_email(email, realm)
except UserProfile.DoesNotExist:
# If the user doesn't have an account in the target realm, we
# check whether they might have an account in another realm,
# and if so, provide a helpful error message via
# `invalid_subdomain`.
if not UserProfile.objects.filter(delivery_email__iexact=email).exists():
return None
if return_data is not None:
return_data["invalid_subdomain"] = True
return_data["matching_user_ids_in_different_realms"] = list(
UserProfile.objects.filter(delivery_email__iexact=email).values("realm_id", "id")
)
return None
if not is_user_active(user_profile, return_data):
return None
return user_profile |
Returns True if the password is strong enough,
False otherwise. | def check_password_strength(password: str) -> bool:
"""
Returns True if the password is strong enough,
False otherwise.
"""
if len(password) < settings.PASSWORD_MIN_LENGTH:
return False
if password == "":
# zxcvbn throws an exception when passed the empty string, so
# we need a special case for the empty string password here.
return False
if int(zxcvbn(password)["guesses"]) < settings.PASSWORD_MIN_GUESSES:
return False
return True |
Returns list of _LDAPUsers matching the email search | def find_ldap_users_by_email(email: str) -> List[_LDAPUser]:
"""
Returns list of _LDAPUsers matching the email search
"""
return LDAPReverseEmailSearch().search_for_users(email) |
Used to make determinations on whether a user's email address is
managed by LDAP. For environments using both LDAP and
Email+Password authentication, we do not allow EmailAuthBackend
authentication for email addresses managed by LDAP (to avoid a
security issue where one create separate credentials for an LDAP
user), and this function is used to enforce that rule. | def email_belongs_to_ldap(realm: Realm, email: str) -> bool:
"""Used to make determinations on whether a user's email address is
managed by LDAP. For environments using both LDAP and
Email+Password authentication, we do not allow EmailAuthBackend
authentication for email addresses managed by LDAP (to avoid a
security issue where one create separate credentials for an LDAP
user), and this function is used to enforce that rule.
"""
if not ldap_auth_enabled(realm):
return False
check_ldap_config()
if settings.LDAP_APPEND_DOMAIN:
# Check if the email ends with LDAP_APPEND_DOMAIN
return Address(addr_spec=email).domain.lower() == settings.LDAP_APPEND_DOMAIN
# If we don't have an LDAP domain, we have to do a lookup for the email.
if find_ldap_users_by_email(email):
return True
else:
return False |
Inside django_auth_ldap populate_user(), if LDAPError is raised,
e.g. due to invalid connection credentials, the function catches it
and emits a signal (ldap_error) to communicate this error to others.
We normally don't use signals, but here there's no choice, so in this function
we essentially convert the signal to a normal exception that will properly
propagate out of django_auth_ldap internals. | def catch_ldap_error(signal: Signal, **kwargs: Any) -> None:
"""
Inside django_auth_ldap populate_user(), if LDAPError is raised,
e.g. due to invalid connection credentials, the function catches it
and emits a signal (ldap_error) to communicate this error to others.
We normally don't use signals, but here there's no choice, so in this function
we essentially convert the signal to a normal exception that will properly
propagate out of django_auth_ldap internals.
"""
if kwargs["context"] == "populate_user":
# The exception message can contain the password (if it was invalid),
# so it seems better not to log that, and only use the original exception's name here.
raise PopulateUserLDAPError(type(kwargs["exception"]).__name__) |
Responsible for doing the Zulip account lookup and validation parts
of the Zulip social auth pipeline (similar to the authenticate()
methods in most other auth backends in this file).
Returns a UserProfile object for successful authentication, and None otherwise. | def social_associate_user_helper(
backend: BaseAuth, return_data: Dict[str, Any], *args: Any, **kwargs: Any
) -> Union[HttpResponse, Optional[UserProfile]]:
"""Responsible for doing the Zulip account lookup and validation parts
of the Zulip social auth pipeline (similar to the authenticate()
methods in most other auth backends in this file).
Returns a UserProfile object for successful authentication, and None otherwise.
"""
subdomain = backend.strategy.session_get("subdomain")
try:
realm = get_realm(subdomain)
except Realm.DoesNotExist:
return_data["invalid_realm"] = True
return None
return_data["realm_id"] = realm.id
return_data["realm_string_id"] = realm.string_id
if not auth_enabled_helper([backend.auth_backend_name], realm):
return_data["auth_backend_disabled"] = True
return None
if "auth_failed_reason" in kwargs.get("response", {}):
return_data["social_auth_failed_reason"] = kwargs["response"]["auth_failed_reason"]
return None
elif hasattr(backend, "get_verified_emails"):
# Some social backends, like GitHubAuthBackend, don't
# guarantee that the `details` data is validated (i.e., it's
# possible users can put any string they want in the "email"
# field of the `details` object). For those backends, we have
# custom per-backend code to properly fetch only verified
# email addresses from the appropriate third-party API.
verified_emails = backend.get_verified_emails(realm, *args, **kwargs)
verified_emails_length = len(verified_emails)
if verified_emails_length == 0:
# TODO: Provide a nice error message screen to the user
# for this case, rather than just logging a warning.
backend.logger.warning(
"Social auth (%s) failed because user has no verified emails",
backend.auth_backend_name,
)
return_data["email_not_verified"] = True
return None
if verified_emails_length == 1:
chosen_email = verified_emails[0]
else:
chosen_email = backend.strategy.request_data().get("email")
if not chosen_email:
avatars = {} # Dict[str, str]
existing_account_emails = []
for email in verified_emails:
existing_account = common_get_active_user(email, realm, {})
if existing_account is not None:
existing_account_emails.append(email)
avatars[email] = avatar_url(existing_account)
if (
len(existing_account_emails) != 1
or backend.strategy.session_get("is_signup") == "1"
):
unverified_emails = []
if hasattr(backend, "get_unverified_emails"):
unverified_emails = backend.get_unverified_emails(realm, *args, **kwargs)
return render(
backend.strategy.request,
"zerver/social_auth_select_email.html",
context={
"primary_email": verified_emails[0],
"verified_non_primary_emails": verified_emails[1:],
"unverified_emails": unverified_emails,
"backend": "github",
"avatar_urls": avatars,
},
)
else:
chosen_email = existing_account_emails[0]
try:
validate_email(chosen_email)
except ValidationError:
return_data["invalid_email"] = True
return None
if chosen_email not in verified_emails:
# If a user edits the submit value for the choose email form, we might
# end up with a wrong email associated with the account. The below code
# takes care of that.
backend.logger.warning(
"Social auth (%s) failed because user has no verified"
" emails associated with the account",
backend.auth_backend_name,
)
return_data["email_not_associated"] = True
return None
validated_email = chosen_email
else:
try:
validate_email(kwargs["details"].get("email"))
except ValidationError:
return_data["invalid_email"] = True
return None
validated_email = kwargs["details"].get("email")
if not validated_email: # nocoverage
# This code path isn't used with GitHubAuthBackend, but may be relevant for other
# social auth backends.
return_data["invalid_email"] = True
return None
return_data["valid_attestation"] = True
return_data["validated_email"] = validated_email
user_profile = common_get_active_user(validated_email, realm, return_data)
full_name = kwargs["details"].get("fullname")
first_name = kwargs["details"].get("first_name")
last_name = kwargs["details"].get("last_name")
if all(name is None for name in [full_name, first_name, last_name]) and backend.name not in [
"apple",
"saml",
"oidc",
]:
# (1) Apple authentication provides the user's name only the very first time a user tries to log in.
# So if the user aborts login or otherwise is doing this the second time,
# we won't have any name data.
# (2) Some SAML or OIDC IdPs may not send any name value if the user doesn't
# have them set in the IdP's directory.
#
# The name will just default to the empty string in the code below.
# We need custom code here for any social auth backends
# that don't provide name details feature.
raise AssertionError("Social auth backend doesn't provide name")
if full_name:
return_data["full_name"] = full_name
else:
# Some authentications methods like Apple and SAML send
# first name and last name as separate attributes. In that case
# we construct the full name from them.
# strip removes the unnecessary ' '
return_data["full_name"] = f"{first_name or ''} {last_name or ''}".strip()
return_data["extra_attrs"] = kwargs["details"].get("extra_attrs", {})
return user_profile |
A simple wrapper function to reformat the return data from
social_associate_user_helper as a dictionary. The
python-social-auth infrastructure will then pass those values into
later stages of settings.SOCIAL_AUTH_PIPELINE, such as
social_auth_finish, as kwargs. | def social_auth_associate_user(
backend: BaseAuth, *args: Any, **kwargs: Any
) -> Union[HttpResponse, Dict[str, Any]]:
"""A simple wrapper function to reformat the return data from
social_associate_user_helper as a dictionary. The
python-social-auth infrastructure will then pass those values into
later stages of settings.SOCIAL_AUTH_PIPELINE, such as
social_auth_finish, as kwargs.
"""
partial_token = backend.strategy.request_data().get("partial_token")
return_data: Dict[str, Any] = {}
user_profile = social_associate_user_helper(backend, return_data, *args, **kwargs)
if isinstance(user_profile, HttpResponse):
return user_profile
else:
return {
"user_profile": user_profile,
"return_data": return_data,
"partial_token": partial_token,
"partial_backend_name": backend,
} |
Given the determination in social_auth_associate_user for whether
the user should be authenticated, this takes care of actually
logging in the user (if appropriate) and redirecting the browser
to the appropriate next page depending on the situation. Read the
comments below as well as login_or_register_remote_user in
`zerver/views/auth.py` for the details on how that dispatch works. | def social_auth_finish(
backend: Any, details: Dict[str, Any], response: HttpResponse, *args: Any, **kwargs: Any
) -> Optional[HttpResponse]:
"""Given the determination in social_auth_associate_user for whether
the user should be authenticated, this takes care of actually
logging in the user (if appropriate) and redirecting the browser
to the appropriate next page depending on the situation. Read the
comments below as well as login_or_register_remote_user in
`zerver/views/auth.py` for the details on how that dispatch works.
"""
from zerver.views.auth import login_or_register_remote_user, redirect_and_log_into_subdomain
user_profile = kwargs["user_profile"]
return_data = kwargs["return_data"]
no_verified_email = return_data.get("email_not_verified")
auth_backend_disabled = return_data.get("auth_backend_disabled")
inactive_user = return_data.get("inactive_user")
inactive_realm = return_data.get("inactive_realm")
invalid_realm = return_data.get("invalid_realm")
invalid_email = return_data.get("invalid_email")
auth_failed_reason = return_data.get("social_auth_failed_reason")
email_not_associated = return_data.get("email_not_associated")
if invalid_realm:
# User has passed an invalid subdomain param - this shouldn't happen in the normal flow,
# unless the user manually edits the param. In any case, it's most appropriate to just take
# them to find_account, as there isn't even an appropriate subdomain to take them to the login
# form on.
return HttpResponseRedirect(reverse("find_account"))
realm = Realm.objects.get(id=return_data["realm_id"])
if auth_backend_disabled or inactive_realm or no_verified_email or email_not_associated:
# Redirect to login page. We can't send to registration
# workflow with these errors. We will redirect to login page.
return redirect_to_login(realm)
if inactive_user:
backend.logger.info(
"Failed login attempt for deactivated account: %s@%s",
return_data["inactive_user_id"],
return_data["realm_string_id"],
)
return redirect_deactivated_user_to_login(realm, return_data["validated_email"])
if invalid_email:
# In case of invalid email, we will end up on registration page.
# This seems better than redirecting to login page.
backend.logger.warning(
"%s got invalid email argument.",
backend.auth_backend_name,
)
return redirect_to_signup(realm)
if auth_failed_reason:
backend.logger.info(auth_failed_reason)
return redirect_to_login(realm)
# Structurally, all the cases where we don't have an authenticated
# email for the user should be handled above; this assertion helps
# prevent any violations of that contract from resulting in a user
# being incorrectly authenticated.
assert return_data.get("valid_attestation") is True
strategy = backend.strategy
full_name_validated = backend.full_name_validated
email_address = return_data["validated_email"]
full_name = return_data["full_name"]
redirect_to = strategy.session_get("next")
multiuse_object_key = strategy.session_get("multiuse_object_key", "")
mobile_flow_otp = strategy.session_get("mobile_flow_otp")
desktop_flow_otp = strategy.session_get("desktop_flow_otp")
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
if user_profile is None or user_profile.is_mirror_dummy:
is_signup = strategy.session_get("is_signup") == "1" or backend.should_auto_signup()
else:
is_signup = False
extra_attrs = return_data.get("extra_attrs", {})
attrs_by_backend = settings.SOCIAL_AUTH_SYNC_CUSTOM_ATTRS_DICT.get(realm.subdomain, {})
if user_profile is not None and extra_attrs and attrs_by_backend:
# This is only supported for SAML right now, though the design
# is meant to be easy to extend this to other backends if desired.
# Unlike with LDAP, here we can only do syncing during the authentication
# flow, as that's when the data is provided and we don't have a way to query
# for it otherwise.
assert backend.name == "saml"
custom_profile_field_name_to_attr_name = attrs_by_backend.get(backend.name, {})
custom_profile_field_name_to_value = {}
for field_name, attr_name in custom_profile_field_name_to_attr_name.items():
custom_profile_field_name_to_value[field_name] = extra_attrs.get(attr_name)
try:
sync_user_profile_custom_fields(user_profile, custom_profile_field_name_to_value)
except SyncUserError as e:
backend.logger.warning(
"Exception while syncing custom profile fields for user %s: %s",
user_profile.id,
str(e),
)
if user_profile:
# This call to authenticate() is just to get to invoke the custom_auth_decorator logic.
# Social auth backends don't work via authenticate() in the same way as normal backends,
# so we can't just wrap their authenticate() methods. But the decorator is applied on
# ZulipDummyBackend.authenticate(), so we can invoke it here to trigger the custom logic.
#
# Note: We're only doing in the case where we already have a user_profile, meaning the
# account already exists and the user is just logging in. The new account registration case
# is handled in the registration codepath.
validated_user_profile = authenticate(
request=strategy.request,
username=user_profile.delivery_email,
realm=realm,
use_dummy_backend=True,
)
if validated_user_profile is None or validated_user_profile != user_profile:
# Log this as as a failure to authenticate via the social backend, since that's
# the correct way to think about this. ZulipDummyBackend is just an implementation
# tool, not an actual backend a user could be authenticating through.
log_auth_attempt(
backend.logger,
strategy.request,
realm,
username=email_address,
succeeded=False,
return_data={},
)
return redirect_to_login(realm)
# At this point, we have now confirmed that the user has
# demonstrated control over the target email address.
#
# The next step is to call login_or_register_remote_user, but
# there are two code paths here because of an optimization to save
# a redirect on mobile and desktop.
# Authentication failures happen on the external provider's side, so we don't get to log those,
# but we should log the successes at least.
log_auth_attempt(
backend.logger,
strategy.request,
realm,
username=email_address,
succeeded=True,
return_data={},
)
data_dict = ExternalAuthDataDict(
subdomain=realm.subdomain,
is_signup=is_signup,
redirect_to=redirect_to,
multiuse_object_key=multiuse_object_key,
full_name_validated=full_name_validated,
mobile_flow_otp=mobile_flow_otp,
desktop_flow_otp=desktop_flow_otp,
params_to_store_in_authenticated_session=backend.get_params_to_store_in_authenticated_session(),
)
if user_profile is None:
data_dict.update(dict(full_name=full_name, email=email_address))
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
if mobile_flow_otp or desktop_flow_otp:
if user_profile is not None and not user_profile.is_mirror_dummy:
# For mobile and desktop app authentication, login_or_register_remote_user
# will redirect to a special zulip:// URL that is handled by
# the app after a successful authentication; so we can
# redirect directly from here, saving a round trip over what
# we need to do to create session cookies on the right domain
# in the web login flow (below).
return login_or_register_remote_user(strategy.request, result)
else:
# The user needs to register, so we need to go the realm's
# subdomain for that.
pass
# If this authentication code were executing on
# subdomain.zulip.example.com, we would just call
# login_or_register_remote_user as in the mobile code path.
# However, because third-party SSO providers generally don't allow
# wildcard addresses in their redirect URLs, for multi-realm
# servers, we will have just completed authentication on e.g.
# auth.zulip.example.com (depending on
# settings.SOCIAL_AUTH_SUBDOMAIN), which cannot store cookies on
# the subdomain.zulip.example.com domain. So instead we serve a
# redirect (encoding the authentication result data in a
# cryptographically signed token) to a route on
# subdomain.zulip.example.com that will verify the signature and
# then call login_or_register_remote_user.
return redirect_and_log_into_subdomain(result) |
wantMessagesSigned controls whether requests processed by this saml auth
object need to be signed. The default of False is often not acceptable,
because we don't want anyone to be able to submit such a request.
Callers should use this to enforce the requirement of signatures. | def patch_saml_auth_require_messages_signed(auth: OneLogin_Saml2_Auth) -> None:
"""
wantMessagesSigned controls whether requests processed by this saml auth
object need to be signed. The default of False is often not acceptable,
because we don't want anyone to be able to submit such a request.
Callers should use this to enforce the requirement of signatures.
"""
auth.get_settings().get_security_data()["wantMessagesSigned"] = True
# Defensive code to confirm the setting change above is successful,
# to catch API changes in python3-saml that would make the change not
# be applied to the actual settings of `auth` - e.g. due to us only
# receiving a copy of the dict.
assert auth.get_settings().get_security_data()["wantMessagesSigned"] is True |
Returns a list of dictionaries that represent social backends, sorted
in the order in which they should be displayed. | def get_external_method_dicts(realm: Optional[Realm] = None) -> List[ExternalAuthMethodDictT]:
"""
Returns a list of dictionaries that represent social backends, sorted
in the order in which they should be displayed.
"""
result: List[ExternalAuthMethodDictT] = []
for backend in EXTERNAL_AUTH_METHODS:
# EXTERNAL_AUTH_METHODS is already sorted in the correct order,
# so we don't need to worry about sorting here.
if auth_enabled_helper([backend.auth_backend_name], realm):
result.extend(backend.dict_representation(realm))
return result |