zip
stringlengths 19
109
| filename
stringlengths 4
185
| contents
stringlengths 0
30.1M
| type_annotations
sequencelengths 0
1.97k
| type_annotation_starts
sequencelengths 0
1.97k
| type_annotation_ends
sequencelengths 0
1.97k
|
---|---|---|---|---|---|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/outgoing_webhook.py | from typing import Any, AnyStr, Iterable, Dict, Tuple, Callable, Mapping, Optional
import requests
import json
import sys
import inspect
import logging
import re
import urllib
from functools import reduce
from requests import Response
from django.utils.translation import ugettext as _
from zerver.models import Realm, UserProfile, get_user_profile_by_id, get_client, \
GENERIC_INTERFACE, Service, SLACK_INTERFACE, email_to_domain, get_service_profile
from zerver.lib.actions import check_send_message
from zerver.lib.queue import retry_event
from zerver.lib.topic import get_topic_from_message_info
from zerver.lib.url_encoding import near_message_url
from zerver.lib.validator import check_dict, check_string
from zerver.decorator import JsonableError
class OutgoingWebhookServiceInterface:
def __init__(self, token: str, user_profile: UserProfile, service_name: str) -> None:
self.token = token # type: str
self.user_profile = user_profile # type: UserProfile
self.service_name = service_name # type: str
class GenericOutgoingWebhookService(OutgoingWebhookServiceInterface):
def build_bot_request(self, event: Dict[str, Any]) -> Optional[Any]:
request_data = {"data": event['command'],
"message": event['message'],
"bot_email": self.user_profile.email,
"token": self.token,
"trigger": event['trigger']}
return json.dumps(request_data)
def send_data_to_server(self,
base_url: str,
request_data: Any) -> Response:
headers = {'content-type': 'application/json'}
response = requests.request('POST', base_url, data=request_data, headers=headers)
return response
def process_success(self, response_json: Dict[str, Any],
event: Dict[str, Any]) -> Optional[Dict[str, Any]]:
if "response_not_required" in response_json and response_json['response_not_required']:
return None
if "response_string" in response_json:
# We are deprecating response_string.
content = str(response_json['response_string'])
success_data = dict(content=content)
return success_data
if "content" in response_json:
content = str(response_json['content'])
success_data = dict(content=content)
if 'widget_content' in response_json:
success_data['widget_content'] = response_json['widget_content']
return success_data
return None
class SlackOutgoingWebhookService(OutgoingWebhookServiceInterface):
def build_bot_request(self, event: Dict[str, Any]) -> Optional[Any]:
if event['message']['type'] == 'private':
failure_message = "Slack outgoing webhooks don't support private messages."
fail_with_message(event, failure_message)
return None
request_data = [("token", self.token),
("team_id", event['message']['sender_realm_str']),
("team_domain", email_to_domain(event['message']['sender_email'])),
("channel_id", event['message']['stream_id']),
("channel_name", event['message']['display_recipient']),
("timestamp", event['message']['timestamp']),
("user_id", event['message']['sender_id']),
("user_name", event['message']['sender_full_name']),
("text", event['command']),
("trigger_word", event['trigger']),
("service_id", event['user_profile_id']),
]
return request_data
def send_data_to_server(self,
base_url: str,
request_data: Any) -> Response:
response = requests.request('POST', base_url, data=request_data)
return response
def process_success(self, response_json: Dict[str, Any],
event: Dict[str, Any]) -> Optional[Dict[str, Any]]:
if "text" in response_json:
content = response_json['text']
success_data = dict(content=content)
return success_data
return None
AVAILABLE_OUTGOING_WEBHOOK_INTERFACES = {
GENERIC_INTERFACE: GenericOutgoingWebhookService,
SLACK_INTERFACE: SlackOutgoingWebhookService,
} # type: Dict[str, Any]
def get_service_interface_class(interface: str) -> Any:
if interface is None or interface not in AVAILABLE_OUTGOING_WEBHOOK_INTERFACES:
return AVAILABLE_OUTGOING_WEBHOOK_INTERFACES[GENERIC_INTERFACE]
else:
return AVAILABLE_OUTGOING_WEBHOOK_INTERFACES[interface]
def get_outgoing_webhook_service_handler(service: Service) -> Any:
service_interface_class = get_service_interface_class(service.interface_name())
service_interface = service_interface_class(token=service.token,
user_profile=service.user_profile,
service_name=service.name)
return service_interface
def send_response_message(bot_id: str, message_info: Dict[str, Any], response_data: Dict[str, Any]) -> None:
"""
bot_id is the user_id of the bot sending the response
message_info is used to address the message and should have these fields:
type - "stream" or "private"
display_recipient - like we have in other message events
topic - see get_topic_from_message_info
response_data is what the bot wants to send back and has these fields:
content - raw markdown content for Zulip to render
"""
message_type = message_info['type']
display_recipient = message_info['display_recipient']
try:
topic_name = get_topic_from_message_info(message_info)
except KeyError:
topic_name = None
bot_user = get_user_profile_by_id(bot_id)
realm = bot_user.realm
client = get_client('OutgoingWebhookResponse')
content = response_data.get('content')
if not content:
raise JsonableError(_("Missing content"))
widget_content = response_data.get('widget_content')
if message_type == 'stream':
message_to = [display_recipient]
elif message_type == 'private':
message_to = [recipient['email'] for recipient in display_recipient]
else:
raise JsonableError(_("Invalid message type"))
check_send_message(
sender=bot_user,
client=client,
message_type_name=message_type,
message_to=message_to,
topic_name=topic_name,
message_content=content,
widget_content=widget_content,
realm=realm,
)
def fail_with_message(event: Dict[str, Any], failure_message: str) -> None:
bot_id = event['user_profile_id']
message_info = event['message']
content = "Failure! " + failure_message
response_data = dict(content=content)
send_response_message(bot_id=bot_id, message_info=message_info, response_data=response_data)
def get_message_url(event: Dict[str, Any]) -> str:
bot_user = get_user_profile_by_id(event['user_profile_id'])
message = event['message']
realm = bot_user.realm
return near_message_url(
realm=realm,
message=message,
)
def notify_bot_owner(event: Dict[str, Any],
request_data: Dict[str, Any],
status_code: Optional[int]=None,
response_content: Optional[AnyStr]=None,
failure_message: Optional[str]=None,
exception: Optional[Exception]=None) -> None:
message_url = get_message_url(event)
bot_id = event['user_profile_id']
bot_owner = get_user_profile_by_id(bot_id).bot_owner
notification_message = "[A message](%s) triggered an outgoing webhook." % (message_url,)
if failure_message:
notification_message += "\n" + failure_message
if status_code:
notification_message += "\nThe webhook got a response with status code *%s*." % (status_code,)
if response_content:
notification_message += "\nThe response contains the following payload:\n" \
"```\n%s\n```" % (response_content,)
if exception:
notification_message += "\nWhen trying to send a request to the webhook service, an exception " \
"of type %s occurred:\n```\n%s\n```" % (
type(exception).__name__, str(exception))
message_info = dict(
type='private',
display_recipient=[dict(email=bot_owner.email)],
)
response_data = dict(content=notification_message)
send_response_message(bot_id=bot_id, message_info=message_info, response_data=response_data)
def request_retry(event: Dict[str, Any],
request_data: Dict[str, Any],
failure_message: Optional[str]=None) -> None:
def failure_processor(event: Dict[str, Any]) -> None:
"""
The name of the argument is 'event' on purpose. This argument will hide
the 'event' argument of the request_retry function. Keeping the same name
results in a smaller diff.
"""
bot_user = get_user_profile_by_id(event['user_profile_id'])
fail_with_message(event, "Bot is unavailable")
notify_bot_owner(event, request_data, failure_message=failure_message)
logging.warning("Maximum retries exceeded for trigger:%s event:%s" % (
bot_user.email, event['command']))
retry_event('outgoing_webhooks', event, failure_processor)
def process_success_response(event: Dict[str, Any],
service_handler: Any,
response: Response) -> None:
try:
response_json = json.loads(response.text)
except ValueError:
fail_with_message(event, "Invalid JSON in response")
return
success_data = service_handler.process_success(response_json, event)
if success_data is None:
return
content = success_data.get('content')
if content is None:
return
widget_content = success_data.get('widget_content')
bot_id = event['user_profile_id']
message_info = event['message']
response_data = dict(content=content, widget_content=widget_content)
send_response_message(bot_id=bot_id, message_info=message_info, response_data=response_data)
def do_rest_call(base_url: str,
request_data: Any,
event: Dict[str, Any],
service_handler: Any) -> None:
try:
response = service_handler.send_data_to_server(
base_url=base_url,
request_data=request_data,
)
if str(response.status_code).startswith('2'):
process_success_response(event, service_handler, response)
else:
logging.warning("Message %(message_url)s triggered an outgoing webhook, returning status "
"code %(status_code)s.\n Content of response (in quotes): \""
"%(response)s\""
% {'message_url': get_message_url(event),
'status_code': response.status_code,
'response': response.content})
failure_message = "Third party responded with %d" % (response.status_code)
fail_with_message(event, failure_message)
notify_bot_owner(event, request_data, response.status_code, response.content)
except requests.exceptions.Timeout:
logging.info("Trigger event %s on %s timed out. Retrying" % (
event["command"], event['service_name']))
failure_message = "A timeout occurred."
request_retry(event, request_data, failure_message=failure_message)
except requests.exceptions.ConnectionError:
logging.info("Trigger event %s on %s resulted in a connection error. Retrying"
% (event["command"], event['service_name']))
failure_message = "A connection error occurred. Is my bot server down?"
request_retry(event, request_data, failure_message=failure_message)
except requests.exceptions.RequestException as e:
response_message = ("An exception of type *%s* occurred for message `%s`! "
"See the Zulip server logs for more information." % (
type(e).__name__, event["command"],))
logging.exception("Outhook trigger failed:\n %s" % (e,))
fail_with_message(event, response_message)
notify_bot_owner(event, request_data, exception=e)
| [
"str",
"UserProfile",
"str",
"Dict[str, Any]",
"str",
"Any",
"Dict[str, Any]",
"Dict[str, Any]",
"Dict[str, Any]",
"str",
"Any",
"Dict[str, Any]",
"Dict[str, Any]",
"str",
"Service",
"str",
"Dict[str, Any]",
"Dict[str, Any]",
"Dict[str, Any]",
"str",
"Dict[str, Any]",
"Dict[str, Any]",
"Dict[str, Any]",
"Dict[str, Any]",
"Dict[str, Any]",
"Dict[str, Any]",
"Dict[str, Any]",
"Any",
"Response",
"str",
"Any",
"Dict[str, Any]",
"Any"
] | [
832,
851,
878,
1159,
1569,
1616,
1849,
1896,
2734,
3872,
3919,
4080,
4127,
4572,
4866,
5259,
5278,
5309,
6844,
6877,
7176,
7433,
7484,
8918,
8966,
9079,
9754,
9816,
9860,
10568,
10604,
10633,
10683
] | [
835,
862,
881,
1173,
1572,
1619,
1863,
1910,
2748,
3875,
3922,
4094,
4141,
4575,
4873,
5262,
5292,
5323,
6858,
6880,
7190,
7447,
7498,
8932,
8980,
9093,
9768,
9819,
9868,
10571,
10607,
10647,
10686
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/parallel.py | from typing import Dict, Iterable, Tuple, Callable, TypeVar, Iterator
import os
import pty
import sys
import errno
JobData = TypeVar('JobData')
def run_parallel(job: Callable[[JobData], int],
data: Iterable[JobData],
threads: int=6) -> Iterator[Tuple[int, JobData]]:
pids = {} # type: Dict[int, JobData]
def wait_for_one() -> Tuple[int, JobData]:
while True:
try:
(pid, status) = os.wait()
return status, pids.pop(pid)
except KeyError:
pass
for item in data:
pid = os.fork()
if pid == 0:
sys.stdin.close()
try:
os.close(pty.STDIN_FILENO)
except OSError as e:
if e.errno != errno.EBADF:
raise
sys.stdin = open("/dev/null", "r")
os._exit(job(item))
pids[pid] = item
threads = threads - 1
if threads == 0:
(status, item) = wait_for_one()
threads += 1
yield (status, item)
if status != 0:
# Stop if any error occurred
break
while True:
try:
(status, item) = wait_for_one()
yield (status, item)
except OSError as e:
if e.errno == errno.ECHILD:
break
else:
raise
if __name__ == "__main__":
# run some unit tests
import time
jobs = [10, 19, 18, 6, 14, 12, 8, 2, 1, 13, 3, 17, 9, 11, 5, 16, 7, 15, 4]
expected_output = [6, 10, 12, 2, 1, 14, 8, 3, 18, 19, 5, 9, 13, 11, 4, 7, 17, 16, 15]
def wait_and_print(x: int) -> int:
time.sleep(x * 0.1)
return 0
output = []
for (status, job) in run_parallel(wait_and_print, jobs):
output.append(job)
if output == expected_output:
print("Successfully passed test!")
else:
print("Failed test!")
print(jobs)
print(expected_output)
print(output)
| [
"Callable[[JobData], int]",
"Iterable[JobData]",
"int"
] | [
169,
218,
1690
] | [
193,
235,
1693
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/profile.py |
import cProfile
from functools import wraps
from typing import Any, TypeVar, Callable
ReturnT = TypeVar('ReturnT')
def profiled(func: Callable[..., ReturnT]) -> Callable[..., ReturnT]:
"""
This decorator should obviously be used only in a dev environment.
It works best when surrounding a function that you expect to be
called once. One strategy is to write a backend test and wrap the
test case with the profiled decorator.
You can run a single test case like this:
# edit zerver/tests/test_external.py and place @profiled above the test case below
./tools/test-backend zerver.tests.test_external.RateLimitTests.test_ratelimit_decrease
Then view the results like this:
./tools/show-profile-results test_ratelimit_decrease.profile
"""
@wraps(func)
def wrapped_func(*args: Any, **kwargs: Any) -> ReturnT:
fn = func.__name__ + ".profile"
prof = cProfile.Profile()
retval = prof.runcall(func, *args, **kwargs) # type: ReturnT
prof.dump_stats(fn)
return retval
return wrapped_func
| [
"Callable[..., ReturnT]",
"Any",
"Any"
] | [
138,
846,
861
] | [
160,
849,
864
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/push_notifications.py | # -*- coding: utf-8 -*-
import base64
import binascii
from functools import partial
import logging
import lxml.html as LH
import os
import re
import time
import random
from typing import Any, Dict, List, Optional, SupportsInt, Tuple, Type, Union, cast
from django.conf import settings
from django.db import IntegrityError, transaction
from django.utils.timezone import now as timezone_now
from django.utils.translation import ugettext as _
from gcm import GCM
import requests
import urllib
import ujson
from zerver.decorator import statsd_increment
from zerver.lib.avatar import absolute_avatar_url
from zerver.lib.exceptions import ErrorCode, JsonableError
from zerver.lib.message import access_message, huddle_users
from zerver.lib.queue import retry_event
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
from zerver.lib.utils import generate_random_token
from zerver.models import PushDeviceToken, Message, Recipient, UserProfile, \
UserMessage, get_display_recipient, receives_offline_push_notifications, \
receives_online_notifications, receives_stream_notifications, get_user_profile_by_id
from version import ZULIP_VERSION
if settings.ZILENCER_ENABLED:
from zilencer.models import RemotePushDeviceToken
else: # nocoverage -- Not convenient to add test for this.
from mock import Mock
RemotePushDeviceToken = Mock() # type: ignore # https://github.com/JukkaL/mypy/issues/1188
DeviceToken = Union[PushDeviceToken, RemotePushDeviceToken]
# We store the token as b64, but apns-client wants hex strings
def b64_to_hex(data: bytes) -> str:
return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
def hex_to_b64(data: str) -> bytes:
return base64.b64encode(binascii.unhexlify(data.encode('utf-8')))
#
# Sending to APNs, for iOS
#
_apns_client = None # type: Optional[Any]
_apns_client_initialized = False
def get_apns_client() -> Any:
# We lazily do this import as part of optimizing Zulip's base
# import time.
from apns2.client import APNsClient
global _apns_client, _apns_client_initialized
if not _apns_client_initialized:
# NB if called concurrently, this will make excess connections.
# That's a little sloppy, but harmless unless a server gets
# hammered with a ton of these all at once after startup.
if settings.APNS_CERT_FILE is not None:
_apns_client = APNsClient(credentials=settings.APNS_CERT_FILE,
use_sandbox=settings.APNS_SANDBOX)
_apns_client_initialized = True
return _apns_client
def apns_enabled() -> bool:
client = get_apns_client()
return client is not None
def modernize_apns_payload(data: Dict[str, Any]) -> Dict[str, Any]:
'''Take a payload in an unknown Zulip version's format, and return in current format.'''
# TODO this isn't super robust as is -- if a buggy remote server
# sends a malformed payload, we are likely to raise an exception.
if 'message_ids' in data:
# The format sent by 1.6.0, from the earliest pre-1.6.0
# version with bouncer support up until 613d093d7 pre-1.7.0:
# 'alert': str, # just sender, and text about PM/group-PM/mention
# 'message_ids': List[int], # always just one
return {
'alert': data['alert'],
'badge': 0,
'custom': {
'zulip': {
'message_ids': data['message_ids'],
},
},
}
else:
# Something already compatible with the current format.
# `alert` may be a string, or a dict with `title` and `body`.
# In 1.7.0 and 1.7.1, before 0912b5ba8 pre-1.8.0, the only
# item in `custom.zulip` is `message_ids`.
return data
APNS_MAX_RETRIES = 3
@statsd_increment("apple_push_notification")
def send_apple_push_notification(user_id: int, devices: List[DeviceToken],
payload_data: Dict[str, Any], remote: bool=False) -> None:
# We lazily do the APNS imports as part of optimizing Zulip's base
# import time; since these are only needed in the push
# notification queue worker, it's best to only import them in the
# code that needs them.
from apns2.payload import Payload as APNsPayload
from apns2.client import APNsClient
from hyper.http20.exceptions import HTTP20Error
client = get_apns_client() # type: APNsClient
if client is None:
logging.warning("APNs: Dropping a notification because nothing configured. "
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE).")
return
if remote:
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
logging.info("APNs: Sending notification for user %d to %d devices",
user_id, len(devices))
payload = APNsPayload(**modernize_apns_payload(payload_data))
expiration = int(time.time() + 24 * 3600)
retries_left = APNS_MAX_RETRIES
for device in devices:
# TODO obviously this should be made to actually use the async
def attempt_send() -> Optional[str]:
stream_id = client.send_notification_async(
device.token, payload, topic='org.zulip.Zulip',
expiration=expiration)
try:
return client.get_notification_result(stream_id)
except HTTP20Error as e:
logging.warning("APNs: HTTP error sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
result = attempt_send()
while result is None and retries_left > 0:
retries_left -= 1
result = attempt_send()
if result is None:
result = "HTTP error, retries exhausted"
if result[0] == "Unregistered":
# For some reason, "Unregistered" result values have a
# different format, as a tuple of the pair ("Unregistered", 12345132131).
result = result[0] # type: ignore # APNS API is inconsistent
if result == 'Success':
logging.info("APNs: Success sending for user %d to device %s",
user_id, device.token)
elif result in ["Unregistered", "BadDeviceToken", "DeviceTokenNotForTopic"]:
logging.info("APNs: Removing invalid/expired token %s (%s)" % (device.token, result))
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=device.token, kind=DeviceTokenClass.APNS).delete()
else:
logging.warning("APNs: Failed to send for user %d to device %s: %s",
user_id, device.token, result)
#
# Sending to GCM, for Android
#
if settings.ANDROID_GCM_API_KEY: # nocoverage
gcm = GCM(settings.ANDROID_GCM_API_KEY)
else:
gcm = None
def gcm_enabled() -> bool: # nocoverage
return gcm is not None
def send_android_push_notification_to_user(user_profile: UserProfile, data: Dict[str, Any]) -> None:
devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
send_android_push_notification(devices, data)
@statsd_increment("android_push_notification")
def send_android_push_notification(devices: List[DeviceToken], data: Dict[str, Any],
remote: bool=False) -> None:
if not gcm:
logging.warning("Skipping sending a GCM push notification since "
"PUSH_NOTIFICATION_BOUNCER_URL and ANDROID_GCM_API_KEY are both unset")
return
reg_ids = [device.token for device in devices]
if remote:
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
try:
res = gcm.json_request(registration_ids=reg_ids, data=data, retries=10)
except IOError as e:
logging.warning(str(e))
return
if res and 'success' in res:
for reg_id, msg_id in res['success'].items():
logging.info("GCM: Sent %s as %s" % (reg_id, msg_id))
# res.canonical will contain results when there are duplicate registrations for the same
# device. The "canonical" registration is the latest registration made by the device.
# Ref: http://developer.android.com/google/gcm/adv.html#canonical
if 'canonical' in res:
for reg_id, new_reg_id in res['canonical'].items():
if reg_id == new_reg_id:
# I'm not sure if this should happen. In any case, not really actionable.
logging.warning("GCM: Got canonical ref but it already matches our ID %s!" % (reg_id,))
elif not DeviceTokenClass.objects.filter(token=new_reg_id,
kind=DeviceTokenClass.GCM).count():
# This case shouldn't happen; any time we get a canonical ref it should have been
# previously registered in our system.
#
# That said, recovery is easy: just update the current PDT object to use the new ID.
logging.warning(
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating." %
(new_reg_id, reg_id))
DeviceTokenClass.objects.filter(
token=reg_id, kind=DeviceTokenClass.GCM).update(token=new_reg_id)
else:
# Since we know the new ID is registered in our system we can just drop the old one.
logging.info("GCM: Got canonical ref %s, dropping %s" % (new_reg_id, reg_id))
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
if 'errors' in res:
for error, reg_ids in res['errors'].items():
if error in ['NotRegistered', 'InvalidRegistration']:
for reg_id in reg_ids:
logging.info("GCM: Removing %s" % (reg_id,))
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
else:
for reg_id in reg_ids:
logging.warning("GCM: Delivery to %s failed: %s" % (reg_id, error))
# python-gcm handles retrying of the unsent messages.
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
#
# Sending to a bouncer
#
def uses_notification_bouncer() -> bool:
return settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
def send_notifications_to_bouncer(user_profile_id: int,
apns_payload: Dict[str, Any],
gcm_payload: Dict[str, Any]) -> None:
post_data = {
'user_id': user_profile_id,
'apns_payload': apns_payload,
'gcm_payload': gcm_payload,
}
# Calls zilencer.views.remote_server_notify_push
send_json_to_push_bouncer('POST', 'notify', post_data)
def send_json_to_push_bouncer(method: str, endpoint: str, post_data: Dict[str, Any]) -> None:
send_to_push_bouncer(
method,
endpoint,
ujson.dumps(post_data),
extra_headers={"Content-type": "application/json"},
)
class PushNotificationBouncerException(Exception):
pass
def send_to_push_bouncer(method: str,
endpoint: str,
post_data: Union[str, Dict[str, Any]],
extra_headers: Optional[Dict[str, Any]]=None) -> None:
"""While it does actually send the notice, this function has a lot of
code and comments around error handling for the push notifications
bouncer. There are several classes of failures, each with its own
potential solution:
* Network errors with requests.request. We let those happen normally.
* 500 errors from the push bouncer or other unexpected responses;
we don't try to parse the response, but do make clear the cause.
* 400 errors from the push bouncer. Here there are 2 categories:
Our server failed to connect to the push bouncer (should throw)
vs. client-side errors like and invalid token.
"""
url = urllib.parse.urljoin(settings.PUSH_NOTIFICATION_BOUNCER_URL,
'/api/v1/remotes/push/' + endpoint)
api_auth = requests.auth.HTTPBasicAuth(settings.ZULIP_ORG_ID,
settings.ZULIP_ORG_KEY)
headers = {"User-agent": "ZulipServer/%s" % (ZULIP_VERSION,)}
if extra_headers is not None:
headers.update(extra_headers)
res = requests.request(method,
url,
data=post_data,
auth=api_auth,
timeout=30,
verify=True,
headers=headers)
if res.status_code >= 500:
# 500s should be resolved by the people who run the push
# notification bouncer service, since they'll get an email
# too. For now we email the server admin, but we'll likely
# want to do some sort of retry logic eventually.
raise PushNotificationBouncerException(
_("Received 500 from push notification bouncer"))
elif res.status_code >= 400:
# If JSON parsing errors, just let that exception happen
result_dict = ujson.loads(res.content)
msg = result_dict['msg']
if 'code' in result_dict and result_dict['code'] == 'INVALID_ZULIP_SERVER':
# Invalid Zulip server credentials should email this server's admins
raise PushNotificationBouncerException(
_("Push notifications bouncer error: %s") % (msg,))
else:
# But most other errors coming from the push bouncer
# server are client errors (e.g. never-registered token)
# and should be handled as such.
raise JsonableError(msg)
elif res.status_code != 200:
# Anything else is unexpected and likely suggests a bug in
# this version of Zulip, so we throw an exception that will
# email the server admins.
raise PushNotificationBouncerException(
"Push notification bouncer returned unexpected status code %s" % (res.status_code,))
# If we don't throw an exception, it's a successful bounce!
#
# Managing device tokens
#
def num_push_devices_for_user(user_profile: UserProfile, kind: Optional[int]=None) -> PushDeviceToken:
if kind is None:
return PushDeviceToken.objects.filter(user=user_profile).count()
else:
return PushDeviceToken.objects.filter(user=user_profile, kind=kind).count()
def add_push_device_token(user_profile: UserProfile,
token_str: bytes,
kind: int,
ios_app_id: Optional[str]=None) -> None:
logging.info("Registering push device: %d %r %d %r",
user_profile.id, token_str, kind, ios_app_id)
# If we're sending things to the push notification bouncer
# register this user with them here
if uses_notification_bouncer():
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
if kind == PushDeviceToken.APNS:
post_data['ios_app_id'] = ios_app_id
logging.info("Sending new push device to bouncer: %r", post_data)
# Calls zilencer.views.register_remote_push_device
send_to_push_bouncer('POST', 'register', post_data)
return
try:
with transaction.atomic():
PushDeviceToken.objects.create(
user_id=user_profile.id,
kind=kind,
token=token_str,
ios_app_id=ios_app_id,
# last_updated is to be renamed to date_created.
last_updated=timezone_now())
except IntegrityError:
pass
def remove_push_device_token(user_profile: UserProfile, token_str: bytes, kind: int) -> None:
# If we're sending things to the push notification bouncer
# unregister this user with them here
if uses_notification_bouncer():
# TODO: Make this a remove item
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
# Calls zilencer.views.unregister_remote_push_device
send_to_push_bouncer("POST", "unregister", post_data)
return
try:
token = PushDeviceToken.objects.get(token=token_str, kind=kind, user=user_profile)
token.delete()
except PushDeviceToken.DoesNotExist:
raise JsonableError(_("Token does not exist"))
#
# Push notifications in general
#
def push_notifications_enabled() -> bool:
'''True just if this server has configured a way to send push notifications.'''
if (uses_notification_bouncer()
and settings.ZULIP_ORG_KEY is not None
and settings.ZULIP_ORG_ID is not None): # nocoverage
# We have the needed configuration to send push notifications through
# the bouncer. Better yet would be to confirm that this config actually
# works -- e.g., that we have ever successfully sent to the bouncer --
# but this is a good start.
return True
if apns_enabled() and gcm_enabled(): # nocoverage
# We have the needed configuration to send through APNs and GCM directly
# (i.e., we are the bouncer, presumably.) Again, assume it actually works.
return True
return False
def get_gcm_alert(message: Message) -> str:
"""
Determine what alert string to display based on the missed messages.
"""
sender_str = message.sender.full_name
if message.recipient.type == Recipient.HUDDLE and message.trigger == 'private_message':
return "New private group message from %s" % (sender_str,)
elif message.recipient.type == Recipient.PERSONAL and message.trigger == 'private_message':
return "New private message from %s" % (sender_str,)
elif message.is_stream_message() and message.trigger == 'mentioned':
return "New mention from %s" % (sender_str,)
else: # message.is_stream_message() and message.trigger == 'stream_push_notify'
return "New stream message from %s in %s" % (sender_str, get_display_recipient(message.recipient),)
def get_mobile_push_content(rendered_content: str) -> str:
def get_text(elem: LH.HtmlElement) -> str:
# Convert default emojis to their unicode equivalent.
classes = elem.get("class", "")
if "emoji" in classes:
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
if match:
emoji_code = match.group('emoji_code')
char_repr = ""
for codepoint in emoji_code.split('-'):
char_repr += chr(int(codepoint, 16))
return char_repr
# Handles realm emojis, avatars etc.
if elem.tag == "img":
return elem.get("alt", "")
if elem.tag == 'blockquote':
return '' # To avoid empty line before quote text
return elem.text or ''
def format_as_quote(quote_text: str) -> str:
quote_text_list = filter(None, quote_text.split('\n')) # Remove empty lines
quote_text = '\n'.join(map(lambda x: "> "+x, quote_text_list))
quote_text += '\n'
return quote_text
def process(elem: LH.HtmlElement) -> str:
plain_text = get_text(elem)
sub_text = ''
for child in elem:
sub_text += process(child)
if elem.tag == 'blockquote':
sub_text = format_as_quote(sub_text)
plain_text += sub_text
plain_text += elem.tail or ""
return plain_text
if settings.PUSH_NOTIFICATION_REDACT_CONTENT:
return "***REDACTED***"
else:
elem = LH.fromstring(rendered_content)
plain_text = process(elem)
return plain_text
def truncate_content(content: str) -> Tuple[str, bool]:
# We use unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
# of three dots as this saves two extra characters for textual
# content. This function will need to be updated to handle unicode
# combining characters and tags when we start supporting themself.
if len(content) <= 200:
return content, False
return content[:200] + "…", True
def get_common_payload(message: Message) -> Dict[str, Any]:
data = {} # type: Dict[str, Any]
# These will let the app support logging into multiple realms and servers.
data['server'] = settings.EXTERNAL_HOST
data['realm_id'] = message.sender.realm.id
data['realm_uri'] = message.sender.realm.uri
# `sender_id` is preferred, but some existing versions use `sender_email`.
data['sender_id'] = message.sender.id
data['sender_email'] = message.sender.email
if message.recipient.type == Recipient.STREAM:
data['recipient_type'] = "stream"
data['stream'] = get_display_recipient(message.recipient)
data['topic'] = message.topic_name()
elif message.recipient.type == Recipient.HUDDLE:
data['recipient_type'] = "private"
data['pm_users'] = huddle_users(message.recipient.id)
else: # Recipient.PERSONAL
data['recipient_type'] = "private"
return data
def get_apns_alert_title(message: Message) -> str:
"""
On an iOS notification, this is the first bolded line.
"""
if message.recipient.type == Recipient.HUDDLE:
recipients = cast(List[Dict[str, Any]], get_display_recipient(message.recipient))
return ', '.join(sorted(r['full_name'] for r in recipients))
elif message.is_stream_message():
return "#%s > %s" % (get_display_recipient(message.recipient), message.topic_name(),)
# For personal PMs, we just show the sender name.
return message.sender.full_name
def get_apns_alert_subtitle(message: Message) -> str:
"""
On an iOS notification, this is the second bolded line.
"""
if message.trigger == "mentioned":
return message.sender.full_name + " mentioned you:"
elif message.recipient.type == Recipient.PERSONAL:
return ""
# For group PMs, or regular messages to a stream, just use a colon to indicate this is the sender.
return message.sender.full_name + ":"
def get_apns_payload(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
zulip_data = get_common_payload(message)
zulip_data.update({
'message_ids': [message.id],
})
content, _ = truncate_content(get_mobile_push_content(message.rendered_content))
apns_data = {
'alert': {
'title': get_apns_alert_title(message),
'subtitle': get_apns_alert_subtitle(message),
'body': content,
},
'sound': 'default',
'badge': 0, # TODO: set badge count in a better way
'custom': {'zulip': zulip_data},
}
return apns_data
def get_gcm_payload(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
data = get_common_payload(message)
content, truncated = truncate_content(get_mobile_push_content(message.rendered_content))
data.update({
'user': user_profile.email,
'event': 'message',
'alert': get_gcm_alert(message),
'zulip_message_id': message.id, # message_id is reserved for CCS
'time': datetime_to_timestamp(message.pub_date),
'content': content,
'content_truncated': truncated,
'sender_full_name': message.sender.full_name,
'sender_avatar_url': absolute_avatar_url(message.sender),
})
return data
def handle_remove_push_notification(user_profile_id: int, message_id: int) -> None:
"""This should be called when a message that had previously had a
mobile push executed is read. This triggers a mobile push notifica
mobile app when the message is read on the server, to remove the
message from the notification.
"""
user_profile = get_user_profile_by_id(user_profile_id)
message, user_message = access_message(user_profile, message_id)
if not settings.SEND_REMOVE_PUSH_NOTIFICATIONS:
# It's a little annoying that we duplicate this flag-clearing
# code (also present below), but this block is scheduled to be
# removed in a few weeks, once the app has supported the
# feature for long enough.
user_message.flags.active_mobile_push_notification = False
user_message.save(update_fields=["flags"])
return
gcm_payload = get_common_payload(message)
gcm_payload.update({
'event': 'remove',
'zulip_message_id': message_id, # message_id is reserved for CCS
})
if uses_notification_bouncer():
try:
send_notifications_to_bouncer(user_profile_id,
{},
gcm_payload)
except requests.ConnectionError: # nocoverage
def failure_processor(event: Dict[str, Any]) -> None:
logging.warning(
"Maximum retries exceeded for trigger:%s event:push_notification" % (
event['user_profile_id']))
return
android_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
if android_devices:
send_android_push_notification(android_devices, gcm_payload)
user_message.flags.active_mobile_push_notification = False
user_message.save(update_fields=["flags"])
@statsd_increment("push_notifications")
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
"""
missed_message is the event received by the
zerver.worker.queue_processors.PushNotificationWorker.consume function.
"""
user_profile = get_user_profile_by_id(user_profile_id)
if not (receives_offline_push_notifications(user_profile) or
receives_online_notifications(user_profile)):
return
user_profile = get_user_profile_by_id(user_profile_id)
(message, user_message) = access_message(user_profile, missed_message['message_id'])
if user_message is not None:
# If ther user has read the message already, don't push-notify.
#
# TODO: It feels like this is already handled when things are
# put in the queue; maybe we should centralize this logic with
# the `zerver/tornado/event_queue.py` logic?
if user_message.flags.read:
return
# Otherwise, we mark the message as having an active mobile
# push notification, so that we can send revocation messages
# later.
user_message.flags.active_mobile_push_notification = True
user_message.save(update_fields=["flags"])
else:
# Users should only be getting push notifications into this
# queue for messages they haven't received if they're
# long-term idle; anything else is likely a bug.
if not user_profile.long_term_idle:
logging.error("Could not find UserMessage with message_id %s and user_id %s" % (
missed_message['message_id'], user_profile_id))
return
message.trigger = missed_message['trigger']
apns_payload = get_apns_payload(user_profile, message)
gcm_payload = get_gcm_payload(user_profile, message)
logging.info("Sending push notification to user %s" % (user_profile_id,))
if uses_notification_bouncer():
try:
send_notifications_to_bouncer(user_profile_id,
apns_payload,
gcm_payload)
except requests.ConnectionError:
def failure_processor(event: Dict[str, Any]) -> None:
logging.warning(
"Maximum retries exceeded for trigger:%s event:push_notification" % (
event['user_profile_id']))
retry_event('missedmessage_mobile_notifications', missed_message,
failure_processor)
return
android_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
apple_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.APNS))
if apple_devices:
send_apple_push_notification(user_profile.id, apple_devices,
apns_payload)
if android_devices:
send_android_push_notification(android_devices, gcm_payload)
| [
"bytes",
"str",
"Dict[str, Any]",
"int",
"List[DeviceToken]",
"Dict[str, Any]",
"UserProfile",
"Dict[str, Any]",
"List[DeviceToken]",
"Dict[str, Any]",
"int",
"Dict[str, Any]",
"Dict[str, Any]",
"str",
"str",
"Dict[str, Any]",
"str",
"str",
"Union[str, Dict[str, Any]]",
"UserProfile",
"UserProfile",
"bytes",
"int",
"UserProfile",
"bytes",
"int",
"Message",
"str",
"LH.HtmlElement",
"str",
"LH.HtmlElement",
"str",
"Message",
"Message",
"Message",
"UserProfile",
"Message",
"UserProfile",
"Message",
"int",
"int",
"Dict[str, Any]",
"int",
"Dict[str, Any]",
"Dict[str, Any]"
] | [
1586,
1691,
2718,
3914,
3928,
3994,
7152,
7171,
7484,
7509,
10852,
10905,
10968,
11278,
11293,
11309,
11587,
11627,
11668,
14704,
14992,
15042,
15081,
16322,
16346,
16359,
17995,
18825,
18861,
19623,
19868,
20429,
20864,
21810,
22372,
22818,
22840,
23446,
23468,
24147,
24164,
25474,
26143,
26164,
28274
] | [
1591,
1694,
2732,
3917,
3945,
4008,
7163,
7185,
7501,
7523,
10855,
10919,
10982,
11281,
11296,
11323,
11590,
11630,
11694,
14715,
15003,
15047,
15084,
16333,
16351,
16362,
18002,
18828,
18875,
19626,
19882,
20432,
20871,
21817,
22379,
22829,
22847,
23457,
23475,
24150,
24167,
25488,
26146,
26178,
28288
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/queue.py |
from collections import defaultdict
import logging
import random
import threading
import time
from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Union
from django.conf import settings
import pika
from pika.adapters.blocking_connection import BlockingChannel
from pika.spec import Basic
from tornado import ioloop
import ujson
from zerver.lib.utils import statsd
MAX_REQUEST_RETRIES = 3
Consumer = Callable[[BlockingChannel, Basic.Deliver, pika.BasicProperties, str], None]
# This simple queuing library doesn't expose much of the power of
# rabbitmq/pika's queuing system; its purpose is to just provide an
# interface for external files to put things into queues and take them
# out from bots without having to import pika code all over our codebase.
class SimpleQueueClient:
def __init__(self,
# Disable RabbitMQ heartbeats by default because BlockingConnection can't process them
rabbitmq_heartbeat: Optional[int] = 0,
) -> None:
self.log = logging.getLogger('zulip.queue')
self.queues = set() # type: Set[str]
self.channel = None # type: Optional[BlockingChannel]
self.consumers = defaultdict(set) # type: Dict[str, Set[Consumer]]
self.rabbitmq_heartbeat = rabbitmq_heartbeat
self._connect()
def _connect(self) -> None:
start = time.time()
self.connection = pika.BlockingConnection(self._get_parameters())
self.channel = self.connection.channel()
self.log.info('SimpleQueueClient connected (connecting took %.3fs)' % (time.time() - start,))
def _reconnect(self) -> None:
self.connection = None
self.channel = None
self.queues = set()
self._connect()
def _get_parameters(self) -> pika.ConnectionParameters:
# We explicitly disable the RabbitMQ heartbeat feature, since
# it doesn't make sense with BlockingConnection
credentials = pika.PlainCredentials(settings.RABBITMQ_USERNAME,
settings.RABBITMQ_PASSWORD)
return pika.ConnectionParameters(settings.RABBITMQ_HOST,
heartbeat_interval=self.rabbitmq_heartbeat,
credentials=credentials)
def _generate_ctag(self, queue_name: str) -> str:
return "%s_%s" % (queue_name, str(random.getrandbits(16)))
def _reconnect_consumer_callback(self, queue: str, consumer: Consumer) -> None:
self.log.info("Queue reconnecting saved consumer %s to queue %s" % (consumer, queue))
self.ensure_queue(queue, lambda: self.channel.basic_consume(consumer,
queue=queue,
consumer_tag=self._generate_ctag(queue)))
def _reconnect_consumer_callbacks(self) -> None:
for queue, consumers in self.consumers.items():
for consumer in consumers:
self._reconnect_consumer_callback(queue, consumer)
def close(self) -> None:
if self.connection:
self.connection.close()
def ready(self) -> bool:
return self.channel is not None
def ensure_queue(self, queue_name: str, callback: Callable[[], None]) -> None:
'''Ensure that a given queue has been declared, and then call
the callback with no arguments.'''
if self.connection is None or not self.connection.is_open:
self._connect()
if queue_name not in self.queues:
self.channel.queue_declare(queue=queue_name, durable=True)
self.queues.add(queue_name)
callback()
def publish(self, queue_name: str, body: str) -> None:
def do_publish() -> None:
self.channel.basic_publish(
exchange='',
routing_key=queue_name,
properties=pika.BasicProperties(delivery_mode=2),
body=body)
statsd.incr("rabbitmq.publish.%s" % (queue_name,))
self.ensure_queue(queue_name, do_publish)
def json_publish(self, queue_name: str, body: Union[Mapping[str, Any], str]) -> None:
# Union because of zerver.middleware.write_log_line uses a str
try:
self.publish(queue_name, ujson.dumps(body))
return
except pika.exceptions.AMQPConnectionError:
self.log.warning("Failed to send to rabbitmq, trying to reconnect and send again")
self._reconnect()
self.publish(queue_name, ujson.dumps(body))
def register_consumer(self, queue_name: str, consumer: Consumer) -> None:
def wrapped_consumer(ch: BlockingChannel,
method: Basic.Deliver,
properties: pika.BasicProperties,
body: str) -> None:
try:
consumer(ch, method, properties, body)
ch.basic_ack(delivery_tag=method.delivery_tag)
except Exception as e:
ch.basic_nack(delivery_tag=method.delivery_tag)
raise e
self.consumers[queue_name].add(wrapped_consumer)
self.ensure_queue(queue_name,
lambda: self.channel.basic_consume(wrapped_consumer, queue=queue_name,
consumer_tag=self._generate_ctag(queue_name)))
def register_json_consumer(self, queue_name: str,
callback: Callable[[Dict[str, Any]], None]) -> None:
def wrapped_callback(ch: BlockingChannel,
method: Basic.Deliver,
properties: pika.BasicProperties,
body: str) -> None:
callback(ujson.loads(body))
self.register_consumer(queue_name, wrapped_callback)
def drain_queue(self, queue_name: str, json: bool=False) -> List[Dict[str, Any]]:
"Returns all messages in the desired queue"
messages = []
def opened() -> None:
while True:
(meta, _, message) = self.channel.basic_get(queue_name)
if not message:
break
self.channel.basic_ack(meta.delivery_tag)
if json:
message = ujson.loads(message)
messages.append(message)
self.ensure_queue(queue_name, opened)
return messages
def start_consuming(self) -> None:
self.channel.start_consuming()
def stop_consuming(self) -> None:
self.channel.stop_consuming()
# Patch pika.adapters.TornadoConnection so that a socket error doesn't
# throw an exception and disconnect the tornado process from the rabbitmq
# queue. Instead, just re-connect as usual
class ExceptionFreeTornadoConnection(pika.adapters.TornadoConnection):
def _adapter_disconnect(self) -> None:
try:
super()._adapter_disconnect()
except (pika.exceptions.ProbableAuthenticationError,
pika.exceptions.ProbableAccessDeniedError,
pika.exceptions.IncompatibleProtocolError) as e:
logging.warning("Caught exception '%r' in ExceptionFreeTornadoConnection when \
calling _adapter_disconnect, ignoring" % (e,))
class TornadoQueueClient(SimpleQueueClient):
# Based on:
# https://pika.readthedocs.io/en/0.9.8/examples/asynchronous_consumer_example.html
def __init__(self) -> None:
super().__init__(
# TornadoConnection can process heartbeats, so enable them.
rabbitmq_heartbeat=None)
self._on_open_cbs = [] # type: List[Callable[[], None]]
self._connection_failure_count = 0
def _connect(self) -> None:
self.log.info("Beginning TornadoQueueClient connection")
self.connection = ExceptionFreeTornadoConnection(
self._get_parameters(),
on_open_callback = self._on_open,
on_open_error_callback = self._on_connection_open_error,
on_close_callback = self._on_connection_closed,
)
def _reconnect(self) -> None:
self.connection = None
self.channel = None
self.queues = set()
self.log.warning("TornadoQueueClient attempting to reconnect to RabbitMQ")
self._connect()
CONNECTION_RETRY_SECS = 2
# When the RabbitMQ server is restarted, it's normal for it to
# take a few seconds to come back; we'll retry a few times and all
# will be well. So for the first few failures, we report only at
# "warning" level, avoiding an email to the server admin.
#
# A loss of an existing connection starts a retry loop just like a
# failed connection attempt, so it counts as the first failure.
#
# On an unloaded test system, a RabbitMQ restart takes about 6s,
# potentially causing 4 failures. We add some headroom above that.
CONNECTION_FAILURES_BEFORE_NOTIFY = 10
def _on_connection_open_error(self, connection: pika.connection.Connection,
message: Optional[str]=None) -> None:
self._connection_failure_count += 1
retry_secs = self.CONNECTION_RETRY_SECS
message = ("TornadoQueueClient couldn't connect to RabbitMQ, retrying in %d secs..."
% (retry_secs,))
if self._connection_failure_count > self.CONNECTION_FAILURES_BEFORE_NOTIFY:
self.log.critical(message)
else:
self.log.warning(message)
ioloop.IOLoop.instance().call_later(retry_secs, self._reconnect)
def _on_connection_closed(self, connection: pika.connection.Connection,
reply_code: int, reply_text: str) -> None:
self._connection_failure_count = 1
retry_secs = self.CONNECTION_RETRY_SECS
self.log.warning("TornadoQueueClient lost connection to RabbitMQ, reconnecting in %d secs..."
% (retry_secs,))
ioloop.IOLoop.instance().call_later(retry_secs, self._reconnect)
def _on_open(self, connection: pika.connection.Connection) -> None:
self._connection_failure_count = 0
try:
self.connection.channel(
on_open_callback = self._on_channel_open)
except pika.exceptions.ConnectionClosed:
# The connection didn't stay open long enough for this code to get to it.
# Let _on_connection_closed deal with trying again.
self.log.warning("TornadoQueueClient couldn't open channel: connection already closed")
def _on_channel_open(self, channel: BlockingChannel) -> None:
self.channel = channel
for callback in self._on_open_cbs:
callback()
self._reconnect_consumer_callbacks()
self.log.info('TornadoQueueClient connected')
def ensure_queue(self, queue_name: str, callback: Callable[[], None]) -> None:
def finish(frame: Any) -> None:
self.queues.add(queue_name)
callback()
if queue_name not in self.queues:
# If we're not connected yet, send this message
# once we have created the channel
if not self.ready():
self._on_open_cbs.append(lambda: self.ensure_queue(queue_name, callback))
return
self.channel.queue_declare(queue=queue_name, durable=True, callback=finish)
else:
callback()
def register_consumer(self, queue_name: str, consumer: Consumer) -> None:
def wrapped_consumer(ch: BlockingChannel,
method: Basic.Deliver,
properties: pika.BasicProperties,
body: str) -> None:
consumer(ch, method, properties, body)
ch.basic_ack(delivery_tag=method.delivery_tag)
if not self.ready():
self.consumers[queue_name].add(wrapped_consumer)
return
self.consumers[queue_name].add(wrapped_consumer)
self.ensure_queue(queue_name,
lambda: self.channel.basic_consume(wrapped_consumer, queue=queue_name,
consumer_tag=self._generate_ctag(queue_name)))
queue_client = None # type: Optional[SimpleQueueClient]
def get_queue_client() -> SimpleQueueClient:
global queue_client
if queue_client is None:
if settings.RUNNING_INSIDE_TORNADO and settings.USING_RABBITMQ:
queue_client = TornadoQueueClient()
elif settings.USING_RABBITMQ:
queue_client = SimpleQueueClient()
return queue_client
# We using a simple lock to prevent multiple RabbitMQ messages being
# sent to the SimpleQueueClient at the same time; this is a workaround
# for an issue with the pika BlockingConnection where using
# BlockingConnection for multiple queues causes the channel to
# randomly close.
queue_lock = threading.RLock()
def queue_json_publish(queue_name: str,
event: Union[Dict[str, Any], str],
processor: Callable[[Any], None]=None) -> None:
# most events are dicts, but zerver.middleware.write_log_line uses a str
with queue_lock:
if settings.USING_RABBITMQ:
get_queue_client().json_publish(queue_name, event)
elif processor:
processor(event)
else:
# Must be imported here: A top section import leads to obscure not-defined-ish errors.
from zerver.worker.queue_processors import get_worker
get_worker(queue_name).consume_wrapper(event)
def retry_event(queue_name: str,
event: Dict[str, Any],
failure_processor: Callable[[Dict[str, Any]], None]) -> None:
if 'failed_tries' not in event:
event['failed_tries'] = 0
event['failed_tries'] += 1
if event['failed_tries'] > MAX_REQUEST_RETRIES:
failure_processor(event)
else:
queue_json_publish(queue_name, event, lambda x: None)
| [
"str",
"str",
"Consumer",
"str",
"Callable[[], None]",
"str",
"str",
"str",
"Union[Mapping[str, Any], str]",
"str",
"Consumer",
"BlockingChannel",
"Basic.Deliver",
"pika.BasicProperties",
"str",
"str",
"Callable[[Dict[str, Any]], None]",
"BlockingChannel",
"Basic.Deliver",
"pika.BasicProperties",
"str",
"str",
"pika.connection.Connection",
"pika.connection.Connection",
"int",
"str",
"pika.connection.Connection",
"BlockingChannel",
"str",
"Callable[[], None]",
"Any",
"str",
"Consumer",
"BlockingChannel",
"Basic.Deliver",
"pika.BasicProperties",
"str",
"str",
"Union[Dict[str, Any], str]",
"str",
"Dict[str, Any]",
"Callable[[Dict[str, Any]], None]"
] | [
2350,
2481,
2496,
3298,
3313,
3761,
3772,
4177,
4188,
4658,
4673,
4725,
4779,
4835,
4892,
5515,
5561,
5637,
5691,
5747,
5804,
5958,
9072,
9690,
9760,
9777,
10135,
10663,
10925,
10940,
10995,
11539,
11554,
11606,
11660,
11716,
11773,
13043,
13078,
13693,
13721,
13772
] | [
2353,
2484,
2504,
3301,
3331,
3764,
3775,
4180,
4217,
4661,
4681,
4740,
4792,
4855,
4895,
5518,
5593,
5652,
5704,
5767,
5807,
5961,
9098,
9716,
9763,
9780,
10161,
10678,
10928,
10958,
10998,
11542,
11562,
11621,
11673,
11736,
11776,
13046,
13104,
13696,
13735,
13804
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/rate_limiter.py |
import os
from typing import Any, Iterator, List, Optional, Tuple
from django.conf import settings
from zerver.lib.redis_utils import get_redis_client
from zerver.models import UserProfile
import redis
import time
import logging
# Implement a rate-limiting scheme inspired by the one described here, but heavily modified
# http://blog.domaintools.com/2013/04/rate-limiting-with-redis/
client = get_redis_client()
rules = settings.RATE_LIMITING_RULES # type: List[Tuple[int, int]]
KEY_PREFIX = ''
class RateLimitedObject:
def get_keys(self) -> List[str]:
key_fragment = self.key_fragment()
return ["{}ratelimit:{}:{}".format(KEY_PREFIX, key_fragment, keytype)
for keytype in ['list', 'zset', 'block']]
def key_fragment(self) -> str:
raise NotImplementedError()
def rules(self) -> List[Tuple[int, int]]:
raise NotImplementedError()
class RateLimitedUser(RateLimitedObject):
def __init__(self, user: UserProfile, domain: str='all') -> None:
self.user = user
self.domain = domain
def key_fragment(self) -> str:
return "{}:{}:{}".format(type(self.user), self.user.id, self.domain)
def rules(self) -> List[Tuple[int, int]]:
if self.user.rate_limits != "":
result = [] # type: List[Tuple[int, int]]
for limit in self.user.rate_limits.split(','):
(seconds, requests) = limit.split(':', 2)
result.append((int(seconds), int(requests)))
return result
return rules
def bounce_redis_key_prefix_for_testing(test_name: str) -> None:
global KEY_PREFIX
KEY_PREFIX = test_name + ':' + str(os.getpid()) + ':'
def max_api_calls(entity: RateLimitedObject) -> int:
"Returns the API rate limit for the highest limit"
return entity.rules()[-1][1]
def max_api_window(entity: RateLimitedObject) -> int:
"Returns the API time window for the highest limit"
return entity.rules()[-1][0]
def add_ratelimit_rule(range_seconds: int, num_requests: int) -> None:
"Add a rate-limiting rule to the ratelimiter"
global rules
rules.append((range_seconds, num_requests))
rules.sort(key=lambda x: x[0])
def remove_ratelimit_rule(range_seconds: int, num_requests: int) -> None:
global rules
rules = [x for x in rules if x[0] != range_seconds and x[1] != num_requests]
def block_access(entity: RateLimitedObject, seconds: int) -> None:
"Manually blocks an entity for the desired number of seconds"
_, _, blocking_key = entity.get_keys()
with client.pipeline() as pipe:
pipe.set(blocking_key, 1)
pipe.expire(blocking_key, seconds)
pipe.execute()
def unblock_access(entity: RateLimitedObject) -> None:
_, _, blocking_key = entity.get_keys()
client.delete(blocking_key)
def clear_history(entity: RateLimitedObject) -> None:
'''
This is only used by test code now, where it's very helpful in
allowing us to run tests quickly, by giving a user a clean slate.
'''
for key in entity.get_keys():
client.delete(key)
def _get_api_calls_left(entity: RateLimitedObject, range_seconds: int, max_calls: int) -> Tuple[int, float]:
list_key, set_key, _ = entity.get_keys()
# Count the number of values in our sorted set
# that are between now and the cutoff
now = time.time()
boundary = now - range_seconds
with client.pipeline() as pipe:
# Count how many API calls in our range have already been made
pipe.zcount(set_key, boundary, now)
# Get the newest call so we can calculate when the ratelimit
# will reset to 0
pipe.lindex(list_key, 0)
results = pipe.execute()
count = results[0] # type: int
newest_call = results[1] # type: Optional[bytes]
calls_left = max_calls - count
if newest_call is not None:
time_reset = now + (range_seconds - (now - float(newest_call)))
else:
time_reset = now
return calls_left, time_reset
def api_calls_left(entity: RateLimitedObject) -> Tuple[int, float]:
"""Returns how many API calls in this range this client has, as well as when
the rate-limit will be reset to 0"""
max_window = max_api_window(entity)
max_calls = max_api_calls(entity)
return _get_api_calls_left(entity, max_window, max_calls)
def is_ratelimited(entity: RateLimitedObject) -> Tuple[bool, float]:
"Returns a tuple of (rate_limited, time_till_free)"
list_key, set_key, blocking_key = entity.get_keys()
rules = entity.rules()
if len(rules) == 0:
return False, 0.0
# Go through the rules from shortest to longest,
# seeing if this user has violated any of them. First
# get the timestamps for each nth items
with client.pipeline() as pipe:
for _, request_count in rules:
pipe.lindex(list_key, request_count - 1) # 0-indexed list
# Get blocking info
pipe.get(blocking_key)
pipe.ttl(blocking_key)
rule_timestamps = pipe.execute() # type: List[Optional[bytes]]
# Check if there is a manual block on this API key
blocking_ttl_b = rule_timestamps.pop()
key_blocked = rule_timestamps.pop()
if key_blocked is not None:
# We are manually blocked. Report for how much longer we will be
if blocking_ttl_b is None:
blocking_ttl = 0.5
else:
blocking_ttl = int(blocking_ttl_b)
return True, blocking_ttl
now = time.time()
for timestamp, (range_seconds, num_requests) in zip(rule_timestamps, rules):
# Check if the nth timestamp is newer than the associated rule. If so,
# it means we've hit our limit for this rule
if timestamp is None:
continue
boundary = float(timestamp) + range_seconds
if boundary > now:
free = boundary - now
return True, free
# No api calls recorded yet
return False, 0.0
def incr_ratelimit(entity: RateLimitedObject) -> None:
"""Increases the rate-limit for the specified entity"""
list_key, set_key, _ = entity.get_keys()
now = time.time()
# If we have no rules, we don't store anything
if len(rules) == 0:
return
# Start redis transaction
with client.pipeline() as pipe:
count = 0
while True:
try:
# To avoid a race condition between getting the element we might trim from our list
# and removing it from our associated set, we abort this whole transaction if
# another agent manages to change our list out from under us
# When watching a value, the pipeline is set to Immediate mode
pipe.watch(list_key)
# Get the last elem that we'll trim (so we can remove it from our sorted set)
last_val = pipe.lindex(list_key, max_api_calls(entity) - 1)
# Restart buffered execution
pipe.multi()
# Add this timestamp to our list
pipe.lpush(list_key, now)
# Trim our list to the oldest rule we have
pipe.ltrim(list_key, 0, max_api_calls(entity) - 1)
# Add our new value to the sorted set that we keep
# We need to put the score and val both as timestamp,
# as we sort by score but remove by value
pipe.zadd(set_key, now, now)
# Remove the trimmed value from our sorted set, if there was one
if last_val is not None:
pipe.zrem(set_key, last_val)
# Set the TTL for our keys as well
api_window = max_api_window(entity)
pipe.expire(list_key, api_window)
pipe.expire(set_key, api_window)
pipe.execute()
# If no exception was raised in the execution, there were no transaction conflicts
break
except redis.WatchError:
if count > 10:
logging.error("Failed to complete incr_ratelimit transaction without "
"interference 10 times in a row! Aborting rate-limit increment")
break
count += 1
continue
| [
"UserProfile",
"str",
"RateLimitedObject",
"RateLimitedObject",
"int",
"int",
"int",
"int",
"RateLimitedObject",
"int",
"RateLimitedObject",
"RateLimitedObject",
"RateLimitedObject",
"int",
"int",
"RateLimitedObject",
"RateLimitedObject",
"RateLimitedObject"
] | [
974,
1601,
1722,
1865,
2020,
2039,
2246,
2265,
2403,
2431,
2718,
2848,
3123,
3157,
3173,
4038,
4372,
5991
] | [
985,
1604,
1739,
1882,
2023,
2042,
2249,
2268,
2420,
2434,
2735,
2865,
3140,
3160,
3176,
4055,
4389,
6008
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/realm_icon.py | from django.conf import settings
from zerver.lib.avatar_hash import gravatar_hash, user_avatar_hash
from zerver.lib.upload import upload_backend
from zerver.models import Realm
def realm_icon_url(realm: Realm) -> str:
return get_realm_icon_url(realm)
def get_realm_icon_url(realm: Realm) -> str:
if realm.icon_source == 'U':
return upload_backend.get_realm_icon_url(realm.id, realm.icon_version)
elif settings.ENABLE_GRAVATAR:
hash_key = gravatar_hash(realm.string_id)
return "https://secure.gravatar.com/avatar/%s?d=identicon" % (hash_key,)
else:
return settings.DEFAULT_AVATAR_URI+'?version=0'
| [
"Realm",
"Realm"
] | [
205,
288
] | [
210,
293
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/redis_utils.py |
from django.conf import settings
import redis
def get_redis_client() -> redis.StrictRedis:
return redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT,
password=settings.REDIS_PASSWORD, db=0)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/request.py | # When adding new functions/classes to this file, you need to also add
# their types to request.pyi in this directory (a mypy stubs file that
# we use to ensure mypy does correct type inference with REQ, which it
# can't do by default due to the dynamic nature of REQ).
#
# Because request.pyi exists, the type annotations in this file are
# mostly not processed by mypy.
from functools import wraps
import ujson
from django.utils.translation import ugettext as _
from zerver.lib.exceptions import JsonableError, ErrorCode, \
InvalidJSONError
from django.http import HttpRequest, HttpResponse
from typing import Any, Callable, List, Optional, Type
class RequestConfusingParmsError(JsonableError):
code = ErrorCode.REQUEST_CONFUSING_VAR
data_fields = ['var_name1', 'var_name2']
def __init__(self, var_name1: str, var_name2: str) -> None:
self.var_name1 = var_name1 # type: str
self.var_name2 = var_name2 # type: str
@staticmethod
def msg_format() -> str:
return _("Can't decide between '{var_name1}' and '{var_name2}' arguments")
class RequestVariableMissingError(JsonableError):
code = ErrorCode.REQUEST_VARIABLE_MISSING
data_fields = ['var_name']
def __init__(self, var_name: str) -> None:
self.var_name = var_name # type: str
@staticmethod
def msg_format() -> str:
return _("Missing '{var_name}' argument")
class RequestVariableConversionError(JsonableError):
code = ErrorCode.REQUEST_VARIABLE_INVALID
data_fields = ['var_name', 'bad_value']
def __init__(self, var_name: str, bad_value: Any) -> None:
self.var_name = var_name # type: str
self.bad_value = bad_value
@staticmethod
def msg_format() -> str:
return _("Bad value for '{var_name}': {bad_value}")
# Used in conjunction with @has_request_variables, below
class REQ:
# NotSpecified is a sentinel value for determining whether a
# default value was specified for a request variable. We can't
# use None because that could be a valid, user-specified default
class _NotSpecified:
pass
NotSpecified = _NotSpecified()
def __init__(self, whence: str=None, *, converter: Callable[[Any], Any]=None,
default: Any=NotSpecified, validator: Callable[[Any], Any]=None,
str_validator: Callable[[Any], Any]=None,
argument_type: str=None, type: Type=None,
aliases: Optional[List[str]]=None) -> None:
"""whence: the name of the request variable that should be used
for this parameter. Defaults to a request variable of the
same name as the parameter.
converter: a function that takes a string and returns a new
value. If specified, this will be called on the request
variable value before passing to the function
default: a value to be used for the argument if the parameter
is missing in the request
validator: similar to converter, but takes an already parsed JSON
data structure. If specified, we will parse the JSON request
variable value before passing to the function
str_validator: Like validator, but doesn't parse JSON first.
argument_type: pass 'body' to extract the parsed JSON
corresponding to the request body
type: a hint to typing (using mypy) what the type of this parameter is.
Currently only typically necessary if default=None and the type cannot
be inferred in another way (eg. via converter).
aliases: alternate names for the POST var
"""
self.post_var_name = whence
self.func_var_name = None # type: str
self.converter = converter
self.validator = validator
self.str_validator = str_validator
self.default = default
self.argument_type = argument_type
self.aliases = aliases
if converter and (validator or str_validator):
# Not user-facing, so shouldn't be tagged for translation
raise AssertionError('converter and validator are mutually exclusive')
if validator and str_validator:
# Not user-facing, so shouldn't be tagged for translation
raise AssertionError('validator and str_validator are mutually exclusive')
# Extracts variables from the request object and passes them as
# named function arguments. The request object must be the first
# argument to the function.
#
# To use, assign a function parameter a default value that is an
# instance of the REQ class. That parameter will then be automatically
# populated from the HTTP request. The request object must be the
# first argument to the decorated function.
#
# This should generally be the innermost (syntactically bottommost)
# decorator applied to a view, since other decorators won't preserve
# the default parameter values used by has_request_variables.
#
# Note that this can't be used in helper functions which are not
# expected to call json_error or json_success, as it uses json_error
# internally when it encounters an error
def has_request_variables(view_func):
# type: (Callable[[HttpRequest, Any, Any], HttpResponse]) -> Callable[[HttpRequest, *Any, **Any], HttpResponse]
num_params = view_func.__code__.co_argcount
if view_func.__defaults__ is None:
num_default_params = 0
else:
num_default_params = len(view_func.__defaults__)
default_param_names = view_func.__code__.co_varnames[num_params - num_default_params:]
default_param_values = view_func.__defaults__
if default_param_values is None:
default_param_values = []
post_params = []
for (name, value) in zip(default_param_names, default_param_values):
if isinstance(value, REQ):
value.func_var_name = name
if value.post_var_name is None:
value.post_var_name = name
post_params.append(value)
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
for param in post_params:
if param.func_var_name in kwargs:
continue
if param.argument_type == 'body':
try:
val = ujson.loads(request.body)
except ValueError:
raise InvalidJSONError(_("Malformed JSON"))
kwargs[param.func_var_name] = val
continue
elif param.argument_type is not None:
# This is a view bug, not a user error, and thus should throw a 500.
raise Exception(_("Invalid argument type"))
post_var_names = [param.post_var_name]
if param.aliases:
post_var_names += param.aliases
default_assigned = False
post_var_name = None # type: Optional[str]
query_params = request.GET.copy()
query_params.update(request.POST)
for req_var in post_var_names:
try:
val = query_params[req_var]
except KeyError:
continue
if post_var_name is not None:
raise RequestConfusingParmsError(post_var_name, req_var)
post_var_name = req_var
if post_var_name is None:
post_var_name = param.post_var_name
if param.default is REQ.NotSpecified:
raise RequestVariableMissingError(post_var_name)
val = param.default
default_assigned = True
if param.converter is not None and not default_assigned:
try:
val = param.converter(val)
except JsonableError:
raise
except Exception:
raise RequestVariableConversionError(post_var_name, val)
# Validators are like converters, but they don't handle JSON parsing; we do.
if param.validator is not None and not default_assigned:
try:
val = ujson.loads(val)
except Exception:
raise JsonableError(_('Argument "%s" is not valid JSON.') % (post_var_name,))
error = param.validator(post_var_name, val)
if error:
raise JsonableError(error)
# str_validators is like validator, but for direct strings (no JSON parsing).
if param.str_validator is not None and not default_assigned:
error = param.str_validator(post_var_name, val)
if error:
raise JsonableError(error)
kwargs[param.func_var_name] = val
return view_func(request, *args, **kwargs)
return _wrapped_view_func
| [
"str",
"str",
"str",
"str",
"Any",
"HttpRequest",
"Any",
"Any"
] | [
829,
845,
1248,
1584,
1600,
6012,
6032,
6047
] | [
832,
848,
1251,
1587,
1603,
6023,
6035,
6050
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/response.py |
from django.http import HttpResponse, HttpResponseNotAllowed
import ujson
from typing import Optional, Any, Dict, List
from zerver.lib.exceptions import JsonableError
class HttpResponseUnauthorized(HttpResponse):
status_code = 401
def __init__(self, realm: str, www_authenticate: Optional[str]=None) -> None:
HttpResponse.__init__(self)
if www_authenticate is None:
self["WWW-Authenticate"] = 'Basic realm="%s"' % (realm,)
elif www_authenticate == "session":
self["WWW-Authenticate"] = 'Session realm="%s"' % (realm,)
else:
raise AssertionError("Invalid www_authenticate value!")
def json_unauthorized(message: str, www_authenticate: Optional[str]=None) -> HttpResponse:
resp = HttpResponseUnauthorized("zulip", www_authenticate=www_authenticate)
resp.content = (ujson.dumps({"result": "error",
"msg": message}) + "\n").encode()
return resp
def json_method_not_allowed(methods: List[str]) -> HttpResponseNotAllowed:
resp = HttpResponseNotAllowed(methods)
resp.content = ujson.dumps({"result": "error",
"msg": "Method Not Allowed",
"allowed_methods": methods}).encode()
return resp
def json_response(res_type: str="success",
msg: str="",
data: Optional[Dict[str, Any]]=None,
status: int=200) -> HttpResponse:
content = {"result": res_type, "msg": msg}
if data is not None:
content.update(data)
return HttpResponse(content=ujson.dumps(content) + "\n",
content_type='application/json', status=status)
def json_success(data: Optional[Dict[str, Any]]=None) -> HttpResponse:
return json_response(data=data)
def json_response_from_error(exception: JsonableError) -> HttpResponse:
'''
This should only be needed in middleware; in app code, just raise.
When app code raises a JsonableError, the JsonErrorHandler
middleware takes care of transforming it into a response by
calling this function.
'''
return json_response('error',
msg=exception.msg,
data=exception.data,
status=exception.http_status_code)
def json_error(msg: str, data: Optional[Dict[str, Any]]=None, status: int=400) -> HttpResponse:
return json_response(res_type="error", msg=msg, data=data, status=status)
| [
"str",
"str",
"List[str]",
"JsonableError",
"str"
] | [
269,
692,
1005,
1849,
2328
] | [
272,
695,
1014,
1862,
2331
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/rest.py |
from typing import Any, Dict
from django.utils.module_loading import import_string
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from zerver.decorator import authenticated_json_view, authenticated_rest_api_view, \
process_as_post, authenticated_uploads_api_view
from zerver.lib.response import json_method_not_allowed, json_unauthorized
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.conf import settings
METHODS = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH')
FLAGS = ('override_api_url_scheme')
@csrf_exempt
def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Dispatch to a REST API endpoint.
Unauthenticated endpoints should not use this, as authentication is verified
in the following ways:
* for paths beginning with /api, HTTP Basic auth
* for paths beginning with /json (used by the web client), the session token
This calls the function named in kwargs[request.method], if that request
method is supported, and after wrapping that function to:
* protect against CSRF (if the user is already authenticated through
a Django session)
* authenticate via an API key (otherwise)
* coerce PUT/PATCH/DELETE into having POST-like semantics for
retrieving variables
Any keyword args that are *not* HTTP methods are passed through to the
target function.
Never make a urls.py pattern put user input into a variable called GET, POST,
etc, as that is where we route HTTP verbs to target functions.
"""
supported_methods = {} # type: Dict[str, Any]
# duplicate kwargs so we can mutate the original as we go
for arg in list(kwargs):
if arg in METHODS:
supported_methods[arg] = kwargs[arg]
del kwargs[arg]
if request.method == 'OPTIONS':
response = HttpResponse(status=204) # No content
response['Allow'] = ', '.join(sorted(supported_methods.keys()))
response['Content-Length'] = "0"
return response
# Override requested method if magic method=??? parameter exists
method_to_use = request.method
if request.POST and 'method' in request.POST:
method_to_use = request.POST['method']
if method_to_use == "SOCKET" and "zulip.emulated_method" in request.META:
method_to_use = request.META["zulip.emulated_method"]
if method_to_use in supported_methods:
entry = supported_methods[method_to_use]
if isinstance(entry, tuple):
target_function, view_flags = entry
target_function = import_string(target_function)
else:
target_function = import_string(supported_methods[method_to_use])
view_flags = set()
# Set request._query for update_activity_user(), which is called
# by some of the later wrappers.
request._query = target_function.__name__
# We want to support authentication by both cookies (web client)
# and API keys (API clients). In the former case, we want to
# do a check to ensure that CSRF etc is honored, but in the latter
# we can skip all of that.
#
# Security implications of this portion of the code are minimal,
# as we should worst-case fail closed if we miscategorise a request.
# for some special views (e.g. serving a file that has been
# uploaded), we support using the same url for web and API clients.
if ('override_api_url_scheme' in view_flags and
request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request uses standard API based authentication.
target_function = authenticated_rest_api_view()(target_function)
elif ('override_api_url_scheme' in view_flags and
request.GET.get('api_key') is not None):
# This request uses legacy API authentication. We
# unfortunately need that in the React Native mobile
# apps, because there's no way to set
# HTTP_AUTHORIZATION in React Native.
target_function = authenticated_uploads_api_view()(target_function)
# /json views (web client) validate with a session token (cookie)
elif not request.path.startswith("/api") and request.user.is_authenticated:
# Authenticated via sessions framework, only CSRF check needed
target_function = csrf_protect(authenticated_json_view(target_function))
# most clients (mobile, bots, etc) use HTTP Basic Auth and REST calls, where instead of
# username:password, we use email:apiKey
elif request.META.get('HTTP_AUTHORIZATION', None):
# Wrap function with decorator to authenticate the user before
# proceeding
view_kwargs = {}
if 'allow_incoming_webhooks' in view_flags:
view_kwargs['is_webhook'] = True
target_function = authenticated_rest_api_view(**view_kwargs)(target_function) # type: ignore # likely mypy bug
# Pick a way to tell user they're not authed based on how the request was made
else:
# If this looks like a request from a top-level page in a
# browser, send the user to the login page
if 'text/html' in request.META.get('HTTP_ACCEPT', ''):
# TODO: It seems like the `?next=` part is unlikely to be helpful
return HttpResponseRedirect('%s/?next=%s' % (settings.HOME_NOT_LOGGED_IN, request.path))
# Ask for basic auth (email:apiKey)
elif request.path.startswith("/api"):
return json_unauthorized(_("Not logged in: API authentication or user session required"))
# Session cookie expired, notify the client
else:
return json_unauthorized(_("Not logged in: API authentication or user session required"),
www_authenticate='session')
if request.method not in ["GET", "POST"]:
# process_as_post needs to be the outer decorator, because
# otherwise we might access and thus cache a value for
# request.REQUEST.
target_function = process_as_post(target_function)
return target_function(request, **kwargs)
return json_method_not_allowed(list(supported_methods.keys()))
| [
"HttpRequest",
"Any"
] | [
659,
682
] | [
670,
685
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/retention.py |
from datetime import timedelta
from django.db import connection, transaction
from django.forms.models import model_to_dict
from django.utils.timezone import now as timezone_now
from zerver.models import Realm, Message, UserMessage, ArchivedMessage, ArchivedUserMessage, \
Attachment, ArchivedAttachment
from typing import Any, Dict, Optional, Generator, List
def get_realm_expired_messages(realm: Any) -> Optional[Dict[str, Any]]:
expired_date = timezone_now() - timedelta(days=realm.message_retention_days)
expired_messages = Message.objects.order_by('id').filter(sender__realm=realm,
pub_date__lt=expired_date)
if not expired_messages.exists():
return None
return {'realm_id': realm.id, 'expired_messages': expired_messages}
def get_expired_messages() -> Generator[Any, None, None]:
# Get all expired messages by Realm.
realms = Realm.objects.order_by('string_id').filter(
deactivated=False, message_retention_days__isnull=False)
for realm in realms:
realm_expired_messages = get_realm_expired_messages(realm)
if realm_expired_messages:
yield realm_expired_messages
def move_attachment_message_to_archive_by_message(message_ids: List[int]) -> None:
# Move attachments messages relation table data to archive.
id_list = ', '.join(str(message_id) for message_id in message_ids)
query = """
INSERT INTO zerver_archivedattachment_messages (id, archivedattachment_id,
archivedmessage_id)
SELECT zerver_attachment_messages.id, zerver_attachment_messages.attachment_id,
zerver_attachment_messages.message_id
FROM zerver_attachment_messages
LEFT JOIN zerver_archivedattachment_messages
ON zerver_archivedattachment_messages.id = zerver_attachment_messages.id
WHERE zerver_attachment_messages.message_id in ({message_ids})
AND zerver_archivedattachment_messages.id IS NULL
"""
with connection.cursor() as cursor:
cursor.execute(query.format(message_ids=id_list))
@transaction.atomic
def move_messages_to_archive(message_ids: List[int]) -> None:
messages = list(Message.objects.filter(id__in=message_ids).values())
if not messages:
raise Message.DoesNotExist
arc_messages = []
for message in messages:
arc_message = ArchivedMessage(**message)
arc_messages.append(arc_message)
ArchivedMessage.objects.bulk_create(arc_messages)
# Move user_messages to the archive.
user_messages = UserMessage.objects.filter(
message_id__in=message_ids).exclude(id__in=ArchivedUserMessage.objects.all())
archiving_messages = []
for user_message in user_messages.values():
archiving_messages.append(ArchivedUserMessage(**user_message))
ArchivedUserMessage.objects.bulk_create(archiving_messages)
# Move attachments to archive
attachments = Attachment.objects.filter(messages__id__in=message_ids).exclude(
id__in=ArchivedAttachment.objects.all()).distinct()
archiving_attachments = []
for attachment in attachments.values():
archiving_attachments.append(ArchivedAttachment(**attachment))
ArchivedAttachment.objects.bulk_create(archiving_attachments)
move_attachment_message_to_archive_by_message(message_ids)
# Remove data from main tables
Message.objects.filter(id__in=message_ids).delete()
user_messages.filter(id__in=ArchivedUserMessage.objects.all(),
message_id__isnull=True).delete()
archived_attachments = ArchivedAttachment.objects.filter(messages__id__in=message_ids).distinct()
Attachment.objects.filter(messages__isnull=True, id__in=archived_attachments).delete()
| [
"Any",
"List[int]",
"List[int]"
] | [
406,
1277,
2184
] | [
409,
1286,
2193
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/send_email.py | from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template import loader
from django.utils.timezone import now as timezone_now
from django.template.exceptions import TemplateDoesNotExist
from zerver.models import UserProfile, ScheduledEmail, get_user_profile_by_id, \
EMAIL_TYPES, Realm
import datetime
from email.utils import parseaddr, formataddr
import logging
import ujson
import os
from typing import Any, Dict, Iterable, List, Mapping, Optional
from zerver.lib.logging_util import log_to_file
from confirmation.models import generate_key
## Logging setup ##
logger = logging.getLogger('zulip.send_email')
log_to_file(logger, settings.EMAIL_LOG_PATH)
class FromAddress:
SUPPORT = parseaddr(settings.ZULIP_ADMINISTRATOR)[1]
NOREPLY = parseaddr(settings.NOREPLY_EMAIL_ADDRESS)[1]
# Generates an unpredictable noreply address.
@staticmethod
def tokenized_no_reply_address() -> str:
if settings.ADD_TOKENS_TO_NOREPLY_ADDRESS:
return parseaddr(settings.TOKENIZED_NOREPLY_EMAIL_ADDRESS)[1].format(token=generate_key())
return FromAddress.NOREPLY
def build_email(template_prefix: str, to_user_id: Optional[int]=None,
to_email: Optional[str]=None, from_name: Optional[str]=None,
from_address: Optional[str]=None, reply_to_email: Optional[str]=None,
context: Optional[Dict[str, Any]]=None) -> EmailMultiAlternatives:
# Callers should pass exactly one of to_user_id and to_email.
assert (to_user_id is None) ^ (to_email is None)
if to_user_id is not None:
to_user = get_user_profile_by_id(to_user_id)
# Change to formataddr((to_user.full_name, to_user.email)) once
# https://github.com/zulip/zulip/issues/4676 is resolved
to_email = to_user.delivery_email
if context is None:
context = {}
context.update({
'support_email': FromAddress.SUPPORT,
'email_images_base_uri': settings.ROOT_DOMAIN_URI + '/static/images/emails',
'physical_address': settings.PHYSICAL_ADDRESS,
})
subject = loader.render_to_string(template_prefix + '.subject',
context=context,
using='Jinja2_plaintext').strip().replace('\n', '')
message = loader.render_to_string(template_prefix + '.txt',
context=context, using='Jinja2_plaintext')
try:
html_message = loader.render_to_string(template_prefix + '.html', context)
except TemplateDoesNotExist:
emails_dir = os.path.dirname(template_prefix)
template = os.path.basename(template_prefix)
compiled_template_prefix = os.path.join(emails_dir, "compiled", template)
html_message = loader.render_to_string(compiled_template_prefix + '.html', context)
if from_name is None:
from_name = "Zulip"
if from_address is None:
from_address = FromAddress.NOREPLY
from_email = formataddr((from_name, from_address))
reply_to = None
if reply_to_email is not None:
reply_to = [reply_to_email]
# Remove the from_name in the reply-to for noreply emails, so that users
# see "noreply@..." rather than "Zulip" or whatever the from_name is
# when they reply in their email client.
elif from_address == FromAddress.NOREPLY:
reply_to = [FromAddress.NOREPLY]
mail = EmailMultiAlternatives(subject, message, from_email, [to_email], reply_to=reply_to)
if html_message is not None:
mail.attach_alternative(html_message, 'text/html')
return mail
class EmailNotDeliveredException(Exception):
pass
# When changing the arguments to this function, you may need to write a
# migration to change or remove any emails in ScheduledEmail.
def send_email(template_prefix: str, to_user_id: Optional[int]=None, to_email: Optional[str]=None,
from_name: Optional[str]=None, from_address: Optional[str]=None,
reply_to_email: Optional[str]=None, context: Dict[str, Any]={}) -> None:
mail = build_email(template_prefix, to_user_id=to_user_id, to_email=to_email, from_name=from_name,
from_address=from_address, reply_to_email=reply_to_email, context=context)
template = template_prefix.split("/")[-1]
logger.info("Sending %s email to %s" % (template, mail.to))
if mail.send() == 0:
logger.error("Error sending %s email to %s" % (template, mail.to))
raise EmailNotDeliveredException
def send_email_from_dict(email_dict: Mapping[str, Any]) -> None:
send_email(**dict(email_dict))
def send_future_email(template_prefix: str, realm: Realm, to_user_id: Optional[int]=None,
to_email: Optional[str]=None, from_name: Optional[str]=None,
from_address: Optional[str]=None, context: Dict[str, Any]={},
delay: datetime.timedelta=datetime.timedelta(0)) -> None:
template_name = template_prefix.split('/')[-1]
email_fields = {'template_prefix': template_prefix, 'to_user_id': to_user_id, 'to_email': to_email,
'from_name': from_name, 'from_address': from_address, 'context': context}
if settings.DEVELOPMENT and not settings.TEST_SUITE:
send_email(template_prefix, to_user_id=to_user_id, to_email=to_email, from_name=from_name,
from_address=from_address, context=context)
# For logging the email
assert (to_user_id is None) ^ (to_email is None)
if to_user_id is not None:
# The realm is redundant if we have a to_user_id; this assert just
# expresses that fact
assert(UserProfile.objects.filter(id=to_user_id, realm=realm).exists())
to_field = {'user_id': to_user_id} # type: Dict[str, Any]
else:
to_field = {'address': parseaddr(to_email)[1]}
ScheduledEmail.objects.create(
type=EMAIL_TYPES[template_name],
scheduled_timestamp=timezone_now() + delay,
realm=realm,
data=ujson.dumps(email_fields),
**to_field)
| [
"str",
"str",
"Mapping[str, Any]",
"str",
"Realm"
] | [
1185,
3857,
4583,
4686,
4698
] | [
1188,
3860,
4600,
4689,
4703
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/sessions.py |
import logging
from django.conf import settings
from django.contrib.auth import SESSION_KEY, get_user_model
from django.contrib.sessions.models import Session
from django.utils.timezone import now as timezone_now
from importlib import import_module
from typing import List, Mapping, Optional
from zerver.models import Realm, UserProfile, get_user_profile_by_id
session_engine = import_module(settings.SESSION_ENGINE)
def get_session_dict_user(session_dict: Mapping[str, int]) -> Optional[int]:
# Compare django.contrib.auth._get_user_session_key
try:
return get_user_model()._meta.pk.to_python(session_dict[SESSION_KEY])
except KeyError:
return None
def get_session_user(session: Session) -> Optional[int]:
return get_session_dict_user(session.get_decoded())
def user_sessions(user_profile: UserProfile) -> List[Session]:
return [s for s in Session.objects.all()
if get_session_user(s) == user_profile.id]
def delete_session(session: Session) -> None:
session_engine.SessionStore(session.session_key).delete() # type: ignore # import_module
def delete_user_sessions(user_profile: UserProfile) -> None:
for session in Session.objects.all():
if get_session_user(session) == user_profile.id:
delete_session(session)
def delete_realm_user_sessions(realm: Realm) -> None:
realm_user_ids = [user_profile.id for user_profile in
UserProfile.objects.filter(realm=realm)]
for session in Session.objects.filter(expire_date__gte=timezone_now()):
if get_session_user(session) in realm_user_ids:
delete_session(session)
def delete_all_user_sessions() -> None:
for session in Session.objects.all():
delete_session(session)
def delete_all_deactivated_user_sessions() -> None:
for session in Session.objects.all():
user_profile_id = get_session_user(session)
if user_profile_id is None:
continue # nocoverage # to debug
user_profile = get_user_profile_by_id(user_profile_id)
if not user_profile.is_active or user_profile.realm.deactivated:
logging.info("Deactivating session for deactivated user %s" % (user_profile.email,))
delete_session(session)
| [
"Mapping[str, int]",
"Session",
"UserProfile",
"Session",
"UserProfile",
"Realm"
] | [
462,
714,
830,
990,
1142,
1338
] | [
479,
721,
841,
997,
1153,
1343
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/soft_deactivation.py |
from zerver.lib.logging_util import log_to_file
from collections import defaultdict
import logging
from django.db import transaction
from django.db.models import Max
from django.conf import settings
from django.utils.timezone import now as timezone_now
from typing import DefaultDict, List, Union, Any
from zerver.models import UserProfile, UserMessage, RealmAuditLog, \
Subscription, Message, Recipient, UserActivity, Realm
logger = logging.getLogger("zulip.soft_deactivation")
log_to_file(logger, settings.SOFT_DEACTIVATION_LOG_PATH)
def filter_by_subscription_history(user_profile: UserProfile,
all_stream_messages: DefaultDict[int, List[Message]],
all_stream_subscription_logs: DefaultDict[int, List[RealmAuditLog]],
) -> List[UserMessage]:
user_messages_to_insert = [] # type: List[UserMessage]
def store_user_message_to_insert(message: Message) -> None:
message = UserMessage(user_profile=user_profile,
message_id=message['id'], flags=0)
user_messages_to_insert.append(message)
for (stream_id, stream_messages) in all_stream_messages.items():
stream_subscription_logs = all_stream_subscription_logs[stream_id]
for log_entry in stream_subscription_logs:
if len(stream_messages) == 0:
continue
if log_entry.event_type == RealmAuditLog.SUBSCRIPTION_DEACTIVATED:
for stream_message in stream_messages:
if stream_message['id'] <= log_entry.event_last_message_id:
store_user_message_to_insert(stream_message)
else:
break
elif log_entry.event_type in (RealmAuditLog.SUBSCRIPTION_ACTIVATED,
RealmAuditLog.SUBSCRIPTION_CREATED):
initial_msg_count = len(stream_messages)
for i, stream_message in enumerate(stream_messages):
if stream_message['id'] > log_entry.event_last_message_id:
stream_messages = stream_messages[i:]
break
final_msg_count = len(stream_messages)
if initial_msg_count == final_msg_count:
if stream_messages[-1]['id'] <= log_entry.event_last_message_id:
stream_messages = []
else:
raise AssertionError('%s is not a Subscription Event.' % (log_entry.event_type))
if len(stream_messages) > 0:
# We do this check for last event since if the last subscription
# event was a subscription_deactivated then we don't want to create
# UserMessage rows for any of the remaining messages.
if stream_subscription_logs[-1].event_type in (
RealmAuditLog.SUBSCRIPTION_ACTIVATED,
RealmAuditLog.SUBSCRIPTION_CREATED):
for stream_message in stream_messages:
store_user_message_to_insert(stream_message)
return user_messages_to_insert
def add_missing_messages(user_profile: UserProfile) -> None:
"""This function takes a soft-deactivated user, and computes and adds
to the database any UserMessage rows that were not created while
the user was soft-deactivated. The end result is that from the
perspective of the message database, it should be impossible to
tell that the user was soft-deactivated at all.
At a high level, the algorithm is as follows:
* Find all the streams that the user was at any time a subscriber
of when or after they were soft-deactivated (`recipient_ids`
below).
* Find all the messages sent to those streams since the user was
soft-deactivated. This will be a superset of the target
UserMessages we need to create in two ways: (1) some UserMessage
rows will have already been created in do_send_messages because
the user had a nonzero set of flags (the fact that we do so in
do_send_messages simplifies things considerably, since it means
we don't need to inspect message content to look for things like
mentions here), and (2) the user might not have been subscribed
to all of the streams in recipient_ids for the entire time
window.
* Correct the list from the previous state by excluding those with
existing UserMessage rows.
* Correct the list from the previous state by excluding those
where the user wasn't subscribed at the time, using the
RealmAuditLog data to determine exactly when the user was
subscribed/unsubscribed.
* Create the UserMessage rows.
"""
assert user_profile.last_active_message_id is not None
all_stream_subs = list(Subscription.objects.select_related('recipient').filter(
user_profile=user_profile,
recipient__type=Recipient.STREAM).values('recipient', 'recipient__type_id'))
# For Stream messages we need to check messages against data from
# RealmAuditLog for visibility to user. So we fetch the subscription logs.
stream_ids = [sub['recipient__type_id'] for sub in all_stream_subs]
events = [RealmAuditLog.SUBSCRIPTION_CREATED, RealmAuditLog.SUBSCRIPTION_DEACTIVATED,
RealmAuditLog.SUBSCRIPTION_ACTIVATED]
subscription_logs = list(RealmAuditLog.objects.select_related(
'modified_stream').filter(
modified_user=user_profile,
modified_stream__id__in=stream_ids,
event_type__in=events).order_by('event_last_message_id'))
all_stream_subscription_logs = defaultdict(list) # type: DefaultDict[int, List[RealmAuditLog]]
for log in subscription_logs:
all_stream_subscription_logs[log.modified_stream.id].append(log)
recipient_ids = []
for sub in all_stream_subs:
stream_subscription_logs = all_stream_subscription_logs[sub['recipient__type_id']]
if stream_subscription_logs[-1].event_type == RealmAuditLog.SUBSCRIPTION_DEACTIVATED:
assert stream_subscription_logs[-1].event_last_message_id is not None
if stream_subscription_logs[-1].event_last_message_id <= user_profile.last_active_message_id:
# We are going to short circuit this iteration as its no use
# iterating since user unsubscribed before soft-deactivation
continue
recipient_ids.append(sub['recipient'])
all_stream_msgs = list(Message.objects.select_related(
'recipient').filter(
recipient__id__in=recipient_ids,
id__gt=user_profile.last_active_message_id).order_by('id').values(
'id', 'recipient__type_id'))
already_created_um_objs = list(UserMessage.objects.select_related(
'message').filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM,
message__id__gt=user_profile.last_active_message_id).values(
'message__id'))
already_created_ums = set([obj['message__id'] for obj in already_created_um_objs])
# Filter those messages for which UserMessage rows have been already created
all_stream_msgs = [msg for msg in all_stream_msgs
if msg['id'] not in already_created_ums]
stream_messages = defaultdict(list) # type: DefaultDict[int, List[Message]]
for msg in all_stream_msgs:
stream_messages[msg['recipient__type_id']].append(msg)
# Calling this function to filter out stream messages based upon
# subscription logs and then store all UserMessage objects for bulk insert
# This function does not perform any SQL related task and gets all the data
# required for its operation in its params.
user_messages_to_insert = filter_by_subscription_history(
user_profile, stream_messages, all_stream_subscription_logs)
# Doing a bulk create for all the UserMessage objects stored for creation.
if len(user_messages_to_insert) > 0:
UserMessage.objects.bulk_create(user_messages_to_insert)
def do_soft_deactivate_user(user_profile: UserProfile) -> None:
user_profile.last_active_message_id = UserMessage.objects.filter(
user_profile=user_profile).order_by(
'-message__id')[0].message_id
user_profile.long_term_idle = True
user_profile.save(update_fields=[
'long_term_idle',
'last_active_message_id'])
logger.info('Soft Deactivated user %s (%s)' %
(user_profile.id, user_profile.email))
def do_soft_deactivate_users(users: List[UserProfile]) -> List[UserProfile]:
users_soft_deactivated = []
with transaction.atomic():
realm_logs = []
for user in users:
do_soft_deactivate_user(user)
event_time = timezone_now()
log = RealmAuditLog(
realm=user.realm,
modified_user=user,
event_type=RealmAuditLog.USER_SOFT_DEACTIVATED,
event_time=event_time
)
realm_logs.append(log)
users_soft_deactivated.append(user)
RealmAuditLog.objects.bulk_create(realm_logs)
return users_soft_deactivated
def maybe_catch_up_soft_deactivated_user(user_profile: UserProfile) -> Union[UserProfile, None]:
if user_profile.long_term_idle:
add_missing_messages(user_profile)
user_profile.long_term_idle = False
user_profile.save(update_fields=['long_term_idle'])
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
event_type=RealmAuditLog.USER_SOFT_ACTIVATED,
event_time=timezone_now()
)
logger.info('Soft Reactivated user %s (%s)' %
(user_profile.id, user_profile.email))
return user_profile
return None
def get_users_for_soft_deactivation(inactive_for_days: int, filter_kwargs: Any) -> List[UserProfile]:
users_activity = list(UserActivity.objects.filter(
user_profile__is_active=True,
user_profile__is_bot=False,
user_profile__long_term_idle=False,
**filter_kwargs).values('user_profile_id').annotate(
last_visit=Max('last_visit')))
user_ids_to_deactivate = []
today = timezone_now()
for user_activity in users_activity:
if (today - user_activity['last_visit']).days > inactive_for_days:
user_ids_to_deactivate.append(user_activity['user_profile_id'])
users_to_deactivate = list(UserProfile.objects.filter(
id__in=user_ids_to_deactivate))
return users_to_deactivate
def do_soft_activate_users(users: List[UserProfile]) -> List[UserProfile]:
users_soft_activated = []
for user_profile in users:
user_activated = maybe_catch_up_soft_deactivated_user(user_profile)
if user_activated:
users_soft_activated.append(user_activated)
return users_soft_activated
| [
"UserProfile",
"DefaultDict[int, List[Message]]",
"DefaultDict[int, List[RealmAuditLog]]",
"Message",
"UserProfile",
"UserProfile",
"List[UserProfile]",
"UserProfile",
"int",
"Any",
"List[UserProfile]"
] | [
593,
662,
760,
965,
3200,
8121,
8576,
9259,
9919,
9939,
10655
] | [
604,
693,
797,
972,
3211,
8132,
8593,
9270,
9922,
9942,
10672
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/sqlalchemy_utils.py | from typing import Optional, Any
from django.db import connection
from zerver.lib.db import TimeTrackingConnection
import sqlalchemy
# This is a Pool that doesn't close connections. Therefore it can be used with
# existing Django database connections.
class NonClosingPool(sqlalchemy.pool.NullPool):
def status(self) -> str:
return "NonClosingPool"
def _do_return_conn(self, conn: sqlalchemy.engine.base.Connection) -> None:
pass
def recreate(self) -> 'NonClosingPool':
return self.__class__(creator=self._creator,
recycle=self._recycle,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch)
sqlalchemy_engine = None # type: Optional[Any]
def get_sqlalchemy_connection() -> sqlalchemy.engine.base.Connection:
global sqlalchemy_engine
if sqlalchemy_engine is None:
def get_dj_conn() -> TimeTrackingConnection:
connection.ensure_connection()
return connection.connection
sqlalchemy_engine = sqlalchemy.create_engine('postgresql://',
creator=get_dj_conn,
poolclass=NonClosingPool,
pool_reset_on_return=False)
sa_connection = sqlalchemy_engine.connect()
sa_connection.execution_options(autocommit=False)
return sa_connection
| [
"sqlalchemy.engine.base.Connection"
] | [
402
] | [
435
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/statistics.py | # -*- coding: utf-8 -*-
from zerver.models import UserProfile, UserActivity, UserActivityInterval, Message
from django.utils.timezone import utc
from typing import Any, Dict, List, Sequence, Set
from datetime import datetime, timedelta
# Return the amount of Zulip usage for this user between the two
# given dates
def seconds_usage_between(user_profile: UserProfile, begin: datetime, end: datetime) -> timedelta:
intervals = UserActivityInterval.objects.filter(user_profile=user_profile,
end__gte=begin,
start__lte=end)
duration = timedelta(0)
for interval in intervals:
start = max(begin, interval.start)
finish = min(end, interval.end)
duration += finish-start
return duration
| [
"UserProfile",
"datetime",
"datetime"
] | [
359,
379,
394
] | [
370,
387,
402
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/storage.py | # Useful reading is https://zulip.readthedocs.io/en/latest/subsystems/front-end-build-process.html
import os
import shutil
from typing import Any, Dict, List, Optional, Tuple
from django.conf import settings
from django.contrib.staticfiles.storage import ManifestStaticFilesStorage
from pipeline.storage import PipelineMixin
from zerver.lib.str_utils import force_str
class AddHeaderMixin:
def post_process(self, paths: Dict[str, Tuple['ZulipStorage', str]], dry_run: bool=False,
**kwargs: Any) -> List[Tuple[str, str, bool]]:
if dry_run:
return []
with open(settings.STATIC_HEADER_FILE, 'rb') as header_file:
header = header_file.read().decode(settings.FILE_CHARSET)
# A dictionary of path to tuples of (old_path, new_path,
# processed). The return value of this method is the values
# of this dictionary
ret_dict = {}
for name in paths:
storage, path = paths[name]
if not path.startswith('min/') or not path.endswith('.css'):
ret_dict[path] = (path, path, False)
continue
# Prepend the header
with storage.open(path, 'rb') as orig_file:
orig_contents = orig_file.read().decode(settings.FILE_CHARSET)
storage.delete(path)
with storage.open(path, 'w') as new_file:
new_file.write(force_str(header + orig_contents, encoding=settings.FILE_CHARSET))
ret_dict[path] = (path, path, True)
super_class = super()
if hasattr(super_class, 'post_process'):
super_ret = super_class.post_process(paths, dry_run, **kwargs) # type: ignore # https://github.com/python/mypy/issues/2956
else:
super_ret = []
# Merge super class's return value with ours
for val in super_ret:
old_path, new_path, processed = val
if processed:
ret_dict[old_path] = val
return list(ret_dict.values())
class RemoveUnminifiedFilesMixin:
def post_process(self, paths: Dict[str, Tuple['ZulipStorage', str]], dry_run: bool=False,
**kwargs: Any) -> List[Tuple[str, str, bool]]:
if dry_run:
return []
root = settings.STATIC_ROOT
to_remove = ['js']
for tree in to_remove:
shutil.rmtree(os.path.join(root, tree))
is_valid = lambda p: all([not p.startswith(k) for k in to_remove])
paths = {k: v for k, v in paths.items() if is_valid(k)}
super_class = super()
if hasattr(super_class, 'post_process'):
return super_class.post_process(paths, dry_run, **kwargs) # type: ignore # https://github.com/python/mypy/issues/2956
return []
class IgnoreBundlesManifestStaticFilesStorage(ManifestStaticFilesStorage):
def hashed_name(self, name: str, content: Optional[str]=None, filename: Optional[str]=None) -> str:
ext = os.path.splitext(name)[1]
if (name.startswith("webpack-bundles") and
ext in ['.js', '.css', '.map']):
# Hack to avoid renaming already-hashnamed webpack bundles
# when minifying; this was causing every bundle to have
# two hashes appended to its name, one by webpack and one
# here. We can't just skip processing of these bundles,
# since we do need the Django storage to add these to the
# manifest for django_webpack_loader to work. So, we just
# use a no-op hash function for these already-hashed
# assets.
return name
if ext in ['.png', '.gif', '.jpg', '.svg']:
# Similarly, don't hash-rename image files; we only serve
# the original file paths (not the hashed file paths), and
# so the only effect of hash-renaming these is to increase
# the size of release tarballs with duplicate copies of thesex.
#
# One could imagine a future world in which we instead
# used the hashed paths for these; in that case, though,
# we should instead be removing the non-hashed paths.
return name
if ext in ['json', 'po', 'mo', 'mp3', 'ogg', 'html']:
# And same story for translation files, sound files, etc.
return name
return super().hashed_name(name, content, filename)
if settings.PRODUCTION:
# This is a hack to use staticfiles.json from within the
# deployment, rather than a directory under STATIC_ROOT. By doing
# so, we can use a different copy of staticfiles.json for each
# deployment, which ensures that we always use the correct static
# assets for each deployment.
ManifestStaticFilesStorage.manifest_name = os.path.join(settings.DEPLOY_ROOT,
"staticfiles.json")
orig_path = ManifestStaticFilesStorage.path
def path(self: ManifestStaticFilesStorage, name: str) -> str:
if name == ManifestStaticFilesStorage.manifest_name:
return name
return orig_path(self, name)
ManifestStaticFilesStorage.path = path
class ZulipStorage(PipelineMixin,
AddHeaderMixin, RemoveUnminifiedFilesMixin,
IgnoreBundlesManifestStaticFilesStorage):
pass
| [
"Dict[str, Tuple['ZulipStorage', str]]",
"Any",
"Dict[str, Tuple['ZulipStorage', str]]",
"Any",
"str",
"ManifestStaticFilesStorage",
"str"
] | [
428,
519,
2115,
2206,
2911,
5007,
5041
] | [
465,
522,
2152,
2209,
2914,
5033,
5044
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/str_utils.py | """
String Utilities:
This module helps in converting strings from one type to another.
Currently we have strings of 3 semantic types:
1. text strings: These strings are used to represent all textual data,
like people's names, stream names, content of messages, etc.
These strings can contain non-ASCII characters, so its type should be
typing.str (which is `str` in python 3 and `unicode` in python 2).
2. binary strings: These strings are used to represent binary data.
This should be of type `bytes`
3. native strings: These strings are for internal use only. Strings of
this type are not meant to be stored in database, displayed to end
users, etc. Things like exception names, parameter names, attribute
names, etc should be native strings. These strings should only
contain ASCII characters and they should have type `str`.
There are 3 utility functions provided for converting strings from one type
to another - force_text, force_bytes, force_str
Interconversion between text strings and binary strings can be done by
using encode and decode appropriately or by using the utility functions
force_text and force_bytes.
It is recommended to use the utility functions for other string conversions.
"""
from typing import Any, Dict, Mapping, Union, TypeVar
NonBinaryStr = TypeVar('NonBinaryStr', str, str)
# This is used to represent text or native strings
def force_text(s: Union[str, bytes], encoding: str='utf-8') -> str:
"""converts a string to a text string"""
if isinstance(s, str):
return s
elif isinstance(s, bytes):
return s.decode(encoding)
else:
raise TypeError("force_text expects a string type")
def force_str(s: Union[str, bytes], encoding: str='utf-8') -> str:
"""converts a string to a native string"""
if isinstance(s, str):
return s
elif isinstance(s, str):
return s.encode(encoding)
elif isinstance(s, bytes):
return s.decode(encoding)
else:
raise TypeError("force_str expects a string type")
| [
"Union[str, bytes]",
"Union[str, bytes]"
] | [
1427,
1719
] | [
1444,
1736
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/stream_recipient.py |
from typing import (Dict, List)
from django.db import connection
from zerver.models import Recipient
class StreamRecipientMap:
'''
This class maps stream_id -> recipient_id and vice versa.
It is useful for bulk operations. Call the populate_* methods
to initialize the data structures. You should try to avoid
excessive queries by finding ids up front, but you can call
this repeatedly, and it will only look up new ids.
You should ONLY use this class for READ operations.
Note that this class uses raw SQL, because we want to highly
optimize page loads.
'''
def __init__(self) -> None:
self.recip_to_stream = dict() # type: Dict[int, int]
self.stream_to_recip = dict() # type: Dict[int, int]
def populate_for_stream_ids(self, stream_ids: List[int]) -> None:
stream_ids = sorted([
stream_id for stream_id in stream_ids
if stream_id not in self.stream_to_recip
])
if not stream_ids:
return
# see comment at the top of the class
id_list = ', '.join(str(stream_id) for stream_id in stream_ids)
query = '''
SELECT
zerver_recipient.id as recipient_id,
zerver_stream.id as stream_id
FROM
zerver_stream
INNER JOIN zerver_recipient ON
zerver_stream.id = zerver_recipient.type_id
WHERE
zerver_recipient.type = %d
AND
zerver_stream.id in (%s)
''' % (Recipient.STREAM, id_list)
self._process_query(query)
def populate_for_recipient_ids(self, recipient_ids: List[int]) -> None:
recipient_ids = sorted([
recip_id for recip_id in recipient_ids
if recip_id not in self.recip_to_stream
])
if not recipient_ids:
return
# see comment at the top of the class
id_list = ', '.join(str(recip_id) for recip_id in recipient_ids)
query = '''
SELECT
zerver_recipient.id as recipient_id,
zerver_stream.id as stream_id
FROM
zerver_recipient
INNER JOIN zerver_stream ON
zerver_stream.id = zerver_recipient.type_id
WHERE
zerver_recipient.type = %d
AND
zerver_recipient.id in (%s)
''' % (Recipient.STREAM, id_list)
self._process_query(query)
def _process_query(self, query: str) -> None:
cursor = connection.cursor()
cursor.execute(query)
rows = cursor.fetchall()
cursor.close()
for recip_id, stream_id in rows:
self.recip_to_stream[recip_id] = stream_id
self.stream_to_recip[stream_id] = recip_id
def recipient_id_for(self, stream_id: int) -> int:
return self.stream_to_recip[stream_id]
def stream_id_for(self, recip_id: int) -> int:
return self.recip_to_stream[recip_id]
def recipient_to_stream_id_dict(self) -> Dict[int, int]:
return self.recip_to_stream
| [
"List[int]",
"List[int]",
"str",
"int",
"int"
] | [
813,
1687,
2552,
2883,
2982
] | [
822,
1696,
2555,
2886,
2985
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/stream_subscription.py | from typing import Dict, List, Tuple
from mypy_extensions import TypedDict
from django.db.models.query import QuerySet
from zerver.models import (
Recipient,
Stream,
Subscription,
UserProfile,
)
def get_active_subscriptions_for_stream_id(stream_id: int) -> QuerySet:
# TODO: Change return type to QuerySet[Subscription]
return Subscription.objects.filter(
recipient__type=Recipient.STREAM,
recipient__type_id=stream_id,
active=True,
)
def get_active_subscriptions_for_stream_ids(stream_ids: List[int]) -> QuerySet:
# TODO: Change return type to QuerySet[Subscription]
return Subscription.objects.filter(
recipient__type=Recipient.STREAM,
recipient__type_id__in=stream_ids,
active=True
)
def get_stream_subscriptions_for_user(user_profile: UserProfile) -> QuerySet:
# TODO: Change return type to QuerySet[Subscription]
return Subscription.objects.filter(
user_profile=user_profile,
recipient__type=Recipient.STREAM,
)
def get_stream_subscriptions_for_users(user_profiles: List[UserProfile]) -> QuerySet:
# TODO: Change return type to QuerySet[Subscription]
return Subscription.objects.filter(
user_profile__in=user_profiles,
recipient__type=Recipient.STREAM,
)
SubInfo = TypedDict('SubInfo', {
'sub': Subscription,
'stream': Stream,
})
def get_bulk_stream_subscriber_info(
user_profiles: List[UserProfile],
stream_dict: Dict[int, Stream]) -> Dict[int, List[Tuple[Subscription, Stream]]]:
stream_ids = stream_dict.keys()
result = {
user_profile.id: []
for user_profile in user_profiles
} # type: Dict[int, List[Tuple[Subscription, Stream]]]
subs = Subscription.objects.filter(
user_profile__in=user_profiles,
recipient__type=Recipient.STREAM,
recipient__type_id__in=stream_ids,
active=True,
).select_related('user_profile', 'recipient')
for sub in subs:
user_profile_id = sub.user_profile_id
stream_id = sub.recipient.type_id
stream = stream_dict[stream_id]
result[user_profile_id].append((sub, stream))
return result
def num_subscribers_for_stream_id(stream_id: int) -> int:
return get_active_subscriptions_for_stream_id(stream_id).filter(
user_profile__is_active=True,
).count()
| [
"int",
"List[int]",
"UserProfile",
"List[UserProfile]",
"List[UserProfile]",
"Dict[int, Stream]",
"int"
] | [
267,
546,
831,
1092,
1454,
1494,
2251
] | [
270,
555,
842,
1109,
1471,
1511,
2254
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/stream_topic.py | from typing import (Dict, List, Set)
from django.db.models.query import QuerySet
from zerver.lib.stream_subscription import (
get_active_subscriptions_for_stream_id,
)
from zerver.models import (
MutedTopic,
)
class StreamTopicTarget:
'''
This class is designed to help us move to a
StreamTopic table or something similar. It isolates
places where we are are still using `subject` or
`topic_name` as a key into tables.
'''
def __init__(self, stream_id: int, topic_name: str) -> None:
self.stream_id = stream_id
self.topic_name = topic_name
def user_ids_muting_topic(self) -> Set[int]:
query = MutedTopic.objects.filter(
stream_id=self.stream_id,
topic_name__iexact=self.topic_name,
).values(
'user_profile_id',
)
return {
row['user_profile_id']
for row in query
}
def get_active_subscriptions(self) -> QuerySet:
return get_active_subscriptions_for_stream_id(self.stream_id)
| [
"int",
"str"
] | [
493,
510
] | [
496,
513
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/streams.py |
from typing import Any, Iterable, List, Mapping, Set, Tuple, Optional
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_stream_name, create_streams_if_needed
from zerver.lib.request import JsonableError
from zerver.models import UserProfile, Stream, Subscription, \
Realm, Recipient, bulk_get_recipients, get_stream_recipient, get_stream, \
bulk_get_streams, get_realm_stream, DefaultStreamGroup
def access_stream_for_delete_or_update(user_profile: UserProfile, stream_id: int) -> Stream:
# We should only ever use this for realm admins, who are allowed
# to delete or update all streams on their realm, even private streams
# to which they are not subscribed. We do an assert here, because
# all callers should have the require_realm_admin decorator.
assert(user_profile.is_realm_admin)
error = _("Invalid stream id")
try:
stream = Stream.objects.get(id=stream_id)
except Stream.DoesNotExist:
raise JsonableError(error)
if stream.realm_id != user_profile.realm_id:
raise JsonableError(error)
return stream
# Only set allow_realm_admin flag to True when you want to allow realm admin to
# access unsubscribed private stream content.
def access_stream_common(user_profile: UserProfile, stream: Stream,
error: str,
require_active: bool=True,
allow_realm_admin: bool=False) -> Tuple[Recipient, Optional[Subscription]]:
"""Common function for backend code where the target use attempts to
access the target stream, returning all the data fetched along the
way. If that user does not have permission to access that stream,
we throw an exception. A design goal is that the error message is
the same for streams you can't access and streams that don't exist."""
# First, we don't allow any access to streams in other realms.
if stream.realm_id != user_profile.realm_id:
raise JsonableError(error)
recipient = get_stream_recipient(stream.id)
try:
sub = Subscription.objects.get(user_profile=user_profile,
recipient=recipient,
active=require_active)
except Subscription.DoesNotExist:
sub = None
# If the stream is in your realm and public, you can access it.
if stream.is_public() and not user_profile.is_guest:
return (recipient, sub)
# Or if you are subscribed to the stream, you can access it.
if sub is not None:
return (recipient, sub)
# For some specific callers (e.g. getting list of subscribers,
# removing other users from a stream, and updating stream name and
# description), we allow realm admins to access stream even if
# they are not subscribed to a private stream.
if user_profile.is_realm_admin and allow_realm_admin:
return (recipient, sub)
# Otherwise it is a private stream and you're not on it, so throw
# an error.
raise JsonableError(error)
def access_stream_by_id(user_profile: UserProfile,
stream_id: int,
require_active: bool=True,
allow_realm_admin: bool=False) -> Tuple[Stream, Recipient, Optional[Subscription]]:
stream = get_stream_by_id(stream_id)
error = _("Invalid stream id")
(recipient, sub) = access_stream_common(user_profile, stream, error,
require_active=require_active,
allow_realm_admin=allow_realm_admin)
return (stream, recipient, sub)
def get_stream_by_id(stream_id: int) -> Stream:
error = _("Invalid stream id")
try:
stream = Stream.objects.get(id=stream_id)
except Stream.DoesNotExist:
raise JsonableError(error)
return stream
def check_stream_name_available(realm: Realm, name: str) -> None:
check_stream_name(name)
try:
get_stream(name, realm)
raise JsonableError(_("Stream name '%s' is already taken.") % (name,))
except Stream.DoesNotExist:
pass
def access_stream_by_name(user_profile: UserProfile,
stream_name: str,
allow_realm_admin: bool=False) -> Tuple[Stream, Recipient, Optional[Subscription]]:
error = _("Invalid stream name '%s'" % (stream_name,))
try:
stream = get_realm_stream(stream_name, user_profile.realm_id)
except Stream.DoesNotExist:
raise JsonableError(error)
(recipient, sub) = access_stream_common(user_profile, stream, error,
allow_realm_admin=allow_realm_admin)
return (stream, recipient, sub)
def access_stream_for_unmute_topic(user_profile: UserProfile, stream_name: str, error: str) -> Stream:
"""
It may seem a little silly to have this helper function for unmuting
topics, but it gets around a linter warning, and it helps to be able
to review all security-related stuff in one place.
Our policy for accessing streams when you unmute a topic is that you
don't necessarily need to have an active subscription or even "legal"
access to the stream. Instead, we just verify the stream_id has been
muted in the past (not here, but in the caller).
Long term, we'll probably have folks just pass us in the id of the
MutedTopic row to unmute topics.
"""
try:
stream = get_stream(stream_name, user_profile.realm)
except Stream.DoesNotExist:
raise JsonableError(error)
return stream
def can_access_stream_history_by_name(user_profile: UserProfile, stream_name: str) -> bool:
"""Determine whether the provided user is allowed to access the
history of the target stream. The stream is specified by name.
This is used by the caller to determine whether this user can get
historical messages before they joined for a narrowing search.
Because of the way our search is currently structured,
we may be passed an invalid stream here. We return
False in that situation, and subsequent code will do
validation and raise the appropriate JsonableError.
Note that this function should only be used in contexts where
access_stream is being called elsewhere to confirm that the user
can actually see this stream.
"""
try:
stream = get_stream(stream_name, user_profile.realm)
except Stream.DoesNotExist:
return False
if stream.is_history_realm_public() and not user_profile.is_guest:
return True
if stream.is_history_public_to_subscribers():
# In this case, we check if the user is subscribed.
error = _("Invalid stream name '%s'" % (stream_name,))
try:
(recipient, sub) = access_stream_common(user_profile, stream, error)
except JsonableError:
return False
return True
return False
def filter_stream_authorization(user_profile: UserProfile,
streams: Iterable[Stream]) -> Tuple[List[Stream], List[Stream]]:
streams_subscribed = set() # type: Set[int]
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams])
subs = Subscription.objects.filter(user_profile=user_profile,
recipient__in=list(recipients_map.values()),
active=True)
for sub in subs:
streams_subscribed.add(sub.recipient.type_id)
unauthorized_streams = [] # type: List[Stream]
for stream in streams:
# The user is authorized for their own streams
if stream.id in streams_subscribed:
continue
# Users are not authorized for invite_only streams, and guest
# users are not authorized for any streams
if stream.invite_only or user_profile.is_guest:
unauthorized_streams.append(stream)
authorized_streams = [stream for stream in streams if
stream.id not in set(stream.id for stream in unauthorized_streams)]
return authorized_streams, unauthorized_streams
def list_to_streams(streams_raw: Iterable[Mapping[str, Any]],
user_profile: UserProfile,
autocreate: bool=False) -> Tuple[List[Stream], List[Stream]]:
"""Converts list of dicts to a list of Streams, validating input in the process
For each stream name, we validate it to ensure it meets our
requirements for a proper stream name using check_stream_name.
This function in autocreate mode should be atomic: either an exception will be raised
during a precheck, or all the streams specified will have been created if applicable.
@param streams_raw The list of stream dictionaries to process;
names should already be stripped of whitespace by the caller.
@param user_profile The user for whom we are retreiving the streams
@param autocreate Whether we should create streams if they don't already exist
"""
# Validate all streams, getting extant ones, then get-or-creating the rest.
stream_set = set(stream_dict["name"] for stream_dict in streams_raw)
for stream_name in stream_set:
# Stream names should already have been stripped by the
# caller, but it makes sense to verify anyway.
assert stream_name == stream_name.strip()
check_stream_name(stream_name)
existing_streams = [] # type: List[Stream]
missing_stream_dicts = [] # type: List[Mapping[str, Any]]
existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)
for stream_dict in streams_raw:
stream_name = stream_dict["name"]
stream = existing_stream_map.get(stream_name.lower())
if stream is None:
missing_stream_dicts.append(stream_dict)
else:
existing_streams.append(stream)
if len(missing_stream_dicts) == 0:
# This is the happy path for callers who expected all of these
# streams to exist already.
created_streams = [] # type: List[Stream]
else:
# autocreate=True path starts here
if not user_profile.can_create_streams():
raise JsonableError(_('User cannot create streams.'))
elif not autocreate:
raise JsonableError(_("Stream(s) (%s) do not exist") % ", ".join(
stream_dict["name"] for stream_dict in missing_stream_dicts))
# We already filtered out existing streams, so dup_streams
# will normally be an empty list below, but we protect against somebody
# else racing to create the same stream. (This is not an entirely
# paranoid approach, since often on Zulip two people will discuss
# creating a new stream, and both people eagerly do it.)
created_streams, dup_streams = create_streams_if_needed(realm=user_profile.realm,
stream_dicts=missing_stream_dicts)
existing_streams += dup_streams
return existing_streams, created_streams
def access_default_stream_group_by_id(realm: Realm, group_id: int) -> DefaultStreamGroup:
try:
return DefaultStreamGroup.objects.get(realm=realm, id=group_id)
except DefaultStreamGroup.DoesNotExist:
raise JsonableError(_("Default stream group with id '%s' does not exist." % (group_id,)))
| [
"UserProfile",
"int",
"UserProfile",
"Stream",
"str",
"UserProfile",
"int",
"int",
"Realm",
"str",
"UserProfile",
"str",
"UserProfile",
"str",
"str",
"UserProfile",
"str",
"UserProfile",
"Iterable[Stream]",
"Iterable[Mapping[str, Any]]",
"UserProfile",
"Realm",
"int"
] | [
549,
573,
1342,
1363,
1403,
3162,
3210,
3749,
3984,
3997,
4245,
4297,
4858,
4884,
4896,
5721,
5747,
7064,
7118,
8260,
8323,
11213,
11230
] | [
560,
576,
1353,
1369,
1406,
3173,
3213,
3752,
3989,
4000,
4256,
4300,
4869,
4887,
4899,
5732,
5750,
7075,
7134,
8287,
8334,
11218,
11233
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/subdomains.py | # -*- coding: utf-8 -*-
from django.conf import settings
from django.http import HttpRequest
import re
from typing import Optional
from zerver.models import get_realm, Realm, UserProfile
def get_subdomain(request: HttpRequest) -> str:
# The HTTP spec allows, but doesn't require, a client to omit the
# port in the `Host` header if it's "the default port for the
# service requested", i.e. typically either 443 or 80; and
# whatever Django gets there, or from proxies reporting that via
# X-Forwarded-Host, it passes right through the same way. So our
# logic is a bit complicated to allow for that variation.
#
# For both EXTERNAL_HOST and REALM_HOSTS, we take a missing port
# to mean that any port should be accepted in Host. It's not
# totally clear that's the right behavior, but it keeps
# compatibility with older versions of Zulip, so that's a start.
host = request.get_host().lower()
m = re.search(r'\.%s(:\d+)?$' % (settings.EXTERNAL_HOST,),
host)
if m:
subdomain = host[:m.start()]
if subdomain in settings.ROOT_SUBDOMAIN_ALIASES:
return Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
return subdomain
for subdomain, realm_host in settings.REALM_HOSTS.items():
if re.search(r'^%s(:\d+)?$' % (realm_host,),
host):
return subdomain
return Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
def is_subdomain_root_or_alias(request: HttpRequest) -> bool:
return get_subdomain(request) == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
def user_matches_subdomain(realm_subdomain: Optional[str], user_profile: UserProfile) -> bool:
if realm_subdomain is None:
return True # nocoverage # This state may no longer be possible.
return user_profile.realm.subdomain == realm_subdomain
def is_root_domain_available() -> bool:
if settings.ROOT_DOMAIN_LANDING_PAGE:
return False
return get_realm(Realm.SUBDOMAIN_FOR_ROOT_DOMAIN) is None
| [
"HttpRequest",
"HttpRequest",
"Optional[str]",
"UserProfile"
] | [
217,
1475,
1611,
1640
] | [
228,
1486,
1624,
1651
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/test_classes.py | from contextlib import contextmanager
from typing import (cast, Any, Callable, Dict, Iterable, Iterator, List, Mapping, Optional,
Sized, Tuple, Union)
from django.apps import apps
from django.db.migrations.state import StateApps
from django.urls import resolve
from django.conf import settings
from django.test import TestCase
from django.test.client import (
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
)
from django.test.testcases import SerializeMixin
from django.http import HttpResponse
from django.db.migrations.executor import MigrationExecutor
from django.db import connection
from django.db.utils import IntegrityError
from django.http import HttpRequest
from two_factor.models import PhoneDevice
from zerver.lib.initial_password import initial_password
from zerver.lib.utils import is_remote_server
from zerver.lib.users import get_api_key
from zerver.lib.actions import (
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
get_display_recipient, bulk_remove_subscriptions, do_create_user,
check_send_stream_message, gather_subscriptions,
get_default_value_for_history_public_to_subscribers,
)
from zerver.lib.stream_subscription import (
get_stream_subscriptions_for_user,
)
from zerver.lib.test_helpers import (
instrument_url, find_key_by_email,
)
from zerver.models import (
get_stream,
get_client,
get_user,
get_realm,
Client,
Message,
Realm,
Recipient,
Service,
Stream,
Subscription,
UserProfile,
)
from zilencer.models import get_remote_server_by_uuid
from zerver.decorator import do_two_factor_login
from zerver.tornado.event_queue import clear_client_event_queues_for_testing
import base64
import mock
import os
import re
import ujson
import urllib
API_KEYS = {} # type: Dict[str, str]
def flush_caches_for_testing() -> None:
global API_KEYS
API_KEYS = {}
class UploadSerializeMixin(SerializeMixin):
"""
We cannot use override_settings to change upload directory because
because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
are compiled only once. Otherwise using a different upload directory
for conflicting test cases would have provided better performance
while providing the required isolation.
"""
lockfile = 'var/upload_lock'
@classmethod
def setUpClass(cls: Any, *args: Any, **kwargs: Any) -> None:
if not os.path.exists(cls.lockfile):
with open(cls.lockfile, 'w'): # nocoverage - rare locking case
pass
super(UploadSerializeMixin, cls).setUpClass(*args, **kwargs)
class ZulipTestCase(TestCase):
# Ensure that the test system just shows us diffs
maxDiff = None # type: Optional[int]
def tearDown(self) -> None:
super().tearDown()
# Important: we need to clear event queues to avoid leaking data to future tests.
clear_client_event_queues_for_testing()
'''
WRAPPER_COMMENT:
We wrap calls to self.client.{patch,put,get,post,delete} for various
reasons. Some of this has to do with fixing encodings before calling
into the Django code. Some of this has to do with providing a future
path for instrumentation. Some of it's just consistency.
The linter will prevent direct calls to self.client.foo, so the wrapper
functions have to fake out the linter by using a local variable called
django_client to fool the regext.
'''
DEFAULT_SUBDOMAIN = "zulip"
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
TOKENIZED_NOREPLY_REGEX = settings.TOKENIZED_NOREPLY_EMAIL_ADDRESS.format(token="[a-z0-9_]{24}")
def set_http_host(self, kwargs: Dict[str, Any]) -> None:
if 'subdomain' in kwargs:
kwargs['HTTP_HOST'] = Realm.host_for_subdomain(kwargs['subdomain'])
del kwargs['subdomain']
elif 'HTTP_HOST' not in kwargs:
kwargs['HTTP_HOST'] = Realm.host_for_subdomain(self.DEFAULT_SUBDOMAIN)
@instrument_url
def client_patch(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
"""
We need to urlencode, since Django's function won't do it for us.
"""
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.patch(url, encoded, **kwargs)
@instrument_url
def client_patch_multipart(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
"""
Use this for patch requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not patch.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.patch(
url,
encoded,
content_type=MULTIPART_CONTENT,
**kwargs)
@instrument_url
def client_put(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.put(url, encoded, **kwargs)
@instrument_url
def client_delete(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.delete(url, encoded, **kwargs)
@instrument_url
def client_options(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.options(url, encoded, **kwargs)
@instrument_url
def client_head(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.head(url, encoded, **kwargs)
@instrument_url
def client_post(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.post(url, info, **kwargs)
@instrument_url
def client_post_request(self, url: str, req: Any) -> HttpResponse:
"""
We simulate hitting an endpoint here, although we
actually resolve the URL manually and hit the view
directly. We have this helper method to allow our
instrumentation to work for /notify_tornado and
future similar methods that require doing funny
things to a request object.
"""
match = resolve(url)
return match.func(req)
@instrument_url
def client_get(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
django_client = self.client # see WRAPPER_COMMENT
self.set_http_host(kwargs)
return django_client.get(url, info, **kwargs)
example_user_map = dict(
hamlet='hamlet@zulip.com',
cordelia='cordelia@zulip.com',
iago='iago@zulip.com',
prospero='prospero@zulip.com',
othello='othello@zulip.com',
AARON='AARON@zulip.com',
aaron='aaron@zulip.com',
ZOE='ZOE@zulip.com',
polonius='polonius@zulip.com',
webhook_bot='webhook-bot@zulip.com',
welcome_bot='welcome-bot@zulip.com',
outgoing_webhook_bot='outgoing-webhook@zulip.com'
)
mit_user_map = dict(
sipbtest="sipbtest@mit.edu",
starnine="starnine@mit.edu",
espuser="espuser@mit.edu",
)
lear_user_map = dict(
cordelia="cordelia@zulip.com",
king="king@lear.org"
)
# Non-registered test users
nonreg_user_map = dict(
test='test@zulip.com',
test1='test1@zulip.com',
alice='alice@zulip.com',
newuser='newuser@zulip.com',
bob='bob@zulip.com',
cordelia='cordelia@zulip.com',
newguy='newguy@zulip.com',
me='me@zulip.com',
)
def nonreg_user(self, name: str) -> UserProfile:
email = self.nonreg_user_map[name]
return get_user(email, get_realm("zulip"))
def example_user(self, name: str) -> UserProfile:
email = self.example_user_map[name]
return get_user(email, get_realm('zulip'))
def mit_user(self, name: str) -> UserProfile:
email = self.mit_user_map[name]
return get_user(email, get_realm('zephyr'))
def lear_user(self, name: str) -> UserProfile:
email = self.lear_user_map[name]
return get_user(email, get_realm('lear'))
def nonreg_email(self, name: str) -> str:
return self.nonreg_user_map[name]
def example_email(self, name: str) -> str:
return self.example_user_map[name]
def mit_email(self, name: str) -> str:
return self.mit_user_map[name]
def notification_bot(self) -> UserProfile:
return get_user('notification-bot@zulip.com', get_realm('zulip'))
def create_test_bot(self, short_name: str, user_profile: UserProfile,
assert_json_error_msg: str=None, **extras: Any) -> Optional[UserProfile]:
self.login(user_profile.email)
bot_info = {
'short_name': short_name,
'full_name': 'Foo Bot',
}
bot_info.update(extras)
result = self.client_post("/json/bots", bot_info)
if assert_json_error_msg is not None:
self.assert_json_error(result, assert_json_error_msg)
return None
else:
self.assert_json_success(result)
bot_email = '{}-bot@zulip.testserver'.format(short_name)
bot_profile = get_user(bot_email, user_profile.realm)
return bot_profile
def login_with_return(self, email: str, password: Optional[str]=None,
**kwargs: Any) -> HttpResponse:
if password is None:
password = initial_password(email)
return self.client_post('/accounts/login/',
{'username': email, 'password': password},
**kwargs)
def login(self, email: str, password: Optional[str]=None, fails: bool=False,
realm: Optional[Realm]=None) -> HttpResponse:
if realm is None:
realm = get_realm("zulip")
if password is None:
password = initial_password(email)
if not fails:
self.assertTrue(self.client.login(username=email, password=password,
realm=realm))
else:
self.assertFalse(self.client.login(username=email, password=password,
realm=realm))
def login_2fa(self, user_profile: UserProfile) -> None:
"""
We need this function to call request.session.save().
do_two_factor_login doesn't save session; in normal request-response
cycle this doesn't matter because middleware will save the session
when it finds it dirty; however,in tests we will have to do that
explicitly.
"""
request = HttpRequest()
request.session = self.client.session
request.user = user_profile
do_two_factor_login(request, user_profile)
request.session.save()
def logout(self) -> None:
self.client.logout()
def register(self, email: str, password: str, **kwargs: Any) -> HttpResponse:
self.client_post('/accounts/home/', {'email': email},
**kwargs)
return self.submit_reg_form_for_user(email, password, **kwargs)
def submit_reg_form_for_user(
self, email: str, password: str,
realm_name: Optional[str]="Zulip Test",
realm_subdomain: Optional[str]="zuliptest",
from_confirmation: Optional[str]='', full_name: Optional[str]=None,
timezone: Optional[str]='', realm_in_root_domain: Optional[str]=None,
default_stream_groups: Optional[List[str]]=[],
source_realm: Optional[str]='', **kwargs: Any) -> HttpResponse:
"""
Stage two of the two-step registration process.
If things are working correctly the account should be fully
registered after this call.
You can pass the HTTP_HOST variable for subdomains via kwargs.
"""
if full_name is None:
full_name = email.replace("@", "_")
payload = {
'full_name': full_name,
'password': password,
'realm_name': realm_name,
'realm_subdomain': realm_subdomain,
'key': find_key_by_email(email),
'timezone': timezone,
'terms': True,
'from_confirmation': from_confirmation,
'default_stream_group': default_stream_groups,
'source_realm': source_realm,
}
if realm_in_root_domain is not None:
payload['realm_in_root_domain'] = realm_in_root_domain
return self.client_post('/accounts/register/', payload, **kwargs)
def get_confirmation_url_from_outbox(self, email_address: str, *,
url_pattern: str=None) -> str:
from django.core.mail import outbox
if url_pattern is None:
# This is a bit of a crude heuristic, but good enough for most tests.
url_pattern = settings.EXTERNAL_HOST + r"(\S+)>"
for message in reversed(outbox):
if email_address in message.to:
return re.search(url_pattern, message.body).groups()[0]
else:
raise AssertionError("Couldn't find a confirmation email.")
def encode_credentials(self, identifier: str, realm: str="zulip") -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
if identifier in API_KEYS:
api_key = API_KEYS[identifier]
else:
if is_remote_server(identifier):
api_key = get_remote_server_by_uuid(identifier).api_key
else:
user = get_user(identifier, get_realm(realm))
api_key = get_api_key(user)
API_KEYS[identifier] = api_key
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
def api_get(self, email: str, *args: Any, **kwargs: Any) -> HttpResponse:
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(email)
return self.client_get(*args, **kwargs)
def api_post(self, identifier: str, *args: Any, **kwargs: Any) -> HttpResponse:
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(identifier, kwargs.get('realm', 'zulip'))
return self.client_post(*args, **kwargs)
def api_patch(self, email: str, *args: Any, **kwargs: Any) -> HttpResponse:
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(email)
return self.client_patch(*args, **kwargs)
def api_put(self, email: str, *args: Any, **kwargs: Any) -> HttpResponse:
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(email)
return self.client_put(*args, **kwargs)
def api_delete(self, email: str, *args: Any, **kwargs: Any) -> HttpResponse:
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(email)
return self.client_delete(*args, **kwargs)
def get_streams(self, email: str, realm: Realm) -> List[str]:
"""
Helper function to get the stream names for a user
"""
user_profile = get_user(email, realm)
subs = get_stream_subscriptions_for_user(user_profile).filter(
active=True,
)
return [cast(str, get_display_recipient(sub.recipient)) for sub in subs]
def send_personal_message(self, from_email: str, to_email: str, content: str="test content",
sender_realm: str="zulip") -> int:
sender = get_user(from_email, get_realm(sender_realm))
recipient_list = [to_email]
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_message(
sender, sending_client, 'private', recipient_list, None,
content
)
def send_huddle_message(self, from_email: str, to_emails: List[str], content: str="test content",
sender_realm: str="zulip") -> int:
sender = get_user(from_email, get_realm(sender_realm))
assert(len(to_emails) >= 2)
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_message(
sender, sending_client, 'private', to_emails, None,
content
)
def send_stream_message(self, sender_email: str, stream_name: str, content: str="test content",
topic_name: str="test", sender_realm: str="zulip") -> int:
sender = get_user(sender_email, get_realm(sender_realm))
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_stream_message(
sender=sender,
client=sending_client,
stream_name=stream_name,
topic=topic_name,
body=content,
)
def get_messages_response(self, anchor: int=1, num_before: int=100, num_after: int=100,
use_first_unread_anchor: bool=False) -> Dict[str, List[Dict[str, Any]]]:
post_params = {"anchor": anchor, "num_before": num_before,
"num_after": num_after,
"use_first_unread_anchor": ujson.dumps(use_first_unread_anchor)}
result = self.client_get("/json/messages", dict(post_params))
data = result.json()
return data
def get_messages(self, anchor: int=1, num_before: int=100, num_after: int=100,
use_first_unread_anchor: bool=False) -> List[Dict[str, Any]]:
data = self.get_messages_response(anchor, num_before, num_after, use_first_unread_anchor)
return data['messages']
def users_subscribed_to_stream(self, stream_name: str, realm: Realm) -> List[UserProfile]:
stream = Stream.objects.get(name=stream_name, realm=realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
return [subscription.user_profile for subscription in subscriptions]
def assert_url_serves_contents_of_file(self, url: str, result: bytes) -> None:
response = self.client_get(url)
data = b"".join(response.streaming_content)
self.assertEqual(result, data)
def assert_json_success(self, result: HttpResponse) -> Dict[str, Any]:
"""
Successful POSTs return a 200 and JSON of the form {"result": "success",
"msg": ""}.
"""
try:
json = ujson.loads(result.content)
except Exception: # nocoverage
json = {'msg': "Error parsing JSON in response!"}
self.assertEqual(result.status_code, 200, json['msg'])
self.assertEqual(json.get("result"), "success")
# We have a msg key for consistency with errors, but it typically has an
# empty value.
self.assertIn("msg", json)
self.assertNotEqual(json["msg"], "Error parsing JSON in response!")
return json
def get_json_error(self, result: HttpResponse, status_code: int=400) -> Dict[str, Any]:
try:
json = ujson.loads(result.content)
except Exception: # nocoverage
json = {'msg': "Error parsing JSON in response!"}
self.assertEqual(result.status_code, status_code, msg=json.get('msg'))
self.assertEqual(json.get("result"), "error")
return json['msg']
def assert_json_error(self, result: HttpResponse, msg: str, status_code: int=400) -> None:
"""
Invalid POSTs return an error status code and JSON of the form
{"result": "error", "msg": "reason"}.
"""
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
def assert_length(self, items: List[Any], count: int) -> None:
actual_count = len(items)
if actual_count != count: # nocoverage
print('ITEMS:\n')
for item in items:
print(item)
print("\nexpected length: %s\nactual length: %s" % (count, actual_count))
raise AssertionError('List is unexpected size!')
def assert_json_error_contains(self, result: HttpResponse, msg_substring: str,
status_code: int=400) -> None:
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
def assert_in_response(self, substring: str, response: HttpResponse) -> None:
self.assertIn(substring, response.content.decode('utf-8'))
def assert_in_success_response(self, substrings: List[str],
response: HttpResponse) -> None:
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertIn(substring, decoded)
def assert_not_in_success_response(self, substrings: List[str],
response: HttpResponse) -> None:
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertNotIn(substring, decoded)
def webhook_fixture_data(self, type: str, action: str, file_type: str='json') -> str:
fn = os.path.join(
os.path.dirname(__file__),
"../webhooks/%s/fixtures/%s.%s" % (type, action, file_type)
)
return open(fn).read()
def fixture_data(self, file_name: str, type: str='') -> str:
fn = os.path.join(
os.path.dirname(__file__),
"../tests/fixtures/%s/%s" % (type, file_name)
)
return open(fn).read()
def make_stream(self, stream_name: str, realm: Optional[Realm]=None,
invite_only: Optional[bool]=False,
history_public_to_subscribers: Optional[bool]=None) -> Stream:
if realm is None:
realm = self.DEFAULT_REALM
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
realm, invite_only, history_public_to_subscribers)
try:
stream = Stream.objects.create(
realm=realm,
name=stream_name,
invite_only=invite_only,
history_public_to_subscribers=history_public_to_subscribers,
)
except IntegrityError: # nocoverage -- this is for bugs in the tests
raise Exception('''
%s already exists
Please call make_stream with a stream name
that is not already in use.''' % (stream_name,))
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream
# Subscribe to a stream directly
def subscribe(self, user_profile: UserProfile, stream_name: str) -> Stream:
try:
stream = get_stream(stream_name, user_profile.realm)
from_stream_creation = False
except Stream.DoesNotExist:
stream, from_stream_creation = create_stream_if_needed(user_profile.realm, stream_name)
bulk_add_subscriptions([stream], [user_profile], from_stream_creation=from_stream_creation)
return stream
def unsubscribe(self, user_profile: UserProfile, stream_name: str) -> None:
client = get_client("website")
stream = get_stream(stream_name, user_profile.realm)
bulk_remove_subscriptions([user_profile], [stream], client)
# Subscribe to a stream by making an API request
def common_subscribe_to_streams(self, email: str, streams: Iterable[str],
extra_post_data: Dict[str, Any]={}, invite_only: bool=False,
**kwargs: Any) -> HttpResponse:
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
'invite_only': ujson.dumps(invite_only)}
post_data.update(extra_post_data)
kwargs['realm'] = kwargs.get('subdomain', 'zulip')
result = self.api_post(email, "/api/v1/users/me/subscriptions", post_data, **kwargs)
return result
def check_user_subscribed_only_to_streams(self, user_name: str,
streams: List[Stream]) -> None:
streams = sorted(streams, key=lambda x: x.name)
subscribed_streams = gather_subscriptions(self.nonreg_user(user_name))[0]
self.assertEqual(len(subscribed_streams), len(streams))
for x, y in zip(subscribed_streams, streams):
self.assertEqual(x["name"], y.name)
def send_json_payload(self, user_profile: UserProfile, url: str,
payload: Union[str, Dict[str, Any]],
stream_name: Optional[str]=None, **post_params: Any) -> Message:
if stream_name is not None:
self.subscribe(user_profile, stream_name)
result = self.client_post(url, payload, **post_params)
self.assert_json_success(result)
# Check the correct message was sent
msg = self.get_last_message()
self.assertEqual(msg.sender.email, user_profile.email)
if stream_name is not None:
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
# TODO: should also validate recipient for private messages
return msg
def get_last_message(self) -> Message:
return Message.objects.latest('id')
def get_second_to_last_message(self) -> Message:
return Message.objects.all().order_by('-id')[1]
@contextmanager
def simulated_markdown_failure(self) -> Iterator[None]:
'''
This raises a failure inside of the try/except block of
bugdown.__init__.do_convert.
'''
with \
self.settings(ERROR_BOT=None), \
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
mock.patch('zerver.lib.bugdown.bugdown_logger'):
yield
def create_default_device(self, user_profile: UserProfile,
number: str="+12223334444") -> None:
phone_device = PhoneDevice(user=user_profile, name='default',
confirmed=True, number=number,
key='abcd', method='sms')
phone_device.save()
class WebhookTestCase(ZulipTestCase):
"""
Common for all webhooks tests
Override below class attributes and run send_and_test_message
If you create your url in uncommon way you can override build_webhook_url method
In case that you need modify body or create it without using fixture you can also override get_body method
"""
STREAM_NAME = None # type: Optional[str]
TEST_USER_EMAIL = 'webhook-bot@zulip.com'
URL_TEMPLATE = None # type: Optional[str]
FIXTURE_DIR_NAME = None # type: Optional[str]
@property
def test_user(self) -> UserProfile:
return get_user(self.TEST_USER_EMAIL, get_realm("zulip"))
def setUp(self) -> None:
self.url = self.build_webhook_url()
def api_stream_message(self, email: str, *args: Any, **kwargs: Any) -> HttpResponse:
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(email)
return self.send_and_test_stream_message(*args, **kwargs)
def send_and_test_stream_message(self, fixture_name: str, expected_topic: Optional[str]=None,
expected_message: Optional[str]=None,
content_type: Optional[str]="application/json", **kwargs: Any) -> Message:
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.test_user, self.url, payload,
self.STREAM_NAME, **kwargs)
self.do_test_topic(msg, expected_topic)
self.do_test_message(msg, expected_message)
return msg
def send_and_test_private_message(self, fixture_name: str, expected_topic: str=None,
expected_message: str=None, content_type: str="application/json",
**kwargs: Any)-> Message:
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
sender = kwargs.get('sender', self.test_user)
msg = self.send_json_payload(sender, self.url, payload,
stream_name=None, **kwargs)
self.do_test_message(msg, expected_message)
return msg
def build_webhook_url(self, *args: Any, **kwargs: Any) -> str:
url = self.URL_TEMPLATE
if url.find("api_key") >= 0:
api_key = get_api_key(self.test_user)
url = self.URL_TEMPLATE.format(api_key=api_key,
stream=self.STREAM_NAME)
else:
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
has_arguments = kwargs or args
if has_arguments and url.find('?') == -1:
url = "{}?".format(url) # nocoverage
else:
url = "{}&".format(url)
for key, value in kwargs.items():
url = "{}{}={}&".format(url, key, value)
for arg in args:
url = "{}{}&".format(url, arg)
return url[:-1] if has_arguments else url
def get_body(self, fixture_name: str) -> Union[str, Dict[str, str]]:
"""Can be implemented either as returning a dictionary containing the
post parameters or as string containing the body of the request."""
return ujson.dumps(ujson.loads(self.webhook_fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
def do_test_topic(self, msg: Message, expected_topic: Optional[str]) -> None:
if expected_topic is not None:
self.assertEqual(msg.topic_name(), expected_topic)
def do_test_message(self, msg: Message, expected_message: Optional[str]) -> None:
if expected_message is not None:
self.assertEqual(msg.content, expected_message)
class MigrationsTestCase(ZulipTestCase):
"""
Test class for database migrations inspired by this blog post:
https://www.caktusgroup.com/blog/2016/02/02/writing-unit-tests-django-migrations/
Documented at https://zulip.readthedocs.io/en/latest/subsystems/schema-migrations.html
"""
@property
def app(self) -> str:
return apps.get_containing_app_config(type(self).__module__).name
migrate_from = None # type: Optional[str]
migrate_to = None # type: Optional[str]
def setUp(self) -> None:
assert self.migrate_from and self.migrate_to, \
"TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__)
migrate_from = [(self.app, self.migrate_from)] # type: List[Tuple[str, str]]
migrate_to = [(self.app, self.migrate_to)] # type: List[Tuple[str, str]]
executor = MigrationExecutor(connection)
old_apps = executor.loader.project_state(migrate_from).apps
# Reverse to the original migration
executor.migrate(migrate_from)
self.setUpBeforeMigration(old_apps)
# Run the migration to test
executor = MigrationExecutor(connection)
executor.loader.build_graph() # reload.
executor.migrate(migrate_to)
self.apps = executor.loader.project_state(migrate_to).apps
def setUpBeforeMigration(self, apps: StateApps) -> None:
pass # nocoverage
| [
"Any",
"Any",
"Any",
"Dict[str, Any]",
"str",
"Any",
"str",
"Any",
"str",
"Any",
"str",
"Any",
"str",
"Any",
"str",
"Any",
"str",
"Any",
"str",
"Any",
"str",
"Any",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"UserProfile",
"Any",
"str",
"Any",
"str",
"UserProfile",
"str",
"str",
"Any",
"str",
"str",
"Any",
"str",
"str",
"str",
"Any",
"Any",
"str",
"Any",
"Any",
"str",
"Any",
"Any",
"str",
"Any",
"Any",
"str",
"Any",
"Any",
"str",
"Realm",
"str",
"str",
"str",
"List[str]",
"str",
"str",
"str",
"Realm",
"str",
"bytes",
"HttpResponse",
"HttpResponse",
"HttpResponse",
"str",
"List[Any]",
"int",
"HttpResponse",
"str",
"str",
"HttpResponse",
"List[str]",
"HttpResponse",
"List[str]",
"HttpResponse",
"str",
"str",
"str",
"str",
"UserProfile",
"str",
"UserProfile",
"str",
"str",
"Iterable[str]",
"Any",
"str",
"List[Stream]",
"UserProfile",
"str",
"Union[str, Dict[str, Any]]",
"Any",
"UserProfile",
"str",
"Any",
"Any",
"str",
"Any",
"str",
"Any",
"Any",
"Any",
"str",
"Message",
"Optional[str]",
"Message",
"Optional[str]",
"StateApps"
] | [
2375,
2387,
2402,
3693,
4044,
4084,
4467,
4507,
5246,
5286,
5560,
5600,
5878,
5918,
6194,
6234,
6507,
6547,
6778,
6788,
7270,
7310,
8585,
8734,
8880,
9024,
9170,
9260,
9347,
9564,
9583,
9663,
10329,
10400,
10695,
11309,
11949,
11964,
11979,
12230,
12245,
12633,
13684,
14272,
14939,
14951,
14966,
15142,
15154,
15169,
15377,
15389,
15404,
15576,
15588,
15603,
15776,
15788,
15803,
15980,
15992,
16378,
16393,
16852,
16868,
17329,
17347,
18689,
18701,
19100,
19113,
19303,
20015,
20433,
20452,
20749,
20767,
21149,
21178,
21385,
21400,
21544,
21600,
21867,
21927,
22181,
22194,
22448,
22680,
23770,
23796,
24230,
24256,
24541,
24555,
24713,
25170,
25230,
25606,
25624,
25664,
25766,
27020,
28103,
28115,
28130,
28346,
28557,
29023,
29206,
29651,
29666,
30452,
30780,
30805,
30967,
30994,
32520
] | [
2378,
2390,
2405,
3707,
4047,
4087,
4470,
4510,
5249,
5289,
5563,
5603,
5881,
5921,
6197,
6237,
6510,
6550,
6781,
6791,
7273,
7313,
8588,
8737,
8883,
9027,
9173,
9263,
9350,
9567,
9594,
9666,
10332,
10403,
10698,
11320,
11952,
11967,
11982,
12233,
12248,
12636,
13687,
14275,
14942,
14954,
14969,
15145,
15157,
15172,
15380,
15392,
15407,
15579,
15591,
15606,
15779,
15791,
15806,
15983,
15997,
16381,
16396,
16855,
16877,
17332,
17350,
18692,
18706,
19103,
19118,
19315,
20027,
20445,
20455,
20758,
20770,
21161,
21181,
21388,
21412,
21553,
21612,
21876,
21939,
22184,
22197,
22451,
22683,
23781,
23799,
24241,
24259,
24544,
24568,
24716,
25173,
25242,
25617,
25627,
25690,
25769,
27031,
28106,
28118,
28133,
28349,
28560,
29026,
29209,
29654,
29669,
30455,
30787,
30818,
30974,
31007,
32529
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/test_fixtures.py | # -*- coding: utf-8 -*-
import json
import os
import re
import hashlib
import subprocess
import sys
from typing import Any, List, Optional
from importlib import import_module
from io import StringIO
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.utils import OperationalError
from django.apps import apps
from django.conf import settings
from django.core.management import call_command
from django.utils.module_loading import module_has_submodule
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from scripts.lib.zulip_tools import get_dev_uuid_var_path, run
UUID_VAR_DIR = get_dev_uuid_var_path()
FILENAME_SPLITTER = re.compile(r'[\W\-_]')
def run_db_migrations(platform: str) -> None:
if platform == 'dev':
migration_status_file = 'migration_status_dev'
settings = 'zproject.settings'
db_name = 'ZULIP_DB_NAME=zulip'
elif platform == 'test':
migration_status_file = 'migration_status_test'
settings = 'zproject.test_settings'
db_name = 'ZULIP_DB_NAME=zulip_test_template'
# We shell out to `manage.py` and pass `DJANGO_SETTINGS_MODULE` on
# the command line rather than just calling the migration
# functions, because Django doesn't support changing settings like
# what the database is as runtime.
# Also we export DB_NAME which is ignored by dev platform but
# recognised by test platform and used to migrate correct db.
run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), db_name,
'./manage.py', 'migrate', '--no-input'])
run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), db_name,
'./manage.py', 'get_migration_status',
'--output=%s' % (migration_status_file)])
def run_generate_fixtures_if_required(use_force: bool=False) -> None:
generate_fixtures_command = ['tools/setup/generate-fixtures']
test_template_db_status = template_database_status()
if use_force or test_template_db_status == 'needs_rebuild':
generate_fixtures_command.append('--force')
elif test_template_db_status == 'run_migrations':
run_db_migrations('test')
subprocess.check_call(generate_fixtures_command)
def database_exists(database_name: str, **options: Any) -> bool:
db = options.get('database', DEFAULT_DB_ALIAS)
try:
connection = connections[db]
with connection.cursor() as cursor:
cursor.execute("SELECT 1 from pg_database WHERE datname='{}';".format(database_name))
return_value = bool(cursor.fetchone())
connections.close_all()
return return_value
except OperationalError:
return False
def get_migration_status(**options: Any) -> str:
verbosity = options.get('verbosity', 1)
for app_config in apps.get_app_configs():
if module_has_submodule(app_config.module, "management"):
import_module('.management', app_config.name)
app_label = options['app_label'] if options.get('app_label') else None
db = options.get('database', DEFAULT_DB_ALIAS)
out = StringIO()
call_command(
'showmigrations',
'--list',
app_label=app_label,
database=db,
no_color=options.get('no_color', False),
settings=options.get('settings', os.environ['DJANGO_SETTINGS_MODULE']),
stdout=out,
traceback=options.get('traceback', True),
verbosity=verbosity,
)
connections.close_all()
out.seek(0)
output = out.read()
return re.sub(r'\x1b\[(1|0)m', '', output)
def extract_migrations_as_list(migration_status: str) -> List[str]:
MIGRATIONS_RE = re.compile(r'\[[X| ]\] (\d+_.+)\n')
return MIGRATIONS_RE.findall(migration_status)
def what_to_do_with_migrations(migration_file: str, **options: Any) -> str:
if not os.path.exists(migration_file):
return 'scrap'
with open(migration_file) as f:
previous_migration_status = f.read()
current_migration_status = get_migration_status(**options)
all_curr_migrations = extract_migrations_as_list(current_migration_status)
all_prev_migrations = extract_migrations_as_list(previous_migration_status)
if len(all_curr_migrations) < len(all_prev_migrations):
return 'scrap'
for migration in all_prev_migrations:
if migration not in all_curr_migrations:
return 'scrap'
if len(all_curr_migrations) == len(all_prev_migrations):
return 'migrations_are_latest'
return 'migrate'
def _get_hash_file_path(source_file_path: str, status_dir: str) -> str:
basename = os.path.basename(source_file_path)
filename = '_'.join(FILENAME_SPLITTER.split(basename)).lower()
return os.path.join(status_dir, filename)
def _check_hash(source_hash_file: str, target_content: str) -> bool:
"""
This function has a side effect of creating a new hash file or
updating the old hash file.
"""
target_hash_content = hashlib.sha1(target_content.encode('utf8')).hexdigest()
if not os.path.exists(source_hash_file):
source_hash_content = None
else:
with open(source_hash_file) as f:
source_hash_content = f.read().strip()
with open(source_hash_file, 'w') as f:
f.write(target_hash_content)
return source_hash_content == target_hash_content
def check_file_hash(target_file_path: str, status_dir: str) -> bool:
source_hash_file = _get_hash_file_path(target_file_path, status_dir)
with open(target_file_path) as f:
target_content = f.read()
return _check_hash(source_hash_file, target_content)
def check_setting_hash(setting_name: str, status_dir: str) -> bool:
hash_filename = '_'.join(['settings', setting_name])
source_hash_file = os.path.join(status_dir, hash_filename)
target_content = json.dumps(getattr(settings, setting_name), sort_keys=True)
return _check_hash(source_hash_file, target_content)
def template_database_status(
database_name: str='zulip_test_template',
migration_status: Optional[str]=None,
settings: str='zproject.test_settings',
status_dir: Optional[str]=None,
check_files: Optional[List[str]]=None,
check_settings: Optional[List[str]]=None) -> str:
# This function returns a status string specifying the type of
# state the template db is in and thus the kind of action required.
if check_files is None:
check_files = [
'zilencer/management/commands/populate_db.py',
'zerver/lib/bulk_create.py',
'zerver/lib/generate_test_data.py',
'tools/setup/postgres-init-test-db',
'tools/setup/postgres-init-dev-db',
]
if check_settings is None:
check_settings = [
'REALM_INTERNAL_BOTS',
]
if status_dir is None:
status_dir = os.path.join(UUID_VAR_DIR, 'test_db_status')
if migration_status is None:
migration_status = os.path.join(UUID_VAR_DIR, 'migration_status_test')
if not os.path.exists(status_dir):
os.mkdir(status_dir)
if database_exists(database_name):
# To ensure Python evaluates all the hash tests (and thus creates the
# hash files about the current state), we evaluate them in a
# list and then process the result
files_hash_status = all([check_file_hash(fn, status_dir) for fn in check_files])
settings_hash_status = all([check_setting_hash(setting_name, status_dir)
for setting_name in check_settings])
hash_status = files_hash_status and settings_hash_status
if not hash_status:
return 'needs_rebuild'
migration_op = what_to_do_with_migrations(migration_status, settings=settings)
if migration_op == 'scrap':
return 'needs_rebuild'
if migration_op == 'migrate':
return 'run_migrations'
return 'current'
return 'needs_rebuild'
| [
"str",
"str",
"Any",
"Any",
"str",
"str",
"Any",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str"
] | [
725,
2228,
2244,
2696,
3583,
3757,
3773,
4525,
4542,
4753,
4774,
5344,
5361,
5617,
5634
] | [
728,
2231,
2247,
2699,
3586,
3760,
3776,
4528,
4545,
4756,
4777,
5347,
5364,
5620,
5637
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/test_helpers.py | from contextlib import contextmanager
from typing import (
cast, Any, Callable, Dict, Generator, Iterable, Iterator, List, Mapping,
Optional, Set, Sized, Tuple, Union, IO, TypeVar
)
from django.core import signing
from django.urls.resolvers import LocaleRegexURLResolver
from django.conf import settings
from django.test import TestCase, override_settings
from django.test.client import (
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
)
from django.template import loader
from django.http import HttpResponse, HttpResponseRedirect
from django.db.utils import IntegrityError
from django.db.migrations.state import StateApps
import zerver.lib.upload
from zerver.lib.upload import S3UploadBackend, LocalUploadBackend
from zerver.lib.avatar import avatar_url
from zerver.lib.cache import get_cache_backend
from zerver.lib.initial_password import initial_password
from zerver.lib.db import TimeTrackingCursor
from zerver.lib import cache
from zerver.tornado import event_queue
from zerver.tornado.handlers import allocate_handler_id
from zerver.worker import queue_processors
from zerver.lib.actions import (
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
get_display_recipient, get_stream_recipient,
)
from zerver.models import (
get_recipient,
get_stream,
get_user,
Client,
Message,
Realm,
Recipient,
Stream,
Subscription,
UserMessage,
UserProfile,
)
from zerver.lib.request import JsonableError
if False:
from zerver.lib.test_classes import ZulipTestCase, MigrationsTestCase
import collections
import base64
import mock
import os
import re
import sys
import time
import ujson
import unittest
import urllib
from zerver.lib.str_utils import NonBinaryStr
from moto import mock_s3_deprecated
import fakeldap
import ldap
class MockLDAP(fakeldap.MockLDAP):
class LDAPError(ldap.LDAPError):
pass
class INVALID_CREDENTIALS(ldap.INVALID_CREDENTIALS):
pass
class NO_SUCH_OBJECT(ldap.NO_SUCH_OBJECT):
pass
class ALREADY_EXISTS(ldap.ALREADY_EXISTS):
pass
@contextmanager
def stub_event_queue_user_events(event_queue_return: Any, user_events_return: Any) -> Iterator[None]:
with mock.patch('zerver.lib.events.request_event_queue',
return_value=event_queue_return):
with mock.patch('zerver.lib.events.get_user_events',
return_value=user_events_return):
yield
@contextmanager
def simulated_queue_client(client: Callable[..., Any]) -> Iterator[None]:
real_SimpleQueueClient = queue_processors.SimpleQueueClient
queue_processors.SimpleQueueClient = client # type: ignore # https://github.com/JukkaL/mypy/issues/1152
yield
queue_processors.SimpleQueueClient = real_SimpleQueueClient # type: ignore # https://github.com/JukkaL/mypy/issues/1152
@contextmanager
def tornado_redirected_to_list(lst: List[Mapping[str, Any]]) -> Iterator[None]:
real_event_queue_process_notification = event_queue.process_notification
event_queue.process_notification = lambda notice: lst.append(notice)
# process_notification takes a single parameter called 'notice'.
# lst.append takes a single argument called 'object'.
# Some code might call process_notification using keyword arguments,
# so mypy doesn't allow assigning lst.append to process_notification
# So explicitly change parameter name to 'notice' to work around this problem
yield
event_queue.process_notification = real_event_queue_process_notification
@contextmanager
def simulated_empty_cache() -> Generator[
List[Tuple[str, Union[str, List[str]], str]], None, None]:
cache_queries = [] # type: List[Tuple[str, Union[str, List[str]], str]]
def my_cache_get(key: str, cache_name: Optional[str]=None) -> Optional[Dict[str, Any]]:
cache_queries.append(('get', key, cache_name))
return None
def my_cache_get_many(keys: List[str], cache_name: Optional[str]=None) -> Dict[str, Any]: # nocoverage -- simulated code doesn't use this
cache_queries.append(('getmany', keys, cache_name))
return {}
old_get = cache.cache_get
old_get_many = cache.cache_get_many
cache.cache_get = my_cache_get
cache.cache_get_many = my_cache_get_many
yield cache_queries
cache.cache_get = old_get
cache.cache_get_many = old_get_many
@contextmanager
def queries_captured(include_savepoints: Optional[bool]=False) -> Generator[
List[Dict[str, Union[str, bytes]]], None, None]:
'''
Allow a user to capture just the queries executed during
the with statement.
'''
queries = [] # type: List[Dict[str, Union[str, bytes]]]
def wrapper_execute(self: TimeTrackingCursor,
action: Callable[[NonBinaryStr, Iterable[Any]], None],
sql: NonBinaryStr,
params: Iterable[Any]=()) -> None:
cache = get_cache_backend(None)
cache.clear()
start = time.time()
try:
return action(sql, params)
finally:
stop = time.time()
duration = stop - start
if include_savepoints or ('SAVEPOINT' not in sql):
queries.append({
'sql': self.mogrify(sql, params).decode('utf-8'),
'time': "%.3f" % duration,
})
old_execute = TimeTrackingCursor.execute
old_executemany = TimeTrackingCursor.executemany
def cursor_execute(self: TimeTrackingCursor, sql: NonBinaryStr,
params: Iterable[Any]=()) -> None:
return wrapper_execute(self, super(TimeTrackingCursor, self).execute, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
TimeTrackingCursor.execute = cursor_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
def cursor_executemany(self: TimeTrackingCursor, sql: NonBinaryStr,
params: Iterable[Any]=()) -> None:
return wrapper_execute(self, super(TimeTrackingCursor, self).executemany, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167 # nocoverage -- doesn't actually get used in tests
TimeTrackingCursor.executemany = cursor_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
yield queries
TimeTrackingCursor.execute = old_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
TimeTrackingCursor.executemany = old_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
@contextmanager
def stdout_suppressed() -> Iterator[IO[str]]:
"""Redirect stdout to /dev/null."""
with open(os.devnull, 'a') as devnull:
stdout, sys.stdout = sys.stdout, devnull
yield stdout
sys.stdout = stdout
def get_test_image_file(filename: str) -> IO[Any]:
test_avatar_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../tests/images'))
return open(os.path.join(test_avatar_dir, filename), 'rb')
def avatar_disk_path(user_profile: UserProfile, medium: bool=False, original: bool=False) -> str:
avatar_url_path = avatar_url(user_profile, medium)
avatar_disk_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars",
avatar_url_path.split("/")[-2],
avatar_url_path.split("/")[-1].split("?")[0])
if original:
avatar_disk_path.replace(".png", ".original")
return avatar_disk_path
def make_client(name: str) -> Client:
client, _ = Client.objects.get_or_create(name=name)
return client
def find_key_by_email(address: str) -> Optional[str]:
from django.core.mail import outbox
key_regex = re.compile("accounts/do_confirm/([a-z0-9]{24})>")
for message in reversed(outbox):
if address in message.to:
return key_regex.search(message.body).groups()[0]
return None # nocoverage -- in theory a test might want this case, but none do
def message_stream_count(user_profile: UserProfile) -> int:
return UserMessage.objects. \
select_related("message"). \
filter(user_profile=user_profile). \
count()
def most_recent_usermessage(user_profile: UserProfile) -> UserMessage:
query = UserMessage.objects. \
select_related("message"). \
filter(user_profile=user_profile). \
order_by('-message')
return query[0] # Django does LIMIT here
def most_recent_message(user_profile: UserProfile) -> Message:
usermessage = most_recent_usermessage(user_profile)
return usermessage.message
def get_subscription(stream_name: str, user_profile: UserProfile) -> Subscription:
stream = get_stream(stream_name, user_profile.realm)
recipient = get_stream_recipient(stream.id)
return Subscription.objects.get(user_profile=user_profile,
recipient=recipient, active=True)
def get_user_messages(user_profile: UserProfile) -> List[Message]:
query = UserMessage.objects. \
select_related("message"). \
filter(user_profile=user_profile). \
order_by('message')
return [um.message for um in query]
class DummyHandler:
def __init__(self) -> None:
allocate_handler_id(self) # type: ignore # this is a testing mock
class POSTRequestMock:
method = "POST"
def __init__(self, post_data: Dict[str, Any], user_profile: Optional[UserProfile]) -> None:
self.GET = {} # type: Dict[str, Any]
self.POST = post_data
self.user = user_profile
self._tornado_handler = DummyHandler()
self._log_data = {} # type: Dict[str, Any]
self.META = {'PATH_INFO': 'test'}
self.path = ''
class HostRequestMock:
"""A mock request object where get_host() works. Useful for testing
routes that use Zulip's subdomains feature"""
def __init__(self, user_profile: UserProfile=None, host: str=settings.EXTERNAL_HOST) -> None:
self.host = host
self.GET = {} # type: Dict[str, Any]
self.POST = {} # type: Dict[str, Any]
self.META = {'PATH_INFO': 'test'}
self.path = ''
self.user = user_profile
self.method = ''
self.body = ''
self.content_type = ''
self._email = ''
def get_host(self) -> str:
return self.host
class MockPythonResponse:
def __init__(self, text: str, status_code: int) -> None:
self.text = text
self.status_code = status_code
@property
def ok(self) -> bool:
return self.status_code == 200
INSTRUMENTING = os.environ.get('TEST_INSTRUMENT_URL_COVERAGE', '') == 'TRUE'
INSTRUMENTED_CALLS = [] # type: List[Dict[str, Any]]
UrlFuncT = Callable[..., HttpResponse] # TODO: make more specific
def append_instrumentation_data(data: Dict[str, Any]) -> None:
INSTRUMENTED_CALLS.append(data)
def instrument_url(f: UrlFuncT) -> UrlFuncT:
if not INSTRUMENTING: # nocoverage -- option is always enabled; should we remove?
return f
else:
def wrapper(self: 'ZulipTestCase', url: str, info: Dict[str, Any]={},
**kwargs: Any) -> HttpResponse:
start = time.time()
result = f(self, url, info, **kwargs)
delay = time.time() - start
test_name = self.id()
if '?' in url:
url, extra_info = url.split('?', 1)
else:
extra_info = ''
append_instrumentation_data(dict(
url=url,
status_code=result.status_code,
method=f.__name__,
delay=delay,
extra_info=extra_info,
info=info,
test_name=test_name,
kwargs=kwargs))
return result
return wrapper
def write_instrumentation_reports(full_suite: bool) -> None:
if INSTRUMENTING:
calls = INSTRUMENTED_CALLS
from zproject.urls import urlpatterns, v1_api_and_json_patterns
# Find our untested urls.
pattern_cnt = collections.defaultdict(int) # type: Dict[str, int]
def re_strip(r: Any) -> str:
return str(r).lstrip('^').rstrip('$')
def find_patterns(patterns: List[Any], prefixes: List[str]) -> None:
for pattern in patterns:
find_pattern(pattern, prefixes)
def cleanup_url(url: str) -> str:
if url.startswith('/'):
url = url[1:]
if url.startswith('http://testserver/'):
url = url[len('http://testserver/'):]
if url.startswith('http://zulip.testserver/'):
url = url[len('http://zulip.testserver/'):]
if url.startswith('http://testserver:9080/'):
url = url[len('http://testserver:9080/'):]
return url
def find_pattern(pattern: Any, prefixes: List[str]) -> None:
if isinstance(pattern, type(LocaleRegexURLResolver)):
return # nocoverage -- shouldn't actually happen
if hasattr(pattern, 'url_patterns'):
return
canon_pattern = prefixes[0] + re_strip(pattern.regex.pattern)
cnt = 0
for call in calls:
if 'pattern' in call:
continue
url = cleanup_url(call['url'])
for prefix in prefixes:
if url.startswith(prefix):
match_url = url[len(prefix):]
if pattern.regex.match(match_url):
if call['status_code'] in [200, 204, 301, 302]:
cnt += 1
call['pattern'] = canon_pattern
pattern_cnt[canon_pattern] += cnt
find_patterns(urlpatterns, ['', 'en/', 'de/'])
find_patterns(v1_api_and_json_patterns, ['api/v1/', 'json/'])
assert len(pattern_cnt) > 100
untested_patterns = set([p for p in pattern_cnt if pattern_cnt[p] == 0])
exempt_patterns = set([
# We exempt some patterns that are called via Tornado.
'api/v1/events',
'api/v1/events/internal',
'api/v1/register',
# We also exempt some development environment debugging
# static content URLs, since the content they point to may
# or may not exist.
'coverage/(?P<path>.*)',
'node-coverage/(?P<path>.*)',
'docs/(?P<path>.*)',
'casper/(?P<path>.*)',
])
untested_patterns -= exempt_patterns
var_dir = 'var' # TODO make sure path is robust here
fn = os.path.join(var_dir, 'url_coverage.txt')
with open(fn, 'w') as f:
for call in calls:
try:
line = ujson.dumps(call)
f.write(line + '\n')
except OverflowError: # nocoverage -- test suite error handling
print('''
A JSON overflow error was encountered while
producing the URL coverage report. Sometimes
this indicates that a test is passing objects
into methods like client_post(), which is
unnecessary and leads to false positives.
''')
print(call)
if full_suite:
print('INFO: URL coverage report is in %s' % (fn,))
print('INFO: Try running: ./tools/create-test-api-docs')
if full_suite and len(untested_patterns): # nocoverage -- test suite error handling
print("\nERROR: Some URLs are untested! Here's the list of untested URLs:")
for untested_pattern in sorted(untested_patterns):
print(" %s" % (untested_pattern,))
sys.exit(1)
def get_all_templates() -> List[str]:
templates = []
relpath = os.path.relpath
isfile = os.path.isfile
path_exists = os.path.exists
def is_valid_template(p: str, n: str) -> bool:
return 'webhooks' not in p \
and not n.startswith('.') \
and not n.startswith('__init__') \
and not n.endswith('.md') \
and not n.endswith('.source.html') \
and isfile(p)
def process(template_dir: str, dirname: str, fnames: Iterable[str]) -> None:
for name in fnames:
path = os.path.join(dirname, name)
if is_valid_template(path, name):
templates.append(relpath(path, template_dir))
for engine in loader.engines.all():
template_dirs = [d for d in engine.template_dirs if path_exists(d)]
for template_dir in template_dirs:
template_dir = os.path.normpath(template_dir)
for dirpath, dirnames, fnames in os.walk(template_dir):
process(template_dir, dirpath, fnames)
return templates
def load_subdomain_token(response: HttpResponse) -> Dict[str, Any]:
assert isinstance(response, HttpResponseRedirect)
token = response.url.rsplit('/', 1)[1]
return signing.loads(token, salt='zerver.views.auth.log_into_subdomain')
FuncT = TypeVar('FuncT', bound=Callable[..., None])
def use_s3_backend(method: FuncT) -> FuncT:
@mock_s3_deprecated
@override_settings(LOCAL_UPLOADS_DIR=None)
def new_method(*args: Any, **kwargs: Any) -> Any:
zerver.lib.upload.upload_backend = S3UploadBackend()
try:
return method(*args, **kwargs)
finally:
zerver.lib.upload.upload_backend = LocalUploadBackend()
return new_method
def use_db_models(method: Callable[..., None]) -> Callable[..., None]:
def method_patched_with_mock(self: 'MigrationsTestCase', apps: StateApps) -> None:
ArchivedAttachment = apps.get_model('zerver', 'ArchivedAttachment')
ArchivedMessage = apps.get_model('zerver', 'ArchivedMessage')
ArchivedUserMessage = apps.get_model('zerver', 'ArchivedUserMessage')
Attachment = apps.get_model('zerver', 'Attachment')
BotConfigData = apps.get_model('zerver', 'BotConfigData')
BotStorageData = apps.get_model('zerver', 'BotStorageData')
Client = apps.get_model('zerver', 'Client')
CustomProfileField = apps.get_model('zerver', 'CustomProfileField')
CustomProfileFieldValue = apps.get_model('zerver', 'CustomProfileFieldValue')
DefaultStream = apps.get_model('zerver', 'DefaultStream')
DefaultStreamGroup = apps.get_model('zerver', 'DefaultStreamGroup')
EmailChangeStatus = apps.get_model('zerver', 'EmailChangeStatus')
Huddle = apps.get_model('zerver', 'Huddle')
Message = apps.get_model('zerver', 'Message')
MultiuseInvite = apps.get_model('zerver', 'MultiuseInvite')
MutedTopic = apps.get_model('zerver', 'MutedTopic')
PreregistrationUser = apps.get_model('zerver', 'PreregistrationUser')
PushDeviceToken = apps.get_model('zerver', 'PushDeviceToken')
Reaction = apps.get_model('zerver', 'Reaction')
Realm = apps.get_model('zerver', 'Realm')
RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog')
RealmDomain = apps.get_model('zerver', 'RealmDomain')
RealmEmoji = apps.get_model('zerver', 'RealmEmoji')
RealmFilter = apps.get_model('zerver', 'RealmFilter')
Recipient = apps.get_model('zerver', 'Recipient')
ScheduledEmail = apps.get_model('zerver', 'ScheduledEmail')
ScheduledMessage = apps.get_model('zerver', 'ScheduledMessage')
Service = apps.get_model('zerver', 'Service')
Stream = apps.get_model('zerver', 'Stream')
Subscription = apps.get_model('zerver', 'Subscription')
UserActivity = apps.get_model('zerver', 'UserActivity')
UserActivityInterval = apps.get_model('zerver', 'UserActivityInterval')
UserGroup = apps.get_model('zerver', 'UserGroup')
UserGroupMembership = apps.get_model('zerver', 'UserGroupMembership')
UserHotspot = apps.get_model('zerver', 'UserHotspot')
UserMessage = apps.get_model('zerver', 'UserMessage')
UserPresence = apps.get_model('zerver', 'UserPresence')
UserProfile = apps.get_model('zerver', 'UserProfile')
zerver_models_patch = mock.patch.multiple(
'zerver.models',
ArchivedAttachment=ArchivedAttachment,
ArchivedMessage=ArchivedMessage,
ArchivedUserMessage=ArchivedUserMessage,
Attachment=Attachment,
BotConfigData=BotConfigData,
BotStorageData=BotStorageData,
Client=Client,
CustomProfileField=CustomProfileField,
CustomProfileFieldValue=CustomProfileFieldValue,
DefaultStream=DefaultStream,
DefaultStreamGroup=DefaultStreamGroup,
EmailChangeStatus=EmailChangeStatus,
Huddle=Huddle,
Message=Message,
MultiuseInvite=MultiuseInvite,
MutedTopic=MutedTopic,
PreregistrationUser=PreregistrationUser,
PushDeviceToken=PushDeviceToken,
Reaction=Reaction,
Realm=Realm,
RealmAuditLog=RealmAuditLog,
RealmDomain=RealmDomain,
RealmEmoji=RealmEmoji,
RealmFilter=RealmFilter,
Recipient=Recipient,
ScheduledEmail=ScheduledEmail,
ScheduledMessage=ScheduledMessage,
Service=Service,
Stream=Stream,
Subscription=Subscription,
UserActivity=UserActivity,
UserActivityInterval=UserActivityInterval,
UserGroup=UserGroup,
UserGroupMembership=UserGroupMembership,
UserHotspot=UserHotspot,
UserMessage=UserMessage,
UserPresence=UserPresence,
UserProfile=UserProfile
)
zerver_test_helpers_patch = mock.patch.multiple(
'zerver.lib.test_helpers',
Client=Client,
Message=Message,
Realm=Realm,
Recipient=Recipient,
Stream=Stream,
Subscription=Subscription,
UserMessage=UserMessage,
UserProfile=UserProfile,
)
zerver_test_classes_patch = mock.patch.multiple(
'zerver.lib.test_classes',
Client=Client,
Message=Message,
Realm=Realm,
Recipient=Recipient,
Service=Service,
Stream=Stream,
Subscription=Subscription,
UserProfile=UserProfile,
)
with zerver_models_patch,\
zerver_test_helpers_patch,\
zerver_test_classes_patch:
method(self, apps)
return method_patched_with_mock
| [
"Any",
"Any",
"Callable[..., Any]",
"List[Mapping[str, Any]]",
"str",
"List[str]",
"TimeTrackingCursor",
"Callable[[NonBinaryStr, Iterable[Any]], None]",
"NonBinaryStr",
"TimeTrackingCursor",
"NonBinaryStr",
"TimeTrackingCursor",
"NonBinaryStr",
"str",
"UserProfile",
"str",
"str",
"UserProfile",
"UserProfile",
"UserProfile",
"str",
"UserProfile",
"UserProfile",
"Dict[str, Any]",
"Optional[UserProfile]",
"str",
"int",
"Dict[str, Any]",
"UrlFuncT",
"'ZulipTestCase'",
"str",
"Any",
"bool",
"Any",
"List[Any]",
"List[str]",
"str",
"Any",
"List[str]",
"str",
"str",
"str",
"str",
"Iterable[str]",
"HttpResponse",
"FuncT",
"Any",
"Any",
"Callable[..., None]",
"'MigrationsTestCase'",
"StateApps"
] | [
2162,
2187,
2515,
2915,
3781,
3955,
4734,
4786,
4862,
5522,
5547,
5914,
5939,
6859,
7073,
7538,
7660,
8046,
8242,
8502,
8649,
8668,
8973,
9396,
9426,
10410,
10428,
10825,
10909,
11072,
11094,
11154,
11876,
12156,
12256,
12277,
12412,
12892,
12907,
16078,
16086,
16385,
16399,
16412,
17018,
17306,
17420,
17435,
17699,
17783,
17811
] | [
2165,
2190,
2533,
2938,
3784,
3964,
4752,
4831,
4874,
5540,
5559,
5932,
5951,
6862,
7084,
7541,
7663,
8057,
8253,
8513,
8652,
8679,
8984,
9410,
9447,
10413,
10431,
10839,
10917,
11087,
11097,
11157,
11880,
12159,
12265,
12286,
12415,
12895,
12916,
16081,
16089,
16388,
16402,
16425,
17030,
17311,
17423,
17438,
17718,
17803,
17820
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/test_runner.py |
from functools import partial
import random
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, \
Type, cast, Union, TypeVar
from unittest import loader, runner # type: ignore # Mypy cannot pick these up.
from unittest.result import TestResult
from django.conf import settings
from django.db import connections, ProgrammingError
from django.urls.resolvers import RegexURLPattern
from django.test import TestCase
from django.test import runner as django_runner
from django.test.runner import DiscoverRunner
from django.test.signals import template_rendered
from zerver.lib import test_classes, test_helpers
from zerver.lib.cache import bounce_key_prefix_for_testing
from zerver.lib.rate_limiter import bounce_redis_key_prefix_for_testing
from zerver.lib.test_classes import flush_caches_for_testing
from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection
from zerver.lib.test_helpers import (
get_all_templates, write_instrumentation_reports,
append_instrumentation_data
)
import os
import subprocess
import sys
import time
import traceback
import unittest
from multiprocessing.sharedctypes import Synchronized
_worker_id = 0 # Used to identify the worker process.
ReturnT = TypeVar('ReturnT') # Constrain return type to match
def slow(slowness_reason: str) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
'''
This is a decorate that annotates a test as being "known
to be slow." The decorator will set expected_run_time and slowness_reason
as attributes of the function. Other code can use this annotation
as needed, e.g. to exclude these tests in "fast" mode.
'''
def decorator(f: Any) -> ReturnT:
f.slowness_reason = slowness_reason
return f
return decorator
def is_known_slow_test(test_method: Any) -> bool:
return hasattr(test_method, 'slowness_reason')
def full_test_name(test: TestCase) -> str:
test_module = test.__module__
test_class = test.__class__.__name__
test_method = test._testMethodName
return '%s.%s.%s' % (test_module, test_class, test_method)
def get_test_method(test: TestCase) -> Callable[[], None]:
return getattr(test, test._testMethodName)
# Each tuple is delay, test_name, slowness_reason
TEST_TIMINGS = [] # type: List[Tuple[float, str, str]]
def report_slow_tests() -> None:
timings = sorted(TEST_TIMINGS, reverse=True)
print('SLOWNESS REPORT')
print(' delay test')
print(' ---- ----')
for delay, test_name, slowness_reason in timings[:15]:
if not slowness_reason:
slowness_reason = 'UNKNOWN WHY SLOW, please investigate'
print(' %0.3f %s\n %s\n' % (delay, test_name, slowness_reason))
print('...')
for delay, test_name, slowness_reason in timings[100:]:
if slowness_reason:
print(' %.3f %s is not that slow' % (delay, test_name))
print(' consider removing @slow decorator')
print(' This may no longer be true: %s' % (slowness_reason,))
def enforce_timely_test_completion(test_method: Any, test_name: str,
delay: float, result: TestResult) -> None:
if hasattr(test_method, 'slowness_reason'):
max_delay = 2.0 # seconds
else:
max_delay = 0.4 # seconds
if delay > max_delay:
msg = '** Test is TOO slow: %s (%.3f s)\n' % (test_name, delay)
result.addInfo(test_method, msg)
def fast_tests_only() -> bool:
return "FAST_TESTS_ONLY" in os.environ
def run_test(test: TestCase, result: TestResult) -> bool:
failed = False
test_method = get_test_method(test)
if fast_tests_only() and is_known_slow_test(test_method):
return failed
test_name = full_test_name(test)
bounce_key_prefix_for_testing(test_name)
bounce_redis_key_prefix_for_testing(test_name)
flush_caches_for_testing()
if not hasattr(test, "_pre_setup"):
msg = "Test doesn't have _pre_setup; something is wrong."
error_pre_setup = (Exception, Exception(msg), None) # type: Tuple[Any, Any, Any]
result.addError(test, error_pre_setup)
return True
test._pre_setup()
start_time = time.time()
test(result) # unittest will handle skipping, error, failure and success.
delay = time.time() - start_time
enforce_timely_test_completion(test_method, test_name, delay, result)
slowness_reason = getattr(test_method, 'slowness_reason', '')
TEST_TIMINGS.append((delay, test_name, slowness_reason))
test._post_teardown()
return failed
class TextTestResult(runner.TextTestResult):
"""
This class has unpythonic function names because base class follows
this style.
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.failed_tests = [] # type: List[str]
def addInfo(self, test: TestCase, msg: str) -> None:
self.stream.write(msg)
self.stream.flush()
def addInstrumentation(self, test: TestCase, data: Dict[str, Any]) -> None:
append_instrumentation_data(data)
def startTest(self, test: TestCase) -> None:
TestResult.startTest(self, test)
self.stream.writeln("Running {}".format(full_test_name(test)))
self.stream.flush()
def addSuccess(self, *args: Any, **kwargs: Any) -> None:
TestResult.addSuccess(self, *args, **kwargs)
def addError(self, *args: Any, **kwargs: Any) -> None:
TestResult.addError(self, *args, **kwargs)
test_name = full_test_name(args[0])
self.failed_tests.append(test_name)
def addFailure(self, *args: Any, **kwargs: Any) -> None:
TestResult.addFailure(self, *args, **kwargs)
test_name = full_test_name(args[0])
self.failed_tests.append(test_name)
def addSkip(self, test: TestCase, reason: str) -> None:
TestResult.addSkip(self, test, reason)
self.stream.writeln("** Skipping {}: {}".format(full_test_name(test),
reason))
self.stream.flush()
class RemoteTestResult(django_runner.RemoteTestResult):
"""
The class follows the unpythonic style of function names of the
base class.
"""
def addInfo(self, test: TestCase, msg: str) -> None:
self.events.append(('addInfo', self.test_index, msg))
def addInstrumentation(self, test: TestCase, data: Dict[str, Any]) -> None:
# Some elements of data['info'] cannot be serialized.
if 'info' in data:
del data['info']
self.events.append(('addInstrumentation', self.test_index, data))
def process_instrumented_calls(func: Callable[[Dict[str, Any]], None]) -> None:
for call in test_helpers.INSTRUMENTED_CALLS:
func(call)
SerializedSubsuite = Tuple[Type['TestSuite'], List[str]]
SubsuiteArgs = Tuple[Type['RemoteTestRunner'], int, SerializedSubsuite, bool]
def run_subsuite(args: SubsuiteArgs) -> Tuple[int, Any]:
# Reset the accumulated INSTRUMENTED_CALLS before running this subsuite.
test_helpers.INSTRUMENTED_CALLS = []
# The first argument is the test runner class but we don't need it
# because we run our own version of the runner class.
_, subsuite_index, subsuite, failfast = args
runner = RemoteTestRunner(failfast=failfast)
result = runner.run(deserialize_suite(subsuite))
# Now we send instrumentation related events. This data will be
# appended to the data structure in the main thread. For Mypy,
# type of Partial is different from Callable. All the methods of
# TestResult are passed TestCase as the first argument but
# addInstrumentation does not need it.
process_instrumented_calls(partial(result.addInstrumentation, None))
return subsuite_index, result.events
# Monkey-patch database creation to fix unnecessary sleep(1)
from django.db.backends.postgresql.creation import DatabaseCreation
def _replacement_destroy_test_db(self: DatabaseCreation,
test_database_name: str,
verbosity: Any) -> None:
"""Replacement for Django's _destroy_test_db that removes the
unnecessary sleep(1)."""
with self.connection._nodb_connection.cursor() as cursor:
cursor.execute("DROP DATABASE %s"
% self.connection.ops.quote_name(test_database_name))
DatabaseCreation._destroy_test_db = _replacement_destroy_test_db
def destroy_test_databases(database_id: Optional[int]=None) -> None:
"""
When database_id is None, the name of the databases is picked up
by the database settings.
"""
for alias in connections:
connection = connections[alias]
try:
connection.creation.destroy_test_db(number=database_id)
except ProgrammingError:
# DB doesn't exist. No need to do anything.
pass
def create_test_databases(database_id: int) -> None:
for alias in connections:
connection = connections[alias]
connection.creation.clone_test_db(
number=database_id,
keepdb=True,
)
settings_dict = connection.creation.get_test_db_clone_settings(database_id)
# connection.settings_dict must be updated in place for changes to be
# reflected in django.db.connections. If the following line assigned
# connection.settings_dict = settings_dict, new threads would connect
# to the default database instead of the appropriate clone.
connection.settings_dict.update(settings_dict)
connection.close()
def init_worker(counter: Synchronized) -> None:
"""
This function runs only under parallel mode. It initializes the
individual processes which are also called workers.
"""
global _worker_id
with counter.get_lock():
counter.value += 1
_worker_id = counter.value
"""
You can now use _worker_id.
"""
test_classes.API_KEYS = {}
# Clear the cache
from zerver.lib.cache import get_cache_backend
cache = get_cache_backend(None)
cache.clear()
# Close all connections
connections.close_all()
destroy_test_databases(_worker_id)
create_test_databases(_worker_id)
# Every process should upload to a separate directory so that
# race conditions can be avoided.
settings.LOCAL_UPLOADS_DIR = '{}_{}'.format(settings.LOCAL_UPLOADS_DIR,
_worker_id)
def is_upload_avatar_url(url: RegexURLPattern) -> bool:
if url.regex.pattern == r'^user_avatars/(?P<path>.*)$':
return True
return False
# We manually update the upload directory path in the url regex.
from zproject import dev_urls
found = False
for url in dev_urls.urls:
if is_upload_avatar_url(url):
found = True
new_root = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars")
url.default_args['document_root'] = new_root
if not found:
print("*** Upload directory not found.")
class TestSuite(unittest.TestSuite):
def run(self, result: TestResult, debug: Optional[bool]=False) -> TestResult:
"""
This function mostly contains the code from
unittest.TestSuite.run. The need to override this function
occurred because we use run_test to run the testcase.
"""
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
for test in self:
# but this is correct. Taken from unittest.
if result.shouldStop:
break
if isinstance(test, TestSuite):
test.run(result, debug=debug)
else:
self._tearDownPreviousClass(test, result) # type: ignore
self._handleModuleFixture(test, result) # type: ignore
self._handleClassSetUp(test, result) # type: ignore
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
failed = run_test(test, result)
if failed or result.shouldStop:
result.shouldStop = True
break
if topLevel:
self._tearDownPreviousClass(None, result) # type: ignore
self._handleModuleTearDown(result) # type: ignore
result._testRunEntered = False
return result
class TestLoader(loader.TestLoader):
suiteClass = TestSuite
class ParallelTestSuite(django_runner.ParallelTestSuite):
run_subsuite = run_subsuite
init_worker = init_worker
def __init__(self, suite: TestSuite, processes: int, failfast: bool) -> None:
super().__init__(suite, processes, failfast)
# We can't specify a consistent type for self.subsuites, since
# the whole idea here is to monkey-patch that so we can use
# most of django_runner.ParallelTestSuite with our own suite
# definitions.
self.subsuites = SubSuiteList(self.subsuites) # type: ignore # Type of self.subsuites changes.
def check_import_error(test_name: str) -> None:
try:
# Directly using __import__ is not recommeded, but here it gives
# clearer traceback as compared to importlib.import_module.
__import__(test_name)
except ImportError as exc:
raise exc from exc # Disable exception chaining in Python 3.
class Runner(DiscoverRunner):
test_suite = TestSuite
test_loader = TestLoader()
parallel_test_suite = ParallelTestSuite
def __init__(self, *args: Any, **kwargs: Any) -> None:
DiscoverRunner.__init__(self, *args, **kwargs)
# `templates_rendered` holds templates which were rendered
# in proper logical tests.
self.templates_rendered = set() # type: Set[str]
# `shallow_tested_templates` holds templates which were rendered
# in `zerver.tests.test_templates`.
self.shallow_tested_templates = set() # type: Set[str]
template_rendered.connect(self.on_template_rendered)
self.database_id = random.randint(1, 10000)
def get_resultclass(self) -> Type[TestResult]:
return TextTestResult
def on_template_rendered(self, sender: Any, context: Dict[str, Any], **kwargs: Any) -> None:
if hasattr(sender, 'template'):
template_name = sender.template.name
if template_name not in self.templates_rendered:
if context.get('shallow_tested') and template_name not in self.templates_rendered:
self.shallow_tested_templates.add(template_name)
else:
self.templates_rendered.add(template_name)
self.shallow_tested_templates.discard(template_name)
def get_shallow_tested_templates(self) -> Set[str]:
return self.shallow_tested_templates
def setup_test_environment(self, *args: Any, **kwargs: Any) -> Any:
settings.DATABASES['default']['NAME'] = settings.BACKEND_DATABASE_TEMPLATE
# We create/destroy the test databases in run_tests to avoid
# duplicate work when running in parallel mode.
return super().setup_test_environment(*args, **kwargs)
def teardown_test_environment(self, *args: Any, **kwargs: Any) -> Any:
# No need to pass the database id now. It will be picked up
# automatically through settings.
if self.parallel == 1:
# In parallel mode (parallel > 1), destroy_test_databases will
# destroy settings.BACKEND_DATABASE_TEMPLATE; we don't want that.
# So run this only in serial mode.
destroy_test_databases()
return super().teardown_test_environment(*args, **kwargs)
def test_imports(self, test_labels: List[str], suite: unittest.TestSuite) -> None:
prefix_old = 'unittest.loader.ModuleImportFailure.' # Python <= 3.4
prefix_new = 'unittest.loader._FailedTest.' # Python > 3.4
error_prefixes = [prefix_old, prefix_new]
for test_name in get_test_names(suite):
for prefix in error_prefixes:
if test_name.startswith(prefix):
test_name = test_name[len(prefix):]
for label in test_labels:
# This code block is for Python 3.5 when test label is
# directly provided, for example:
# ./tools/test-backend zerver.tests.test_alert_words.py
#
# In this case, the test name is of this form:
# 'unittest.loader._FailedTest.test_alert_words'
#
# Whereas check_import_error requires test names of
# this form:
# 'unittest.loader._FailedTest.zerver.tests.test_alert_words'.
if test_name in label:
test_name = label
break
check_import_error(test_name)
def run_tests(self, test_labels: List[str],
extra_tests: Optional[List[TestCase]]=None,
full_suite: bool=False,
**kwargs: Any) -> Tuple[bool, List[str]]:
self.setup_test_environment()
try:
suite = self.build_suite(test_labels, extra_tests)
except AttributeError:
# We are likely to get here only when running tests in serial
# mode on Python 3.4 or lower.
# test_labels are always normalized to include the correct prefix.
# If we run the command with ./tools/test-backend test_alert_words,
# test_labels will be equal to ['zerver.tests.test_alert_words'].
for test_label in test_labels:
check_import_error(test_label)
# I think we won't reach this line under normal circumstances, but
# for some unforeseen scenario in which the AttributeError was not
# caused by an import error, let's re-raise the exception for
# debugging purposes.
raise
self.test_imports(test_labels, suite)
if self.parallel == 1:
# We are running in serial mode so create the databases here.
# For parallel mode, the databases are created in init_worker.
# We don't want to create and destroy DB in setup_test_environment
# because it will be called for both serial and parallel modes.
# However, at this point we know in which mode we would be running
# since that decision has already been made in build_suite().
destroy_test_databases(self.database_id)
create_test_databases(self.database_id)
# We have to do the next line to avoid flaky scenarios where we
# run a single test and getting an SA connection causes data from
# a Django connection to be rolled back mid-test.
get_sqlalchemy_connection()
result = self.run_suite(suite)
self.teardown_test_environment()
failed = self.suite_result(suite, result)
if not failed:
write_instrumentation_reports(full_suite=full_suite)
return failed, result.failed_tests
def get_test_names(suite: unittest.TestSuite) -> List[str]:
if isinstance(suite, ParallelTestSuite):
# suite is ParallelTestSuite. It will have a subsuites parameter of
# type SubSuiteList. Each element of a SubsuiteList is a tuple whose
# first element is the type of TestSuite and the second element is a
# list of test names in that test suite. See serialize_suite() for the
# implementation details.
return [name for subsuite in suite.subsuites for name in subsuite[1]]
else:
suite = cast(TestSuite, suite)
return [full_test_name(t) for t in get_tests_from_suite(suite)]
def get_tests_from_suite(suite: TestSuite) -> TestCase:
for test in suite:
if isinstance(test, TestSuite):
for child in get_tests_from_suite(test):
yield child
else:
yield test
def serialize_suite(suite: TestSuite) -> Tuple[Type[TestSuite], List[str]]:
return type(suite), get_test_names(suite)
def deserialize_suite(args: Tuple[Type[TestSuite], List[str]]) -> TestSuite:
suite_class, test_names = args
suite = suite_class()
tests = TestLoader().loadTestsFromNames(test_names)
for test in get_tests_from_suite(tests):
suite.addTest(test)
return suite
class RemoteTestRunner(django_runner.RemoteTestRunner):
resultclass = RemoteTestResult
class SubSuiteList(List[Tuple[Type[TestSuite], List[str]]]):
"""
This class allows us to avoid changing the main logic of
ParallelTestSuite and still make it serializable.
"""
def __init__(self, suites: List[TestSuite]) -> None:
serialized_suites = [serialize_suite(s) for s in suites]
super().__init__(serialized_suites)
def __getitem__(self, index: Any) -> Any:
suite = super().__getitem__(index)
return deserialize_suite(suite)
| [
"str",
"Any",
"Any",
"TestCase",
"TestCase",
"Any",
"str",
"float",
"TestResult",
"TestCase",
"TestResult",
"Any",
"Any",
"TestCase",
"str",
"TestCase",
"Dict[str, Any]",
"TestCase",
"Any",
"Any",
"Any",
"Any",
"Any",
"Any",
"TestCase",
"str",
"TestCase",
"str",
"TestCase",
"Dict[str, Any]",
"Callable[[Dict[str, Any]], None]",
"SubsuiteArgs",
"DatabaseCreation",
"str",
"Any",
"int",
"Synchronized",
"RegexURLPattern",
"TestResult",
"TestSuite",
"int",
"bool",
"str",
"Any",
"Any",
"Any",
"Dict[str, Any]",
"Any",
"Any",
"Any",
"Any",
"Any",
"List[str]",
"unittest.TestSuite",
"List[str]",
"Any",
"unittest.TestSuite",
"TestSuite",
"TestSuite",
"Tuple[Type[TestSuite], List[str]]",
"List[TestSuite]",
"Any"
] | [
1313,
1688,
1825,
1916,
2138,
3089,
3105,
3152,
3167,
3551,
3569,
4761,
4776,
4911,
4926,
5039,
5055,
5153,
5345,
5360,
5458,
5473,
5659,
5674,
5858,
5876,
6293,
6308,
6424,
6440,
6696,
6967,
7992,
8063,
8112,
8949,
9637,
10534,
11148,
12852,
12874,
12889,
13327,
13786,
13801,
14451,
14465,
14491,
15128,
15143,
15475,
15490,
15988,
16006,
17299,
17442,
19517,
20171,
20404,
20528,
21100,
21269
] | [
1316,
1691,
1828,
1924,
2146,
3092,
3108,
3157,
3177,
3559,
3579,
4764,
4779,
4919,
4929,
5047,
5069,
5161,
5348,
5363,
5461,
5476,
5662,
5677,
5866,
5879,
6301,
6311,
6432,
6454,
6728,
6979,
8008,
8066,
8115,
8952,
9649,
10549,
11158,
12861,
12877,
12893,
13330,
13789,
13804,
14454,
14479,
14494,
15131,
15146,
15478,
15493,
15997,
16024,
17308,
17445,
19535,
20180,
20413,
20561,
21115,
21272
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/tex.py |
import logging
import os
import subprocess
from django.conf import settings
from typing import Optional
def render_tex(tex: str, is_inline: bool=True) -> Optional[str]:
r"""Render a TeX string into HTML using KaTeX
Returns the HTML string, or None if there was some error in the TeX syntax
Keyword arguments:
tex -- Text string with the TeX to render
Don't include delimiters ('$$', '\[ \]', etc.)
is_inline -- Boolean setting that indicates whether the render should be
inline (i.e. for embedding it in text) or not. The latter
will show the content centered, and in the "expanded" form
(default True)
"""
katex_path = os.path.join(settings.STATIC_ROOT, 'third/katex/cli.js')
if not os.path.isfile(katex_path):
logging.error("Cannot find KaTeX for latex rendering!")
return None
command = ['node', katex_path]
if not is_inline:
command.extend(['--', '--display-mode'])
katex = subprocess.Popen(command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = katex.communicate(input=tex.encode())[0]
if katex.returncode == 0:
# stdout contains a newline at the end
assert stdout is not None
return stdout.decode('utf-8').strip()
else:
return None
| [
"str"
] | [
126
] | [
129
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/thumbnail.py | # -*- coding: utf-8 -*-
# See https://zulip.readthedocs.io/en/latest/subsystems/thumbnailing.html
import base64
import os
import sys
import urllib
from django.conf import settings
from libthumbor import CryptoURL
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath('__file__'))))
sys.path.append(ZULIP_PATH)
from zthumbor.loaders.helpers import (
THUMBOR_S3_TYPE, THUMBOR_LOCAL_FILE_TYPE, THUMBOR_EXTERNAL_TYPE
)
from zerver.lib.camo import get_camo_url
def is_thumbor_enabled() -> bool:
return settings.THUMBOR_URL != ''
def user_uploads_or_external(url: str) -> bool:
return url.startswith('http') or url.lstrip('/').startswith('user_uploads/')
def get_source_type(url: str) -> str:
if not url.startswith('/user_uploads/'):
return THUMBOR_EXTERNAL_TYPE
local_uploads_dir = settings.LOCAL_UPLOADS_DIR
if local_uploads_dir:
return THUMBOR_LOCAL_FILE_TYPE
return THUMBOR_S3_TYPE
def generate_thumbnail_url(path: str, size: str='0x0') -> str:
if not (path.startswith('https://') or path.startswith('http://')):
path = '/' + path
if not is_thumbor_enabled():
if path.startswith('http://'):
return get_camo_url(path)
return path
if not user_uploads_or_external(path):
return path
source_type = get_source_type(path)
safe_url = base64.urlsafe_b64encode(path.encode()).decode('utf-8')
image_url = '%s/source_type/%s' % (safe_url, source_type)
width, height = map(int, size.split('x'))
crypto = CryptoURL(key=settings.THUMBOR_KEY)
encrypted_url = crypto.generate(
width=width,
height=height,
smart=True,
filters=['no_upscale()', 'sharpen(0.5,0.2,true)'],
image_url=image_url
)
if settings.THUMBOR_URL == 'http://127.0.0.1:9995':
# If THUMBOR_URL is the default then thumbor is hosted on same machine
# as the Zulip server and we should serve a relative URL.
# We add a /thumbor in front of the relative url because we make
# use of a proxy pass to redirect request internally in Nginx to 9995
# port where thumbor is running.
thumbnail_url = '/thumbor' + encrypted_url
else:
thumbnail_url = urllib.parse.urljoin(settings.THUMBOR_URL, encrypted_url)
return thumbnail_url
| [
"str",
"str",
"str"
] | [
593,
714,
987
] | [
596,
717,
990
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/timeout.py | from types import TracebackType
from typing import Any, Callable, Optional, Tuple, Type, TypeVar
import six
import sys
import time
import ctypes
import threading
# Based on http://code.activestate.com/recipes/483752/
class TimeoutExpired(Exception):
'''Exception raised when a function times out.'''
def __str__(self) -> str:
return 'Function call timed out.'
ResultT = TypeVar('ResultT')
def timeout(timeout: float, func: Callable[..., ResultT], *args: Any, **kwargs: Any) -> ResultT:
'''Call the function in a separate thread.
Return its return value, or raise an exception,
within approximately 'timeout' seconds.
The function may receive a TimeoutExpired exception
anywhere in its code, which could have arbitrary
unsafe effects (resources not released, etc.).
It might also fail to receive the exception and
keep running in the background even though
timeout() has returned.
This may also fail to interrupt functions which are
stuck in a long-running primitive interpreter
operation.'''
class TimeoutThread(threading.Thread):
def __init__(self) -> None:
threading.Thread.__init__(self)
self.result = None # type: Optional[ResultT]
self.exc_info = None # type: Optional[Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]]]
# Don't block the whole program from exiting
# if this is the only thread left.
self.daemon = True
def run(self) -> None:
try:
self.result = func(*args, **kwargs)
except BaseException:
self.exc_info = sys.exc_info()
def raise_async_timeout(self) -> None:
# Called from another thread.
# Attempt to raise a TimeoutExpired in the thread represented by 'self'.
assert self.ident is not None # Thread should be running; c_long expects int
tid = ctypes.c_long(self.ident)
result = ctypes.pythonapi.PyThreadState_SetAsyncExc(
tid, ctypes.py_object(TimeoutExpired))
if result > 1:
# "if it returns a number greater than one, you're in trouble,
# and you should call it again with exc=NULL to revert the effect"
#
# I was unable to find the actual source of this quote, but it
# appears in the many projects across the Internet that have
# copy-pasted this recipe.
ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, None)
thread = TimeoutThread()
thread.start()
thread.join(timeout)
if thread.is_alive():
# Gamely try to kill the thread, following the dodgy approach from
# http://stackoverflow.com/a/325528/90777
#
# We need to retry, because an async exception received while the
# thread is in a system call is simply ignored.
for i in range(10):
thread.raise_async_timeout()
time.sleep(0.1)
if not thread.is_alive():
break
raise TimeoutExpired
if thread.exc_info:
# Raise the original stack trace so our error messages are more useful.
# from http://stackoverflow.com/a/4785766/90777
six.reraise(thread.exc_info[0], thread.exc_info[1], thread.exc_info[2])
assert thread.result is not None # assured if above did not reraise
return thread.result
| [
"float",
"Callable[..., ResultT]",
"Any",
"Any"
] | [
432,
445,
476,
491
] | [
437,
467,
479,
494
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/timestamp.py | import datetime
import calendar
from django.utils.timezone import utc as timezone_utc
class TimezoneNotUTCException(Exception):
pass
def verify_UTC(dt: datetime.datetime) -> None:
if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) != timezone_utc.utcoffset(dt):
raise TimezoneNotUTCException("Datetime %s does not have a UTC timezone." % (dt,))
def convert_to_UTC(dt: datetime.datetime) -> datetime.datetime:
if dt.tzinfo is None:
return dt.replace(tzinfo=timezone_utc)
return dt.astimezone(timezone_utc)
def floor_to_hour(dt: datetime.datetime) -> datetime.datetime:
verify_UTC(dt)
return datetime.datetime(*dt.timetuple()[:4]) \
.replace(tzinfo=timezone_utc)
def floor_to_day(dt: datetime.datetime) -> datetime.datetime:
verify_UTC(dt)
return datetime.datetime(*dt.timetuple()[:3]) \
.replace(tzinfo=timezone_utc)
def ceiling_to_hour(dt: datetime.datetime) -> datetime.datetime:
floor = floor_to_hour(dt)
if floor == dt:
return floor
return floor + datetime.timedelta(hours=1)
def ceiling_to_day(dt: datetime.datetime) -> datetime.datetime:
floor = floor_to_day(dt)
if floor == dt:
return floor
return floor + datetime.timedelta(days=1)
def timestamp_to_datetime(timestamp: float) -> datetime.datetime:
return datetime.datetime.fromtimestamp(float(timestamp), tz=timezone_utc)
def datetime_to_timestamp(dt: datetime.datetime) -> int:
verify_UTC(dt)
return calendar.timegm(dt.timetuple())
| [
"datetime.datetime",
"datetime.datetime",
"datetime.datetime",
"datetime.datetime",
"datetime.datetime",
"datetime.datetime",
"float",
"datetime.datetime"
] | [
158,
384,
560,
743,
929,
1112,
1307,
1445
] | [
175,
401,
577,
760,
946,
1129,
1312,
1462
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/timezone.py |
from typing import List
import pytz
def get_all_timezones() -> List[str]:
return sorted(pytz.all_timezones)
def get_timezone(tz: str) -> pytz.datetime.tzinfo:
return pytz.timezone(tz)
| [
"str"
] | [
137
] | [
140
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/topic.py | import datetime
from django.db import connection
from django.db.models.query import QuerySet, Q
from django.utils.timezone import now as timezone_now
from sqlalchemy.sql import (
column,
literal,
func,
)
from zerver.lib.request import REQ
from zerver.models import (
Message,
Recipient,
UserMessage,
UserProfile,
)
from typing import Any, Dict, List, Optional, Tuple
# Only use these constants for events.
ORIG_TOPIC = "orig_subject"
TOPIC_NAME = "subject"
TOPIC_LINKS = "subject_links"
MATCH_TOPIC = "match_subject"
# This constant is actually embedded into
# the JSON data for message edit history,
# so we'll always need to handle legacy data
# unless we do a pretty tricky migration.
LEGACY_PREV_TOPIC = "prev_subject"
# This constant is pretty closely coupled to the
# database, but it's the JSON field.
EXPORT_TOPIC_NAME = "subject"
'''
The following functions are for user-facing APIs
where we'll want to support "subject" for a while.
'''
def get_topic_from_message_info(message_info: Dict[str, Any]) -> str:
'''
Use this where you are getting dicts that are based off of messages
that may come from the outside world, especially from third party
APIs and bots.
We prefer 'topic' to 'subject' here. We expect at least one field
to be present (or the caller must know how to handle KeyError).
'''
if 'topic' in message_info:
return message_info['topic']
return message_info['subject']
def REQ_topic() -> Optional[str]:
# REQ handlers really return a REQ, but we
# lie to make the rest of the type matching work.
return REQ(
whence='topic',
aliases=['subject'],
converter=lambda x: x.strip(),
default=None,
) # type: ignore # see comment above
'''
TRY TO KEEP THIS DIVIDING LINE.
Below this line we want to make it so that functions are only
using "subject" in the DB sense, and nothing customer facing.
'''
# This is used in low-level message functions in
# zerver/lib/message.py, and it's not user facing.
DB_TOPIC_NAME = "subject"
MESSAGE__TOPIC = 'message__subject'
def topic_match_sa(topic_name: str) -> Any:
# _sa is short for Sql Alchemy, which we use mostly for
# queries that search messages
topic_cond = func.upper(column("subject")) == func.upper(literal(topic_name))
return topic_cond
def topic_column_sa() -> Any:
return column("subject")
def filter_by_exact_message_topic(query: QuerySet, message: Message) -> QuerySet:
topic_name = message.topic_name()
return query.filter(subject=topic_name)
def filter_by_topic_name_via_message(query: QuerySet, topic_name: str) -> QuerySet:
return query.filter(message__subject__iexact=topic_name)
def messages_for_topic(stream_id: int, topic_name: str) -> QuerySet:
# It might be the case that we really want subject__contains
# here. This code is used for the archive.
return Message.objects.filter(
recipient__type_id=stream_id,
subject=topic_name,
)
def save_message_for_edit_use_case(message: Message) -> None:
message.save(update_fields=["subject", "content", "rendered_content",
"rendered_content_version", "last_edit_time",
"edit_history"])
def user_message_exists_for_topic(user_profile: UserProfile,
recipient: Recipient,
topic_name: str) -> bool:
return UserMessage.objects.filter(
user_profile=user_profile,
message__recipient=recipient,
message__subject__iexact=topic_name,
).exists()
def update_messages_for_topic_edit(message: Message,
propagate_mode: str,
orig_topic_name: str,
topic_name: str) -> List[Message]:
propagate_query = Q(recipient = message.recipient, subject = orig_topic_name)
# We only change messages up to 2 days in the past, to avoid hammering our
# DB by changing an unbounded amount of messages
if propagate_mode == 'change_all':
before_bound = timezone_now() - datetime.timedelta(days=2)
propagate_query = (propagate_query & ~Q(id = message.id) &
Q(pub_date__range=(before_bound, timezone_now())))
if propagate_mode == 'change_later':
propagate_query = propagate_query & Q(id__gt = message.id)
messages = Message.objects.filter(propagate_query).select_related()
# Evaluate the query before running the update
messages_list = list(messages)
messages.update(subject=topic_name)
for m in messages_list:
# The cached ORM object is not changed by messages.update()
# and the remote cache update requires the new value
m.set_topic_name(topic_name)
return messages_list
def generate_topic_history_from_db_rows(rows: List[Tuple[str, int]]) -> List[Dict[str, Any]]:
canonical_topic_names = {} # type: Dict[str, Tuple[int, str]]
# Sort rows by max_message_id so that if a topic
# has many different casings, we use the most
# recent row.
rows = sorted(rows, key=lambda tup: tup[1])
for (topic_name, max_message_id) in rows:
canonical_name = topic_name.lower()
canonical_topic_names[canonical_name] = (max_message_id, topic_name)
history = []
for canonical_topic, (max_message_id, topic_name) in canonical_topic_names.items():
history.append(dict(
name=topic_name,
max_id=max_message_id)
)
return sorted(history, key=lambda x: -x['max_id'])
def get_topic_history_for_stream(user_profile: UserProfile,
recipient: Recipient,
public_history: bool) -> List[Dict[str, Any]]:
cursor = connection.cursor()
if public_history:
query = '''
SELECT
"zerver_message"."subject" as topic,
max("zerver_message".id) as max_message_id
FROM "zerver_message"
WHERE (
"zerver_message"."recipient_id" = %s
)
GROUP BY (
"zerver_message"."subject"
)
ORDER BY max("zerver_message".id) DESC
'''
cursor.execute(query, [recipient.id])
else:
query = '''
SELECT
"zerver_message"."subject" as topic,
max("zerver_message".id) as max_message_id
FROM "zerver_message"
INNER JOIN "zerver_usermessage" ON (
"zerver_usermessage"."message_id" = "zerver_message"."id"
)
WHERE (
"zerver_usermessage"."user_profile_id" = %s AND
"zerver_message"."recipient_id" = %s
)
GROUP BY (
"zerver_message"."subject"
)
ORDER BY max("zerver_message".id) DESC
'''
cursor.execute(query, [user_profile.id, recipient.id])
rows = cursor.fetchall()
cursor.close()
return generate_topic_history_from_db_rows(rows)
def get_topic_history_for_web_public_stream(recipient: Recipient) -> List[Dict[str, Any]]:
cursor = connection.cursor()
query = '''
SELECT
"zerver_message"."subject" as topic,
max("zerver_message".id) as max_message_id
FROM "zerver_message"
WHERE (
"zerver_message"."recipient_id" = %s
)
GROUP BY (
"zerver_message"."subject"
)
ORDER BY max("zerver_message".id) DESC
'''
cursor.execute(query, [recipient.id])
rows = cursor.fetchall()
cursor.close()
return generate_topic_history_from_db_rows(rows)
def get_turtle_message(message_ids: List[int]) -> Message:
# This is used for onboarding, and it's only extracted
# here to make subject -> topic sweeping easier.
turtle_message = Message.objects.get( # nolint
id__in=message_ids,
subject='topic demonstration',
content__icontains='cute/turtle.png')
return turtle_message
| [
"Dict[str, Any]",
"str",
"QuerySet",
"Message",
"QuerySet",
"str",
"int",
"str",
"Message",
"UserProfile",
"Recipient",
"str",
"Message",
"str",
"str",
"str",
"List[Tuple[str, int]]",
"UserProfile",
"Recipient",
"bool",
"Recipient",
"List[int]"
] | [
1030,
2146,
2460,
2479,
2628,
2650,
2764,
2781,
3064,
3332,
3390,
3447,
3678,
3738,
3795,
3847,
4912,
5677,
5734,
5794,
7085,
7654
] | [
1044,
2149,
2468,
2486,
2636,
2653,
2767,
2784,
3071,
3343,
3399,
3450,
3685,
3741,
3798,
3850,
4933,
5688,
5743,
5798,
7094,
7663
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/topic_mutes.py | from typing import Any, Callable, Dict, List, Optional
from zerver.lib.topic import (
topic_match_sa,
)
from zerver.models import (
get_stream_recipient,
get_stream,
MutedTopic,
UserProfile
)
from sqlalchemy.sql import (
and_,
column,
not_,
or_,
Selectable
)
def get_topic_mutes(user_profile: UserProfile) -> List[List[str]]:
rows = MutedTopic.objects.filter(
user_profile=user_profile,
).values(
'stream__name',
'topic_name'
)
return [
[row['stream__name'], row['topic_name']]
for row in rows
]
def set_topic_mutes(user_profile: UserProfile, muted_topics: List[List[str]]) -> None:
'''
This is only used in tests.
'''
MutedTopic.objects.filter(
user_profile=user_profile,
).delete()
for stream_name, topic_name in muted_topics:
stream = get_stream(stream_name, user_profile.realm)
recipient = get_stream_recipient(stream.id)
add_topic_mute(
user_profile=user_profile,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name=topic_name,
)
def add_topic_mute(user_profile: UserProfile, stream_id: int, recipient_id: int, topic_name: str) -> None:
MutedTopic.objects.create(
user_profile=user_profile,
stream_id=stream_id,
recipient_id=recipient_id,
topic_name=topic_name,
)
def remove_topic_mute(user_profile: UserProfile, stream_id: int, topic_name: str) -> None:
row = MutedTopic.objects.get(
user_profile=user_profile,
stream_id=stream_id,
topic_name__iexact=topic_name
)
row.delete()
def topic_is_muted(user_profile: UserProfile, stream_id: int, topic_name: str) -> bool:
is_muted = MutedTopic.objects.filter(
user_profile=user_profile,
stream_id=stream_id,
topic_name__iexact=topic_name,
).exists()
return is_muted
def exclude_topic_mutes(conditions: List[Selectable],
user_profile: UserProfile,
stream_id: Optional[int]) -> List[Selectable]:
query = MutedTopic.objects.filter(
user_profile=user_profile,
)
if stream_id is not None:
# If we are narrowed to a stream, we can optimize the query
# by not considering topic mutes outside the stream.
query = query.filter(stream_id=stream_id)
query = query.values(
'recipient_id',
'topic_name'
)
rows = list(query)
if not rows:
return conditions
def mute_cond(row: Dict[str, Any]) -> Selectable:
recipient_id = row['recipient_id']
topic_name = row['topic_name']
stream_cond = column("recipient_id") == recipient_id
topic_cond = topic_match_sa(topic_name)
return and_(stream_cond, topic_cond)
condition = not_(or_(*list(map(mute_cond, rows))))
return conditions + [condition]
def build_topic_mute_checker(user_profile: UserProfile) -> Callable[[int, str], bool]:
rows = MutedTopic.objects.filter(
user_profile=user_profile,
).values(
'recipient_id',
'topic_name'
)
rows = list(rows)
tups = set()
for row in rows:
recipient_id = row['recipient_id']
topic_name = row['topic_name']
tups.add((recipient_id, topic_name.lower()))
def is_muted(recipient_id: int, topic: str) -> bool:
return (recipient_id, topic.lower()) in tups
return is_muted
| [
"UserProfile",
"UserProfile",
"List[List[str]]",
"UserProfile",
"int",
"int",
"str",
"UserProfile",
"int",
"str",
"UserProfile",
"int",
"str",
"List[Selectable]",
"UserProfile",
"Optional[int]",
"Dict[str, Any]",
"UserProfile",
"int",
"str"
] | [
335,
633,
660,
1195,
1219,
1238,
1255,
1473,
1497,
1514,
1721,
1745,
1762,
1993,
2049,
2097,
2592,
2995,
3405,
3417
] | [
346,
644,
675,
1206,
1222,
1241,
1258,
1484,
1500,
1517,
1732,
1748,
1765,
2009,
2060,
2110,
2606,
3006,
3408,
3420
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/type_debug.py |
import sys
import functools
from typing import Any, Callable, IO, Mapping, Sequence, TypeVar
def get_mapping_type_str(x: Mapping[Any, Any]) -> str:
container_type = type(x).__name__
if not x:
if container_type == 'dict':
return '{}'
else:
return container_type + '([])'
key = next(iter(x))
key_type = get_type_str(key)
value_type = get_type_str(x[key])
if container_type == 'dict':
if len(x) == 1:
return '{%s: %s}' % (key_type, value_type)
else:
return '{%s: %s, ...}' % (key_type, value_type)
else:
if len(x) == 1:
return '%s([(%s, %s)])' % (container_type, key_type, value_type)
else:
return '%s([(%s, %s), ...])' % (container_type, key_type, value_type)
def get_sequence_type_str(x: Sequence[Any]) -> str:
container_type = type(x).__name__
if not x:
if container_type == 'list':
return '[]'
else:
return container_type + '([])'
elem_type = get_type_str(x[0])
if container_type == 'list':
if len(x) == 1:
return '[' + elem_type + ']'
else:
return '[' + elem_type + ', ...]'
else:
if len(x) == 1:
return '%s([%s])' % (container_type, elem_type)
else:
return '%s([%s, ...])' % (container_type, elem_type)
expansion_blacklist = [str, bytes]
def get_type_str(x: Any) -> str:
if x is None:
return 'None'
elif isinstance(x, tuple):
types = []
for v in x:
types.append(get_type_str(v))
if len(x) == 1:
return '(' + types[0] + ',)'
else:
return '(' + ', '.join(types) + ')'
elif isinstance(x, Mapping):
return get_mapping_type_str(x)
elif isinstance(x, Sequence) and not any(isinstance(x, t) for t in expansion_blacklist):
return get_sequence_type_str(x)
else:
return type(x).__name__
FuncT = TypeVar('FuncT', bound=Callable[..., Any])
def print_types_to(file_obj: IO[str]) -> Callable[[FuncT], FuncT]:
def decorator(func: FuncT) -> FuncT:
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
arg_types = [get_type_str(arg) for arg in args]
kwarg_types = [key + "=" + get_type_str(value) for key, value in kwargs.items()]
ret_val = func(*args, **kwargs)
output = "%s(%s) -> %s" % (func.__name__,
", ".join(arg_types + kwarg_types),
get_type_str(ret_val))
print(output, file=file_obj)
return ret_val
return wrapper # type: ignore # https://github.com/python/mypy/issues/1927
return decorator
def print_types(func: FuncT) -> FuncT:
return print_types_to(sys.stdout)(func)
| [
"Mapping[Any, Any]",
"Sequence[Any]",
"Any",
"IO[str]",
"FuncT",
"Any",
"Any",
"FuncT"
] | [
124,
839,
1455,
2076,
2138,
2213,
2228,
2825
] | [
141,
852,
1458,
2083,
2143,
2216,
2231,
2830
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/types.py | from typing import TypeVar, Callable, Optional, List, Dict, Union, Tuple, Any
from django.http import HttpResponse
ViewFuncT = TypeVar('ViewFuncT', bound=Callable[..., HttpResponse])
# See zerver/lib/validator.py for more details of Validators,
# including many examples
Validator = Callable[[str, object], Optional[str]]
ExtendedValidator = Callable[[str, str, object], Optional[str]]
RealmUserValidator = Callable[[int, List[int], bool], Optional[str]]
ProfileDataElement = Dict[str, Union[int, float, Optional[str]]]
ProfileData = List[ProfileDataElement]
FieldElement = Tuple[int, str, Validator, Callable[[Any], Any], str]
ExtendedFieldElement = Tuple[int, str, ExtendedValidator, Callable[[Any], Any], str]
UserFieldElement = Tuple[int, str, RealmUserValidator, Callable[[Any], Any], str]
FieldTypeData = List[Union[FieldElement, ExtendedFieldElement, UserFieldElement]]
ProfileFieldData = Dict[str, Dict[str, str]]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/unminify.py |
import re
import os
import sourcemap
from typing import Dict, List
class SourceMap:
'''Map (line, column) pairs from generated to source file.'''
def __init__(self, sourcemap_dirs: List[str]) -> None:
self._dirs = sourcemap_dirs
self._indices = {} # type: Dict[str, sourcemap.SourceMapDecoder]
def _index_for(self, minified_src: str) -> sourcemap.SourceMapDecoder:
'''Return the source map index for minified_src, loading it if not
already loaded.'''
if minified_src not in self._indices:
for source_dir in self._dirs:
filename = os.path.join(source_dir, minified_src + '.map')
if os.path.isfile(filename):
with open(filename) as fp:
self._indices[minified_src] = sourcemap.load(fp)
break
return self._indices[minified_src]
def annotate_stacktrace(self, stacktrace: str) -> str:
out = '' # type: str
for ln in stacktrace.splitlines():
out += ln + '\n'
match = re.search(r'/static/(?:webpack-bundles|min)/(.+)(\.[\.0-9a-f]+\.js):(\d+):(\d+)', ln)
if match:
# Get the appropriate source map for the minified file.
minified_src = match.groups()[0] + match.groups()[1]
index = self._index_for(minified_src)
gen_line, gen_col = list(map(int, match.groups()[2:4]))
# The sourcemap lib is 0-based, so subtract 1 from line and col.
try:
result = index.lookup(line=gen_line-1, column=gen_col-1)
display_src = result.src
webpack_prefix = "webpack:///"
if display_src.startswith(webpack_prefix):
display_src = display_src[len(webpack_prefix):]
out += (' = %s line %d column %d\n' %
(display_src, result.src_line+1, result.src_col+1))
except IndexError:
out += ' [Unable to look up in source map]\n'
if ln.startswith(' at'):
out += '\n'
return out
| [
"List[str]",
"str",
"str"
] | [
194,
364,
954
] | [
203,
367,
957
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/upload.py | from typing import Any, Dict, Mapping, Optional, Tuple
from django.utils.translation import ugettext as _
from django.conf import settings
from django.template.defaultfilters import slugify
from django.core.files import File
from django.http import HttpRequest
from django.db.models import Sum
from jinja2 import Markup as mark_safe
import unicodedata
from zerver.lib.avatar_hash import user_avatar_path
from zerver.lib.exceptions import JsonableError, ErrorCode
from zerver.lib.str_utils import NonBinaryStr
from boto.s3.bucket import Bucket
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from mimetypes import guess_type, guess_extension
from zerver.models import get_user_profile_by_id, RealmEmoji
from zerver.models import Attachment
from zerver.models import Realm, RealmEmoji, UserProfile, Message
import urllib
import base64
import os
import re
from PIL import Image, ImageOps, ExifTags
from PIL.GifImagePlugin import GifImageFile
import io
import random
import logging
DEFAULT_AVATAR_SIZE = 100
MEDIUM_AVATAR_SIZE = 500
DEFAULT_EMOJI_SIZE = 64
# Performance Note:
#
# For writing files to S3, the file could either be stored in RAM
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
#
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
# should occur in practice.
#
# This is great, because passing the pseudofile object that Django gives
# you to boto would be a pain.
# To come up with a s3 key we randomly generate a "directory". The
# "file name" is the original filename provided by the user run
# through a sanitization function.
class RealmUploadQuotaError(JsonableError):
code = ErrorCode.REALM_UPLOAD_QUOTA
attachment_url_re = re.compile(r'[/\-]user[\-_]uploads[/\.-].*?(?=[ )]|\Z)')
def attachment_url_to_path_id(attachment_url: str) -> str:
path_id_raw = re.sub(r'[/\-]user[\-_]uploads[/\.-]', '', attachment_url)
# Remove any extra '.' after file extension. These are probably added by the user
return re.sub('[.]+$', '', path_id_raw, re.M)
def sanitize_name(value: NonBinaryStr) -> str:
"""
Sanitizes a value to be safe to store in a Linux filesystem, in
S3, and in a URL. So unicode is allowed, but not special
characters other than ".", "-", and "_".
This implementation is based on django.utils.text.slugify; it is
modified by:
* adding '.' and '_' to the list of allowed characters.
* preserving the case of the value.
"""
value = unicodedata.normalize('NFKC', value)
value = re.sub(r'[^\w\s._-]', '', value, flags=re.U).strip()
return mark_safe(re.sub(r'[-\s]+', '-', value, flags=re.U))
def random_name(bytes: int=60) -> str:
return base64.urlsafe_b64encode(os.urandom(bytes)).decode('utf-8')
class BadImageError(JsonableError):
code = ErrorCode.BAD_IMAGE
name_to_tag_num = dict((name, num) for num, name in ExifTags.TAGS.items())
# https://stackoverflow.com/a/6218425
def exif_rotate(image: Image) -> Image:
if not hasattr(image, '_getexif'):
return image
exif_data = image._getexif()
if exif_data is None:
return image
exif_dict = dict(exif_data.items())
orientation = exif_dict.get(name_to_tag_num['Orientation'])
if orientation == 3:
return image.rotate(180, expand=True)
elif orientation == 6:
return image.rotate(270, expand=True)
elif orientation == 8:
return image.rotate(90, expand=True)
return image
def resize_avatar(image_data: bytes, size: int=DEFAULT_AVATAR_SIZE) -> bytes:
try:
im = Image.open(io.BytesIO(image_data))
im = exif_rotate(im)
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
except IOError:
raise BadImageError("Could not decode image; did you upload an image file?")
out = io.BytesIO()
if im.mode == 'CMYK':
im = im.convert('RGB')
im.save(out, format='png')
return out.getvalue()
def resize_gif(im: GifImageFile, size: int=DEFAULT_EMOJI_SIZE) -> bytes:
frames = []
duration_info = []
# If 'loop' info is not set then loop for infinite number of times.
loop = im.info.get("loop", 0)
for frame_num in range(0, im.n_frames):
im.seek(frame_num)
new_frame = Image.new("RGBA", im.size)
new_frame.paste(im, (0, 0), im.convert("RGBA"))
new_frame = ImageOps.fit(new_frame, (size, size), Image.ANTIALIAS)
frames.append(new_frame)
duration_info.append(im.info['duration'])
out = io.BytesIO()
frames[0].save(out, save_all=True, optimize=True,
format="GIF", append_images=frames[1:],
duration=duration_info,
loop=loop)
return out.getvalue()
def resize_emoji(image_data: bytes, size: int=DEFAULT_EMOJI_SIZE) -> bytes:
try:
im = Image.open(io.BytesIO(image_data))
image_format = im.format
if image_format == "GIF":
return resize_gif(im, size)
else:
im = exif_rotate(im)
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
out = io.BytesIO()
im.save(out, format=image_format)
return out.getvalue()
except IOError:
raise BadImageError("Could not decode image; did you upload an image file?")
### Common
class ZulipUploadBackend:
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile,
target_realm: Optional[Realm]=None) -> str:
raise NotImplementedError()
def upload_avatar_image(self, user_file: File,
acting_user_profile: UserProfile,
target_user_profile: UserProfile) -> None:
raise NotImplementedError()
def delete_avatar_image(self, user: UserProfile) -> None:
raise NotImplementedError()
def delete_message_image(self, path_id: str) -> bool:
raise NotImplementedError()
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
raise NotImplementedError()
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
raise NotImplementedError()
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
raise NotImplementedError()
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
raise NotImplementedError()
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
raise NotImplementedError()
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str, user_profile: UserProfile) -> None:
raise NotImplementedError()
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
raise NotImplementedError()
### S3
def get_bucket(conn: S3Connection, bucket_name: str) -> Bucket:
# Calling get_bucket() with validate=True can apparently lead
# to expensive S3 bills:
# http://www.appneta.com/blog/s3-list-get-bucket-default/
# The benefits of validation aren't completely clear to us, and
# we want to save on our bills, so we set the validate flag to False.
# (We think setting validate to True would cause us to fail faster
# in situations where buckets don't exist, but that shouldn't be
# an issue for us.)
bucket = conn.get_bucket(bucket_name, validate=False)
return bucket
def upload_image_to_s3(
bucket_name: NonBinaryStr,
file_name: str,
content_type: Optional[str],
user_profile: UserProfile,
contents: bytes) -> None:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = get_bucket(conn, bucket_name)
key = Key(bucket)
key.key = file_name
key.set_metadata("user_profile_id", str(user_profile.id))
key.set_metadata("realm_id", str(user_profile.realm_id))
if content_type is not None:
headers = {'Content-Type': content_type} # type: Optional[Dict[str, str]]
else:
headers = None
key.set_contents_from_string(contents, headers=headers) # type: ignore # https://github.com/python/typeshed/issues/1552
def currently_used_upload_space(realm: Realm) -> int:
used_space = Attachment.objects.filter(realm=realm).aggregate(Sum('size'))['size__sum']
if used_space is None:
return 0
return used_space
def check_upload_within_quota(realm: Realm, uploaded_file_size: int) -> None:
upload_quota = realm.upload_quota_bytes()
if upload_quota is None:
return
used_space = currently_used_upload_space(realm)
if (used_space + uploaded_file_size) > upload_quota:
raise RealmUploadQuotaError(_("Upload would exceed your organization's upload quota."))
def get_file_info(request: HttpRequest, user_file: File) -> Tuple[str, int, Optional[str]]:
uploaded_file_name = user_file.name
assert isinstance(uploaded_file_name, str)
content_type = request.GET.get('mimetype')
if content_type is None:
guessed_type = guess_type(uploaded_file_name)[0]
if guessed_type is not None:
content_type = guessed_type
else:
extension = guess_extension(content_type)
if extension is not None:
uploaded_file_name = uploaded_file_name + extension
uploaded_file_name = urllib.parse.unquote(uploaded_file_name)
uploaded_file_size = user_file.size
return uploaded_file_name, uploaded_file_size, content_type
def get_signed_upload_url(path: str) -> str:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
return conn.generate_url(15, 'GET', bucket=settings.S3_AUTH_UPLOADS_BUCKET, key=path)
def get_realm_for_filename(path: str) -> Optional[int]:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
key = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path)
if key is None:
# This happens if the key does not exist.
return None
return get_user_profile_by_id(key.metadata["user_profile_id"]).realm_id
class S3UploadBackend(ZulipUploadBackend):
def delete_file_from_s3(self, path_id: str, bucket_name: str) -> bool:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = get_bucket(conn, bucket_name)
# check if file exists
key = bucket.get_key(path_id)
if key is not None:
bucket.delete_key(key)
return True
file_name = path_id.split("/")[-1]
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
return False
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
if target_realm is None:
target_realm = user_profile.realm
s3_file_name = "/".join([
str(target_realm.id),
random_name(18),
sanitize_name(uploaded_file_name)
])
url = "/user_uploads/%s" % (s3_file_name,)
upload_image_to_s3(
bucket_name,
s3_file_name,
content_type,
user_profile,
file_data
)
create_attachment(uploaded_file_name, s3_file_name, user_profile, uploaded_file_size)
return url
def delete_message_image(self, path_id: str) -> bool:
return self.delete_file_from_s3(path_id, settings.S3_AUTH_UPLOADS_BUCKET)
def write_avatar_images(self, s3_file_name: str, target_user_profile: UserProfile,
image_data: bytes, content_type: Optional[str]) -> None:
bucket_name = settings.S3_AVATAR_BUCKET
upload_image_to_s3(
bucket_name,
s3_file_name + ".original",
content_type,
target_user_profile,
image_data,
)
# custom 500px wide version
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
upload_image_to_s3(
bucket_name,
s3_file_name + "-medium.png",
"image/png",
target_user_profile,
resized_medium
)
resized_data = resize_avatar(image_data)
upload_image_to_s3(
bucket_name,
s3_file_name,
'image/png',
target_user_profile,
resized_data,
)
# See avatar_url in avatar.py for URL. (That code also handles the case
# that users use gravatar.)
def upload_avatar_image(self, user_file: File,
acting_user_profile: UserProfile,
target_user_profile: UserProfile) -> None:
content_type = guess_type(user_file.name)[0]
s3_file_name = user_avatar_path(target_user_profile)
image_data = user_file.read()
self.write_avatar_images(s3_file_name, target_user_profile,
image_data, content_type)
def delete_avatar_image(self, user: UserProfile) -> None:
path_id = user_avatar_path(user)
bucket_name = settings.S3_AVATAR_BUCKET
self.delete_file_from_s3(path_id + ".original", bucket_name)
self.delete_file_from_s3(path_id + "-medium.png", bucket_name)
self.delete_file_from_s3(path_id, bucket_name)
def get_avatar_key(self, file_name: str) -> Key:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket_name = settings.S3_AVATAR_BUCKET
bucket = get_bucket(conn, bucket_name)
key = bucket.get_key(file_name)
return key
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
s3_source_file_name = user_avatar_path(source_profile)
s3_target_file_name = user_avatar_path(target_profile)
key = self.get_avatar_key(s3_source_file_name + ".original")
image_data = key.get_contents_as_string() # type: ignore # https://github.com/python/typeshed/issues/1552
content_type = key.content_type
self.write_avatar_images(s3_target_file_name, target_profile, image_data, content_type) # type: ignore # image_data is `bytes`, boto subs are wrong
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
bucket = settings.S3_AVATAR_BUCKET
medium_suffix = "-medium.png" if medium else ""
# ?x=x allows templates to append additional parameters with &s
return "https://%s.s3.amazonaws.com/%s%s?x=x" % (bucket, hash_key, medium_suffix)
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
content_type = guess_type(icon_file.name)[0]
bucket_name = settings.S3_AVATAR_BUCKET
s3_file_name = os.path.join(str(user_profile.realm.id), 'realm', 'icon')
image_data = icon_file.read()
upload_image_to_s3(
bucket_name,
s3_file_name + ".original",
content_type,
user_profile,
image_data,
)
resized_data = resize_avatar(image_data)
upload_image_to_s3(
bucket_name,
s3_file_name + ".png",
'image/png',
user_profile,
resized_data,
)
# See avatar_url in avatar.py for URL. (That code also handles the case
# that users use gravatar.)
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
bucket = settings.S3_AVATAR_BUCKET
# ?x=x allows templates to append additional parameters with &s
return "https://%s.s3.amazonaws.com/%s/realm/icon.png?version=%s" % (bucket, realm_id, version)
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
file_path = user_avatar_path(user_profile)
s3_file_name = file_path
bucket_name = settings.S3_AVATAR_BUCKET
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = get_bucket(conn, bucket_name)
key = bucket.get_key(file_path)
image_data = key.get_contents_as_string()
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE) # type: ignore # image_data is `bytes`, boto subs are wrong
upload_image_to_s3(
bucket_name,
s3_file_name + "-medium.png",
"image/png",
user_profile,
resized_medium
)
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str,
user_profile: UserProfile) -> None:
content_type = guess_type(emoji_file.name)[0]
bucket_name = settings.S3_AVATAR_BUCKET
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=user_profile.realm_id,
emoji_file_name=emoji_file_name
)
image_data = emoji_file.read()
resized_image_data = resize_emoji(image_data)
upload_image_to_s3(
bucket_name,
".".join((emoji_path, "original")),
content_type,
user_profile,
image_data,
)
upload_image_to_s3(
bucket_name,
emoji_path,
content_type,
user_profile,
resized_image_data,
)
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
bucket = settings.S3_AVATAR_BUCKET
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id,
emoji_file_name=emoji_file_name)
return "https://%s.s3.amazonaws.com/%s" % (bucket, emoji_path)
### Local
def write_local_file(type: str, path: str, file_data: bytes) -> None:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, 'wb') as f:
f.write(file_data)
def read_local_file(type: str, path: str) -> bytes:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
with open(file_path, 'rb') as f:
return f.read()
def delete_local_file(type: str, path: str) -> bool:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
if os.path.isfile(file_path):
# This removes the file but the empty folders still remain.
os.remove(file_path)
return True
file_name = path.split("/")[-1]
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
return False
def get_local_file_path(path_id: str) -> Optional[str]:
local_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'files', path_id)
if os.path.isfile(local_path):
return local_path
else:
return None
class LocalUploadBackend(ZulipUploadBackend):
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
# Split into 256 subdirectories to prevent directories from getting too big
path = "/".join([
str(user_profile.realm_id),
format(random.randint(0, 255), 'x'),
random_name(18),
sanitize_name(uploaded_file_name)
])
write_local_file('files', path, file_data)
create_attachment(uploaded_file_name, path, user_profile, uploaded_file_size)
return '/user_uploads/' + path
def delete_message_image(self, path_id: str) -> bool:
return delete_local_file('files', path_id)
def write_avatar_images(self, file_path: str, image_data: bytes) -> None:
write_local_file('avatars', file_path + '.original', image_data)
resized_data = resize_avatar(image_data)
write_local_file('avatars', file_path + '.png', resized_data)
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
write_local_file('avatars', file_path + '-medium.png', resized_medium)
def upload_avatar_image(self, user_file: File,
acting_user_profile: UserProfile,
target_user_profile: UserProfile) -> None:
file_path = user_avatar_path(target_user_profile)
image_data = user_file.read()
self.write_avatar_images(file_path, image_data)
def delete_avatar_image(self, user: UserProfile) -> None:
path_id = user_avatar_path(user)
delete_local_file("avatars", path_id + ".original")
delete_local_file("avatars", path_id + ".png")
delete_local_file("avatars", path_id + "-medium.png")
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
# ?x=x allows templates to append additional parameters with &s
medium_suffix = "-medium" if medium else ""
return "/user_avatars/%s%s.png?x=x" % (hash_key, medium_suffix)
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
source_file_path = user_avatar_path(source_profile)
target_file_path = user_avatar_path(target_profile)
image_data = read_local_file('avatars', source_file_path + '.original')
self.write_avatar_images(target_file_path, image_data)
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
upload_path = os.path.join('avatars', str(user_profile.realm.id), 'realm')
image_data = icon_file.read()
write_local_file(
upload_path,
'icon.original',
image_data)
resized_data = resize_avatar(image_data)
write_local_file(upload_path, 'icon.png', resized_data)
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
# ?x=x allows templates to append additional parameters with &s
return "/user_avatars/%s/realm/icon.png?version=%s" % (realm_id, version)
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
file_path = user_avatar_path(user_profile)
output_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + "-medium.png")
if os.path.isfile(output_path):
return
image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original")
image_data = open(image_path, "rb").read()
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
write_local_file('avatars', file_path + '-medium.png', resized_medium)
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str,
user_profile: UserProfile) -> None:
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id= user_profile.realm_id,
emoji_file_name=emoji_file_name
)
image_data = emoji_file.read()
resized_image_data = resize_emoji(image_data)
write_local_file(
'avatars',
".".join((emoji_path, "original")),
image_data)
write_local_file(
'avatars',
emoji_path,
resized_image_data)
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
return os.path.join(
"/user_avatars",
RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id, emoji_file_name=emoji_file_name))
# Common and wrappers
if settings.LOCAL_UPLOADS_DIR is not None:
upload_backend = LocalUploadBackend() # type: ZulipUploadBackend
else:
upload_backend = S3UploadBackend() # nocoverage
def delete_message_image(path_id: str) -> bool:
return upload_backend.delete_message_image(path_id)
def upload_avatar_image(user_file: File, acting_user_profile: UserProfile,
target_user_profile: UserProfile) -> None:
upload_backend.upload_avatar_image(user_file, acting_user_profile, target_user_profile)
def delete_avatar_image(user_profile: UserProfile) -> None:
upload_backend.delete_avatar_image(user_profile)
def copy_avatar(source_profile: UserProfile, target_profile: UserProfile) -> None:
upload_backend.copy_avatar(source_profile, target_profile)
def upload_icon_image(user_file: File, user_profile: UserProfile) -> None:
upload_backend.upload_realm_icon_image(user_file, user_profile)
def upload_emoji_image(emoji_file: File, emoji_file_name: str, user_profile: UserProfile) -> None:
upload_backend.upload_emoji_image(emoji_file, emoji_file_name, user_profile)
def upload_message_file(uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
return upload_backend.upload_message_file(uploaded_file_name, uploaded_file_size,
content_type, file_data, user_profile,
target_realm=target_realm)
def claim_attachment(user_profile: UserProfile,
path_id: str,
message: Message,
is_message_realm_public: bool) -> Attachment:
attachment = Attachment.objects.get(path_id=path_id)
attachment.messages.add(message)
attachment.is_realm_public = attachment.is_realm_public or is_message_realm_public
attachment.save()
return attachment
def create_attachment(file_name: str, path_id: str, user_profile: UserProfile,
file_size: int) -> bool:
attachment = Attachment.objects.create(file_name=file_name, path_id=path_id, owner=user_profile,
realm=user_profile.realm, size=file_size)
from zerver.lib.actions import notify_attachment_update
notify_attachment_update(user_profile, 'add', attachment.to_dict())
return True
def upload_message_image_from_request(request: HttpRequest, user_file: File,
user_profile: UserProfile) -> str:
uploaded_file_name, uploaded_file_size, content_type = get_file_info(request, user_file)
return upload_message_file(uploaded_file_name, uploaded_file_size,
content_type, user_file.read(), user_profile)
| [
"str",
"NonBinaryStr",
"Image",
"bytes",
"GifImageFile",
"bytes",
"str",
"int",
"Optional[str]",
"bytes",
"UserProfile",
"File",
"UserProfile",
"UserProfile",
"UserProfile",
"str",
"str",
"UserProfile",
"UserProfile",
"UserProfile",
"File",
"UserProfile",
"int",
"int",
"File",
"str",
"UserProfile",
"str",
"int",
"S3Connection",
"str",
"NonBinaryStr",
"str",
"Optional[str]",
"UserProfile",
"bytes",
"Realm",
"Realm",
"int",
"HttpRequest",
"File",
"str",
"str",
"str",
"str",
"str",
"int",
"Optional[str]",
"bytes",
"UserProfile",
"str",
"str",
"UserProfile",
"bytes",
"Optional[str]",
"File",
"UserProfile",
"UserProfile",
"UserProfile",
"str",
"UserProfile",
"UserProfile",
"str",
"File",
"UserProfile",
"int",
"int",
"UserProfile",
"File",
"str",
"UserProfile",
"str",
"int",
"str",
"str",
"bytes",
"str",
"str",
"str",
"str",
"str",
"str",
"int",
"Optional[str]",
"bytes",
"UserProfile",
"str",
"str",
"bytes",
"File",
"UserProfile",
"UserProfile",
"UserProfile",
"str",
"UserProfile",
"UserProfile",
"File",
"UserProfile",
"int",
"int",
"UserProfile",
"File",
"str",
"UserProfile",
"str",
"int",
"str",
"File",
"UserProfile",
"UserProfile",
"UserProfile",
"UserProfile",
"UserProfile",
"File",
"UserProfile",
"File",
"str",
"UserProfile",
"str",
"int",
"Optional[str]",
"bytes",
"UserProfile",
"UserProfile",
"str",
"Message",
"bool",
"str",
"str",
"UserProfile",
"int",
"HttpRequest",
"File",
"UserProfile"
] | [
1830,
2082,
2977,
3505,
3963,
4759,
5392,
5417,
5464,
5490,
5539,
5706,
5761,
5823,
5922,
6025,
6115,
6227,
6256,
6370,
6478,
6498,
6600,
6614,
6709,
6732,
6751,
6855,
6870,
6950,
6977,
7583,
7616,
7643,
7680,
7711,
8321,
8532,
8559,
8896,
8920,
9624,
9826,
10241,
10259,
10770,
10795,
10842,
10868,
10917,
11635,
11780,
11806,
11859,
11880,
12823,
12878,
12940,
13283,
13631,
13911,
13940,
14511,
14855,
14875,
15683,
15697,
15985,
16708,
16731,
16777,
17554,
17569,
17900,
17911,
17927,
18162,
18173,
18347,
18358,
18779,
19070,
19095,
19142,
19168,
19217,
19781,
19892,
19909,
20315,
20370,
20432,
20648,
20929,
21201,
21230,
21566,
21586,
21992,
22006,
22229,
22809,
22832,
22878,
23422,
23437,
23838,
23944,
23971,
24029,
24182,
24290,
24319,
24438,
24458,
24584,
24607,
24626,
24774,
24799,
24842,
24868,
24913,
25250,
25293,
25328,
25383,
25663,
25677,
25696,
25742,
26138,
26162,
26220
] | [
1833,
2094,
2982,
3510,
3975,
4764,
5395,
5420,
5477,
5495,
5550,
5710,
5772,
5834,
5933,
6028,
6118,
6238,
6267,
6381,
6482,
6509,
6603,
6617,
6713,
6735,
6762,
6858,
6873,
6962,
6980,
7595,
7619,
7656,
7691,
7716,
8326,
8537,
8562,
8907,
8924,
9627,
9829,
10244,
10262,
10773,
10798,
10855,
10873,
10928,
11638,
11783,
11817,
11864,
11893,
12827,
12889,
12951,
13294,
13634,
13922,
13951,
14514,
14859,
14886,
15686,
15700,
15996,
16712,
16734,
16788,
17557,
17572,
17903,
17914,
17932,
18165,
18176,
18350,
18361,
18782,
19073,
19098,
19155,
19173,
19228,
19784,
19895,
19914,
20319,
20381,
20443,
20659,
20932,
21212,
21241,
21570,
21597,
21995,
22009,
22240,
22813,
22835,
22889,
23425,
23440,
23841,
23948,
23982,
24040,
24193,
24301,
24330,
24442,
24469,
24588,
24610,
24637,
24777,
24802,
24855,
24873,
24924,
25261,
25296,
25335,
25387,
25666,
25680,
25707,
25745,
26149,
26166,
26231
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_encoding.py | import urllib
from typing import Any, Dict, List
from zerver.lib.topic import get_topic_from_message_info
from zerver.models import Realm, Stream, UserProfile
def hash_util_encode(string: str) -> str:
# Do the same encoding operation as hash_util.encodeHashComponent on the
# frontend.
# `safe` has a default value of "/", but we want those encoded, too.
return urllib.parse.quote(
string.encode("utf-8"), safe=b"").replace(".", "%2E").replace("%", ".")
def encode_stream(stream_id: int, stream_name: str) -> str:
# We encode streams for urls as something like 99-Verona.
stream_name = stream_name.replace(' ', '-')
return str(stream_id) + '-' + hash_util_encode(stream_name)
def personal_narrow_url(realm: Realm, sender: UserProfile) -> str:
base_url = "%s/#narrow/pm-with/" % (realm.uri,)
email_user = sender.email.split('@')[0].lower()
pm_slug = str(sender.id) + '-' + hash_util_encode(email_user)
return base_url + pm_slug
def huddle_narrow_url(realm: Realm, other_user_ids: List[int]) -> str:
pm_slug = ','.join(str(user_id) for user_id in sorted(other_user_ids)) + '-group'
base_url = "%s/#narrow/pm-with/" % (realm.uri,)
return base_url + pm_slug
def stream_narrow_url(realm: Realm, stream: Stream) -> str:
base_url = "%s/#narrow/stream/" % (realm.uri,)
return base_url + encode_stream(stream.id, stream.name)
def topic_narrow_url(realm: Realm, stream: Stream, topic: str) -> str:
base_url = "%s/#narrow/stream/" % (realm.uri,)
return "%s%s/topic/%s" % (base_url,
encode_stream(stream.id, stream.name),
hash_util_encode(topic))
def near_message_url(realm: Realm,
message: Dict[str, Any]) -> str:
if message['type'] == 'stream':
url = near_stream_message_url(
realm=realm,
message=message,
)
return url
url = near_pm_message_url(
realm=realm,
message=message,
)
return url
def near_stream_message_url(realm: Realm,
message: Dict[str, Any]) -> str:
message_id = str(message['id'])
stream_id = message['stream_id']
stream_name = message['display_recipient']
topic_name = get_topic_from_message_info(message)
encoded_topic = hash_util_encode(topic_name)
encoded_stream = encode_stream(stream_id=stream_id, stream_name=stream_name)
parts = [
realm.uri,
'#narrow',
'stream',
encoded_stream,
'topic',
encoded_topic,
'near',
message_id,
]
full_url = '/'.join(parts)
return full_url
def near_pm_message_url(realm: Realm,
message: Dict[str, Any]) -> str:
message_id = str(message['id'])
str_user_ids = [
str(recipient['id'])
for recipient in message['display_recipient']
]
# Use the "perma-link" format here that includes the sender's
# user_id, so they're easier to share between people.
pm_str = ','.join(str_user_ids) + '-pm'
parts = [
realm.uri,
'#narrow',
'pm-with',
pm_str,
'near',
message_id,
]
full_url = '/'.join(parts)
return full_url
| [
"str",
"int",
"str",
"Realm",
"UserProfile",
"Realm",
"List[int]",
"Realm",
"Stream",
"Realm",
"Stream",
"str",
"Realm",
"Dict[str, Any]",
"Realm",
"Dict[str, Any]",
"Realm",
"Dict[str, Any]"
] | [
190,
510,
528,
747,
762,
1013,
1036,
1253,
1268,
1424,
1439,
1454,
1711,
1748,
2066,
2110,
2698,
2738
] | [
193,
513,
531,
752,
773,
1018,
1045,
1258,
1274,
1429,
1445,
1457,
1716,
1762,
2071,
2124,
2703,
2752
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_preview/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_preview/oembed/__init__.py | from typing import Optional, Dict, Any
from pyoembed import oEmbed, PyOembedException
def get_oembed_data(url: str,
maxwidth: Optional[int]=640,
maxheight: Optional[int]=480) -> Optional[Dict[str, Any]]:
try:
data = oEmbed(url, maxwidth=maxwidth, maxheight=maxheight)
except PyOembedException:
return None
data['image'] = data.get('thumbnail_url')
return data
| [
"str"
] | [
113
] | [
116
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_preview/parsers/__init__.py | from zerver.lib.url_preview.parsers.open_graph import OpenGraphParser
from zerver.lib.url_preview.parsers.generic import GenericParser
__all__ = ['OpenGraphParser', 'GenericParser']
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_preview/parsers/base.py | from typing import Any
class BaseParser:
def __init__(self, html_source: str) -> None:
# We import BeautifulSoup here, because it's not used by most
# processes in production, and bs4 is big enough that
# importing it adds 10s of milliseconds to manage.py startup.
from bs4 import BeautifulSoup
self._soup = BeautifulSoup(html_source, "lxml")
def extract_data(self) -> Any:
raise NotImplementedError()
| [
"str"
] | [
78
] | [
81
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_preview/parsers/generic.py | from typing import Dict, Optional
from zerver.lib.url_preview.parsers.base import BaseParser
class GenericParser(BaseParser):
def extract_data(self) -> Dict[str, Optional[str]]:
return {
'title': self._get_title(),
'description': self._get_description(),
'image': self._get_image()}
def _get_title(self) -> Optional[str]:
soup = self._soup
if (soup.title and soup.title.text != ''):
return soup.title.text
if (soup.h1 and soup.h1.text != ''):
return soup.h1.text
return None
def _get_description(self) -> Optional[str]:
soup = self._soup
meta_description = soup.find('meta', attrs={'name': 'description'})
if (meta_description and meta_description.get('content', '') != ''):
return meta_description['content']
first_h1 = soup.find('h1')
if first_h1:
first_p = first_h1.find_next('p')
if (first_p and first_p.string != ''):
return first_p.text
first_p = soup.find('p')
if (first_p and first_p.string != ''):
return first_p.string
return None
def _get_image(self) -> Optional[str]:
"""
Finding a first image after the h1 header.
Presumably it will be the main image.
"""
soup = self._soup
first_h1 = soup.find('h1')
if first_h1:
first_image = first_h1.find_next_sibling('img')
if first_image and first_image['src'] != '':
return first_image['src']
return None
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_preview/parsers/open_graph.py | import re
from typing import Dict
from .base import BaseParser
class OpenGraphParser(BaseParser):
def extract_data(self) -> Dict[str, str]:
meta = self._soup.findAll('meta')
content = {}
for tag in meta:
if tag.has_attr('property') and 'og:' in tag['property']:
content[re.sub('og:', '', tag['property'])] = tag['content']
return content
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/url_preview/preview.py | import re
import logging
import traceback
from typing import Any, Optional, Dict
from typing.re import Match
import requests
from zerver.lib.cache import cache_with_key, get_cache_with_key, preview_url_cache_key
from zerver.lib.url_preview.oembed import get_oembed_data
from zerver.lib.url_preview.parsers import OpenGraphParser, GenericParser
from django.utils.encoding import smart_text
CACHE_NAME = "database"
# Based on django.core.validators.URLValidator, with ftp support removed.
link_regex = re.compile(
r'^(?:http)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def is_link(url: str) -> Match[str]:
return link_regex.match(smart_text(url))
@cache_with_key(preview_url_cache_key, cache_name=CACHE_NAME, with_statsd_key="urlpreview_data")
def get_link_embed_data(url: str,
maxwidth: Optional[int]=640,
maxheight: Optional[int]=480) -> Optional[Dict[str, Any]]:
if not is_link(url):
return None
# Fetch information from URL.
# We are using three sources in next order:
# 1. OEmbed
# 2. Open Graph
# 3. Meta tags
try:
data = get_oembed_data(url, maxwidth=maxwidth, maxheight=maxheight)
except requests.exceptions.RequestException:
msg = 'Unable to fetch information from url {0}, traceback: {1}'
logging.error(msg.format(url, traceback.format_exc()))
return None
data = data or {}
response = requests.get(url)
if response.ok:
og_data = OpenGraphParser(response.text).extract_data()
if og_data:
data.update(og_data)
generic_data = GenericParser(response.text).extract_data() or {}
for key in ['title', 'description', 'image']:
if not data.get(key) and generic_data.get(key):
data[key] = generic_data[key]
return data
@get_cache_with_key(preview_url_cache_key, cache_name=CACHE_NAME)
def link_embed_data_from_cache(url: str, maxwidth: Optional[int]=640, maxheight: Optional[int]=480) -> Any:
return
| [
"str",
"str",
"str"
] | [
808,
1001,
2159
] | [
811,
1004,
2162
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/user_agent.py | import re
from typing import Dict
# Warning: If you change this parsing, please test using
# zerver/tests/test_decorators.py
# And extend zerver/tests/fixtures/user_agents_unique with any new test cases
def parse_user_agent(user_agent: str) -> Dict[str, str]:
match = re.match("^(?P<name>[^/ ]*[^0-9/(]*)(/(?P<version>[^/ ]*))?([ /].*)?$", user_agent)
assert match is not None
return match.groupdict()
| [
"str"
] | [
239
] | [
242
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/user_groups.py | from __future__ import absolute_import
from collections import defaultdict
from django.db import transaction
from django.utils.translation import ugettext as _
from zerver.lib.exceptions import JsonableError
from zerver.models import UserProfile, Realm, UserGroupMembership, UserGroup
from typing import Dict, Iterable, List, Tuple, Any
def access_user_group_by_id(user_group_id: int, user_profile: UserProfile) -> UserGroup:
try:
user_group = UserGroup.objects.get(id=user_group_id, realm=user_profile.realm)
group_member_ids = get_user_group_members(user_group)
msg = _("Only group members and organization administrators can administer this group.")
if (not user_profile.is_realm_admin and user_profile.id not in group_member_ids):
raise JsonableError(msg)
except UserGroup.DoesNotExist:
raise JsonableError(_("Invalid user group"))
return user_group
def user_groups_in_realm(realm: Realm) -> List[UserGroup]:
user_groups = UserGroup.objects.filter(realm=realm)
return list(user_groups)
def user_groups_in_realm_serialized(realm: Realm) -> List[Dict[str, Any]]:
"""This function is used in do_events_register code path so this code
should be performant. We need to do 2 database queries because
Django's ORM doesn't properly support the left join between
UserGroup and UserGroupMembership that we need.
"""
realm_groups = UserGroup.objects.filter(realm=realm)
group_dicts = {} # type: Dict[str, Any]
for user_group in realm_groups:
group_dicts[user_group.id] = dict(
id=user_group.id,
name=user_group.name,
description=user_group.description,
members=[],
)
membership = UserGroupMembership.objects.filter(user_group__realm=realm).values_list(
'user_group_id', 'user_profile_id')
for (user_group_id, user_profile_id) in membership:
group_dicts[user_group_id]['members'].append(user_profile_id)
for group_dict in group_dicts.values():
group_dict['members'] = sorted(group_dict['members'])
return sorted(group_dicts.values(), key=lambda group_dict: group_dict['id'])
def get_user_groups(user_profile: UserProfile) -> List[UserGroup]:
return list(user_profile.usergroup_set.all())
def check_add_user_to_user_group(user_profile: UserProfile, user_group: UserGroup) -> bool:
member_obj, created = UserGroupMembership.objects.get_or_create(
user_group=user_group, user_profile=user_profile)
return created
def remove_user_from_user_group(user_profile: UserProfile, user_group: UserGroup) -> int:
num_deleted, _ = UserGroupMembership.objects.filter(
user_profile=user_profile, user_group=user_group).delete()
return num_deleted
def check_remove_user_from_user_group(user_profile: UserProfile, user_group: UserGroup) -> bool:
try:
num_deleted = remove_user_from_user_group(user_profile, user_group)
return bool(num_deleted)
except Exception:
return False
def create_user_group(name: str, members: List[UserProfile], realm: Realm,
description: str='') -> UserGroup:
with transaction.atomic():
user_group = UserGroup.objects.create(name=name, realm=realm,
description=description)
UserGroupMembership.objects.bulk_create([
UserGroupMembership(user_profile=member, user_group=user_group)
for member in members
])
return user_group
def get_user_group_members(user_group: UserGroup) -> List[UserProfile]:
members = UserGroupMembership.objects.filter(user_group=user_group)
return [member.user_profile.id for member in members]
def get_memberships_of_users(user_group: UserGroup, members: List[UserProfile]) -> List[int]:
return list(UserGroupMembership.objects.filter(
user_group=user_group,
user_profile__in=members).values_list('user_profile_id', flat=True))
| [
"int",
"UserProfile",
"Realm",
"Realm",
"UserProfile",
"UserProfile",
"UserGroup",
"UserProfile",
"UserGroup",
"UserProfile",
"UserGroup",
"str",
"List[UserProfile]",
"Realm",
"UserGroup",
"UserGroup",
"List[UserProfile]"
] | [
382,
401,
953,
1109,
2218,
2349,
2374,
2587,
2612,
2831,
2856,
3066,
3080,
3106,
3579,
3784,
3804
] | [
385,
412,
958,
1114,
2229,
2360,
2383,
2598,
2621,
2842,
2865,
3069,
3097,
3111,
3588,
3793,
3821
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/users.py | from typing import Dict, List, Optional, Union, cast
from django.db.models.query import QuerySet
from django.utils.translation import ugettext as _
from django.conf import settings
from zerver.lib.cache import generic_bulk_cached_fetch, user_profile_cache_key_id, \
user_profile_by_id_cache_key
from zerver.lib.request import JsonableError
from zerver.lib.avatar import avatar_url
from zerver.models import UserProfile, Service, Realm, \
get_user_profile_by_id, query_for_ids, get_user_profile_by_id_in_realm, \
CustomProfileField
from zulip_bots.custom_exceptions import ConfigValidationError
def check_full_name(full_name_raw: str) -> str:
full_name = full_name_raw.strip()
if len(full_name) > UserProfile.MAX_NAME_LENGTH:
raise JsonableError(_("Name too long!"))
if len(full_name) < UserProfile.MIN_NAME_LENGTH:
raise JsonableError(_("Name too short!"))
if list(set(full_name).intersection(UserProfile.NAME_INVALID_CHARS)):
raise JsonableError(_("Invalid characters in name!"))
return full_name
# NOTE: We don't try to absolutely prevent 2 bots from having the same
# name (e.g. you can get there by reactivating a deactivated bot after
# making a new bot with the same name). This is just a check designed
# to make it unlikely to happen by accident.
def check_bot_name_available(realm_id: int, full_name: str) -> None:
dup_exists = UserProfile.objects.filter(
realm_id=realm_id,
full_name=full_name.strip(),
is_active=True,
).exists()
if dup_exists:
raise JsonableError(_("Name is already in use!"))
def check_short_name(short_name_raw: str) -> str:
short_name = short_name_raw.strip()
if len(short_name) == 0:
raise JsonableError(_("Bad name or username"))
return short_name
def check_valid_bot_config(service_name: str, config_data: Dict[str, str]) -> None:
try:
from zerver.lib.bot_lib import get_bot_handler
bot_handler = get_bot_handler(service_name)
if hasattr(bot_handler, 'validate_config'):
bot_handler.validate_config(config_data)
except ConfigValidationError:
# The exception provides a specific error message, but that
# message is not tagged translatable, because it is
# triggered in the external zulip_bots package.
# TODO: Think of some clever way to provide a more specific
# error message.
raise JsonableError(_("Invalid configuration data!"))
# Adds an outgoing webhook or embedded bot service.
def add_service(name: str, user_profile: UserProfile, base_url: Optional[str]=None,
interface: Optional[int]=None, token: Optional[str]=None) -> None:
Service.objects.create(name=name,
user_profile=user_profile,
base_url=base_url,
interface=interface,
token=token)
def check_bot_creation_policy(user_profile: UserProfile, bot_type: int) -> None:
# Realm administrators can always add bot
if user_profile.is_realm_admin:
return
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_EVERYONE:
return
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_ADMINS_ONLY:
raise JsonableError(_("Must be an organization administrator"))
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS and \
bot_type == UserProfile.DEFAULT_BOT:
raise JsonableError(_("Must be an organization administrator"))
def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None:
if bot_type not in user_profile.allowed_bot_types:
raise JsonableError(_('Invalid bot type'))
def check_valid_interface_type(interface_type: Optional[int]) -> None:
if interface_type not in Service.ALLOWED_INTERFACE_TYPES:
raise JsonableError(_('Invalid interface type'))
def bulk_get_users(emails: List[str], realm: Optional[Realm],
base_query: 'QuerySet[UserProfile]'=None) -> Dict[str, UserProfile]:
if base_query is None:
assert realm is not None
query = UserProfile.objects.filter(realm=realm, is_active=True)
realm_id = realm.id
else:
# WARNING: Currently, this code path only really supports one
# version of `base_query` being used (because otherwise,
# they'll share the cache, which can screw up the filtering).
# If you're using this flow, you'll need to re-do any filters
# in base_query in the code itself; base_query is just a perf
# optimization.
query = base_query
realm_id = 0
def fetch_users_by_email(emails: List[str]) -> List[UserProfile]:
# This should be just
#
# UserProfile.objects.select_related("realm").filter(email__iexact__in=emails,
# realm=realm)
#
# But chaining __in and __iexact doesn't work with Django's
# ORM, so we have the following hack to construct the relevant where clause
if len(emails) == 0:
return []
upper_list = ", ".join(["UPPER(%s)"] * len(emails))
where_clause = "UPPER(zerver_userprofile.email::text) IN (%s)" % (upper_list,)
return query.select_related("realm").extra(
where=[where_clause],
params=emails)
return generic_bulk_cached_fetch(
# Use a separate cache key to protect us from conflicts with
# the get_user cache.
lambda email: 'bulk_get_users:' + user_profile_cache_key_id(email, realm_id),
fetch_users_by_email,
[email.lower() for email in emails],
id_fetcher=lambda user_profile: user_profile.email.lower()
)
def user_ids_to_users(user_ids: List[int], realm: Realm) -> List[UserProfile]:
# TODO: Consider adding a flag to control whether deactivated
# users should be included.
def fetch_users_by_id(user_ids: List[int]) -> List[UserProfile]:
if len(user_ids) == 0:
return []
return list(UserProfile.objects.filter(id__in=user_ids).select_related())
user_profiles_by_id = generic_bulk_cached_fetch(
cache_key_function=user_profile_by_id_cache_key,
query_function=fetch_users_by_id,
object_ids=user_ids
) # type: Dict[int, UserProfile]
found_user_ids = user_profiles_by_id.keys()
missed_user_ids = [user_id for user_id in user_ids if user_id not in found_user_ids]
if missed_user_ids:
raise JsonableError(_("Invalid user ID: %s" % (missed_user_ids[0])))
user_profiles = list(user_profiles_by_id.values())
for user_profile in user_profiles:
if user_profile.realm != realm:
raise JsonableError(_("Invalid user ID: %s" % (user_profile.id,)))
return user_profiles
def access_bot_by_id(user_profile: UserProfile, user_id: int) -> UserProfile:
try:
target = get_user_profile_by_id_in_realm(user_id, user_profile.realm)
except UserProfile.DoesNotExist:
raise JsonableError(_("No such bot"))
if not target.is_bot:
raise JsonableError(_("No such bot"))
if not user_profile.can_admin_user(target):
raise JsonableError(_("Insufficient permission"))
return target
def access_user_by_id(user_profile: UserProfile, user_id: int,
allow_deactivated: bool=False, allow_bots: bool=False) -> UserProfile:
try:
target = get_user_profile_by_id_in_realm(user_id, user_profile.realm)
except UserProfile.DoesNotExist:
raise JsonableError(_("No such user"))
if target.is_bot and not allow_bots:
raise JsonableError(_("No such user"))
if not target.is_active and not allow_deactivated:
raise JsonableError(_("User is deactivated"))
if not user_profile.can_admin_user(target):
raise JsonableError(_("Insufficient permission"))
return target
def get_accounts_for_email(email: str) -> List[Dict[str, Optional[str]]]:
if settings.PRODUCTION: # nocoverage
return []
profiles = UserProfile.objects.select_related('realm').filter(email__iexact=email.strip(),
is_active=True,
is_bot=False,
realm__deactivated=False)
return [{"realm_name": profile.realm.name,
"string_id": profile.realm.string_id,
"full_name": profile.full_name,
"avatar": avatar_url(profile)}
for profile in profiles]
def get_api_key(user_profile: UserProfile) -> str:
return user_profile.api_key
def get_all_api_keys(user_profile: UserProfile) -> List[str]:
# Users can only have one API key for now
return [user_profile.api_key]
def validate_user_custom_profile_data(realm_id: int,
profile_data: List[Dict[str, Union[int, str, List[int]]]]) -> None:
# This function validate all custom field values according to their field type.
for item in profile_data:
field_id = item['id']
try:
field = CustomProfileField.objects.get(id=field_id)
except CustomProfileField.DoesNotExist:
raise JsonableError(_('Field id {id} not found.').format(id=field_id))
validators = CustomProfileField.FIELD_VALIDATORS
field_type = field.field_type
var_name = '{}'.format(field.name)
value = item['value']
if field_type in validators:
validator = validators[field_type]
result = validator(var_name, value)
elif field_type == CustomProfileField.CHOICE:
choice_field_validator = CustomProfileField.CHOICE_FIELD_VALIDATORS[field_type]
field_data = field.field_data
result = choice_field_validator(var_name, field_data, value)
elif field_type == CustomProfileField.USER:
user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type]
result = user_field_validator(realm_id, cast(List[int], value),
False)
else:
raise AssertionError("Invalid field type")
if result is not None:
raise JsonableError(result)
| [
"str",
"int",
"str",
"str",
"str",
"Dict[str, str]",
"str",
"UserProfile",
"UserProfile",
"int",
"UserProfile",
"int",
"Optional[int]",
"List[str]",
"Optional[Realm]",
"List[str]",
"List[int]",
"Realm",
"List[int]",
"UserProfile",
"int",
"UserProfile",
"int",
"str",
"UserProfile",
"UserProfile",
"int",
"List[Dict[str, Union[int, str, List[int]]]]"
] | [
645,
1356,
1372,
1650,
1851,
1869,
2563,
2582,
2979,
3002,
3614,
3637,
3805,
3976,
3994,
4724,
5837,
5855,
6019,
6921,
6943,
7367,
7389,
8014,
8718,
8807,
8963,
9020
] | [
648,
1359,
1375,
1653,
1854,
1883,
2566,
2593,
2990,
3005,
3625,
3640,
3818,
3985,
4009,
4733,
5846,
5860,
6028,
6932,
6946,
7378,
7392,
8017,
8729,
8818,
8966,
9063
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/utils.py | # -*- coding: utf-8 -*-
from typing import Any, Callable, List, Optional, Sequence, TypeVar, Iterable, Set, Tuple
import base64
import errno
import hashlib
import heapq
import itertools
import os
import string
import sys
from time import sleep
from itertools import zip_longest
from django.conf import settings
T = TypeVar('T')
def statsd_key(val: Any, clean_periods: bool=False) -> str:
if not isinstance(val, str):
val = str(val)
if ':' in val:
val = val.split(':')[0]
val = val.replace('-', "_")
if clean_periods:
val = val.replace('.', '_')
return val
class StatsDWrapper:
"""Transparently either submit metrics to statsd
or do nothing without erroring out"""
# Backported support for gauge deltas
# as our statsd server supports them but supporting
# pystatsd is not released yet
def _our_gauge(self, stat: str, value: float, rate: float=1, delta: bool=False) -> None:
"""Set a gauge value."""
from django_statsd.clients import statsd
if delta:
value_str = '%+g|g' % (value,)
else:
value_str = '%g|g' % (value,)
statsd._send(stat, value_str, rate)
def __getattr__(self, name: str) -> Any:
# Hand off to statsd if we have it enabled
# otherwise do nothing
if name in ['timer', 'timing', 'incr', 'decr', 'gauge']:
if settings.STATSD_HOST != '':
from django_statsd.clients import statsd
if name == 'gauge':
return self._our_gauge
else:
return getattr(statsd, name)
else:
return lambda *args, **kwargs: None
raise AttributeError
statsd = StatsDWrapper()
# Runs the callback with slices of all_list of a given batch_size
def run_in_batches(all_list: Sequence[T],
batch_size: int,
callback: Callable[[Sequence[T]], None],
sleep_time: int=0,
logger: Optional[Callable[[str], None]]=None) -> None:
if len(all_list) == 0:
return
limit = (len(all_list) // batch_size) + 1
for i in range(limit):
start = i*batch_size
end = (i+1) * batch_size
if end >= len(all_list):
end = len(all_list)
batch = all_list[start:end]
if logger:
logger("Executing %s in batch %s of %s" % (end-start, i+1, limit))
callback(batch)
if i != limit - 1:
sleep(sleep_time)
def make_safe_digest(string: str,
hash_func: Callable[[bytes], Any]=hashlib.sha1) -> str:
"""
return a hex digest of `string`.
"""
# hashlib.sha1, md5, etc. expect bytes, so non-ASCII strings must
# be encoded.
return hash_func(string.encode('utf-8')).hexdigest()
def log_statsd_event(name: str) -> None:
"""
Sends a single event to statsd with the desired name and the current timestamp
This can be used to provide vertical lines in generated graphs,
for example when doing a prod deploy, bankruptcy request, or
other one-off events
Note that to draw this event as a vertical line in graphite
you can use the drawAsInfinite() command
"""
event_name = "events.%s" % (name,)
statsd.incr(event_name)
def generate_random_token(length: int) -> str:
return str(base64.b16encode(os.urandom(length // 2)).decode('utf-8').lower())
def generate_api_key() -> str:
choices = string.ascii_letters + string.digits
altchars = ''.join([choices[ord(os.urandom(1)) % 62] for _ in range(2)]).encode("utf-8")
api_key = base64.b64encode(os.urandom(24), altchars=altchars).decode("utf-8")
return api_key
def query_chunker(queries: List[Any],
id_collector: Optional[Set[int]]=None,
chunk_size: int=1000,
db_chunk_size: Optional[int]=None) -> Iterable[Any]:
'''
This merges one or more Django ascending-id queries into
a generator that returns chunks of chunk_size row objects
during each yield, preserving id order across all results..
Queries should satisfy these conditions:
- They should be Django filters.
- They should return Django objects with "id" attributes.
- They should be disjoint.
The generator also populates id_collector, which we use
internally to enforce unique ids, but which the caller
can pass in to us if they want the side effect of collecting
all ids.
'''
if db_chunk_size is None:
db_chunk_size = chunk_size // len(queries)
assert db_chunk_size >= 2
assert chunk_size >= 2
if id_collector is not None:
assert(len(id_collector) == 0)
else:
id_collector = set()
def chunkify(q: Any, i: int) -> Iterable[Tuple[int, int, Any]]:
q = q.order_by('id')
min_id = -1
while True:
assert db_chunk_size is not None # Hint for mypy, but also workaround for mypy bug #3442.
rows = list(q.filter(id__gt=min_id)[0:db_chunk_size])
if len(rows) == 0:
break
for row in rows:
yield (row.id, i, row)
min_id = rows[-1].id
iterators = [chunkify(q, i) for i, q in enumerate(queries)]
merged_query = heapq.merge(*iterators)
while True:
tup_chunk = list(itertools.islice(merged_query, 0, chunk_size))
if len(tup_chunk) == 0:
break
# Do duplicate-id management here.
tup_ids = set([tup[0] for tup in tup_chunk])
assert len(tup_ids) == len(tup_chunk)
assert len(tup_ids.intersection(id_collector)) == 0
id_collector.update(tup_ids)
yield [row for row_id, i, row in tup_chunk]
def process_list_in_batches(lst: List[Any],
chunk_size: int,
process_batch: Callable[[List[Any]], None]) -> None:
offset = 0
while True:
items = lst[offset:offset+chunk_size]
if not items:
break
process_batch(items)
offset += chunk_size
def split_by(array: List[Any], group_size: int, filler: Any) -> List[List[Any]]:
"""
Group elements into list of size `group_size` and fill empty cells with
`filler`. Recipe from https://docs.python.org/3/library/itertools.html
"""
args = [iter(array)] * group_size
return list(map(list, zip_longest(*args, fillvalue=filler)))
def is_remote_server(identifier: str) -> bool:
"""
This function can be used to identify the source of API auth
request. We can have two types of sources, Remote Zulip Servers
and UserProfiles.
"""
return "@" not in identifier
| [
"Any",
"str",
"float",
"str",
"Sequence[T]",
"int",
"Callable[[Sequence[T]], None]",
"str",
"str",
"int",
"List[Any]",
"Any",
"int",
"List[Any]",
"int",
"Callable[[List[Any]], None]",
"List[Any]",
"int",
"Any",
"str"
] | [
352,
888,
900,
1254,
1886,
1930,
1964,
2598,
2907,
3391,
3791,
4831,
4839,
5845,
5896,
5944,
6179,
6202,
6215,
6544
] | [
355,
891,
905,
1257,
1897,
1933,
1993,
2601,
2910,
3394,
3800,
4834,
4842,
5854,
5899,
5971,
6188,
6205,
6218,
6547
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/validator.py | '''
This module sets up a scheme for validating that arbitrary Python
objects are correctly typed. It is totally decoupled from Django,
composable, easily wrapped, and easily extended.
A validator takes two parameters--var_name and val--and returns an
error if val is not the correct type. The var_name parameter is used
to format error messages. Validators return None when there are no errors.
Example primitive validators are check_string, check_int, and check_bool.
Compound validators are created by check_list and check_dict. Note that
those functions aren't directly called for validation; instead, those
functions are called to return other functions that adhere to the validator
contract. This is similar to how Python decorators are often parameterized.
The contract for check_list and check_dict is that they get passed in other
validators to apply to their items. This allows you to build up validators
for arbitrarily complex validators. See ValidatorTestCase for example usage.
A simple example of composition is this:
check_list(check_string)('my_list', ['a', 'b', 'c']) is None
To extend this concept, it's simply a matter of writing your own validator
for any particular type of object.
'''
import ujson
from django.utils.translation import ugettext as _
from django.core.exceptions import ValidationError
from django.core.validators import validate_email, URLValidator
from typing import Callable, Iterable, Optional, Tuple, TypeVar, cast, \
Dict
from datetime import datetime
from zerver.lib.request import JsonableError
from zerver.lib.types import Validator, ProfileFieldData
def check_string(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, str):
return _('%s is not a string') % (var_name,)
return None
def check_required_string(var_name: str, val: object) -> Optional[str]:
error = check_string(var_name, val)
if error:
return error
val = cast(str, val)
if not val.strip():
return _("{item} cannot be blank.").format(item=var_name)
return None
def check_short_string(var_name: str, val: object) -> Optional[str]:
return check_capped_string(50)(var_name, val)
def check_capped_string(max_length: int) -> Validator:
def validator(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, str):
return _('%s is not a string') % (var_name,)
if len(val) > max_length:
return _("{var_name} is too long (limit: {max_length} characters)".format(
var_name=var_name, max_length=max_length))
return None
return validator
def check_string_fixed_length(length: int) -> Validator:
def validator(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, str):
return _('%s is not a string') % (var_name,)
if len(val) != length:
return _("{var_name} has incorrect length {length}; should be {target_length}".format(
var_name=var_name, target_length=length, length=len(val)))
return None
return validator
def check_long_string(var_name: str, val: object) -> Optional[str]:
return check_capped_string(500)(var_name, val)
def check_date(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, str):
return _('%s is not a string') % (var_name,)
try:
datetime.strptime(val, '%Y-%m-%d')
except ValueError:
return _('%s is not a date') % (var_name,)
return None
def check_int(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, int):
return _('%s is not an integer') % (var_name,)
return None
def check_float(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, float):
return _('%s is not a float') % (var_name,)
return None
def check_bool(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, bool):
return _('%s is not a boolean') % (var_name,)
return None
def check_none_or(sub_validator: Validator) -> Validator:
def f(var_name: str, val: object) -> Optional[str]:
if val is None:
return None
else:
return sub_validator(var_name, val)
return f
def check_list(sub_validator: Optional[Validator], length: Optional[int]=None) -> Validator:
def f(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, list):
return _('%s is not a list') % (var_name,)
if length is not None and length != len(val):
return (_('%(container)s should have exactly %(length)s items') %
{'container': var_name, 'length': length})
if sub_validator:
for i, item in enumerate(val):
vname = '%s[%d]' % (var_name, i)
error = sub_validator(vname, item)
if error:
return error
return None
return f
def check_dict(required_keys: Iterable[Tuple[str, Validator]]=[],
value_validator: Optional[Validator]=None,
_allow_only_listed_keys: bool=False) -> Validator:
def f(var_name: str, val: object) -> Optional[str]:
if not isinstance(val, dict):
return _('%s is not a dict') % (var_name,)
for k, sub_validator in required_keys:
if k not in val:
return (_('%(key_name)s key is missing from %(var_name)s') %
{'key_name': k, 'var_name': var_name})
vname = '%s["%s"]' % (var_name, k)
error = sub_validator(vname, val[k])
if error:
return error
if value_validator:
for key in val:
vname = '%s contains a value that' % (var_name,)
error = value_validator(vname, val[key])
if error:
return error
if _allow_only_listed_keys:
delta_keys = set(val.keys()) - set(x[0] for x in required_keys)
if len(delta_keys) != 0:
return _("Unexpected arguments: %s" % (", ".join(list(delta_keys))))
return None
return f
def check_dict_only(required_keys: Iterable[Tuple[str, Validator]]) -> Validator:
return check_dict(required_keys, _allow_only_listed_keys=True)
def check_variable_type(allowed_type_funcs: Iterable[Validator]) -> Validator:
"""
Use this validator if an argument is of a variable type (e.g. processing
properties that might be strings or booleans).
`allowed_type_funcs`: the check_* validator functions for the possible data
types for this variable.
"""
def enumerated_type_check(var_name: str, val: object) -> Optional[str]:
for func in allowed_type_funcs:
if not func(var_name, val):
return None
return _('%s is not an allowed_type') % (var_name,)
return enumerated_type_check
def equals(expected_val: object) -> Validator:
def f(var_name: str, val: object) -> Optional[str]:
if val != expected_val:
return (_('%(variable)s != %(expected_value)s (%(value)s is wrong)') %
{'variable': var_name,
'expected_value': expected_val,
'value': val})
return None
return f
def validate_login_email(email: str) -> None:
try:
validate_email(email)
except ValidationError as err:
raise JsonableError(str(err.message))
def check_url(var_name: str, val: object) -> Optional[str]:
# First, ensure val is a string
string_msg = check_string(var_name, val)
if string_msg is not None:
return string_msg
# Now, validate as URL
validate = URLValidator()
try:
validate(val)
return None
except ValidationError:
return _('%s is not a URL') % (var_name,)
def validate_field_data(field_data: ProfileFieldData) -> Optional[str]:
"""
This function is used to validate the data sent to the server while
creating/editing choices of the choice field in Organization settings.
"""
validator = check_dict_only([
('text', check_required_string),
('order', check_required_string),
])
for key, value in field_data.items():
if not key.strip():
return _("'{item}' cannot be blank.").format(item='value')
error = validator('field_data', value)
if error:
return error
return None
def validate_choice_field(var_name: str, field_data: str, value: object) -> None:
"""
This function is used to validate the value selected by the user against a
choice field. This is not used to validate admin data.
"""
field_data_dict = ujson.loads(field_data)
if value not in field_data_dict:
msg = _("'{value}' is not a valid choice for '{field_name}'.")
return msg.format(value=value, field_name=var_name)
def check_widget_content(widget_content: object) -> Optional[str]:
if not isinstance(widget_content, dict):
return 'widget_content is not a dict'
if 'widget_type' not in widget_content:
return 'widget_type is not in widget_content'
if 'extra_data' not in widget_content:
return 'extra_data is not in widget_content'
widget_type = widget_content['widget_type']
extra_data = widget_content['extra_data']
if not isinstance(extra_data, dict):
return 'extra_data is not a dict'
if widget_type == 'zform':
if 'type' not in extra_data:
return 'zform is missing type field'
if extra_data['type'] == 'choices':
check_choices = check_list(
check_dict([
('short_name', check_string),
('long_name', check_string),
('reply', check_string),
]),
)
checker = check_dict([
('heading', check_string),
('choices', check_choices),
])
msg = checker('extra_data', extra_data)
if msg:
return msg
return None
return 'unknown zform type: ' + extra_data['type']
return 'unknown widget type: ' + widget_type
| [
"str",
"object",
"str",
"object",
"str",
"object",
"int",
"str",
"object",
"int",
"str",
"object",
"str",
"object",
"str",
"object",
"str",
"object",
"str",
"object",
"str",
"object",
"Validator",
"str",
"object",
"Optional[Validator]",
"str",
"object",
"str",
"object",
"Iterable[Tuple[str, Validator]]",
"Iterable[Validator]",
"str",
"object",
"object",
"str",
"object",
"str",
"str",
"object",
"ProfileFieldData",
"str",
"str",
"object",
"object"
] | [
1647,
1657,
1822,
1832,
2100,
2110,
2223,
2270,
2280,
2660,
2707,
2717,
3116,
3126,
3229,
3239,
3518,
3528,
3685,
3695,
3850,
3860,
4024,
4069,
4079,
4259,
4342,
4352,
5141,
5151,
6178,
6337,
6666,
6676,
6929,
6971,
6981,
7320,
7479,
7489,
7876,
8486,
8503,
8515,
8942
] | [
1650,
1663,
1825,
1838,
2103,
2116,
2226,
2273,
2286,
2663,
2710,
2723,
3119,
3132,
3232,
3245,
3521,
3534,
3688,
3701,
3853,
3866,
4033,
4072,
4085,
4278,
4345,
4358,
5144,
5157,
6209,
6356,
6669,
6682,
6935,
6974,
6987,
7323,
7482,
7495,
7892,
8489,
8506,
8521,
8948
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/webhooks/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/webhooks/common.py | from urllib.parse import unquote
from django.conf import settings
from django.http import HttpRequest
from django.utils.translation import ugettext as _
from typing import Optional
from zerver.lib.actions import check_send_stream_message, \
check_send_private_message, send_rate_limited_pm_notification_to_bot_owner
from zerver.lib.exceptions import StreamDoesNotExistError, JsonableError, \
ErrorCode
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.send_email import FromAddress
from zerver.models import UserProfile, get_system_bot
MISSING_EVENT_HEADER_MESSAGE = """
Hi there! Your bot {bot_name} just sent an HTTP request to {request_path} that
is missing the HTTP {header_name} header. Because this header is how
{integration_name} indicates the event type, this usually indicates a configuration
issue, where you either entered the URL for a different integration, or are running
an older version of the third-party service that doesn't provide that header.
Contact {support_email} if you need help debugging!
"""
INVALID_JSON_MESSAGE = """
Hi there! It looks like you tried to setup the Zulip {webhook_name} integration,
but didn't correctly configure the webhook to send data in the JSON format
that this integration expects!
"""
# Django prefixes all custom HTTP headers with `HTTP_`
DJANGO_HTTP_PREFIX = "HTTP_"
def notify_bot_owner_about_invalid_json(user_profile: UserProfile,
webhook_client_name: str) -> None:
send_rate_limited_pm_notification_to_bot_owner(
user_profile, user_profile.realm,
INVALID_JSON_MESSAGE.format(webhook_name=webhook_client_name).strip()
)
class UnexpectedWebhookEventType(JsonableError):
code = ErrorCode.UNEXPECTED_WEBHOOK_EVENT_TYPE
data_fields = ['webhook_name', 'event_type']
def __init__(self, webhook_name: str, event_type: Optional[str]) -> None:
self.webhook_name = webhook_name
self.event_type = event_type
@staticmethod
def msg_format() -> str:
return _("The '{event_type}' event isn't currently supported by the {webhook_name} webhook")
class MissingHTTPEventHeader(JsonableError):
code = ErrorCode.MISSING_HTTP_EVENT_HEADER
data_fields = ['header']
def __init__(self, header: str) -> None:
self.header = header
@staticmethod
def msg_format() -> str:
return _("Missing the HTTP event header '{header}'")
@has_request_variables
def check_send_webhook_message(
request: HttpRequest, user_profile: UserProfile,
topic: str, body: str, stream: Optional[str]=REQ(default=None),
user_specified_topic: Optional[str]=REQ("topic", default=None),
unquote_stream: Optional[bool]=False
) -> None:
if stream is None:
assert user_profile.bot_owner is not None
check_send_private_message(user_profile, request.client,
user_profile.bot_owner, body)
else:
# Some third-party websites (such as Atlassian's JIRA), tend to
# double escape their URLs in a manner that escaped space characters
# (%20) are never properly decoded. We work around that by making sure
# that the stream name is decoded on our end.
if unquote_stream:
stream = unquote(stream)
if user_specified_topic is not None:
topic = user_specified_topic
try:
check_send_stream_message(user_profile, request.client,
stream, topic, body)
except StreamDoesNotExistError:
# A PM will be sent to the bot_owner by check_message, notifying
# that the webhook bot just tried to send a message to a non-existent
# stream, so we don't need to re-raise it since it clutters up
# webhook-errors.log
pass
def validate_extract_webhook_http_header(request: HttpRequest, header: str,
integration_name: str) -> str:
extracted_header = request.META.get(DJANGO_HTTP_PREFIX + header)
if extracted_header is None:
message_body = MISSING_EVENT_HEADER_MESSAGE.format(
bot_name=request.user.full_name,
request_path=request.path,
header_name=header,
integration_name=integration_name,
support_email=FromAddress.SUPPORT,
)
send_rate_limited_pm_notification_to_bot_owner(
request.user, request.user.realm, message_body)
raise MissingHTTPEventHeader(header)
return extracted_header
| [
"UserProfile",
"str",
"str",
"Optional[str]",
"str",
"HttpRequest",
"UserProfile",
"str",
"str",
"HttpRequest",
"str",
"str"
] | [
1418,
1492,
1872,
1889,
2294,
2519,
2546,
2574,
2585,
3922,
3943,
4007
] | [
1429,
1495,
1875,
1902,
2297,
2530,
2557,
2577,
2588,
3933,
3946,
4010
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/webhooks/git.py | from typing import Optional, Any, Dict, List, Tuple
from collections import defaultdict
TOPIC_WITH_BRANCH_TEMPLATE = '{repo} / {branch}'
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE = '{repo} / {type} #{id} {title}'
EMPTY_SHA = '0000000000000000000000000000000000000000'
COMMITS_LIMIT = 20
COMMIT_ROW_TEMPLATE = '* {commit_msg} ([{commit_short_sha}]({commit_url}))\n'
COMMITS_MORE_THAN_LIMIT_TEMPLATE = "[and {commits_number} more commit(s)]"
COMMIT_OR_COMMITS = "commit{}"
PUSH_PUSHED_TEXT_WITH_URL = "[pushed]({compare_url}) {number_of_commits} {commit_or_commits}"
PUSH_PUSHED_TEXT_WITHOUT_URL = "pushed {number_of_commits} {commit_or_commits}"
PUSH_COMMITS_BASE = '{user_name} {pushed_text} to branch {branch_name}.'
PUSH_COMMITS_MESSAGE_TEMPLATE_WITH_COMMITTERS = PUSH_COMMITS_BASE + """ {committers_details}.
{commits_data}
"""
PUSH_COMMITS_MESSAGE_TEMPLATE_WITHOUT_COMMITTERS = PUSH_COMMITS_BASE + """
{commits_data}
"""
PUSH_DELETE_BRANCH_MESSAGE_TEMPLATE = "{user_name} [deleted]({compare_url}) the branch {branch_name}."
PUSH_LOCAL_BRANCH_WITHOUT_COMMITS_MESSAGE_TEMPLATE = ("{user_name} [pushed]({compare_url}) "
"the branch {branch_name}.")
PUSH_COMMITS_MESSAGE_EXTENSION = "Commits by {}"
PUSH_COMMITTERS_LIMIT_INFO = 3
FORCE_PUSH_COMMITS_MESSAGE_TEMPLATE = ("{user_name} [force pushed]({url}) "
"to branch {branch_name}. Head is now {head}")
CREATE_BRANCH_MESSAGE_TEMPLATE = "{user_name} created [{branch_name}]({url}) branch"
REMOVE_BRANCH_MESSAGE_TEMPLATE = "{user_name} deleted branch {branch_name}"
PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE = "{user_name} {action} [{type}{id}]({url})"
PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE_WITH_TITLE = "{user_name} {action} [{type}{id} {title}]({url})"
PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE = "(assigned to {assignee})"
PULL_REQUEST_BRANCH_INFO_TEMPLATE = "\nfrom `{target}` to `{base}`"
SETUP_MESSAGE_TEMPLATE = "{integration} webhook has been successfully configured"
SETUP_MESSAGE_USER_PART = " by {user_name}"
CONTENT_MESSAGE_TEMPLATE = "\n~~~ quote\n{message}\n~~~"
COMMITS_COMMENT_MESSAGE_TEMPLATE = "{user_name} {action} on [{sha}]({url})"
PUSH_TAGS_MESSAGE_TEMPLATE = """{user_name} {action} tag {tag}"""
TAG_WITH_URL_TEMPLATE = "[{tag_name}]({tag_url})"
TAG_WITHOUT_URL_TEMPLATE = "{tag_name}"
def get_push_commits_event_message(user_name: str, compare_url: Optional[str],
branch_name: str, commits_data: List[Dict[str, Any]],
is_truncated: Optional[bool]=False,
deleted: Optional[bool]=False) -> str:
if not commits_data and deleted:
return PUSH_DELETE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name,
compare_url=compare_url,
branch_name=branch_name
)
if not commits_data and not deleted:
return PUSH_LOCAL_BRANCH_WITHOUT_COMMITS_MESSAGE_TEMPLATE.format(
user_name=user_name,
compare_url=compare_url,
branch_name=branch_name
)
pushed_message_template = PUSH_PUSHED_TEXT_WITH_URL if compare_url else PUSH_PUSHED_TEXT_WITHOUT_URL
pushed_text_message = pushed_message_template.format(
compare_url=compare_url,
number_of_commits=len(commits_data),
commit_or_commits=COMMIT_OR_COMMITS.format('s' if len(commits_data) > 1 else ''))
committers_items = get_all_committers(commits_data) # type: List[Tuple[str, int]]
if len(committers_items) == 1 and user_name == committers_items[0][0]:
return PUSH_COMMITS_MESSAGE_TEMPLATE_WITHOUT_COMMITTERS.format(
user_name=user_name,
pushed_text=pushed_text_message,
branch_name=branch_name,
commits_data=get_commits_content(commits_data, is_truncated),
).rstrip()
else:
committers_details = "{} ({})".format(*committers_items[0])
for name, number_of_commits in committers_items[1:-1]:
committers_details = "{}, {} ({})".format(committers_details, name, number_of_commits)
if len(committers_items) > 1:
committers_details = "{} and {} ({})".format(committers_details, *committers_items[-1])
return PUSH_COMMITS_MESSAGE_TEMPLATE_WITH_COMMITTERS.format(
user_name=user_name,
pushed_text=pushed_text_message,
branch_name=branch_name,
committers_details=PUSH_COMMITS_MESSAGE_EXTENSION.format(committers_details),
commits_data=get_commits_content(commits_data, is_truncated),
).rstrip()
def get_force_push_commits_event_message(user_name: str, url: str, branch_name: str, head: str) -> str:
return FORCE_PUSH_COMMITS_MESSAGE_TEMPLATE.format(
user_name=user_name,
url=url,
branch_name=branch_name,
head=head
)
def get_create_branch_event_message(user_name: str, url: str, branch_name: str) -> str:
return CREATE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name,
url=url,
branch_name=branch_name,
)
def get_remove_branch_event_message(user_name: str, branch_name: str) -> str:
return REMOVE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name,
branch_name=branch_name,
)
def get_pull_request_event_message(user_name: str, action: str, url: str, number: Optional[int]=None,
target_branch: Optional[str]=None, base_branch: Optional[str]=None,
message: Optional[str]=None, assignee: Optional[str]=None,
type: Optional[str]='PR', title: Optional[str]=None) -> str:
kwargs = {
'user_name': user_name,
'action': action,
'type': type,
'url': url,
'id': ' #{}'.format(number) if number is not None else '',
'title': title,
}
if title is not None:
main_message = PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE_WITH_TITLE.format(**kwargs)
else:
main_message = PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE.format(**kwargs)
if assignee:
main_message += PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE.format(assignee=assignee)
if target_branch and base_branch:
main_message += PULL_REQUEST_BRANCH_INFO_TEMPLATE.format(
target=target_branch,
base=base_branch
)
if message:
main_message += '\n' + CONTENT_MESSAGE_TEMPLATE.format(message=message)
return main_message.rstrip()
def get_setup_webhook_message(integration: str, user_name: Optional[str]=None) -> str:
content = SETUP_MESSAGE_TEMPLATE.format(integration=integration)
if user_name:
content += SETUP_MESSAGE_USER_PART.format(user_name=user_name)
return content
def get_issue_event_message(user_name: str,
action: str,
url: str,
number: Optional[int]=None,
message: Optional[str]=None,
assignee: Optional[str]=None,
title: Optional[str]=None) -> str:
return get_pull_request_event_message(
user_name,
action,
url,
number,
message=message,
assignee=assignee,
type='Issue',
title=title,
)
def get_push_tag_event_message(user_name: str,
tag_name: str,
tag_url: Optional[str]=None,
action: Optional[str]='pushed') -> str:
if tag_url:
tag_part = TAG_WITH_URL_TEMPLATE.format(tag_name=tag_name, tag_url=tag_url)
else:
tag_part = TAG_WITHOUT_URL_TEMPLATE.format(tag_name=tag_name)
return PUSH_TAGS_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
tag=tag_part
)
def get_commits_comment_action_message(user_name: str,
action: str,
commit_url: str,
sha: str,
message: Optional[str]=None) -> str:
content = COMMITS_COMMENT_MESSAGE_TEMPLATE.format(
user_name=user_name,
action=action,
sha=get_short_sha(sha),
url=commit_url
)
if message is not None:
content += CONTENT_MESSAGE_TEMPLATE.format(
message=message
)
return content
def get_commits_content(commits_data: List[Dict[str, Any]], is_truncated: Optional[bool]=False) -> str:
commits_content = ''
for commit in commits_data[:COMMITS_LIMIT]:
commits_content += COMMIT_ROW_TEMPLATE.format(
commit_short_sha=get_short_sha(commit['sha']),
commit_url=commit.get('url'),
commit_msg=commit['message'].partition('\n')[0]
)
if len(commits_data) > COMMITS_LIMIT:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number=len(commits_data) - COMMITS_LIMIT
)
elif is_truncated:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number=''
).replace(' ', ' ')
return commits_content.rstrip()
def get_short_sha(sha: str) -> str:
return sha[:7]
def get_all_committers(commits_data: List[Dict[str, Any]]) -> List[Tuple[str, int]]:
committers = defaultdict(int) # type: Dict[str, int]
for commit in commits_data:
committers[commit['name']] += 1
# Sort by commit count, breaking ties alphabetically.
committers_items = sorted(list(committers.items()),
key=lambda item: (-item[1], item[0])) # type: List[Tuple[str, int]]
committers_values = [c_i[1] for c_i in committers_items] # type: List[int]
if len(committers) > PUSH_COMMITTERS_LIMIT_INFO:
others_number_of_commits = sum(committers_values[PUSH_COMMITTERS_LIMIT_INFO:])
committers_items = committers_items[:PUSH_COMMITTERS_LIMIT_INFO]
committers_items.append(('others', others_number_of_commits))
return committers_items
| [
"str",
"Optional[str]",
"str",
"List[Dict[str, Any]]",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"List[Dict[str, Any]]",
"str",
"List[Dict[str, Any]]"
] | [
2406,
2424,
2487,
2506,
4694,
4704,
4722,
4733,
4952,
4962,
4980,
5176,
5194,
5372,
5385,
5395,
6603,
6864,
6905,
6943,
7433,
7479,
7971,
8023,
8079,
8128,
8553,
9310,
9380
] | [
2409,
2437,
2490,
2526,
4697,
4707,
4725,
4736,
4955,
4965,
4983,
5179,
5197,
5375,
5388,
5398,
6606,
6867,
6908,
6946,
7436,
7482,
7974,
8026,
8082,
8131,
8573,
9313,
9400
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/widget.py | from typing import MutableMapping, Any, Optional, List, Tuple
from django.conf import settings
import re
import json
from zerver.models import SubMessage
def get_widget_data(content: str) -> Tuple[Optional[str], Optional[str]]:
valid_widget_types = ['tictactoe', 'poll', 'todo']
tokens = content.split(' ')
# tokens[0] will always exist
if tokens[0].startswith('/'):
widget_type = tokens[0][1:]
if widget_type in valid_widget_types:
extra_data = get_extra_data_from_widget_type(tokens, widget_type)
return widget_type, extra_data
return None, None
def get_extra_data_from_widget_type(tokens: List[str],
widget_type: Optional[str]) -> Any:
if widget_type == 'poll':
# This is used to extract the question from the poll command.
# The command '/poll question' will pre-set the question in the poll
question = ' '.join(tokens[1:])
if not question:
question = ''
extra_data = {'question': question}
return extra_data
return None
def do_widget_post_save_actions(message: MutableMapping[str, Any]) -> None:
'''
This is experimental code that only works with the
webapp for now.
'''
if not settings.ALLOW_SUB_MESSAGES:
return
content = message['message'].content
sender_id = message['message'].sender_id
message_id = message['message'].id
widget_type = None
extra_data = None
widget_type, extra_data = get_widget_data(content)
widget_content = message.get('widget_content')
if widget_content is not None:
# Note that we validate this data in check_message,
# so we can trust it here.
widget_type = widget_content['widget_type']
extra_data = widget_content['extra_data']
if widget_type:
content = dict(
widget_type=widget_type,
extra_data=extra_data
)
submessage = SubMessage(
sender_id=sender_id,
message_id=message_id,
msg_type='widget',
content=json.dumps(content),
)
submessage.save()
message['submessages'] = SubMessage.get_raw_db_rows([message_id])
| [
"str",
"List[str]",
"Optional[str]",
"MutableMapping[str, Any]"
] | [
187,
659,
719,
1138
] | [
190,
668,
732,
1162
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/zcommand.py | from typing import Any, Dict
from django.utils.translation import ugettext as _
from zerver.models import UserProfile
from zerver.lib.actions import do_set_user_display_setting
from zerver.lib.exceptions import JsonableError
def process_zcommands(content: str, user_profile: UserProfile) -> Dict[str, Any]:
if not content.startswith('/'):
raise JsonableError(_('There should be a leading slash in the zcommand.'))
command = content[1:]
if command == 'ping':
ret = dict() # type: Dict[str, Any]
return ret
night_commands = ['night', 'dark']
day_commands = ['day', 'light']
if command in night_commands:
if user_profile.night_mode:
msg = 'You are still in night mode.'
else:
switch_command = day_commands[night_commands.index(command)]
msg = 'Changed to night mode! To revert night mode, type `/%s`.' % (switch_command,)
do_set_user_display_setting(user_profile, 'night_mode', True)
ret = dict(msg=msg)
return ret
if command in day_commands:
if user_profile.night_mode:
switch_command = night_commands[day_commands.index(command)]
msg = 'Changed to day mode! To revert day mode, type `/%s`.' % (switch_command,)
do_set_user_display_setting(user_profile, 'night_mode', False)
else:
msg = 'You are still in day mode.'
ret = dict(msg=msg)
return ret
raise JsonableError(_('No such command: %s') % (command,))
| [
"str",
"UserProfile"
] | [
258,
277
] | [
261,
288
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/lib/zephyr.py | import re
import traceback
import DNS
from zerver.lib.str_utils import NonBinaryStr, force_str
def compute_mit_user_fullname(email: NonBinaryStr) -> NonBinaryStr:
try:
# Input is either e.g. username@mit.edu or user|CROSSREALM.INVALID@mit.edu
match_user = re.match(r'^([a-zA-Z0-9_.-]+)(\|.+)?@mit\.edu$', email.lower())
if match_user and match_user.group(2) is None:
answer = DNS.dnslookup(
"%s.passwd.ns.athena.mit.edu" % (match_user.group(1),),
DNS.Type.TXT)
hesiod_name = force_str(answer[0][0]).split(':')[4].split(',')[0].strip()
if hesiod_name != "":
return hesiod_name
elif match_user:
return match_user.group(1).lower() + "@" + match_user.group(2).upper()[1:]
except DNS.Base.ServerError:
pass
except Exception:
print("Error getting fullname for %s:" % (email,))
traceback.print_exc()
return email.lower()
| [
"NonBinaryStr"
] | [
134
] | [
146
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/logging_handlers.py | # System documented in https://zulip.readthedocs.io/en/latest/subsystems/logging.html
import logging
import platform
import os
import subprocess
import traceback
from typing import Any, Dict, Optional
from django.conf import settings
from django.core import mail
from django.http import HttpRequest
from django.utils.log import AdminEmailHandler
from django.views.debug import ExceptionReporter, get_exception_reporter_filter
from zerver.lib.logging_util import find_log_caller_module
from zerver.lib.queue import queue_json_publish
from version import ZULIP_VERSION
def try_git_describe() -> Optional[str]:
try: # nocoverage
return subprocess.check_output(
['git',
'--git-dir', os.path.join(os.path.dirname(__file__), '../.git'),
'describe', '--tags', '--always', '--dirty', '--long'],
stderr=subprocess.PIPE,
).strip().decode('utf-8')
except Exception: # nocoverage
return None
def add_deployment_metadata(report: Dict[str, Any]) -> None:
report['git_described'] = try_git_describe()
report['zulip_version_const'] = ZULIP_VERSION
version_path = os.path.join(os.path.dirname(__file__), '../version')
if os.path.exists(version_path):
report['zulip_version_file'] = open(version_path).read().strip() # nocoverage
def add_request_metadata(report: Dict[str, Any], request: HttpRequest) -> None:
report['has_request'] = True
report['path'] = request.path
report['method'] = request.method
report['remote_addr'] = request.META.get('REMOTE_ADDR', None),
report['query_string'] = request.META.get('QUERY_STRING', None),
report['server_name'] = request.META.get('SERVER_NAME', None),
try:
from django.contrib.auth.models import AnonymousUser
user_profile = request.user
if isinstance(user_profile, AnonymousUser):
user_full_name = None
user_email = None
else:
user_full_name = user_profile.full_name
user_email = user_profile.email
except Exception:
# Unexpected exceptions here should be handled gracefully
traceback.print_exc()
user_full_name = None
user_email = None
report['user_email'] = user_email
report['user_full_name'] = user_full_name
exception_filter = get_exception_reporter_filter(request)
try:
report['data'] = request.GET if request.method == 'GET' else \
exception_filter.get_post_parameters(request)
except Exception:
# exception_filter.get_post_parameters will throw
# RequestDataTooBig if there's a really big file uploaded
report['data'] = {}
try:
report['host'] = request.get_host().split(':')[0]
except Exception:
# request.get_host() will throw a DisallowedHost
# exception if the host is invalid
report['host'] = platform.node()
class AdminNotifyHandler(logging.Handler):
"""An logging handler that sends the log/exception to the queue to be
turned into an email and/or a Zulip message for the server admins.
"""
# adapted in part from django/utils/log.py
def __init__(self) -> None:
logging.Handler.__init__(self)
def emit(self, record: logging.LogRecord) -> None:
report = {} # type: Dict[str, Any]
# This parameter determines whether Zulip should attempt to
# send Zulip messages containing the error report. If there's
# syntax that makes the markdown processor throw an exception,
# we really don't want to send that syntax into a new Zulip
# message in exception handler (that's the stuff of which
# recursive exception loops are made).
#
# We initialize is_bugdown_rendering_exception to `True` to
# prevent the infinite loop of zulip messages by ERROR_BOT if
# the outer try block here throws an exception before we have
# a chance to check the exception for whether it comes from
# bugdown.
is_bugdown_rendering_exception = True
try:
report['node'] = platform.node()
report['host'] = platform.node()
add_deployment_metadata(report)
if record.exc_info:
stack_trace = ''.join(traceback.format_exception(*record.exc_info))
message = str(record.exc_info[1])
from zerver.lib.exceptions import BugdownRenderingException
is_bugdown_rendering_exception = record.msg.startswith('Exception in Markdown parser')
else:
stack_trace = 'No stack trace available'
message = record.getMessage()
if '\n' in message:
# Some exception code paths in queue processors
# seem to result in super-long messages
stack_trace = message
message = message.split('\n')[0]
is_bugdown_rendering_exception = False
report['stack_trace'] = stack_trace
report['message'] = message
report['logger_name'] = record.name
report['log_module'] = find_log_caller_module(record)
report['log_lineno'] = record.lineno
if hasattr(record, "request"):
add_request_metadata(report, record.request) # type: ignore # record.request is added dynamically
except Exception:
report['message'] = "Exception in preparing exception report!"
logging.warning(report['message'], exc_info=True)
report['stack_trace'] = "See /var/log/zulip/errors.log"
if settings.DEBUG_ERROR_REPORTING: # nocoverage
logging.warning("Reporting an error to admins...")
logging.warning("Reporting an error to admins: {} {} {} {} {}" .format(
record.levelname, report['logger_name'], report['log_module'],
report['message'], report['stack_trace']))
try:
if settings.STAGING_ERROR_NOTIFICATIONS:
# On staging, process the report directly so it can happen inside this
# try/except to prevent looping
from zerver.lib.error_notify import notify_server_error
notify_server_error(report, is_bugdown_rendering_exception)
else:
queue_json_publish('error_reports', dict(
type = "server",
report = report,
))
except Exception:
# If this breaks, complain loudly but don't pass the traceback up the stream
# However, we *don't* want to use logging.exception since that could trigger a loop.
logging.warning("Reporting an exception triggered an exception!", exc_info=True)
| [
"Dict[str, Any]",
"Dict[str, Any]",
"HttpRequest",
"logging.LogRecord"
] | [
1005,
1361,
1386,
3261
] | [
1019,
1375,
1397,
3278
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/add_users_to_mailing_list.py |
import argparse
from datetime import datetime
from typing import Any
import requests
import ujson
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from zerver.models import UserProfile
class Command(BaseCommand):
help = """Add users to a MailChimp mailing list."""
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument('--api-key',
dest='api_key',
type=str,
help='MailChimp API key.')
parser.add_argument('--list-id',
dest='list_id',
type=str,
help='List ID of the MailChimp mailing list.')
parser.add_argument('--optin-time',
dest='optin_time',
type=str,
default=datetime.isoformat(timezone_now().replace(microsecond=0)),
help='Opt-in time of the users.')
def handle(self, *args: Any, **options: str) -> None:
if options['api_key'] is None:
try:
if settings.MAILCHIMP_API_KEY is None:
print('MAILCHIMP_API_KEY is None. Check your server settings file.')
exit(1)
options['api_key'] = settings.MAILCHIMP_API_KEY
except AttributeError:
print('Please supply a MailChimp API key to --api-key, or add a '
'MAILCHIMP_API_KEY to your server settings file.')
exit(1)
if options['list_id'] is None:
try:
if settings.ZULIP_FRIENDS_LIST_ID is None:
print('ZULIP_FRIENDS_LIST_ID is None. Check your server settings file.')
exit(1)
options['list_id'] = settings.ZULIP_FRIENDS_LIST_ID
except AttributeError:
print('Please supply a MailChimp List ID to --list-id, or add a '
'ZULIP_FRIENDS_LIST_ID to your server settings file.')
exit(1)
endpoint = "https://%s.api.mailchimp.com/3.0/lists/%s/members" % \
(options['api_key'].split('-')[1], options['list_id'])
for user in UserProfile.objects.filter(is_bot=False, is_active=True) \
.values('email', 'full_name', 'realm_id'):
data = {
'email_address': user['email'],
'list_id': options['list_id'],
'status': 'subscribed',
'merge_fields': {
'NAME': user['full_name'],
'REALM_ID': user['realm_id'],
'OPTIN_TIME': options['optin_time'],
},
}
r = requests.post(endpoint, auth=('apikey', options['api_key']), json=data, timeout=10)
if r.status_code == 400 and ujson.loads(r.text)['title'] == 'Member Exists':
print("%s is already a part of the list." % (data['email_address'],))
elif r.status_code >= 400:
print(r.text)
| [
"argparse.ArgumentParser",
"Any",
"str"
] | [
400,
1125,
1141
] | [
423,
1128,
1144
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/add_users_to_streams.py |
from typing import Any
from django.core.management.base import CommandParser
from zerver.lib.actions import bulk_add_subscriptions, ensure_stream
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Add some or all users in a realm to a set of streams."""
def add_arguments(self, parser: CommandParser) -> None:
self.add_realm_args(parser, True)
self.add_user_list_args(parser, all_users_help="Add all users in realm to these streams.")
parser.add_argument(
'-s', '--streams',
dest='streams',
type=str,
required=True,
help='A comma-separated list of stream names.')
def handle(self, **options: Any) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
user_profiles = self.get_users(options, realm)
stream_names = set([stream.strip() for stream in options["streams"].split(",")])
for stream_name in set(stream_names):
for user_profile in user_profiles:
stream = ensure_stream(realm, stream_name)
_ignore, already_subscribed = bulk_add_subscriptions([stream], [user_profile])
was_there_already = user_profile.id in {tup[0].id for tup in already_subscribed}
print("%s %s to %s" % (
"Already subscribed" if was_there_already else "Subscribed",
user_profile.email, stream_name))
| [
"CommandParser",
"Any"
] | [
342,
738
] | [
355,
741
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/bankrupt_users.py |
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import CommandError
from zerver.lib.actions import do_mark_all_as_read
from zerver.lib.management import ZulipBaseCommand
from zerver.models import Message
class Command(ZulipBaseCommand):
help = """Bankrupt one or many users."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('emails', metavar='<email>', type=str, nargs='+',
help='email address to bankrupt')
self.add_realm_args(parser, True)
def handle(self, *args: Any, **options: str) -> None:
realm = self.get_realm(options)
for email in options['emails']:
try:
user_profile = self.get_user(email, realm)
except CommandError:
print("e-mail %s doesn't exist in the realm %s, skipping" % (email, realm))
continue
do_mark_all_as_read(user_profile, self.get_client())
messages = Message.objects.filter(
usermessage__user_profile=user_profile).order_by('-id')[:1]
if messages:
old_pointer = user_profile.pointer
new_pointer = messages[0].id
user_profile.pointer = new_pointer
user_profile.save(update_fields=["pointer"])
print("%s: %d => %d" % (email, old_pointer, new_pointer))
else:
print("%s has no messages, can't bankrupt!" % (email,))
| [
"ArgumentParser",
"Any",
"str"
] | [
367,
603,
619
] | [
381,
606,
622
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/bulk_change_user_name.py |
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import CommandError
from zerver.lib.actions import do_change_full_name
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Change the names for many users."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('data_file', metavar='<data file>', type=str,
help="file containing rows of the form <email>,<desired name>")
self.add_realm_args(parser, True)
def handle(self, *args: Any, **options: str) -> None:
data_file = options['data_file']
realm = self.get_realm(options)
with open(data_file, "r") as f:
for line in f:
email, new_name = line.strip().split(",", 1)
try:
user_profile = self.get_user(email, realm)
old_name = user_profile.full_name
print("%s: %s -> %s" % (email, old_name, new_name))
do_change_full_name(user_profile, new_name, None)
except CommandError:
print("e-mail %s doesn't exist in the realm %s, skipping" % (email, realm))
| [
"ArgumentParser",
"Any",
"str"
] | [
338,
600,
616
] | [
352,
603,
619
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/change_user_email.py |
from argparse import ArgumentParser
from typing import Any
from zerver.lib.actions import do_change_user_email
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Change the email address for a user."""
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser)
parser.add_argument('old_email', metavar='<old email>', type=str,
help='email address to change')
parser.add_argument('new_email', metavar='<new email>', type=str,
help='new email address')
def handle(self, *args: Any, **options: str) -> None:
old_email = options['old_email']
new_email = options['new_email']
realm = self.get_realm(options)
user_profile = self.get_user(old_email, realm)
do_change_user_email(user_profile, new_email)
| [
"ArgumentParser",
"Any",
"str"
] | [
289,
641,
657
] | [
303,
644,
660
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/check_redis.py |
import logging
import time
from typing import Any, Callable, Optional
from django.conf import settings
from django.core.management.base import BaseCommand, CommandParser
from zerver.lib.rate_limiter import RateLimitedUser, \
client, max_api_calls, max_api_window
from zerver.models import get_user_profile_by_id
class Command(BaseCommand):
help = """Checks redis to make sure our rate limiting system hasn't grown a bug
and left redis with a bunch of data
Usage: ./manage.py [--trim] check_redis"""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('-t', '--trim',
dest='trim',
default=False,
action='store_true',
help="Actually trim excess")
def _check_within_range(self, key: str, count_func: Callable[[], int],
trim_func: Optional[Callable[[str, int], None]]=None) -> None:
user_id = int(key.split(':')[1])
user = get_user_profile_by_id(user_id)
entity = RateLimitedUser(user)
max_calls = max_api_calls(entity)
age = int(client.ttl(key))
if age < 0:
logging.error("Found key with age of %s, will never expire: %s" % (age, key,))
count = count_func()
if count > max_calls:
logging.error("Redis health check found key with more elements \
than max_api_calls! (trying to trim) %s %s" % (key, count))
if trim_func is not None:
client.expire(key, max_api_window(entity))
trim_func(key, max_calls)
def handle(self, *args: Any, **options: Any) -> None:
if not settings.RATE_LIMITING:
print("This machine is not using redis or rate limiting, aborting")
exit(1)
# Find all keys, and make sure they're all within size constraints
wildcard_list = "ratelimit:*:*:list"
wildcard_zset = "ratelimit:*:*:zset"
trim_func = lambda key, max_calls: client.ltrim(key, 0, max_calls - 1) # type: Optional[Callable[[str, int], None]]
if not options['trim']:
trim_func = None
lists = client.keys(wildcard_list)
for list_name in lists:
self._check_within_range(list_name,
lambda: client.llen(list_name),
trim_func)
zsets = client.keys(wildcard_zset)
for zset in zsets:
now = time.time()
# We can warn on our zset being too large, but we don't know what
# elements to trim. We'd have to go through every list item and take
# the intersection. The best we can do is expire it
self._check_within_range(zset,
lambda: client.zcount(zset, 0, now),
lambda key, max_calls: None)
| [
"CommandParser",
"str",
"Callable[[], int]",
"Any",
"Any"
] | [
557,
855,
872,
1663,
1679
] | [
570,
858,
889,
1666,
1682
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/checkconfig.py |
import sys
from typing import Any
from django.conf import settings
from django.core.management.base import BaseCommand
from zerver.lib.management import check_config
class Command(BaseCommand):
help = """Checks your Zulip Voyager Django configuration for issues."""
def handle(self, *args: Any, **options: Any) -> None:
check_config()
| [
"Any",
"Any"
] | [
303,
319
] | [
306,
322
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/compilemessages.py |
import json
import os
import polib
import re
import ujson
from subprocess import CalledProcessError, check_output
from typing import Any, Dict, List
from django.conf import settings
from django.conf.locale import LANG_INFO
from django.core.management.base import CommandParser
from django.core.management.commands import compilemessages
from django.utils.translation.trans_real import to_language
from zerver.lib.i18n import with_language
class Command(compilemessages.Command):
def add_arguments(self, parser: CommandParser) -> None:
super().add_arguments(parser)
parser.add_argument(
'--strict', '-s',
action='store_true',
default=False,
help='Stop execution in case of errors.')
def handle(self, *args: Any, **options: Any) -> None:
if settings.PRODUCTION:
# HACK: When using upgrade-zulip-from-git, we're in a
# production environment where STATIC_ROOT will include
# past versions; this ensures we only process the current
# version
settings.STATIC_ROOT = os.path.join(settings.DEPLOY_ROOT, "static")
settings.LOCALE_PATHS = (os.path.join(settings.DEPLOY_ROOT, 'static/locale'),)
super().handle(*args, **options)
self.strict = options['strict']
self.extract_language_options()
self.create_language_name_map()
def create_language_name_map(self) -> None:
join = os.path.join
static_root = settings.STATIC_ROOT
path = join(static_root, 'locale', 'language_options.json')
output_path = join(static_root, 'locale', 'language_name_map.json')
with open(path, 'r') as reader:
languages = ujson.load(reader)
lang_list = []
for lang_info in languages['languages']:
lang_info['name'] = lang_info['name_local']
del lang_info['name_local']
lang_list.append(lang_info)
lang_list.sort(key=lambda lang: lang['name'])
with open(output_path, 'w') as output_file:
ujson.dump({'name_map': lang_list}, output_file, indent=4, sort_keys=True)
output_file.write('\n')
def get_po_filename(self, locale_path: str, locale: str) -> str:
po_template = '{}/{}/LC_MESSAGES/django.po'
return po_template.format(locale_path, locale)
def get_json_filename(self, locale_path: str, locale: str) -> str:
return "{}/{}/translations.json".format(locale_path, locale)
def get_name_from_po_file(self, po_filename: str, locale: str) -> str:
lang_name_re = re.compile(r'"Language-Team: (.*?) \(')
with open(po_filename, 'r') as reader:
result = lang_name_re.search(reader.read())
if result:
try:
return result.group(1)
except Exception:
print("Problem in parsing {}".format(po_filename))
raise
else:
raise Exception("Unknown language %s" % (locale,))
def get_locales(self) -> List[str]:
tracked_files = check_output(['git', 'ls-files', 'static/locale'])
tracked_files = tracked_files.decode().split()
regex = re.compile(r'static/locale/(\w+)/LC_MESSAGES/django.po')
locales = ['en']
for tracked_file in tracked_files:
matched = regex.search(tracked_file)
if matched:
locales.append(matched.group(1))
return locales
def extract_language_options(self) -> None:
locale_path = "{}/locale".format(settings.STATIC_ROOT)
output_path = "{}/language_options.json".format(locale_path)
data = {'languages': []} # type: Dict[str, List[Dict[str, Any]]]
try:
locales = self.get_locales()
except CalledProcessError:
# In case we are not under a Git repo, fallback to getting the
# locales using listdir().
locales = os.listdir(locale_path)
locales.append('en')
locales = list(set(locales))
for locale in locales:
if locale == 'en':
data['languages'].append({
'name': 'English',
'name_local': 'English',
'code': 'en',
'locale': 'en',
})
continue
lc_messages_path = os.path.join(locale_path, locale, 'LC_MESSAGES')
if not os.path.exists(lc_messages_path):
# Not a locale.
continue
info = {} # type: Dict[str, Any]
code = to_language(locale)
percentage = self.get_translation_percentage(locale_path, locale)
try:
name = LANG_INFO[code]['name']
name_local = LANG_INFO[code]['name_local']
except KeyError:
# Fallback to getting the name from PO file.
filename = self.get_po_filename(locale_path, locale)
name = self.get_name_from_po_file(filename, locale)
name_local = with_language(name, code)
info['name'] = name
info['name_local'] = name_local
info['code'] = code
info['locale'] = locale
info['percent_translated'] = percentage
data['languages'].append(info)
with open(output_path, 'w') as writer:
json.dump(data, writer, indent=2, sort_keys=True)
writer.write('\n')
def get_translation_percentage(self, locale_path: str, locale: str) -> int:
# backend stats
po = polib.pofile(self.get_po_filename(locale_path, locale))
not_translated = len(po.untranslated_entries())
total = len(po.translated_entries()) + not_translated
# frontend stats
with open(self.get_json_filename(locale_path, locale)) as reader:
for key, value in ujson.load(reader).items():
total += 1
if value == '':
not_translated += 1
# mobile stats
with open(os.path.join(locale_path, 'mobile_info.json')) as mob:
mobile_info = ujson.load(mob)
try:
info = mobile_info[locale]
except KeyError:
if self.strict:
raise
info = {'total': 0, 'not_translated': 0}
total += info['total']
not_translated += info['not_translated']
return (total - not_translated) * 100 // total
| [
"CommandParser",
"Any",
"Any",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str"
] | [
520,
785,
801,
2260,
2273,
2439,
2452,
2584,
2597,
5617,
5630
] | [
533,
788,
804,
2263,
2276,
2442,
2455,
2587,
2600,
5620,
5633
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/convert_gitter_data.py |
import argparse
import os
import subprocess
import tempfile
import shutil
from typing import Any
from django.core.management.base import BaseCommand, CommandParser, CommandError
from zerver.data_import.gitter import do_convert_data
class Command(BaseCommand):
help = """Convert the Gitter data into Zulip data format."""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('gitter_data', nargs='+',
metavar='<gitter data>',
help="Gitter data in json format")
parser.add_argument('--output', dest='output_dir',
action="store", default=None,
help='Directory to write exported data to.')
parser.add_argument('--threads',
dest='threads',
action="store",
default=6,
help='Threads to download avatars and attachments faster')
parser.formatter_class = argparse.RawTextHelpFormatter
def handle(self, *args: Any, **options: Any) -> None:
output_dir = options["output_dir"]
if output_dir is None:
output_dir = tempfile.mkdtemp(prefix="/tmp/converted-gitter-data-")
else:
output_dir = os.path.realpath(output_dir)
num_threads = int(options['threads'])
if num_threads < 1:
raise CommandError('You must have at least one thread.')
for path in options['gitter_data']:
if not os.path.exists(path):
print("Gitter data file not found: '%s'" % (path,))
exit(1)
# TODO add json check
print("Converting Data ...")
do_convert_data(path, output_dir, num_threads)
| [
"CommandParser",
"Any",
"Any"
] | [
366,
1100,
1116
] | [
379,
1103,
1119
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/convert_hipchat_data.py | import argparse
import os
import subprocess
import tempfile
import shutil
from typing import Any
'''
Example usage for testing purposes:
Move the data:
rm -Rf /tmp/hipchat*
mkdir /tmp/hipchat
./manage.py convert_hipchat_data ~/hipchat-31028-2018-08-08_23-23-22.tar --output /tmp/hipchat
./manage.py import --destroy-rebuild-database hipchat /tmp/hipchat
Test out the realm:
./tools/run-dev.py
go to browser and use your dev url
spec:
https://confluence.atlassian.com/hipchatkb/
exporting-from-hipchat-server-or-data-center-for-data-portability-950821555.html
'''
from django.core.management.base import BaseCommand, CommandParser, CommandError
from zerver.data_import.hipchat import do_convert_data
class Command(BaseCommand):
help = """Convert the Hipchat data into Zulip data format."""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('hipchat_tar', nargs='+',
metavar='<hipchat data tarfile>',
help="tar of Hipchat data")
parser.add_argument('--output', dest='output_dir',
action="store",
help='Directory to write exported data to.')
parser.add_argument('--mask', dest='masking_content',
action="store_true",
help='Mask the content for privacy during QA.')
parser.formatter_class = argparse.RawTextHelpFormatter
def handle(self, *args: Any, **options: Any) -> None:
output_dir = options["output_dir"]
if output_dir is None:
print("You need to specify --output <output directory>")
exit(1)
if os.path.exists(output_dir) and not os.path.isdir(output_dir):
print(output_dir + " is not a directory")
exit(1)
os.makedirs(output_dir, exist_ok=True)
if os.listdir(output_dir):
print('Output directory should be empty!')
exit(1)
output_dir = os.path.realpath(output_dir)
for path in options['hipchat_tar']:
if not os.path.exists(path):
print("Tar file not found: '%s'" % (path,))
exit(1)
print("Converting Data ...")
do_convert_data(
input_tar_file=path,
output_dir=output_dir,
masking_content=options.get('masking_content', False),
)
| [
"CommandParser",
"Any",
"Any"
] | [
870,
1524,
1540
] | [
883,
1527,
1543
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/convert_slack_data.py |
import argparse
import os
import subprocess
import tempfile
import shutil
from typing import Any
from django.core.management.base import BaseCommand, CommandParser, CommandError
from zerver.data_import.slack import do_convert_data
class Command(BaseCommand):
help = """Convert the Slack data into Zulip data format."""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('slack_data_zip', nargs='+',
metavar='<slack data zip>',
help="Zipped slack data")
parser.add_argument('--token', metavar='<slack_token>',
type=str, help='Slack legacy token of the organsation')
parser.add_argument('--output', dest='output_dir',
action="store", default=None,
help='Directory to write exported data to.')
parser.add_argument('--threads',
dest='threads',
action="store",
default=6,
help='Threads to use in exporting UserMessage objects in parallel')
parser.formatter_class = argparse.RawTextHelpFormatter
def handle(self, *args: Any, **options: Any) -> None:
output_dir = options["output_dir"]
if output_dir is None:
output_dir = tempfile.mkdtemp(prefix="/tmp/converted-slack-data-")
else:
output_dir = os.path.realpath(output_dir)
token = options['token']
if token is None:
print("Enter slack legacy token!")
exit(1)
num_threads = int(options['threads'])
if num_threads < 1:
raise CommandError('You must have at least one thread.')
for path in options['slack_data_zip']:
if not os.path.exists(path):
print("Slack data directory not found: '%s'" % (path,))
exit(1)
print("Converting Data ...")
do_convert_data(path, output_dir, token, threads=num_threads)
| [
"CommandParser",
"Any",
"Any"
] | [
364,
1253,
1269
] | [
377,
1256,
1272
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/create_default_stream_groups.py |
from argparse import ArgumentParser
from typing import Any
from zerver.lib.actions import ensure_stream
from zerver.lib.management import ZulipBaseCommand
from zerver.models import DefaultStreamGroup
class Command(ZulipBaseCommand):
help = """
Create default stream groups which the users can choose during sign up.
./manage.py create_default_stream_groups -s gsoc-1,gsoc-2,gsoc-3 -d "Google summer of code" -r zulip
"""
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser, True)
parser.add_argument(
'-n', '--name',
dest='name',
type=str,
required=True,
help='Name of the group you want to create.'
)
parser.add_argument(
'-d', '--description',
dest='description',
type=str,
required=True,
help='Description of the group.'
)
parser.add_argument(
'-s', '--streams',
dest='streams',
type=str,
required=True,
help='A comma-separated list of stream names.')
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
streams = []
stream_names = set([stream.strip() for stream in options["streams"].split(",")])
for stream_name in set(stream_names):
stream = ensure_stream(realm, stream_name)
streams.append(stream)
try:
default_stream_group = DefaultStreamGroup.objects.get(
name=options["name"], realm=realm, description=options["description"])
except DefaultStreamGroup.DoesNotExist:
default_stream_group = DefaultStreamGroup.objects.create(
name=options["name"], realm=realm, description=options["description"])
default_stream_group.streams.set(streams)
default_stream_groups = DefaultStreamGroup.objects.all()
for default_stream_group in default_stream_groups:
print(default_stream_group.name)
print(default_stream_group.description)
for stream in default_stream_group.streams.all():
print(stream.name)
print("")
| [
"ArgumentParser",
"Any",
"Any"
] | [
467,
1161,
1177
] | [
481,
1164,
1180
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/create_large_indexes.py |
from typing import Any, Callable, Dict, List, Set
from django.db import connection
from zerver.lib.management import ZulipBaseCommand
def create_index_if_not_exist(index_name: str, table_name: str,
column_string: str, where_clause: str) -> None:
#
# This function is somewhat similar to
# zerver.lib.migrate.create_index_if_not_exist.
#
# The other function gets used as part of Django migrations; this function
# uses SQL that is not supported by Django migrations.
#
# Creating concurrent indexes is kind of a pain with current versions
# of Django/postgres, because you will get this error with seemingly
# reasonable code:
#
# CREATE INDEX CONCURRENTLY cannot be executed from a function or multi-command string
#
# For a lot more detail on this process, refer to the commit message
# that added this file to the repo.
with connection.cursor() as cursor:
sql = '''
SELECT 1
FROM pg_class
where relname = %s
'''
cursor.execute(sql, [index_name])
rows = cursor.fetchall()
if len(rows) > 0:
print('Index %s already exists.' % (index_name,))
return
print("Creating index %s." % (index_name,))
sql = '''
CREATE INDEX CONCURRENTLY
%s
ON %s (%s)
%s;
''' % (index_name, table_name, column_string, where_clause)
cursor.execute(sql)
print('Finished creating %s.' % (index_name,))
def create_indexes() -> None:
# copied from 0082
create_index_if_not_exist(
index_name='zerver_usermessage_starred_message_id',
table_name='zerver_usermessage',
column_string='user_profile_id, message_id',
where_clause='WHERE (flags & 2) != 0',
)
# copied from 0083
create_index_if_not_exist(
index_name='zerver_usermessage_mentioned_message_id',
table_name='zerver_usermessage',
column_string='user_profile_id, message_id',
where_clause='WHERE (flags & 8) != 0',
)
# copied from 0095
create_index_if_not_exist(
index_name='zerver_usermessage_unread_message_id',
table_name='zerver_usermessage',
column_string='user_profile_id, message_id',
where_clause='WHERE (flags & 1) = 0',
)
# copied from 0098
create_index_if_not_exist(
index_name='zerver_usermessage_has_alert_word_message_id',
table_name='zerver_usermessage',
column_string='user_profile_id, message_id',
where_clause='WHERE (flags & 512) != 0',
)
# copied from 0099
create_index_if_not_exist(
index_name='zerver_usermessage_wildcard_mentioned_message_id',
table_name='zerver_usermessage',
column_string='user_profile_id, message_id',
where_clause='WHERE (flags & 8) != 0 OR (flags & 16) != 0',
)
# copied from 0177
create_index_if_not_exist(
index_name='zerver_usermessage_is_private_message_id',
table_name='zerver_usermessage',
column_string='user_profile_id, message_id',
where_clause='WHERE (flags & 2048) != 0',
)
# copied from 0180
create_index_if_not_exist(
index_name='zerver_usermessage_active_mobile_push_notification_id',
table_name='zerver_usermessage',
column_string='user_profile_id, message_id',
where_clause='WHERE (flags & 4096) != 0',
)
class Command(ZulipBaseCommand):
help = """Create concurrent indexes for large tables."""
def handle(self, *args: Any, **options: str) -> None:
create_indexes()
| [
"str",
"str",
"str",
"str",
"Any",
"str"
] | [
180,
197,
247,
266,
3636,
3652
] | [
183,
200,
250,
269,
3639,
3655
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/create_realm_internal_bots.py |
from typing import Any, Iterable, Tuple
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.models import Count
from zerver.lib.onboarding import create_if_missing_realm_internal_bots
from zerver.models import Realm, UserProfile
class Command(BaseCommand):
help = """\
Create realm internal bots if absent, in all realms.
These are normally created when the realm is, so this should be a no-op
except when upgrading to a version that adds a new realm internal bot.
"""
def handle(self, *args: Any, **options: Any) -> None:
create_if_missing_realm_internal_bots()
# create_users is idempotent -- it's a no-op when a given email
# already has a user in a given realm.
| [
"Any",
"Any"
] | [
555,
571
] | [
558,
574
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/create_stream.py |
import sys
from argparse import ArgumentParser
from typing import Any
from zerver.lib.actions import create_stream_if_needed
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.str_utils import force_text
class Command(ZulipBaseCommand):
help = """Create a stream, and subscribe all active users (excluding bots).
This should be used for TESTING only, unless you understand the limitations of
the command."""
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser, True, "realm in which to create the stream")
parser.add_argument('stream_name', metavar='<stream name>', type=str,
help='name of stream to create')
def handle(self, *args: Any, **options: str) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
encoding = sys.getfilesystemencoding()
stream_name = options['stream_name']
create_stream_if_needed(realm, force_text(stream_name, encoding))
| [
"ArgumentParser",
"Any",
"str"
] | [
469,
743,
759
] | [
483,
746,
762
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/create_user.py |
import argparse
import sys
from typing import Any
from django.core import validators
from django.core.exceptions import ValidationError
from django.core.management.base import CommandError
from django.db.utils import IntegrityError
from zerver.lib.actions import do_create_user, notify_new_user
from zerver.lib.initial_password import initial_password
from zerver.lib.management import ZulipBaseCommand
from zerver.models import email_to_username
class Command(ZulipBaseCommand):
help = """Create the specified user with a default initial password.
A user MUST have ALREADY accepted the Terms of Service before creating their
account this way.
Omit both <email> and <full name> for interactive user creation.
"""
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument('--this-user-has-accepted-the-tos',
dest='tos',
action="store_true",
default=False,
help='Acknowledgement that the user has already accepted the ToS.')
parser.add_argument('--password',
dest='password',
type=str,
default='',
help='password of new user. For development only.'
'Note that we recommend against setting '
'passwords this way, since they can be snooped by any user account '
'on the server via `ps -ef` or by any superuser with'
'read access to the user\'s bash history.')
parser.add_argument('--password-file',
dest='password_file',
type=str,
default='',
help='The file containing the password of the new user.')
parser.add_argument('email', metavar='<email>', type=str, nargs='?', default=argparse.SUPPRESS,
help='email address of new user')
parser.add_argument('full_name', metavar='<full name>', type=str, nargs='?',
default=argparse.SUPPRESS,
help='full name of new user')
self.add_realm_args(parser, True, "The name of the existing realm to which to add the user.")
def handle(self, *args: Any, **options: Any) -> None:
if not options["tos"]:
raise CommandError("""You must confirm that this user has accepted the
Terms of Service by passing --this-user-has-accepted-the-tos.""")
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
try:
email = options['email']
full_name = options['full_name']
try:
validators.validate_email(email)
except ValidationError:
raise CommandError("Invalid email address.")
except KeyError:
if 'email' in options or 'full_name' in options:
raise CommandError("""Either specify an email and full name as two
parameters, or specify no parameters for interactive user creation.""")
else:
while True:
email = input("Email: ")
try:
validators.validate_email(email)
break
except ValidationError:
print("Invalid email address.", file=sys.stderr)
full_name = input("Full name: ")
try:
if 'password' in options:
pw = options['password']
if 'password_file' in options:
pw = open(options['password_file'], 'r').read()
else:
user_initial_password = initial_password(email)
if user_initial_password is None:
raise CommandError("Password is unusable.")
pw = user_initial_password.encode()
notify_new_user(do_create_user(email, pw,
realm, full_name, email_to_username(email)),
internal=True)
except IntegrityError:
raise CommandError("User already exists.")
| [
"argparse.ArgumentParser",
"Any",
"Any"
] | [
760,
2427,
2443
] | [
783,
2430,
2446
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/deactivate_realm.py |
from argparse import ArgumentParser
from typing import Any
from zerver.lib.actions import do_deactivate_realm
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Script to deactivate a realm."""
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser, True)
def handle(self, *args: Any, **options: str) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
if realm.deactivated:
print("The realm", options["realm_id"], "is already deactivated.")
exit(0)
print("Deactivating", options["realm_id"])
do_deactivate_realm(realm)
print("Done!")
| [
"ArgumentParser",
"Any",
"str"
] | [
281,
377,
393
] | [
295,
380,
396
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/deactivate_user.py |
from argparse import ArgumentParser
from typing import Any
from zerver.lib.actions import do_deactivate_user
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.sessions import user_sessions
from zerver.models import UserProfile
class Command(ZulipBaseCommand):
help = "Deactivate a user, including forcibly logging them out."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('-f', '--for-real',
dest='for_real',
action='store_true',
default=False,
help="Actually deactivate the user. Default is a dry run.")
parser.add_argument('email', metavar='<email>', type=str,
help='email of user to deactivate')
self.add_realm_args(parser)
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
user_profile = self.get_user(options['email'], realm)
print("Deactivating %s (%s) - %s" % (user_profile.full_name,
user_profile.email,
user_profile.realm.string_id))
print("%s has the following active sessions:" % (user_profile.email,))
for session in user_sessions(user_profile):
print(session.expire_date, session.get_decoded())
print("")
print("%s has %s active bots that will also be deactivated." % (
user_profile.email,
UserProfile.objects.filter(
is_bot=True, is_active=True, bot_owner=user_profile
).count()
))
if not options["for_real"]:
print("This was a dry run. Pass -f to actually deactivate.")
exit(1)
do_deactivate_user(user_profile)
print("Sessions deleted, user deactivated.")
| [
"ArgumentParser",
"Any",
"Any"
] | [
386,
879,
895
] | [
400,
882,
898
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/delete_old_unclaimed_attachments.py |
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand
from zerver.lib.actions import do_delete_old_unclaimed_attachments
from zerver.models import get_old_unclaimed_attachments
class Command(BaseCommand):
help = """Remove unclaimed attachments from storage older than a supplied
numerical value indicating the limit of how old the attachment can be.
One week is taken as the default value."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('-w', '--weeks',
dest='delta_weeks',
default=1,
help="Limiting value of how old the file can be.")
parser.add_argument('-f', '--for-real',
dest='for_real',
action='store_true',
default=False,
help="Actually remove the files from the storage.")
def handle(self, *args: Any, **options: Any) -> None:
delta_weeks = options['delta_weeks']
print("Deleting unclaimed attached files older than %s" % (delta_weeks,))
print("")
# print the list of files that are going to be removed
old_attachments = get_old_unclaimed_attachments(delta_weeks)
for old_attachment in old_attachments:
print("%s created at %s" % (old_attachment.file_name, old_attachment.create_time))
print("")
if not options["for_real"]:
print("This was a dry run. Pass -f to actually delete.")
exit(1)
do_delete_old_unclaimed_attachments(delta_weeks)
print("")
print("Unclaimed Files deleted.")
| [
"ArgumentParser",
"Any",
"Any"
] | [
523,
1054,
1070
] | [
537,
1057,
1073
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/deliver_email.py |
"""\
Deliver email messages that have been queued by various things
(at this time invitation reminders and day1/day2 followup emails).
This management command is run via supervisor. Do not run on multiple
machines, as you may encounter multiple sends in a specific race
condition. (Alternatively, you can set `EMAIL_DELIVERER_DISABLED=True`
on all but one machine to make the command have no effect.)
"""
import logging
import time
from typing import Any
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from ujson import loads
from zerver.lib.context_managers import lockfile
from zerver.lib.logging_util import log_to_file
from zerver.lib.management import sleep_forever
from zerver.lib.send_email import EmailNotDeliveredException, send_email
from zerver.models import ScheduledEmail
## Setup ##
logger = logging.getLogger(__name__)
log_to_file(logger, settings.EMAIL_DELIVERER_LOG_PATH)
class Command(BaseCommand):
help = """Deliver emails queued by various parts of Zulip
(either for immediate sending or sending at a specified time).
Run this command under supervisor. This is for SMTP email delivery.
Usage: ./manage.py deliver_email
"""
def handle(self, *args: Any, **options: Any) -> None:
if settings.EMAIL_DELIVERER_DISABLED:
sleep_forever()
with lockfile("/tmp/zulip_email_deliver.lockfile"):
while True:
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now())
if email_jobs_to_deliver:
for job in email_jobs_to_deliver:
try:
send_email(**loads(job.data))
job.delete()
except EmailNotDeliveredException:
logger.warning("%r not delivered" % (job,))
time.sleep(10)
else:
# Less load on the db during times of activity,
# and more responsiveness when the load is low
time.sleep(2)
| [
"Any",
"Any"
] | [
1279,
1295
] | [
1282,
1298
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/deliver_scheduled_messages.py | import logging
import time
from typing import Any, Dict
from datetime import timedelta
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import transaction
from django.utils.timezone import now as timezone_now
from zerver.lib.context_managers import lockfile
from zerver.lib.logging_util import log_to_file
from zerver.lib.management import sleep_forever
from zerver.models import ScheduledMessage, Message, get_user
from zerver.lib.actions import do_send_messages
from zerver.lib.addressee import Addressee
## Setup ##
logger = logging.getLogger(__name__)
log_to_file(logger, settings.SCHEDULED_MESSAGE_DELIVERER_LOG_PATH)
class Command(BaseCommand):
help = """Deliver scheduled messages from the ScheduledMessage table.
Run this command under supervisor.
This management command is run via supervisor. Do not run on multiple
machines, as you may encounter multiple sends in a specific race
condition. (Alternatively, you can set `EMAIL_DELIVERER_DISABLED=True`
on all but one machine to make the command have no effect.)
Usage: ./manage.py deliver_scheduled_messages
"""
def construct_message(self, scheduled_message: ScheduledMessage) -> Dict[str, Any]:
message = Message()
original_sender = scheduled_message.sender
message.content = scheduled_message.content
message.recipient = scheduled_message.recipient
message.subject = scheduled_message.subject
message.pub_date = timezone_now()
message.sending_client = scheduled_message.sending_client
delivery_type = scheduled_message.delivery_type
if delivery_type == ScheduledMessage.SEND_LATER:
message.sender = original_sender
elif delivery_type == ScheduledMessage.REMIND:
message.sender = get_user(settings.REMINDER_BOT, original_sender.realm)
return {'message': message, 'stream': scheduled_message.stream,
'realm': scheduled_message.realm}
def handle(self, *args: Any, **options: Any) -> None:
if settings.EMAIL_DELIVERER_DISABLED:
# Here doing a check and sleeping indefinitely on this setting might
# not sound right. Actually we do this check to avoid running this
# process on every server that might be in service to a realm. See
# the comment in zproject/settings.py file about renaming this setting.
sleep_forever()
with lockfile("/tmp/zulip_scheduled_message_deliverer.lockfile"):
while True:
messages_to_deliver = ScheduledMessage.objects.filter(
scheduled_timestamp__lte=timezone_now(),
delivered=False)
if messages_to_deliver:
for message in messages_to_deliver:
with transaction.atomic():
do_send_messages([self.construct_message(message)])
message.delivered = True
message.save(update_fields=['delivered'])
cur_time = timezone_now()
time_next_min = (cur_time + timedelta(minutes=1)).replace(second=0, microsecond=0)
sleep_time = (time_next_min - cur_time).total_seconds()
time.sleep(sleep_time)
| [
"ScheduledMessage",
"Any",
"Any"
] | [
1187,
2021,
2037
] | [
1203,
2024,
2040
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/dump_messages.py |
import datetime
import time
from typing import Any
from django.core.management.base import CommandParser
from django.utils.timezone import utc as timezone_utc
from zerver.lib.management import ZulipBaseCommand
from zerver.models import Message, Recipient, Stream
class Command(ZulipBaseCommand):
help = "Dump messages from public streams of a realm"
def add_arguments(self, parser: CommandParser) -> None:
default_cutoff = time.time() - 60 * 60 * 24 * 30 # 30 days.
self.add_realm_args(parser, True)
parser.add_argument('--since',
dest='since',
type=int,
default=default_cutoff,
help='The time in epoch since from which to start the dump.')
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
streams = Stream.objects.filter(realm=realm, invite_only=False)
recipients = Recipient.objects.filter(
type=Recipient.STREAM, type_id__in=[stream.id for stream in streams])
cutoff = datetime.datetime.fromtimestamp(options["since"], tz=timezone_utc)
messages = Message.objects.filter(pub_date__gt=cutoff, recipient__in=recipients)
for message in messages:
print(message.to_dict(False))
| [
"CommandParser",
"Any",
"Any"
] | [
395,
820,
836
] | [
408,
823,
839
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/email_mirror.py |
"""
Forward messages sent to the configured email gateway to Zulip.
For zulip.com, messages to that address go to the Inbox of emailgateway@zulip.com.
Zulip voyager configurations will differ.
Messages meant for Zulip have a special recipient form of
<stream name>+<regenerable stream token>@streams.zulip.com
This pattern is configurable via the EMAIL_GATEWAY_PATTERN settings.py
variable.
Run this in a cronjob every N minutes if you have configured Zulip to poll
an external IMAP mailbox for messages. The script will then connect to
your IMAP server and batch-process all messages.
We extract and validate the target stream from information in the
recipient address and retrieve, forward, and archive the message.
"""
import email
import logging
from email.message import Message
from imaplib import IMAP4_SSL
from typing import Any, Generator, List
from django.conf import settings
from django.core.management.base import BaseCommand
from zerver.lib.email_mirror import logger, process_message
## Setup ##
log_format = "%(asctime)s: %(message)s"
logging.basicConfig(format=log_format)
formatter = logging.Formatter(log_format)
file_handler = logging.FileHandler(settings.EMAIL_MIRROR_LOG_PATH)
file_handler.setFormatter(formatter)
logger.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
def get_imap_messages() -> Generator[Message, None, None]:
mbox = IMAP4_SSL(settings.EMAIL_GATEWAY_IMAP_SERVER, settings.EMAIL_GATEWAY_IMAP_PORT)
mbox.login(settings.EMAIL_GATEWAY_LOGIN, settings.EMAIL_GATEWAY_PASSWORD)
try:
mbox.select(settings.EMAIL_GATEWAY_IMAP_FOLDER)
try:
status, num_ids_data = mbox.search(None, 'ALL') # type: ignore # https://github.com/python/typeshed/pull/1762
for msgid in num_ids_data[0].split():
status, msg_data = mbox.fetch(msgid, '(RFC822)')
msg_as_bytes = msg_data[0][1]
message = email.message_from_bytes(msg_as_bytes)
yield message
mbox.store(msgid, '+FLAGS', '\\Deleted')
mbox.expunge()
finally:
mbox.close()
finally:
mbox.logout()
class Command(BaseCommand):
help = __doc__
def handle(self, *args: Any, **options: str) -> None:
# We're probably running from cron, try to batch-process mail
if (not settings.EMAIL_GATEWAY_BOT or not settings.EMAIL_GATEWAY_LOGIN or
not settings.EMAIL_GATEWAY_PASSWORD or not settings.EMAIL_GATEWAY_IMAP_SERVER or
not settings.EMAIL_GATEWAY_IMAP_PORT or not settings.EMAIL_GATEWAY_IMAP_FOLDER):
print("Please configure the Email Mirror Gateway in /etc/zulip/, "
"or specify $ORIGINAL_RECIPIENT if piping a single mail.")
exit(1)
for message in get_imap_messages():
process_message(message)
| [
"Any",
"str"
] | [
2244,
2260
] | [
2247,
2263
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/enqueue_digest_emails.py | import datetime
import logging
from typing import Any, List
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from zerver.lib.digest import DIGEST_CUTOFF, enqueue_emails
from zerver.lib.logging_util import log_to_file
## Logging setup ##
logger = logging.getLogger(__name__)
log_to_file(logger, settings.DIGEST_LOG_PATH)
class Command(BaseCommand):
help = """Enqueue digest emails for users that haven't checked the app
in a while.
"""
def handle(self, *args: Any, **options: Any) -> None:
cutoff = timezone_now() - datetime.timedelta(days=DIGEST_CUTOFF)
enqueue_emails(cutoff)
| [
"Any",
"Any"
] | [
562,
578
] | [
565,
581
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/enqueue_file.py |
import sys
from argparse import ArgumentParser
from typing import IO, Any
import ujson
from django.core.management.base import BaseCommand
from zerver.lib.queue import queue_json_publish
def error(*args: Any) -> None:
raise Exception('We cannot enqueue because settings.USING_RABBITMQ is False.')
class Command(BaseCommand):
help = """Read JSON lines from a file and enqueue them to a worker queue.
Each line in the file should either be a JSON payload or two tab-separated
fields, the second of which is a JSON payload. (The latter is to accommodate
the format of error files written by queue workers that catch exceptions--their
first field is a timestamp that we ignore.)
You can use "-" to represent stdin.
"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('queue_name', metavar='<queue>', type=str,
help="name of worker queue to enqueue to")
parser.add_argument('file_name', metavar='<file>', type=str,
help="name of file containing JSON lines")
def handle(self, *args: Any, **options: str) -> None:
queue_name = options['queue_name']
file_name = options['file_name']
if file_name == '-':
f = sys.stdin # type: IO[str]
else:
f = open(file_name)
while True:
line = f.readline()
if not line:
break
line = line.strip()
try:
payload = line.split('\t')[1]
except IndexError:
payload = line
print('Queueing to queue %s: %s' % (queue_name, payload))
# Verify that payload is valid json.
data = ujson.loads(payload)
# This is designed to use the `error` method rather than
# the call_consume_in_tests flow.
queue_json_publish(queue_name, data, error)
| [
"Any",
"ArgumentParser",
"Any",
"str"
] | [
208,
768,
1104,
1120
] | [
211,
782,
1107,
1123
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/export.py |
import os
import shutil
import subprocess
import tempfile
from argparse import ArgumentParser, RawTextHelpFormatter
from typing import Any
from django.core.management.base import CommandError
from zerver.lib.export import do_export_realm, \
do_write_stats_file_for_realm_export
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Exports all data from a Zulip realm
This command exports all significant data from a Zulip realm. The
result can be imported using the `./manage.py import` command.
Things that are exported:
* All user-accessible data in the Zulip database (Messages,
Streams, UserMessages, RealmEmoji, etc.)
* Copies of all uploaded files and avatar images along with
metadata needed to restore them even in the ab
Things that are not exported:
* Confirmation and PreregistrationUser (transient tables)
* Sessions (everyone will need to login again post-export)
* Users' passwords and API keys (users will need to use SSO or reset password)
* Mobile tokens for APNS/GCM (users will need to reconnect their mobile devices)
* ScheduledEmail (Not relevant on a new server)
* RemoteZulipServer (Unlikely to be migrated)
* third_party_api_results cache (this means rerending all old
messages could be expensive)
Things that will break as a result of the export:
* Passwords will not be transferred. They will all need to go
through the password reset flow to obtain a new password (unless
they intend to only use e.g. Google Auth).
* Users will need to logout and re-login to the Zulip desktop and
mobile apps. The apps now all have an option on the login page
where you can specify which Zulip server to use; your users
should enter <domain name>.
* All bots will stop working since they will be pointing to the
wrong server URL, and all users' API keys have been rotated as
part of the migration. So to re-enable your integrations, you
will need to direct your integrations at the new server.
Usually this means updating the URL and the bots' API keys. You
can see a list of all the bots that have been configured for
your realm on the `/#organization` page, and use that list to
make sure you migrate them all.
The proper procedure for using this to export a realm is as follows:
* Use `./manage.py deactivate_realm` to deactivate the realm, so
nothing happens in the realm being exported during the export
process.
* Use `./manage.py export` to export the realm, producing a data
tarball.
* Transfer the tarball to the new server and unpack it.
* Use `./manage.py import` to import the realm
* Use `./manage.py reactivate_realm` to reactivate the realm, so
users can login again.
* Inform the users about the things broken above.
We recommend testing by exporting without having deactivated the
realm first, to make sure you have the procedure right and
minimize downtime.
Performance: In one test, the tool exported a realm with hundreds
of users and ~1M messages of history with --threads=1 in about 3
hours of serial runtime (goes down to ~50m with --threads=6 on a
machine with 8 CPUs). Importing that same data set took about 30
minutes. But this will vary a lot depending on the average number
of recipients of messages in the realm, hardware, etc."""
# Fix support for multi-line usage
def create_parser(self, *args: Any, **kwargs: Any) -> ArgumentParser:
parser = super().create_parser(*args, **kwargs)
parser.formatter_class = RawTextHelpFormatter
return parser
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--output',
dest='output_dir',
action="store",
default=None,
help='Directory to write exported data to.')
parser.add_argument('--threads',
dest='threads',
action="store",
default=6,
help='Threads to use in exporting UserMessage objects in parallel')
self.add_realm_args(parser, True)
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
output_dir = options["output_dir"]
if output_dir is None:
output_dir = tempfile.mkdtemp(prefix="/tmp/zulip-export-")
else:
output_dir = os.path.realpath(os.path.expanduser(output_dir))
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
print("Exporting realm %s" % (realm.string_id,))
num_threads = int(options['threads'])
if num_threads < 1:
raise CommandError('You must have at least one thread.')
do_export_realm(realm, output_dir, threads=num_threads)
print("Finished exporting to %s; tarring" % (output_dir,))
do_write_stats_file_for_realm_export(output_dir)
tarball_path = output_dir.rstrip('/') + '.tar.gz'
os.chdir(os.path.dirname(output_dir))
subprocess.check_call(["tar", "-czf", tarball_path, os.path.basename(output_dir)])
print("Tarball written to %s" % (tarball_path,))
| [
"Any",
"Any",
"ArgumentParser",
"Any",
"Any"
] | [
3566,
3581,
3774,
4380,
4396
] | [
3569,
3584,
3788,
4383,
4399
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/export_single_user.py |
import os
import shutil
import subprocess
import tempfile
from argparse import ArgumentParser
from typing import Any
from zerver.lib.export import do_export_user
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Exports message data from a Zulip user
This command exports the message history for a single Zulip user.
Note that this only exports the user's message history and
realm-public metadata needed to understand it; it does nothing
with (for example) any bots owned by the user."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('email', metavar='<email>', type=str,
help="email of user to export")
parser.add_argument('--output',
dest='output_dir',
action="store",
default=None,
help='Directory to write exported data to.')
self.add_realm_args(parser)
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
user_profile = self.get_user(options["email"], realm)
output_dir = options["output_dir"]
if output_dir is None:
output_dir = tempfile.mkdtemp(prefix="/tmp/zulip-export-")
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
print("Exporting user %s" % (user_profile.email,))
do_export_user(user_profile, output_dir)
print("Finished exporting to %s; tarring" % (output_dir,))
tarball_path = output_dir.rstrip('/') + '.tar.gz'
subprocess.check_call(["tar", "--strip-components=1", "-czf", tarball_path, output_dir])
print("Tarball written to %s" % (tarball_path,))
| [
"ArgumentParser",
"Any",
"Any"
] | [
595,
1057,
1073
] | [
609,
1060,
1076
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/export_usermessage_batch.py |
import glob
import logging
import os
import shutil
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand
from zerver.lib.export import export_usermessages_batch
class Command(BaseCommand):
help = """UserMessage fetching helper for export.py"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--path',
dest='path',
action="store",
default=None,
help='Path to find messages.json archives')
parser.add_argument('--thread',
dest='thread',
action="store",
default=None,
help='Thread ID')
def handle(self, *args: Any, **options: Any) -> None:
logging.info("Starting UserMessage batch thread %s" % (options['thread'],))
files = set(glob.glob(os.path.join(options['path'], 'messages-*.json.partial')))
for partial_path in files:
locked_path = partial_path.replace(".json.partial", ".json.locked")
output_path = partial_path.replace(".json.partial", ".json")
try:
shutil.move(partial_path, locked_path)
except Exception:
# Already claimed by another process
continue
logging.info("Thread %s processing %s" % (options['thread'], output_path))
try:
export_usermessages_batch(locked_path, output_path)
except Exception:
# Put the item back in the free pool when we fail
shutil.move(locked_path, partial_path)
raise
| [
"ArgumentParser",
"Any",
"Any"
] | [
346,
852,
868
] | [
360,
855,
871
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/fill_memcached_caches.py |
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand
from zerver.lib.cache_helpers import cache_fillers, fill_remote_cache
class Command(BaseCommand):
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--cache', dest="cache", default=None,
help="Populate the memcached cache of messages.")
def handle(self, *args: Any, **options: str) -> None:
if options["cache"] is not None:
fill_remote_cache(options["cache"])
return
for cache in cache_fillers.keys():
fill_remote_cache(cache)
| [
"ArgumentParser",
"Any",
"str"
] | [
249,
448,
464
] | [
263,
451,
467
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/fix_unreads.py |
import logging
import sys
from argparse import ArgumentParser
from typing import Any, List, Optional
from django.core.management.base import CommandError
from django.db import connection
from zerver.lib.fix_unreads import fix
from zerver.lib.management import ZulipBaseCommand
from zerver.models import Realm, UserProfile
logging.getLogger('zulip.fix_unreads').setLevel(logging.INFO)
class Command(ZulipBaseCommand):
help = """Fix problems related to unread counts."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('emails',
metavar='<emails>',
type=str,
nargs='*',
help='email address to spelunk')
parser.add_argument('--all',
action='store_true',
dest='all',
default=False,
help='fix all users in specified realm')
self.add_realm_args(parser)
def fix_all_users(self, realm: Realm) -> None:
user_profiles = list(UserProfile.objects.filter(
realm=realm,
is_bot=False
))
for user_profile in user_profiles:
fix(user_profile)
connection.commit()
def fix_emails(self, realm: Optional[Realm], emails: List[str]) -> None:
for email in emails:
try:
user_profile = self.get_user(email, realm)
except CommandError:
print("e-mail %s doesn't exist in the realm %s, skipping" % (email, realm))
return
fix(user_profile)
connection.commit()
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
if options['all']:
if realm is None:
print('You must specify a realm if you choose the --all option.')
sys.exit(1)
self.fix_all_users(realm)
return
self.fix_emails(realm, options['emails'])
| [
"ArgumentParser",
"Realm",
"Optional[Realm]",
"List[str]",
"Any",
"Any"
] | [
515,
1074,
1346,
1371,
1737,
1753
] | [
529,
1079,
1361,
1380,
1740,
1756
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/generate_invite_links.py |
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import CommandError
from confirmation.models import Confirmation, create_confirmation_link
from zerver.lib.management import ZulipBaseCommand
from zerver.models import PreregistrationUser, email_allowed_for_realm, \
email_allowed_for_realm, DomainNotAllowedForRealmError
class Command(ZulipBaseCommand):
help = "Generate activation links for users and print them to stdout."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--force',
dest='force',
action="store_true",
default=False,
help='Override that the domain is restricted to external users.')
parser.add_argument('emails', metavar='<email>', type=str, nargs='*',
help='email of users to generate an activation link for')
self.add_realm_args(parser, True)
def handle(self, *args: Any, **options: Any) -> None:
duplicates = False
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
if not options['emails']:
self.print_help("./manage.py", "generate_invite_links")
exit(1)
for email in options['emails']:
try:
self.get_user(email, realm)
print(email + ": There is already a user registered with that address.")
duplicates = True
continue
except CommandError:
pass
if duplicates:
return
for email in options['emails']:
try:
email_allowed_for_realm(email, realm)
except DomainNotAllowedForRealmError:
if not options["force"]:
print("You've asked to add an external user '%s' to a closed realm '%s'." % (
email, realm.string_id))
print("Are you sure? To do this, pass --force.")
exit(1)
prereg_user = PreregistrationUser(email=email, realm=realm)
prereg_user.save()
print(email + ": " + create_confirmation_link(prereg_user, realm.host,
Confirmation.INVITATION))
| [
"ArgumentParser",
"Any",
"Any"
] | [
516,
1043,
1059
] | [
530,
1046,
1062
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/generate_multiuse_invite_link.py |
from argparse import ArgumentParser
from typing import Any, List
from confirmation.models import Confirmation, create_confirmation_link
from zerver.lib.actions import ensure_stream, do_create_multiuse_invite_link
from zerver.lib.management import ZulipBaseCommand
from zerver.models import Stream
class Command(ZulipBaseCommand):
help = "Generates invite link that can be used for inviting multiple users"
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser, True)
parser.add_argument(
'-s', '--streams',
dest='streams',
type=str,
help='A comma-separated list of stream names.')
parser.add_argument(
'--referred-by',
dest='referred_by',
type=str,
help='Email of referrer',
required=True,
)
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
streams = [] # type: List[Stream]
if options["streams"]:
stream_names = set([stream.strip() for stream in options["streams"].split(",")])
for stream_name in set(stream_names):
stream = ensure_stream(realm, stream_name)
streams.append(stream)
referred_by = self.get_user(options['referred_by'], realm)
invite_link = do_create_multiuse_invite_link(referred_by, streams)
print("You can use %s to invite as many number of people to the organization." % (invite_link,))
| [
"ArgumentParser",
"Any",
"Any"
] | [
450,
905,
921
] | [
464,
908,
924
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/generate_realm_creation_link.py |
import sys
from argparse import ArgumentParser, RawTextHelpFormatter
from typing import Any
from django.core.management.base import BaseCommand
from django.db import ProgrammingError
from confirmation.models import generate_realm_creation_url
from zerver.models import Realm
class Command(BaseCommand):
help = """
Outputs a randomly generated, 1-time-use link for Organization creation.
Whoever visits the link can create a new organization on this server, regardless of whether
settings.OPEN_REALM_CREATION is enabled. The link would expire automatically after
settings.REALM_CREATION_LINK_VALIDITY_DAYS.
Usage: ./manage.py generate_realm_creation_link """
# Fix support for multi-line usage
def create_parser(self, *args: Any, **kwargs: Any) -> ArgumentParser:
parser = super().create_parser(*args, **kwargs)
parser.formatter_class = RawTextHelpFormatter
return parser
def handle(self, *args: Any, **options: Any) -> None:
try:
# first check if the db has been initalized
Realm.objects.first()
except ProgrammingError:
print("The Zulip database does not appear to exist. Have you run initialize-database?")
sys.exit(1)
url = generate_realm_creation_url(by_admin=True)
self.stdout.write(self.style.SUCCESS("Please visit the following "
"secure single-use link to register your "))
self.stdout.write(self.style.SUCCESS("new Zulip organization:\033[0m"))
self.stdout.write("")
self.stdout.write(self.style.SUCCESS(" \033[1;92m%s\033[0m" % (url,)))
self.stdout.write("")
| [
"Any",
"Any",
"Any",
"Any"
] | [
762,
777,
962,
978
] | [
765,
780,
965,
981
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/get_migration_status.py | # -*- coding: utf-8 -*-
import argparse
import os
from typing import Any
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from scripts.lib.zulip_tools import get_dev_uuid_var_path
from zerver.lib.test_fixtures import get_migration_status
class Command(BaseCommand):
help = "Get status of migrations."
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument('app_label', nargs='?',
help='App label of an application to synchronize the state.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. '
'Defaults to the "default" database.')
parser.add_argument('--output', action='store',
help='Path to store the status to (default to stdout).')
def handle(self, *args: Any, **options: Any) -> None:
result = get_migration_status(**options)
if options['output'] is not None:
uuid_var_path = get_dev_uuid_var_path()
path = os.path.join(uuid_var_path, options['output'])
with open(path, 'w') as f:
f.write(result)
else:
self.stdout.write(result)
| [
"argparse.ArgumentParser",
"Any",
"Any"
] | [
387,
976,
992
] | [
410,
979,
995
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/import.py |
import argparse
import os
import subprocess
import tarfile
from typing import Any
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandParser
from zerver.lib.import_realm import do_import_realm, do_import_system_bots
from zerver.forms import check_subdomain_available
class Command(BaseCommand):
help = """Import extracted Zulip database dump directories into a fresh Zulip instance.
This command should be used only on a newly created, empty Zulip instance to
import a database dump from one or more JSON files."""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('--destroy-rebuild-database',
dest='destroy_rebuild_database',
default=False,
action="store_true",
help='Destroys and rebuilds the databases prior to import.')
parser.add_argument('--import-into-nonempty',
dest='import_into_nonempty',
default=False,
action="store_true",
help='Import into an existing nonempty database.')
parser.add_argument('subdomain', metavar='<subdomain>',
type=str, help="Subdomain")
parser.add_argument('export_paths', nargs='+',
metavar='<export path>',
help="list of export directories to import")
parser.formatter_class = argparse.RawTextHelpFormatter
def do_destroy_and_rebuild_database(self, db_name: str) -> None:
call_command('flush', verbosity=0, interactive=False)
subprocess.check_call([os.path.join(settings.DEPLOY_ROOT, "scripts/setup/flush-memcached")])
def handle(self, *args: Any, **options: Any) -> None:
subdomain = options['subdomain']
if options["destroy_rebuild_database"]:
print("Rebuilding the database!")
db_name = settings.DATABASES['default']['NAME']
self.do_destroy_and_rebuild_database(db_name)
elif options["import_into_nonempty"]:
print("NOTE: The argument 'import_into_nonempty' is now the default behavior.")
check_subdomain_available(subdomain, from_management_command=True)
paths = []
for path in options['export_paths']:
path = os.path.realpath(os.path.expanduser(path))
if not os.path.exists(path):
print("Directory not found: '%s'" % (path,))
exit(1)
if not os.path.isdir(path):
print("Export file should be folder; if it's a tarball, please unpack it first.")
exit(1)
paths.append(path)
for path in paths:
print("Processing dump: %s ..." % (path,))
realm = do_import_realm(path, subdomain)
print("Checking the system bots.")
do_import_system_bots(realm)
| [
"CommandParser",
"str",
"Any",
"Any"
] | [
650,
1679,
1885,
1901
] | [
663,
1682,
1888,
1904
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/initialize_voyager_db.py |
from argparse import ArgumentParser
from typing import Any, Iterable, Tuple, Optional
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from zerver.lib.bulk_create import bulk_create_users
from zerver.models import Realm, UserProfile, \
email_to_username, get_client, get_system_bot
settings.TORNADO_SERVER = None
def create_users(realm: Realm, name_list: Iterable[Tuple[str, str]], bot_type: Optional[int]=None) -> None:
user_set = set()
for full_name, email in name_list:
short_name = email_to_username(email)
user_set.add((email, full_name, short_name, True))
bulk_create_users(realm, user_set, bot_type)
class Command(BaseCommand):
help = "Populate an initial database for Zulip Voyager"
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--extra-users',
dest='extra_users',
type=int,
default=0,
help='The number of extra users to create')
def handle(self, *args: Any, **options: Any) -> None:
if Realm.objects.count() > 0:
print("Database already initialized; doing nothing.")
return
realm = Realm.objects.create(string_id=settings.INTERNAL_BOT_DOMAIN.split('.')[0])
names = [(settings.FEEDBACK_BOT_NAME, settings.FEEDBACK_BOT)]
create_users(realm, names, bot_type=UserProfile.DEFAULT_BOT)
get_client("website")
get_client("API")
internal_bots = [(bot['name'], bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,))
for bot in settings.INTERNAL_BOTS]
create_users(realm, internal_bots, bot_type=UserProfile.DEFAULT_BOT)
# Set the owners for these bots to the bots themselves
bots = UserProfile.objects.filter(email__in=[bot_info[1] for bot_info in internal_bots])
for bot in bots:
bot.bot_owner = bot
bot.save()
# Initialize the email gateway bot as an API Super User
email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT)
email_gateway_bot.is_api_super_user = True
email_gateway_bot.save()
self.stdout.write("Successfully populated database with initial data.\n")
self.stdout.write("Please run ./manage.py generate_realm_creation_link "
"to generate link for creating organization")
site = Site.objects.get_current()
site.domain = settings.EXTERNAL_HOST
site.save()
| [
"Realm",
"Iterable[Tuple[str, str]]",
"ArgumentParser",
"Any",
"Any"
] | [
427,
445,
851,
1147,
1163
] | [
432,
470,
865,
1150,
1166
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/knight.py |
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import CommandError
from zerver.lib.actions import do_change_is_admin
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Give an existing user administrative permissions over their (own) Realm.
ONLY perform this on customer request from an authorized person.
"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('-f', '--for-real',
dest='ack',
action="store_true",
default=False,
help='Acknowledgement that this is done according to policy.')
parser.add_argument('--revoke',
dest='grant',
action="store_false",
default=True,
help='Remove an administrator\'s rights.')
parser.add_argument('--permission',
dest='permission',
action="store",
default='administer',
choices=['administer', 'api_super_user', ],
help='Permission to grant/remove.')
parser.add_argument('email', metavar='<email>', type=str,
help="email of user to knight")
self.add_realm_args(parser, True)
def handle(self, *args: Any, **options: Any) -> None:
email = options['email']
realm = self.get_realm(options)
profile = self.get_user(email, realm)
if options['grant']:
if profile.has_perm(options['permission'], profile.realm):
raise CommandError("User already has permission for this realm.")
else:
if options['ack']:
do_change_is_admin(profile, True, permission=options['permission'])
print("Done!")
else:
print("Would have granted %s %s rights for %s" % (
email, options['permission'], profile.realm.string_id))
else:
if profile.has_perm(options['permission'], profile.realm):
if options['ack']:
do_change_is_admin(profile, False, permission=options['permission'])
print("Done!")
else:
print("Would have removed %s's %s rights on %s" % (email, options['permission'],
profile.realm.string_id))
else:
raise CommandError("User did not have permission for this realm!")
| [
"ArgumentParser",
"Any",
"Any"
] | [
444,
1503,
1519
] | [
458,
1506,
1522
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/list_realms.py |
import sys
from argparse import ArgumentParser
from typing import Any
from zerver.lib.management import ZulipBaseCommand
from zerver.models import Realm
class Command(ZulipBaseCommand):
help = """List realms in the server and it's configuration settings(optional).
Usage examples:
./manage.py list_realms
./manage.py list_realms --all"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument("--all",
dest="all",
action="store_true",
default=False,
help="Print all the configuration settings of the realms.")
def handle(self, *args: Any, **options: Any) -> None:
realms = Realm.objects.all()
outer_format = "%-5s %-40s %-40s"
inner_format = "%-40s %s"
deactivated = False
if not options["all"]:
print(outer_format % ("id", "string_id", "name"))
print(outer_format % ("--", "---------", "----"))
for realm in realms:
if realm.deactivated:
print(self.style.ERROR(outer_format % (realm.id, realm.string_id, realm.name)))
deactivated = True
else:
print(outer_format % (realm.id, realm.string_id, realm.name))
if deactivated:
print(self.style.WARNING("\nRed rows represent deactivated realms."))
sys.exit(0)
# The remaining code path is the --all case.
identifier_attributes = ["id", "name", "string_id"]
for realm in realms:
# Start with just all the fields on the object, which is
# hacky but doesn't require any work to maintain.
realm_dict = realm.__dict__
# Remove a field that is confusingly useless
del realm_dict['_state']
# Fix the one bitfield to display useful data
realm_dict['authentication_methods'] = str(realm.authentication_methods_dict())
for key in identifier_attributes:
if realm.deactivated:
print(self.style.ERROR(inner_format % (key, realm_dict[key])))
deactivated = True
else:
print(inner_format % (key, realm_dict[key]))
for key, value in sorted(realm_dict.iteritems()):
if key not in identifier_attributes:
if realm.deactivated:
print(self.style.ERROR(inner_format % (key, value)))
else:
print(inner_format % (key, value))
print("-" * 80)
if deactivated:
print(self.style.WARNING("\nRed is used to highlight deactivated realms."))
| [
"ArgumentParser",
"Any",
"Any"
] | [
384,
695,
711
] | [
398,
698,
714
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | zerver/management/commands/logout_all_users.py |
from argparse import ArgumentParser
from typing import Any
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.sessions import delete_all_deactivated_user_sessions, \
delete_all_user_sessions, delete_realm_user_sessions
class Command(ZulipBaseCommand):
help = "Log out all users."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--deactivated-only',
action='store_true',
default=False,
help="Only logout all users who are deactivated")
self.add_realm_args(parser, help="Only logout all users in a particular realm")
def handle(self, *args: Any, **options: Any) -> None:
realm = self.get_realm(options)
if realm:
delete_realm_user_sessions(realm)
elif options["deactivated_only"]:
delete_all_deactivated_user_sessions()
else:
delete_all_user_sessions()
| [
"ArgumentParser",
"Any",
"Any"
] | [
344,
706,
722
] | [
358,
709,
725
] |
Subsets and Splits