zip
stringlengths 19
109
| filename
stringlengths 4
185
| contents
stringlengths 0
30.1M
| type_annotations
sequencelengths 0
1.97k
| type_annotation_starts
sequencelengths 0
1.97k
| type_annotation_ends
sequencelengths 0
1.97k
|
---|---|---|---|---|---|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/management/commands/client_activity.py | import datetime
from argparse import ArgumentParser
from typing import Any
from django.db.models import Count, QuerySet
from django.utils.timezone import now as timezone_now
from zerver.lib.management import ZulipBaseCommand
from zerver.models import UserActivity
class Command(ZulipBaseCommand):
help = """Report rough client activity globally, for a realm, or for a user
Usage examples:
./manage.py client_activity --target server
./manage.py client_activity --target realm --realm zulip
./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--target', dest='target', required=True, type=str,
help="'server' will calculate client activity of the entire server. "
"'realm' will calculate client activity of realm. "
"'user' will calculate client activity of the user.")
parser.add_argument('--user', dest='user', type=str,
help="The email address of the user you want to calculate activity.")
self.add_realm_args(parser)
def compute_activity(self, user_activity_objects: QuerySet) -> None:
# Report data from the past week.
#
# This is a rough report of client activity because we inconsistently
# register activity from various clients; think of it as telling you
# approximately how many people from a group have used a particular
# client recently. For example, this might be useful to get a sense of
# how popular different versions of a desktop client are.
#
# Importantly, this does NOT tell you anything about the relative
# volumes of requests from clients.
threshold = timezone_now() - datetime.timedelta(days=7)
client_counts = user_activity_objects.filter(
last_visit__gt=threshold).values("client__name").annotate(
count=Count('client__name'))
total = 0
counts = []
for client_type in client_counts:
count = client_type["count"]
client = client_type["client__name"]
total += count
counts.append((count, client))
counts.sort()
for count in counts:
print("%25s %15d" % (count[1], count[0]))
print("Total:", total)
def handle(self, *args: Any, **options: str) -> None:
realm = self.get_realm(options)
if options["user"] is None:
if options["target"] == "server" and realm is None:
# Report global activity.
self.compute_activity(UserActivity.objects.all())
elif options["target"] == "realm" and realm is not None:
self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm))
else:
self.print_help("./manage.py", "client_activity")
elif options["target"] == "user":
user_profile = self.get_user(options["user"], realm)
self.compute_activity(UserActivity.objects.filter(user_profile=user_profile))
else:
self.print_help("./manage.py", "client_activity")
| [
"ArgumentParser",
"QuerySet",
"Any",
"str"
] | [
619,
1244,
2457,
2473
] | [
633,
1252,
2460,
2476
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/management/commands/populate_analytics_db.py |
from datetime import datetime, timedelta
from typing import Any, Dict, List, Mapping, Optional, Type, Union
from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from analytics.lib.counts import COUNT_STATS, \
CountStat, do_drop_all_analytics_tables
from analytics.lib.fixtures import generate_time_series_data
from analytics.lib.time_utils import time_range
from analytics.models import BaseCount, FillState, RealmCount, UserCount, \
StreamCount, InstallationCount
from zerver.lib.actions import do_change_is_admin
from zerver.lib.timestamp import floor_to_day
from zerver.models import Realm, UserProfile, Stream, Message, Client, \
RealmAuditLog, Recipient
class Command(BaseCommand):
help = """Populates analytics tables with randomly generated data."""
DAYS_OF_DATA = 100
random_seed = 26
def create_user(self, email: str,
full_name: str,
is_staff: bool,
date_joined: datetime,
realm: Realm) -> UserProfile:
user = UserProfile.objects.create(
email=email, full_name=full_name, is_staff=is_staff,
realm=realm, short_name=full_name, pointer=-1, last_pointer_updater='none',
api_key='42', date_joined=date_joined)
RealmAuditLog.objects.create(
realm=realm, modified_user=user, event_type=RealmAuditLog.USER_CREATED,
event_time=user.date_joined)
return user
def generate_fixture_data(self, stat: CountStat, business_hours_base: float,
non_business_hours_base: float, growth: float,
autocorrelation: float, spikiness: float,
holiday_rate: float=0, partial_sum: bool=False) -> List[int]:
self.random_seed += 1
return generate_time_series_data(
days=self.DAYS_OF_DATA, business_hours_base=business_hours_base,
non_business_hours_base=non_business_hours_base, growth=growth,
autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate,
frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed)
def handle(self, *args: Any, **options: Any) -> None:
# TODO: This should arguably only delete the objects
# associated with the "analytics" realm.
do_drop_all_analytics_tables()
# This also deletes any objects with this realm as a foreign key
Realm.objects.filter(string_id='analytics').delete()
# Because we just deleted a bunch of objects in the database
# directly (rather than deleting individual objects in Django,
# in which case our post_save hooks would have flushed the
# individual objects from memcached for us), we need to flush
# memcached in order to ensure deleted objects aren't still
# present in the memcached cache.
from zerver.apps import flush_cache
flush_cache(None)
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
last_end_time = floor_to_day(timezone_now())
realm = Realm.objects.create(
string_id='analytics', name='Analytics', date_created=installation_time)
shylock = self.create_user('shylock@analytics.ds', 'Shylock', True, installation_time, realm)
do_change_is_admin(shylock, True)
stream = Stream.objects.create(
name='all', realm=realm, date_created=installation_time)
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
def insert_fixture_data(stat: CountStat,
fixture_data: Mapping[Optional[str], List[int]],
table: Type[BaseCount]) -> None:
end_times = time_range(last_end_time, last_end_time, stat.frequency,
len(list(fixture_data.values())[0]))
if table == InstallationCount:
id_args = {} # type: Dict[str, Any]
if table == RealmCount:
id_args = {'realm': realm}
if table == UserCount:
id_args = {'realm': realm, 'user': shylock}
if table == StreamCount:
id_args = {'stream': stream, 'realm': realm}
for subgroup, values in fixture_data.items():
table.objects.bulk_create([
table(property=stat.property, subgroup=subgroup, end_time=end_time,
value=value, **id_args)
for end_time, value in zip(end_times, values) if value != 0])
stat = COUNT_STATS['1day_actives::day']
realm_data = {
None: self.generate_fixture_data(stat, .08, .02, 3, .3, 6, partial_sum=True),
} # type: Mapping[Optional[str], List[int]]
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
None: self.generate_fixture_data(stat, .8, .2, 4, .3, 6, partial_sum=True),
} # type: Mapping[Optional[str], List[int]]
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS['realm_active_humans::day']
realm_data = {
None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
None: self.generate_fixture_data(stat, 1, .3, 4, .5, 3, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS['active_users_audit:is_bot:day']
realm_data = {
'false': self.generate_fixture_data(stat, .1, .03, 3.5, .8, 2, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
'false': self.generate_fixture_data(stat, 1, .3, 6, .8, 2, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS['messages_sent:is_bot:hour']
user_data = {'false': self.generate_fixture_data(
stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)} # type: Mapping[Optional[str], List[int]]
insert_fixture_data(stat, user_data, UserCount)
realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4),
'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {'false': self.generate_fixture_data(stat, 350, 150, 6, .6, 4),
'true': self.generate_fixture_data(stat, 150, 150, 3, .4, 2)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS['messages_sent:message_type:day']
user_data = {
'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8),
'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8),
'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4),
'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4),
'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4),
'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
'public_stream': self.generate_fixture_data(stat, 300, 80, 5, .6, 4),
'private_stream': self.generate_fixture_data(stat, 70, 70, 5, .6, 4),
'private_message': self.generate_fixture_data(stat, 130, 50, 5, .6, 4),
'huddle_message': self.generate_fixture_data(stat, 60, 30, 3, .6, 4)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
website, created = Client.objects.get_or_create(name='website')
old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7')
android, created = Client.objects.get_or_create(name='ZulipAndroid')
iOS, created = Client.objects.get_or_create(name='ZulipiOS')
react_native, created = Client.objects.get_or_create(name='ZulipMobile')
API, created = Client.objects.get_or_create(name='API: Python')
zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror')
unused, created = Client.objects.get_or_create(name='unused')
long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook')
stat = COUNT_STATS['messages_sent:client:day']
user_data = {
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8),
zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3),
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3),
android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3),
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
website.id: self.generate_fixture_data(stat, 300, 200, 5, .6, 3),
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, .6, 3),
android.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
API.id: self.generate_fixture_data(stat, 50, 50, 5, .6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, .6, 3),
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS['messages_in_stream:is_bot:day']
realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4),
'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)}
insert_fixture_data(stat, realm_data, RealmCount)
stream_data = {'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4),
'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2)} # type: Mapping[Optional[str], List[int]]
insert_fixture_data(stat, stream_data, StreamCount)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
| [
"str",
"str",
"bool",
"datetime",
"Realm",
"CountStat",
"float",
"float",
"float",
"float",
"float",
"Any",
"Any",
"CountStat",
"Mapping[Optional[str], List[int]]",
"Type[BaseCount]"
] | [
909,
945,
980,
1019,
1056,
1552,
1584,
1646,
1661,
1715,
1733,
2272,
2288,
3667,
3724,
3798
] | [
912,
948,
984,
1027,
1061,
1561,
1589,
1651,
1666,
1720,
1738,
2275,
2291,
3676,
3757,
3813
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/management/commands/realm_stats.py | import datetime
from argparse import ArgumentParser
from typing import Any, List
import pytz
from django.core.management.base import BaseCommand
from django.db.models import Count
from django.utils.timezone import now as timezone_now
from zerver.models import Message, Realm, Recipient, Stream, \
Subscription, UserActivity, UserMessage, UserProfile, get_realm
MOBILE_CLIENT_LIST = ["Android", "ios"]
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
class Command(BaseCommand):
help = "Generate statistics on realm activity."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def active_users(self, realm: Realm) -> List[UserProfile]:
# Has been active (on the website, for now) in the last 7 days.
activity_cutoff = timezone_now() - datetime.timedelta(days=7)
return [activity.user_profile for activity in (
UserActivity.objects.filter(user_profile__realm=realm,
user_profile__is_active=True,
last_visit__gt=activity_cutoff,
query="/json/users/me/pointer",
client__name="website"))]
def messages_sent_by(self, user: UserProfile, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count()
def total_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
def human_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
def api_messages(self, realm: Realm, days_ago: int) -> int:
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
def stream_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff,
recipient__type=Recipient.STREAM).count()
def private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
def group_private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
def report_percentage(self, numerator: float, denominator: float, text: str) -> None:
if not denominator:
fraction = 0.0
else:
fraction = numerator / float(denominator)
print("%.2f%% of" % (fraction * 100,), text)
def handle(self, *args: Any, **options: Any) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
print(e)
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
active_users = self.active_users(realm)
num_active = len(active_users)
print("%d active users (%d total)" % (num_active, len(user_profiles)))
streams = Stream.objects.filter(realm=realm).extra(
tables=['zerver_subscription', 'zerver_recipient'],
where=['zerver_subscription.recipient_id = zerver_recipient.id',
'zerver_recipient.type = 2',
'zerver_recipient.type_id = zerver_stream.id',
'zerver_subscription.active = true']).annotate(count=Count("name"))
print("%d streams" % (streams.count(),))
for days_ago in (1, 7, 30):
print("In last %d days, users sent:" % (days_ago,))
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
for quantity in sorted(sender_quantities, reverse=True):
print(quantity, end=' ')
print("")
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
self.report_percentage(num_notifications_enabled, num_active,
"active users have desktop notifications enabled")
num_enter_sends = len([x for x in active_users if x.enter_sends])
self.report_percentage(num_enter_sends, num_active,
"active users have enter-sends")
all_message_count = human_messages.filter(sender__realm=realm).count()
multi_paragraph_message_count = human_messages.filter(
sender__realm=realm, content__contains="\n\n").count()
self.report_percentage(multi_paragraph_message_count, all_message_count,
"all messages are multi-paragraph")
# Starred messages
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
flags=UserMessage.flags.starred).values(
"user_profile").annotate(count=Count("user_profile"))
print("%d users have starred %d messages" % (
len(starrers), sum([elt["count"] for elt in starrers])))
active_user_subs = Subscription.objects.filter(
user_profile__in=user_profiles, active=True)
# Streams not in home view
non_home_view = active_user_subs.filter(in_home_view=False).values(
"user_profile").annotate(count=Count("user_profile"))
print("%d users have %d streams not in home view" % (
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
# Code block markup
markup_messages = human_messages.filter(
sender__realm=realm, content__contains="~~~").values(
"sender").annotate(count=Count("sender"))
print("%d users have used code block markup on %s messages" % (
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
# Notifications for stream messages
notifications = active_user_subs.filter(desktop_notifications=True).values(
"user_profile").annotate(count=Count("user_profile"))
print("%d users receive desktop notifications for %d streams" % (
len(notifications), sum([elt["count"] for elt in notifications])))
print("")
| [
"ArgumentParser",
"Realm",
"UserProfile",
"int",
"Realm",
"int",
"Realm",
"int",
"Realm",
"int",
"Realm",
"int",
"Realm",
"int",
"Realm",
"int",
"float",
"float",
"str",
"Any",
"Any"
] | [
664,
871,
1483,
1506,
1723,
1740,
1966,
1983,
2206,
2223,
2367,
2384,
2682,
2699,
3029,
3046,
3377,
3397,
3410,
3629,
3645
] | [
678,
876,
1494,
1509,
1728,
1743,
1971,
1986,
2211,
2226,
2372,
2387,
2687,
2702,
3034,
3049,
3382,
3402,
3413,
3632,
3648
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/management/commands/stream_stats.py | from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand
from django.db.models import Q
from zerver.models import Message, Realm, \
Recipient, Stream, Subscription, get_realm
class Command(BaseCommand):
help = "Generate statistics on the streams for a realm."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def handle(self, *args: Any, **options: str) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
print(e)
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
print("------------")
print("%25s %15s %10s" % ("stream", "subscribers", "messages"))
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
invite_only_count = 0
for stream in streams:
if stream.invite_only:
invite_only_count += 1
continue
print("%25s" % (stream.name,), end=' ')
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
print("%10d" % (len(Subscription.objects.filter(recipient=recipient,
active=True)),), end=' ')
num_messages = len(Message.objects.filter(recipient=recipient))
print("%12d" % (num_messages,))
print("%d private streams" % (invite_only_count,))
print("")
| [
"ArgumentParser",
"Any",
"str"
] | [
362,
563,
579
] | [
376,
566,
582
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/management/commands/update_analytics_counts.py | import os
import time
from argparse import ArgumentParser
from typing import Any, Dict
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.dateparse import parse_datetime
from django.utils.timezone import now as timezone_now
from django.utils.timezone import utc as timezone_utc
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
from scripts.lib.zulip_tools import ENDC, WARNING
from zerver.lib.timestamp import floor_to_hour
from zerver.models import Realm
class Command(BaseCommand):
help = """Fills Analytics tables.
Run as a cron job that runs every hour."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--time', '-t',
type=str,
help='Update stat tables from current state to'
'--time. Defaults to the current time.',
default=timezone_now().isoformat())
parser.add_argument('--utc',
action='store_true',
help="Interpret --time in UTC.",
default=False)
parser.add_argument('--stat', '-s',
type=str,
help="CountStat to process. If omitted, all stats are processed.")
parser.add_argument('--verbose',
action='store_true',
help="Print timing information to stdout.",
default=False)
def handle(self, *args: Any, **options: Any) -> None:
try:
os.mkdir(settings.ANALYTICS_LOCK_DIR)
except OSError:
print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC)
return
try:
self.run_update_analytics_counts(options)
finally:
os.rmdir(settings.ANALYTICS_LOCK_DIR)
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None:
# installation_epoch relies on there being at least one realm; we
# shouldn't run the analytics code if that condition isn't satisfied
if not Realm.objects.exists():
logger.info("No realms, stopping update_analytics_counts")
return
fill_to_time = parse_datetime(options['time'])
if options['utc']:
fill_to_time = fill_to_time.replace(tzinfo=timezone_utc)
if fill_to_time.tzinfo is None:
raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?")
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc))
if options['stat'] is not None:
stats = [COUNT_STATS[options['stat']]]
else:
stats = list(COUNT_STATS.values())
logger.info("Starting updating analytics counts through %s" % (fill_to_time,))
if options['verbose']:
start = time.time()
last = start
for stat in stats:
process_count_stat(stat, fill_to_time)
if options['verbose']:
print("Updated %s in %.3fs" % (stat.property, time.time() - last))
last = time.time()
if options['verbose']:
print("Finished updating analytics counts through %s in %.3fs" %
(fill_to_time, time.time() - start))
logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
| [
"ArgumentParser",
"Any",
"Any",
"Dict[str, Any]"
] | [
686,
1608,
1624,
2015
] | [
700,
1611,
1627,
2029
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/management/commands/user_stats.py | import datetime
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from zerver.models import Message, Realm, Stream, UserProfile, get_realm
class Command(BaseCommand):
help = "Generate statistics on user activity."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def messages_sent_by(self, user: UserProfile, week: int) -> int:
start = timezone_now() - datetime.timedelta(days=(week + 1)*7)
end = timezone_now() - datetime.timedelta(days=week*7)
return Message.objects.filter(sender=user, pub_date__gt=start, pub_date__lte=end).count()
def handle(self, *args: Any, **options: Any) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
print(e)
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
print("%d users" % (len(user_profiles),))
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
for user_profile in user_profiles:
print("%35s" % (user_profile.email,), end=' ')
for week in range(10):
print("%5d" % (self.messages_sent_by(user_profile, week)), end=' ')
print("")
| [
"ArgumentParser",
"UserProfile",
"int",
"Any",
"Any"
] | [
373,
583,
602,
876,
892
] | [
387,
594,
605,
879,
895
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0001_initial.py | # -*- coding: utf-8 -*-
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import zerver.lib.str_utils
class Migration(migrations.Migration):
dependencies = [
('zerver', '0030_realm_org_type'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Anomaly',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('info', models.CharField(max_length=1000)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name='HuddleCount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name='InstallationCount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name='RealmCount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name='StreamCount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name='UserCount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='usercount',
unique_together=set([('user', 'property', 'end_time', 'interval')]),
),
migrations.AlterUniqueTogether(
name='streamcount',
unique_together=set([('stream', 'property', 'end_time', 'interval')]),
),
migrations.AlterUniqueTogether(
name='realmcount',
unique_together=set([('realm', 'property', 'end_time', 'interval')]),
),
migrations.AlterUniqueTogether(
name='installationcount',
unique_together=set([('property', 'end_time', 'interval')]),
),
migrations.AlterUniqueTogether(
name='huddlecount',
unique_together=set([('huddle', 'property', 'end_time', 'interval')]),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0002_remove_huddlecount.py | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analytics', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name='huddlecount',
unique_together=set([]),
),
migrations.RemoveField(
model_name='huddlecount',
name='anomaly',
),
migrations.RemoveField(
model_name='huddlecount',
name='huddle',
),
migrations.RemoveField(
model_name='huddlecount',
name='user',
),
migrations.DeleteModel(
name='HuddleCount',
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0003_fillstate.py | # -*- coding: utf-8 -*-
from django.db import migrations, models
import zerver.lib.str_utils
class Migration(migrations.Migration):
dependencies = [
('analytics', '0002_remove_huddlecount'),
]
operations = [
migrations.CreateModel(
name='FillState',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('property', models.CharField(unique=True, max_length=40)),
('end_time', models.DateTimeField()),
('state', models.PositiveSmallIntegerField()),
('last_modified', models.DateTimeField(auto_now=True)),
],
bases=(models.Model,),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0004_add_subgroup.py | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analytics', '0003_fillstate'),
]
operations = [
migrations.AddField(
model_name='installationcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name='realmcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name='streamcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name='usercount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0005_alter_field_size.py | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analytics', '0004_add_subgroup'),
]
operations = [
migrations.AlterField(
model_name='installationcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name='installationcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name='realmcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name='realmcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name='streamcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name='streamcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name='usercount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name='usercount',
name='property',
field=models.CharField(max_length=32),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0006_add_subgroup_to_unique_constraints.py | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analytics', '0005_alter_field_size'),
]
operations = [
migrations.AlterUniqueTogether(
name='installationcount',
unique_together=set([('property', 'subgroup', 'end_time', 'interval')]),
),
migrations.AlterUniqueTogether(
name='realmcount',
unique_together=set([('realm', 'property', 'subgroup', 'end_time', 'interval')]),
),
migrations.AlterUniqueTogether(
name='streamcount',
unique_together=set([('stream', 'property', 'subgroup', 'end_time', 'interval')]),
),
migrations.AlterUniqueTogether(
name='usercount',
unique_together=set([('user', 'property', 'subgroup', 'end_time', 'interval')]),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0007_remove_interval.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-16 20:50
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('analytics', '0006_add_subgroup_to_unique_constraints'),
]
operations = [
migrations.AlterUniqueTogether(
name='installationcount',
unique_together=set([('property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='installationcount',
name='interval',
),
migrations.AlterUniqueTogether(
name='realmcount',
unique_together=set([('realm', 'property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='realmcount',
name='interval',
),
migrations.AlterUniqueTogether(
name='streamcount',
unique_together=set([('stream', 'property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='streamcount',
name='interval',
),
migrations.AlterUniqueTogether(
name='usercount',
unique_together=set([('user', 'property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='usercount',
name='interval',
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0008_add_count_indexes.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-01 22:28
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0050_userprofile_avatar_version'),
('analytics', '0007_remove_interval'),
]
operations = [
migrations.AlterIndexTogether(
name='realmcount',
index_together=set([('property', 'end_time')]),
),
migrations.AlterIndexTogether(
name='streamcount',
index_together=set([('property', 'realm', 'end_time')]),
),
migrations.AlterIndexTogether(
name='usercount',
index_together=set([('property', 'realm', 'end_time')]),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0009_remove_messages_to_stream_stat.py | # -*- coding: utf-8 -*-
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
property = 'messages_sent_to_stream:is_bot'
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
class Migration(migrations.Migration):
dependencies = [
('analytics', '0008_add_count_indexes'),
]
operations = [
migrations.RunPython(delete_messages_sent_to_stream_stat),
]
| [
"StateApps",
"DatabaseSchemaEditor"
] | [
232,
258
] | [
241,
278
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0010_clear_messages_sent_values.py | # -*- coding: utf-8 -*-
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
property = 'messages_sent:message_type:day'
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
class Migration(migrations.Migration):
dependencies = [('analytics', '0009_remove_messages_to_stream_stat')]
operations = [
migrations.RunPython(clear_message_sent_by_message_type_values),
]
| [
"StateApps",
"DatabaseSchemaEditor"
] | [
238,
264
] | [
247,
284
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0011_clear_analytics_tables.py | # -*- coding: utf-8 -*-
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
UserCount.objects.all().delete()
StreamCount.objects.all().delete()
RealmCount.objects.all().delete()
InstallationCount.objects.all().delete()
FillState.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('analytics', '0010_clear_messages_sent_values'),
]
operations = [
migrations.RunPython(clear_analytics_tables),
]
| [
"StateApps",
"DatabaseSchemaEditor"
] | [
219,
245
] | [
228,
265
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/0012_add_on_delete.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-01-29 08:14
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('analytics', '0011_clear_analytics_tables'),
]
operations = [
migrations.AlterField(
model_name='installationcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name='realmcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name='streamcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name='usercount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/migrations/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/models.py | import datetime
from typing import Any, Dict, Optional, Tuple, Union
from django.db import models
from zerver.lib.timestamp import floor_to_day
from zerver.models import Realm, Recipient, Stream, UserProfile
class FillState(models.Model):
property = models.CharField(max_length=40, unique=True) # type: str
end_time = models.DateTimeField() # type: datetime.datetime
# Valid states are {DONE, STARTED}
DONE = 1
STARTED = 2
state = models.PositiveSmallIntegerField() # type: int
last_modified = models.DateTimeField(auto_now=True) # type: datetime.datetime
def __str__(self) -> str:
return "<FillState: %s %s %s>" % (self.property, self.end_time, self.state)
# The earliest/starting end_time in FillState
# We assume there is at least one realm
def installation_epoch() -> datetime.datetime:
earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min']
return floor_to_day(earliest_realm_creation)
def last_successful_fill(property: str) -> Optional[datetime.datetime]:
fillstate = FillState.objects.filter(property=property).first()
if fillstate is None:
return None
if fillstate.state == FillState.DONE:
return fillstate.end_time
return fillstate.end_time - datetime.timedelta(hours=1)
# would only ever make entries here by hand
class Anomaly(models.Model):
info = models.CharField(max_length=1000) # type: str
def __str__(self) -> str:
return "<Anomaly: %s... %s>" % (self.info, self.id)
class BaseCount(models.Model):
# Note: When inheriting from BaseCount, you may want to rearrange
# the order of the columns in the migration to make sure they
# match how you'd like the table to be arranged.
property = models.CharField(max_length=32) # type: str
subgroup = models.CharField(max_length=16, null=True) # type: Optional[str]
end_time = models.DateTimeField() # type: datetime.datetime
value = models.BigIntegerField() # type: int
anomaly = models.ForeignKey(Anomaly, on_delete=models.SET_NULL, null=True) # type: Optional[Anomaly]
class Meta:
abstract = True
class InstallationCount(BaseCount):
class Meta:
unique_together = ("property", "subgroup", "end_time")
def __str__(self) -> str:
return "<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value)
class RealmCount(BaseCount):
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
class Meta:
unique_together = ("realm", "property", "subgroup", "end_time")
index_together = ["property", "end_time"]
def __str__(self) -> str:
return "<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value)
class UserCount(BaseCount):
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
class Meta:
unique_together = ("user", "property", "subgroup", "end_time")
# This index dramatically improves the performance of
# aggregating from users to realms
index_together = ["property", "realm", "end_time"]
def __str__(self) -> str:
return "<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value)
class StreamCount(BaseCount):
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
class Meta:
unique_together = ("stream", "property", "subgroup", "end_time")
# This index dramatically improves the performance of
# aggregating from streams to realms
index_together = ["property", "realm", "end_time"]
def __str__(self) -> str:
return "<StreamCount: %s %s %s %s %s>" % (
self.stream, self.property, self.subgroup, self.value, self.id)
| [
"str"
] | [
1030
] | [
1033
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/tests/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/tests/test_counts.py |
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional, Tuple, Type, Union
import ujson
from django.apps import apps
from django.db import models
from django.db.models import Sum
from django.test import TestCase
from django.utils.timezone import now as timezone_now
from django.utils.timezone import utc as timezone_utc
from analytics.lib.counts import COUNT_STATS, CountStat, DataCollector, \
DependentCountStat, LoggingCountStat, do_aggregate_to_summary_table, \
do_drop_all_analytics_tables, do_drop_single_stat, \
do_fill_count_stat_at_hour, do_increment_logging_stat, \
process_count_stat, sql_data_collector
from analytics.models import Anomaly, BaseCount, \
FillState, InstallationCount, RealmCount, StreamCount, \
UserCount, installation_epoch, last_successful_fill
from zerver.lib.actions import do_activate_user, do_create_user, \
do_deactivate_user, do_reactivate_user, update_user_activity_interval, \
do_invite_users, do_revoke_user_invite, do_resend_user_invite_email, \
InvitationError
from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day
from zerver.lib.topic import DB_TOPIC_NAME
from zerver.models import Client, Huddle, Message, Realm, \
RealmAuditLog, Recipient, Stream, UserActivityInterval, \
UserProfile, get_client, get_user, PreregistrationUser
class AnalyticsTestCase(TestCase):
MINUTE = timedelta(seconds = 60)
HOUR = MINUTE * 60
DAY = HOUR * 24
TIME_ZERO = datetime(1988, 3, 14).replace(tzinfo=timezone_utc)
TIME_LAST_HOUR = TIME_ZERO - HOUR
def setUp(self) -> None:
self.default_realm = Realm.objects.create(
string_id='realmtest', name='Realm Test', date_created=self.TIME_ZERO - 2*self.DAY)
# used to generate unique names in self.create_*
self.name_counter = 100
# used as defaults in self.assertCountEquals
self.current_property = None # type: Optional[str]
# Lightweight creation of users, streams, and messages
def create_user(self, **kwargs: Any) -> UserProfile:
self.name_counter += 1
defaults = {
'email': 'user%s@domain.tld' % (self.name_counter,),
'date_joined': self.TIME_LAST_HOUR,
'full_name': 'full_name',
'short_name': 'short_name',
'pointer': -1,
'last_pointer_updater': 'seems unused?',
'realm': self.default_realm,
'api_key': '42'}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
return UserProfile.objects.create(**kwargs)
def create_stream_with_recipient(self, **kwargs: Any) -> Tuple[Stream, Recipient]:
self.name_counter += 1
defaults = {'name': 'stream name %s' % (self.name_counter,),
'realm': self.default_realm,
'date_created': self.TIME_LAST_HOUR}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
stream = Stream.objects.create(**kwargs)
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream, recipient
def create_huddle_with_recipient(self, **kwargs: Any) -> Tuple[Huddle, Recipient]:
self.name_counter += 1
defaults = {'huddle_hash': 'hash%s' % (self.name_counter,)}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
huddle = Huddle.objects.create(**kwargs)
recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.HUDDLE)
return huddle, recipient
def create_message(self, sender: UserProfile, recipient: Recipient, **kwargs: Any) -> Message:
defaults = {
'sender': sender,
'recipient': recipient,
DB_TOPIC_NAME: 'subject',
'content': 'hi',
'pub_date': self.TIME_LAST_HOUR,
'sending_client': get_client("website")}
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
return Message.objects.create(**kwargs)
# kwargs should only ever be a UserProfile or Stream.
def assertCountEquals(self, table: Type[BaseCount], value: int, property: Optional[str]=None,
subgroup: Optional[str]=None, end_time: datetime=TIME_ZERO,
realm: Optional[Realm]=None, **kwargs: models.Model) -> None:
if property is None:
property = self.current_property
queryset = table.objects.filter(property=property, end_time=end_time).filter(**kwargs)
if table is not InstallationCount:
if realm is None:
realm = self.default_realm
queryset = queryset.filter(realm=realm)
if subgroup is not None:
queryset = queryset.filter(subgroup=subgroup)
self.assertEqual(queryset.values_list('value', flat=True)[0], value)
def assertTableState(self, table: Type[BaseCount], arg_keys: List[str],
arg_values: List[List[object]]) -> None:
"""Assert that the state of a *Count table is what it should be.
Example usage:
self.assertTableState(RealmCount, ['property', 'subgroup', 'realm'],
[['p1', 4], ['p2', 10, self.alt_realm]])
table -- A *Count table.
arg_keys -- List of columns of <table>.
arg_values -- List of "rows" of <table>.
Each entry of arg_values (e.g. ['p1', 4]) represents a row of <table>.
The i'th value of the entry corresponds to the i'th arg_key, so e.g.
the first arg_values entry here corresponds to a row of RealmCount
with property='p1' and subgroup=10.
Any columns not specified (in this case, every column of RealmCount
other than property and subgroup) are either set to default values,
or are ignored.
The function checks that every entry of arg_values matches exactly one
row of <table>, and that no additional rows exist. Note that this means
checking a table with duplicate rows is not supported.
"""
defaults = {
'property': self.current_property,
'subgroup': None,
'end_time': self.TIME_ZERO,
'value': 1}
for values in arg_values:
kwargs = {} # type: Dict[str, Any]
for i in range(len(values)):
kwargs[arg_keys[i]] = values[i]
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
if table is not InstallationCount:
if 'realm' not in kwargs:
if 'user' in kwargs:
kwargs['realm'] = kwargs['user'].realm
elif 'stream' in kwargs:
kwargs['realm'] = kwargs['stream'].realm
else:
kwargs['realm'] = self.default_realm
self.assertEqual(table.objects.filter(**kwargs).count(), 1)
self.assertEqual(table.objects.count(), len(arg_values))
class TestProcessCountStat(AnalyticsTestCase):
def make_dummy_count_stat(self, property: str) -> CountStat:
query = """INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
VALUES (%s, 1, '%s', %%%%(time_end)s)""" % (self.default_realm.id, property)
return CountStat(property, sql_data_collector(RealmCount, query, None), CountStat.HOUR)
def assertFillStateEquals(self, stat: CountStat, end_time: datetime,
state: int=FillState.DONE) -> None:
fill_state = FillState.objects.filter(property=stat.property).first()
self.assertEqual(fill_state.end_time, end_time)
self.assertEqual(fill_state.state, state)
def test_process_stat(self) -> None:
# process new stat
current_time = installation_epoch() + self.HOUR
stat = self.make_dummy_count_stat('test stat')
process_count_stat(stat, current_time)
self.assertFillStateEquals(stat, current_time)
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
# dirty stat
FillState.objects.filter(property=stat.property).update(state=FillState.STARTED)
process_count_stat(stat, current_time)
self.assertFillStateEquals(stat, current_time)
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
# clean stat, no update
process_count_stat(stat, current_time)
self.assertFillStateEquals(stat, current_time)
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
# clean stat, with update
current_time = current_time + self.HOUR
stat = self.make_dummy_count_stat('test stat')
process_count_stat(stat, current_time)
self.assertFillStateEquals(stat, current_time)
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 2)
def test_bad_fill_to_time(self) -> None:
stat = self.make_dummy_count_stat('test stat')
with self.assertRaises(ValueError):
process_count_stat(stat, installation_epoch() + 65*self.MINUTE)
with self.assertRaises(TimezoneNotUTCException):
process_count_stat(stat, installation_epoch().replace(tzinfo=None))
# This tests the LoggingCountStat branch of the code in do_delete_counts_at_hour.
# It is important that do_delete_counts_at_hour not delete any of the collected
# logging data!
def test_process_logging_stat(self) -> None:
end_time = self.TIME_ZERO
user_stat = LoggingCountStat('user stat', UserCount, CountStat.DAY)
stream_stat = LoggingCountStat('stream stat', StreamCount, CountStat.DAY)
realm_stat = LoggingCountStat('realm stat', RealmCount, CountStat.DAY)
user = self.create_user()
stream = self.create_stream_with_recipient()[0]
realm = self.default_realm
UserCount.objects.create(
user=user, realm=realm, property=user_stat.property, end_time=end_time, value=5)
StreamCount.objects.create(
stream=stream, realm=realm, property=stream_stat.property, end_time=end_time, value=5)
RealmCount.objects.create(
realm=realm, property=realm_stat.property, end_time=end_time, value=5)
# Normal run of process_count_stat
for stat in [user_stat, stream_stat, realm_stat]:
process_count_stat(stat, end_time)
self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 5]])
self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 5]])
self.assertTableState(RealmCount, ['property', 'value'],
[[user_stat.property, 5],
[stream_stat.property, 5],
[realm_stat.property, 5]])
self.assertTableState(InstallationCount, ['property', 'value'],
[[user_stat.property, 5],
[stream_stat.property, 5],
[realm_stat.property, 5]])
# Change the logged data and mark FillState as dirty
UserCount.objects.update(value=6)
StreamCount.objects.update(value=6)
RealmCount.objects.filter(property=realm_stat.property).update(value=6)
FillState.objects.update(state=FillState.STARTED)
# Check that the change propagated (and the collected data wasn't deleted)
for stat in [user_stat, stream_stat, realm_stat]:
process_count_stat(stat, end_time)
self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 6]])
self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 6]])
self.assertTableState(RealmCount, ['property', 'value'],
[[user_stat.property, 6],
[stream_stat.property, 6],
[realm_stat.property, 6]])
self.assertTableState(InstallationCount, ['property', 'value'],
[[user_stat.property, 6],
[stream_stat.property, 6],
[realm_stat.property, 6]])
def test_process_dependent_stat(self) -> None:
stat1 = self.make_dummy_count_stat('stat1')
stat2 = self.make_dummy_count_stat('stat2')
query = """INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
VALUES (%s, 1, '%s', %%%%(time_end)s)""" % (self.default_realm.id, 'stat3')
stat3 = DependentCountStat('stat3', sql_data_collector(RealmCount, query, None),
CountStat.HOUR,
dependencies=['stat1', 'stat2'])
hour = [installation_epoch() + i*self.HOUR for i in range(5)]
# test when one dependency has been run, and the other hasn't
process_count_stat(stat1, hour[2])
process_count_stat(stat3, hour[1])
self.assertTableState(InstallationCount, ['property', 'end_time'],
[['stat1', hour[1]], ['stat1', hour[2]]])
self.assertFillStateEquals(stat3, hour[0])
# test that we don't fill past the fill_to_time argument, even if
# dependencies have later last_successful_fill
process_count_stat(stat2, hour[3])
process_count_stat(stat3, hour[1])
self.assertTableState(InstallationCount, ['property', 'end_time'],
[['stat1', hour[1]], ['stat1', hour[2]],
['stat2', hour[1]], ['stat2', hour[2]], ['stat2', hour[3]],
['stat3', hour[1]]])
self.assertFillStateEquals(stat3, hour[1])
# test that we don't fill past the dependency last_successful_fill times,
# even if fill_to_time is later
process_count_stat(stat3, hour[4])
self.assertTableState(InstallationCount, ['property', 'end_time'],
[['stat1', hour[1]], ['stat1', hour[2]],
['stat2', hour[1]], ['stat2', hour[2]], ['stat2', hour[3]],
['stat3', hour[1]], ['stat3', hour[2]]])
self.assertFillStateEquals(stat3, hour[2])
# test daily dependent stat with hourly dependencies
query = """INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
VALUES (%s, 1, '%s', %%%%(time_end)s)""" % (self.default_realm.id, 'stat4')
stat4 = DependentCountStat('stat4', sql_data_collector(RealmCount, query, None),
CountStat.DAY,
dependencies=['stat1', 'stat2'])
hour24 = installation_epoch() + 24*self.HOUR
hour25 = installation_epoch() + 25*self.HOUR
process_count_stat(stat1, hour25)
process_count_stat(stat2, hour25)
process_count_stat(stat4, hour25)
self.assertEqual(InstallationCount.objects.filter(property='stat4').count(), 1)
self.assertFillStateEquals(stat4, hour24)
class TestCountStats(AnalyticsTestCase):
def setUp(self) -> None:
super().setUp()
# This tests two things for each of the queries/CountStats: Handling
# more than 1 realm, and the time bounds (time_start and time_end in
# the queries).
self.second_realm = Realm.objects.create(
string_id='second-realm', name='Second Realm',
date_created=self.TIME_ZERO-2*self.DAY)
for minutes_ago in [0, 1, 61, 60*24+1]:
creation_time = self.TIME_ZERO - minutes_ago*self.MINUTE
user = self.create_user(email='user-%s@second.analytics' % (minutes_ago,),
realm=self.second_realm, date_joined=creation_time)
recipient = self.create_stream_with_recipient(
name='stream %s' % (minutes_ago,), realm=self.second_realm,
date_created=creation_time)[1]
self.create_message(user, recipient, pub_date=creation_time)
self.hourly_user = get_user('user-1@second.analytics', self.second_realm)
self.daily_user = get_user('user-61@second.analytics', self.second_realm)
# This realm should not show up in the *Count tables for any of the
# messages_* CountStats
self.no_message_realm = Realm.objects.create(
string_id='no-message-realm', name='No Message Realm',
date_created=self.TIME_ZERO-2*self.DAY)
self.create_user(realm=self.no_message_realm)
self.create_stream_with_recipient(realm=self.no_message_realm)
# This huddle should not show up anywhere
self.create_huddle_with_recipient()
def test_active_users_by_is_bot(self) -> None:
stat = COUNT_STATS['active_users:is_bot:day']
self.current_property = stat.property
# To be included
self.create_user(is_bot=True)
self.create_user(is_bot=True, date_joined=self.TIME_ZERO-25*self.HOUR)
self.create_user(is_bot=False)
# To be excluded
self.create_user(is_active=False)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[2, 'true'], [1, 'false'],
[3, 'false', self.second_realm],
[1, 'false', self.no_message_realm]])
self.assertTableState(InstallationCount,
['value', 'subgroup'],
[[2, 'true'], [5, 'false']])
self.assertTableState(UserCount, [], [])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_by_is_bot(self) -> None:
stat = COUNT_STATS['messages_sent:is_bot:hour']
self.current_property = stat.property
bot = self.create_user(is_bot=True)
human1 = self.create_user()
human2 = self.create_user()
recipient_human1 = Recipient.objects.create(type_id=human1.id,
type=Recipient.PERSONAL)
recipient_stream = self.create_stream_with_recipient()[1]
recipient_huddle = self.create_huddle_with_recipient()[1]
self.create_message(bot, recipient_human1)
self.create_message(bot, recipient_stream)
self.create_message(bot, recipient_huddle)
self.create_message(human1, recipient_human1)
self.create_message(human2, recipient_human1)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
[[1, 'false', human1], [1, 'false', human2], [3, 'true', bot],
[1, 'false', self.hourly_user]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[2, 'false'], [3, 'true'], [1, 'false', self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[3, 'false'], [3, 'true']])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_by_message_type(self) -> None:
stat = COUNT_STATS['messages_sent:message_type:day']
self.current_property = stat.property
# Nothing currently in this stat that is bot related, but so many of
# the rest of our stats make the human/bot distinction that one can
# imagine a later refactoring that will intentionally or
# unintentionally change this. So make one of our users a bot.
user1 = self.create_user(is_bot=True)
user2 = self.create_user()
user3 = self.create_user()
# private streams
recipient_stream1 = self.create_stream_with_recipient(invite_only=True)[1]
recipient_stream2 = self.create_stream_with_recipient(invite_only=True)[1]
self.create_message(user1, recipient_stream1)
self.create_message(user2, recipient_stream1)
self.create_message(user2, recipient_stream2)
# public streams
recipient_stream3 = self.create_stream_with_recipient()[1]
recipient_stream4 = self.create_stream_with_recipient()[1]
self.create_message(user1, recipient_stream3)
self.create_message(user1, recipient_stream4)
self.create_message(user2, recipient_stream3)
# huddles
recipient_huddle1 = self.create_huddle_with_recipient()[1]
recipient_huddle2 = self.create_huddle_with_recipient()[1]
self.create_message(user1, recipient_huddle1)
self.create_message(user2, recipient_huddle2)
# private messages
recipient_user1 = Recipient.objects.create(type_id=user1.id, type=Recipient.PERSONAL)
recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL)
recipient_user3 = Recipient.objects.create(type_id=user3.id, type=Recipient.PERSONAL)
self.create_message(user1, recipient_user2)
self.create_message(user2, recipient_user1)
self.create_message(user3, recipient_user3)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
[[1, 'private_stream', user1],
[2, 'private_stream', user2],
[2, 'public_stream', user1],
[1, 'public_stream', user2],
[1, 'private_message', user1],
[1, 'private_message', user2],
[1, 'private_message', user3],
[1, 'huddle_message', user1],
[1, 'huddle_message', user2],
[1, 'public_stream', self.hourly_user],
[1, 'public_stream', self.daily_user]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[3, 'private_stream'], [3, 'public_stream'], [3, 'private_message'],
[2, 'huddle_message'], [2, 'public_stream', self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'],
[[3, 'private_stream'], [5, 'public_stream'], [3, 'private_message'],
[2, 'huddle_message']])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_to_recipients_with_same_id(self) -> None:
stat = COUNT_STATS['messages_sent:message_type:day']
self.current_property = stat.property
user = self.create_user(id=1000)
user_recipient = Recipient.objects.create(type_id=user.id, type=Recipient.PERSONAL)
stream_recipient = self.create_stream_with_recipient(id=1000)[1]
huddle_recipient = self.create_huddle_with_recipient(id=1000)[1]
self.create_message(user, user_recipient)
self.create_message(user, stream_recipient)
self.create_message(user, huddle_recipient)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertCountEquals(UserCount, 1, subgroup='private_message')
self.assertCountEquals(UserCount, 1, subgroup='huddle_message')
self.assertCountEquals(UserCount, 1, subgroup='public_stream')
def test_messages_sent_by_client(self) -> None:
stat = COUNT_STATS['messages_sent:client:day']
self.current_property = stat.property
user1 = self.create_user(is_bot=True)
user2 = self.create_user()
recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL)
recipient_stream = self.create_stream_with_recipient()[1]
recipient_huddle = self.create_huddle_with_recipient()[1]
client2 = Client.objects.create(name='client2')
self.create_message(user1, recipient_user2, sending_client=client2)
self.create_message(user1, recipient_stream)
self.create_message(user1, recipient_huddle)
self.create_message(user2, recipient_user2, sending_client=client2)
self.create_message(user2, recipient_user2, sending_client=client2)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
client2_id = str(client2.id)
website_client_id = str(get_client('website').id) # default for self.create_message
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
[[2, website_client_id, user1],
[1, client2_id, user1], [2, client2_id, user2],
[1, website_client_id, self.hourly_user],
[1, website_client_id, self.daily_user]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[2, website_client_id], [3, client2_id],
[2, website_client_id, self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'],
[[4, website_client_id], [3, client2_id]])
self.assertTableState(StreamCount, [], [])
def test_messages_sent_to_stream_by_is_bot(self) -> None:
stat = COUNT_STATS['messages_in_stream:is_bot:day']
self.current_property = stat.property
bot = self.create_user(is_bot=True)
human1 = self.create_user()
human2 = self.create_user()
recipient_human1 = Recipient.objects.create(type_id=human1.id, type=Recipient.PERSONAL)
stream1, recipient_stream1 = self.create_stream_with_recipient()
stream2, recipient_stream2 = self.create_stream_with_recipient()
# To be included
self.create_message(human1, recipient_stream1)
self.create_message(human2, recipient_stream1)
self.create_message(human1, recipient_stream2)
self.create_message(bot, recipient_stream2)
self.create_message(bot, recipient_stream2)
# To be excluded
self.create_message(human2, recipient_human1)
self.create_message(bot, recipient_human1)
recipient_huddle = self.create_huddle_with_recipient()[1]
self.create_message(human1, recipient_huddle)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(StreamCount, ['value', 'subgroup', 'stream'],
[[2, 'false', stream1], [1, 'false', stream2], [2, 'true', stream2],
# "hourly" and "daily" stream, from TestCountStats.setUp
[1, 'false', Stream.objects.get(name='stream 1')],
[1, 'false', Stream.objects.get(name='stream 61')]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[3, 'false'], [2, 'true'], [2, 'false', self.second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[5, 'false'], [2, 'true']])
self.assertTableState(UserCount, [], [])
def create_interval(self, user: UserProfile, start_offset: timedelta,
end_offset: timedelta) -> None:
UserActivityInterval.objects.create(
user_profile=user, start=self.TIME_ZERO-start_offset,
end=self.TIME_ZERO-end_offset)
def test_1day_actives(self) -> None:
stat = COUNT_STATS['1day_actives::day']
self.current_property = stat.property
_1day = 1*self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
# Outside time range, should not appear. Also tests upper boundary.
user1 = self.create_user()
self.create_interval(user1, _1day + self.DAY, _1day + timedelta(seconds=1))
self.create_interval(user1, timedelta(0), -self.HOUR)
# On lower boundary, should appear
user2 = self.create_user()
self.create_interval(user2, _1day + self.DAY, _1day)
# Multiple intervals, including one outside boundary
user3 = self.create_user()
self.create_interval(user3, 2*self.DAY, 1*self.DAY)
self.create_interval(user3, 20*self.HOUR, 19*self.HOUR)
self.create_interval(user3, 20*self.MINUTE, 19*self.MINUTE)
# Intervals crossing boundary
user4 = self.create_user()
self.create_interval(user4, 1.5*self.DAY, 0.5*self.DAY)
user5 = self.create_user()
self.create_interval(user5, self.MINUTE, -self.MINUTE)
# Interval subsuming time range
user6 = self.create_user()
self.create_interval(user6, 2*self.DAY, -2*self.DAY)
# Second realm
user7 = self.create_user(realm=self.second_realm)
self.create_interval(user7, 20*self.MINUTE, 19*self.MINUTE)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['value', 'user'],
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]])
self.assertTableState(RealmCount, ['value', 'realm'],
[[5, self.default_realm], [1, self.second_realm]])
self.assertTableState(InstallationCount, ['value'], [[6]])
self.assertTableState(StreamCount, [], [])
def test_15day_actives(self) -> None:
stat = COUNT_STATS['15day_actives::day']
self.current_property = stat.property
_15day = 15*self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
# Outside time range, should not appear. Also tests upper boundary.
user1 = self.create_user()
self.create_interval(user1, _15day + self.DAY, _15day + timedelta(seconds=1))
self.create_interval(user1, timedelta(0), -self.HOUR)
# On lower boundary, should appear
user2 = self.create_user()
self.create_interval(user2, _15day + self.DAY, _15day)
# Multiple intervals, including one outside boundary
user3 = self.create_user()
self.create_interval(user3, 20*self.DAY, 19*self.DAY)
self.create_interval(user3, 20*self.HOUR, 19*self.HOUR)
self.create_interval(user3, 20*self.MINUTE, 19*self.MINUTE)
# Intervals crossing boundary
user4 = self.create_user()
self.create_interval(user4, 20*self.DAY, 10*self.DAY)
user5 = self.create_user()
self.create_interval(user5, self.MINUTE, -self.MINUTE)
# Interval subsuming time range
user6 = self.create_user()
self.create_interval(user6, 20*self.DAY, -2*self.DAY)
# Second realm
user7 = self.create_user(realm=self.second_realm)
self.create_interval(user7, 20*self.MINUTE, 19*self.MINUTE)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['value', 'user'],
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]])
self.assertTableState(RealmCount, ['value', 'realm'],
[[5, self.default_realm], [1, self.second_realm]])
self.assertTableState(InstallationCount, ['value'], [[6]])
self.assertTableState(StreamCount, [], [])
def test_minutes_active(self) -> None:
stat = COUNT_STATS['minutes_active::day']
self.current_property = stat.property
# Outside time range, should not appear. Also testing for intervals
# starting and ending on boundary
user1 = self.create_user()
self.create_interval(user1, 25*self.HOUR, self.DAY)
self.create_interval(user1, timedelta(0), -self.HOUR)
# Multiple intervals, including one outside boundary
user2 = self.create_user()
self.create_interval(user2, 20*self.DAY, 19*self.DAY)
self.create_interval(user2, 20*self.HOUR, 19*self.HOUR)
self.create_interval(user2, 20*self.MINUTE, 19*self.MINUTE)
# Intervals crossing boundary
user3 = self.create_user()
self.create_interval(user3, 25*self.HOUR, 22*self.HOUR)
self.create_interval(user3, self.MINUTE, -self.MINUTE)
# Interval subsuming time range
user4 = self.create_user()
self.create_interval(user4, 2*self.DAY, -2*self.DAY)
# Less than 60 seconds, should not appear
user5 = self.create_user()
self.create_interval(user5, self.MINUTE, timedelta(seconds=30))
self.create_interval(user5, timedelta(seconds=20), timedelta(seconds=10))
# Second realm
user6 = self.create_user(realm=self.second_realm)
self.create_interval(user6, 20*self.MINUTE, 19*self.MINUTE)
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['value', 'user'],
[[61, user2], [121, user3], [24*60, user4], [1, user6]])
self.assertTableState(RealmCount, ['value', 'realm'],
[[61 + 121 + 24*60, self.default_realm], [1, self.second_realm]])
self.assertTableState(InstallationCount, ['value'], [[61 + 121 + 24*60 + 1]])
self.assertTableState(StreamCount, [], [])
class TestDoAggregateToSummaryTable(AnalyticsTestCase):
# do_aggregate_to_summary_table is mostly tested by the end to end
# nature of the tests in TestCountStats. But want to highlight one
# feature important for keeping the size of the analytics tables small,
# which is that if there is no relevant data in the table being
# aggregated, the aggregation table doesn't get a row with value 0.
def test_no_aggregated_zeros(self) -> None:
stat = LoggingCountStat('test stat', UserCount, CountStat.HOUR)
do_aggregate_to_summary_table(stat, self.TIME_ZERO)
self.assertFalse(RealmCount.objects.exists())
self.assertFalse(InstallationCount.objects.exists())
class TestDoIncrementLoggingStat(AnalyticsTestCase):
def test_table_and_id_args(self) -> None:
# For realms, streams, and users, tests that the new rows are going to
# the appropriate *Count table, and that using a different zerver_object
# results in a new row being created
self.current_property = 'test'
second_realm = Realm.objects.create(string_id='moo', name='moo')
stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
do_increment_logging_stat(second_realm, stat, None, self.TIME_ZERO)
self.assertTableState(RealmCount, ['realm'], [[self.default_realm], [second_realm]])
user1 = self.create_user()
user2 = self.create_user()
stat = LoggingCountStat('test', UserCount, CountStat.DAY)
do_increment_logging_stat(user1, stat, None, self.TIME_ZERO)
do_increment_logging_stat(user2, stat, None, self.TIME_ZERO)
self.assertTableState(UserCount, ['user'], [[user1], [user2]])
stream1 = self.create_stream_with_recipient()[0]
stream2 = self.create_stream_with_recipient()[0]
stat = LoggingCountStat('test', StreamCount, CountStat.DAY)
do_increment_logging_stat(stream1, stat, None, self.TIME_ZERO)
do_increment_logging_stat(stream2, stat, None, self.TIME_ZERO)
self.assertTableState(StreamCount, ['stream'], [[stream1], [stream2]])
def test_frequency(self) -> None:
times = [self.TIME_ZERO - self.MINUTE*i for i in [0, 1, 61, 24*60+1]]
stat = LoggingCountStat('day test', RealmCount, CountStat.DAY)
for time_ in times:
do_increment_logging_stat(self.default_realm, stat, None, time_)
stat = LoggingCountStat('hour test', RealmCount, CountStat.HOUR)
for time_ in times:
do_increment_logging_stat(self.default_realm, stat, None, time_)
self.assertTableState(RealmCount, ['value', 'property', 'end_time'],
[[3, 'day test', self.TIME_ZERO],
[1, 'day test', self.TIME_ZERO - self.DAY],
[2, 'hour test', self.TIME_ZERO],
[1, 'hour test', self.TIME_LAST_HOUR],
[1, 'hour test', self.TIME_ZERO - self.DAY]])
def test_get_or_create(self) -> None:
stat = LoggingCountStat('test', RealmCount, CountStat.HOUR)
# All these should trigger the create part of get_or_create.
# property is tested in test_frequency, and id_args are tested in test_id_args,
# so this only tests a new subgroup and end_time
do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
do_increment_logging_stat(self.default_realm, stat, 'subgroup2', self.TIME_ZERO)
do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_LAST_HOUR)
self.current_property = 'test'
self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
[[1, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
[1, 'subgroup1', self.TIME_LAST_HOUR]])
# This should trigger the get part of get_or_create
do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
[[2, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
[1, 'subgroup1', self.TIME_LAST_HOUR]])
def test_increment(self) -> None:
stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
self.current_property = 'test'
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=-1)
self.assertTableState(RealmCount, ['value'], [[-1]])
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=3)
self.assertTableState(RealmCount, ['value'], [[2]])
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
self.assertTableState(RealmCount, ['value'], [[3]])
class TestLoggingCountStats(AnalyticsTestCase):
def test_aggregation(self) -> None:
stat = LoggingCountStat('realm test', RealmCount, CountStat.DAY)
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
process_count_stat(stat, self.TIME_ZERO)
user = self.create_user()
stat = LoggingCountStat('user test', UserCount, CountStat.DAY)
do_increment_logging_stat(user, stat, None, self.TIME_ZERO)
process_count_stat(stat, self.TIME_ZERO)
stream = self.create_stream_with_recipient()[0]
stat = LoggingCountStat('stream test', StreamCount, CountStat.DAY)
do_increment_logging_stat(stream, stat, None, self.TIME_ZERO)
process_count_stat(stat, self.TIME_ZERO)
self.assertTableState(InstallationCount, ['property', 'value'],
[['realm test', 1], ['user test', 1], ['stream test', 1]])
self.assertTableState(RealmCount, ['property', 'value'],
[['realm test', 1], ['user test', 1], ['stream test', 1]])
self.assertTableState(UserCount, ['property', 'value'], [['user test', 1]])
self.assertTableState(StreamCount, ['property', 'value'], [['stream test', 1]])
def test_active_users_log_by_is_bot(self) -> None:
property = 'active_users_log:is_bot:day'
user = do_create_user('email', 'password', self.default_realm, 'full_name', 'short_name')
self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False)
.aggregate(Sum('value'))['value__sum'])
do_deactivate_user(user)
self.assertEqual(0, RealmCount.objects.filter(property=property, subgroup=False)
.aggregate(Sum('value'))['value__sum'])
do_activate_user(user)
self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False)
.aggregate(Sum('value'))['value__sum'])
do_deactivate_user(user)
self.assertEqual(0, RealmCount.objects.filter(property=property, subgroup=False)
.aggregate(Sum('value'))['value__sum'])
do_reactivate_user(user)
self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False)
.aggregate(Sum('value'))['value__sum'])
def test_invites_sent(self) -> None:
property = 'invites_sent::day'
def assertInviteCountEquals(count: int) -> None:
self.assertEqual(count, RealmCount.objects.filter(property=property, subgroup=None)
.aggregate(Sum('value'))['value__sum'])
user = self.create_user(email='first@domain.tld')
stream, _ = self.create_stream_with_recipient()
do_invite_users(user, ['user1@domain.tld', 'user2@domain.tld'], [stream])
assertInviteCountEquals(2)
# We currently send emails when re-inviting users that haven't
# turned into accounts, so count them towards the total
do_invite_users(user, ['user1@domain.tld', 'user2@domain.tld'], [stream])
assertInviteCountEquals(4)
# Test mix of good and malformed invite emails
try:
do_invite_users(user, ['user3@domain.tld', 'malformed'], [stream])
except InvitationError:
pass
assertInviteCountEquals(4)
# Test inviting existing users
try:
do_invite_users(user, ['first@domain.tld', 'user4@domain.tld'], [stream])
except InvitationError:
pass
assertInviteCountEquals(5)
# Revoking invite should not give you credit
do_revoke_user_invite(PreregistrationUser.objects.filter(realm=user.realm).first())
assertInviteCountEquals(5)
# Resending invite should cost you
do_resend_user_invite_email(PreregistrationUser.objects.first())
assertInviteCountEquals(6)
class TestDeleteStats(AnalyticsTestCase):
def test_do_drop_all_analytics_tables(self) -> None:
user = self.create_user()
stream = self.create_stream_with_recipient()[0]
count_args = {'property': 'test', 'end_time': self.TIME_ZERO, 'value': 10}
UserCount.objects.create(user=user, realm=user.realm, **count_args)
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
RealmCount.objects.create(realm=user.realm, **count_args)
InstallationCount.objects.create(**count_args)
FillState.objects.create(property='test', end_time=self.TIME_ZERO, state=FillState.DONE)
Anomaly.objects.create(info='test anomaly')
analytics = apps.get_app_config('analytics')
for table in list(analytics.models.values()):
self.assertTrue(table.objects.exists())
do_drop_all_analytics_tables()
for table in list(analytics.models.values()):
self.assertFalse(table.objects.exists())
def test_do_drop_single_stat(self) -> None:
user = self.create_user()
stream = self.create_stream_with_recipient()[0]
count_args_to_delete = {'property': 'to_delete', 'end_time': self.TIME_ZERO, 'value': 10}
count_args_to_save = {'property': 'to_save', 'end_time': self.TIME_ZERO, 'value': 10}
for count_args in [count_args_to_delete, count_args_to_save]:
UserCount.objects.create(user=user, realm=user.realm, **count_args)
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
RealmCount.objects.create(realm=user.realm, **count_args)
InstallationCount.objects.create(**count_args)
FillState.objects.create(property='to_delete', end_time=self.TIME_ZERO, state=FillState.DONE)
FillState.objects.create(property='to_save', end_time=self.TIME_ZERO, state=FillState.DONE)
Anomaly.objects.create(info='test anomaly')
analytics = apps.get_app_config('analytics')
for table in list(analytics.models.values()):
self.assertTrue(table.objects.exists())
do_drop_single_stat('to_delete')
for table in list(analytics.models.values()):
if table._meta.db_table == 'analytics_anomaly':
self.assertTrue(table.objects.exists())
else:
self.assertFalse(table.objects.filter(property='to_delete').exists())
self.assertTrue(table.objects.filter(property='to_save').exists())
class TestActiveUsersAudit(AnalyticsTestCase):
def setUp(self) -> None:
super().setUp()
self.user = self.create_user()
self.stat = COUNT_STATS['active_users_audit:is_bot:day']
self.current_property = self.stat.property
def add_event(self, event_type: str, days_offset: float,
user: Optional[UserProfile]=None) -> None:
hours_offset = int(24*days_offset)
if user is None:
user = self.user
RealmAuditLog.objects.create(
realm=user.realm, modified_user=user, event_type=event_type,
event_time=self.TIME_ZERO - hours_offset*self.HOUR)
def test_user_deactivated_in_future(self) -> None:
self.add_event(RealmAuditLog.USER_CREATED, 1)
self.add_event(RealmAuditLog.USER_DEACTIVATED, 0)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup'], [['false']])
def test_user_reactivated_in_future(self) -> None:
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
self.add_event(RealmAuditLog.USER_REACTIVATED, 0)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, [], [])
def test_user_active_then_deactivated_same_day(self) -> None:
self.add_event(RealmAuditLog.USER_CREATED, 1)
self.add_event(RealmAuditLog.USER_DEACTIVATED, .5)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, [], [])
def test_user_unactive_then_activated_same_day(self) -> None:
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
self.add_event(RealmAuditLog.USER_REACTIVATED, .5)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup'], [['false']])
# Arguably these next two tests are duplicates of the _in_future tests, but are
# a guard against future refactorings where they may no longer be duplicates
def test_user_active_then_deactivated_with_day_gap(self) -> None:
self.add_event(RealmAuditLog.USER_CREATED, 2)
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
process_count_stat(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup', 'end_time'],
[['false', self.TIME_ZERO - self.DAY]])
def test_user_deactivated_then_reactivated_with_day_gap(self) -> None:
self.add_event(RealmAuditLog.USER_DEACTIVATED, 2)
self.add_event(RealmAuditLog.USER_REACTIVATED, 1)
process_count_stat(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup'], [['false']])
def test_event_types(self) -> None:
self.add_event(RealmAuditLog.USER_CREATED, 4)
self.add_event(RealmAuditLog.USER_DEACTIVATED, 3)
self.add_event(RealmAuditLog.USER_ACTIVATED, 2)
self.add_event(RealmAuditLog.USER_REACTIVATED, 1)
for i in range(4):
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO - i*self.DAY)
self.assertTableState(UserCount, ['subgroup', 'end_time'],
[['false', self.TIME_ZERO - i*self.DAY] for i in [3, 1, 0]])
# Also tests that aggregation to RealmCount and InstallationCount is
# being done, and that we're storing the user correctly in UserCount
def test_multiple_users_realms_and_bots(self) -> None:
user1 = self.create_user()
user2 = self.create_user()
second_realm = Realm.objects.create(string_id='moo', name='moo')
user3 = self.create_user(realm=second_realm)
user4 = self.create_user(realm=second_realm, is_bot=True)
for user in [user1, user2, user3, user4]:
self.add_event(RealmAuditLog.USER_CREATED, 1, user=user)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup', 'user'],
[['false', user1], ['false', user2], ['false', user3], ['true', user4]])
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
[[2, 'false', self.default_realm], [1, 'false', second_realm],
[1, 'true', second_realm]])
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[3, 'false'], [1, 'true']])
self.assertTableState(StreamCount, [], [])
# Not that interesting a test if you look at the SQL query at hand, but
# almost all other CountStats have a start_date, so guarding against a
# refactoring that adds that in.
# Also tests the slightly more end-to-end process_count_stat rather than
# do_fill_count_stat_at_hour. E.g. if one changes self.stat.frequency to
# CountStat.HOUR from CountStat.DAY, this will fail, while many of the
# tests above will not.
def test_update_from_two_days_ago(self) -> None:
self.add_event(RealmAuditLog.USER_CREATED, 2)
process_count_stat(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup', 'end_time'],
[['false', self.TIME_ZERO], ['false', self.TIME_ZERO-self.DAY]])
# User with no relevant activity could happen e.g. for a system bot that
# doesn't go through do_create_user. Mainly just want to make sure that
# that situation doesn't throw an error.
def test_empty_realm_or_user_with_no_relevant_activity(self) -> None:
self.add_event('unrelated', 1)
self.create_user() # also test a user with no RealmAuditLog entries
Realm.objects.create(string_id='moo', name='moo')
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, [], [])
def test_max_audit_entry_is_unrelated(self) -> None:
self.add_event(RealmAuditLog.USER_CREATED, 1)
self.add_event('unrelated', .5)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup'], [['false']])
# Simultaneous related audit entries should not be allowed, and so not testing for that.
def test_simultaneous_unrelated_audit_entry(self) -> None:
self.add_event(RealmAuditLog.USER_CREATED, 1)
self.add_event('unrelated', 1)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['subgroup'], [['false']])
def test_simultaneous_max_audit_entries_of_different_users(self) -> None:
user1 = self.create_user()
user2 = self.create_user()
user3 = self.create_user()
self.add_event(RealmAuditLog.USER_CREATED, .5, user=user1)
self.add_event(RealmAuditLog.USER_CREATED, .5, user=user2)
self.add_event(RealmAuditLog.USER_CREATED, 1, user=user3)
self.add_event(RealmAuditLog.USER_DEACTIVATED, .5, user=user3)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(UserCount, ['user', 'subgroup'],
[[user1, 'false'], [user2, 'false']])
def test_end_to_end_with_actions_dot_py(self) -> None:
user1 = do_create_user('email1', 'password', self.default_realm, 'full_name', 'short_name')
user2 = do_create_user('email2', 'password', self.default_realm, 'full_name', 'short_name')
user3 = do_create_user('email3', 'password', self.default_realm, 'full_name', 'short_name')
user4 = do_create_user('email4', 'password', self.default_realm, 'full_name', 'short_name')
do_deactivate_user(user2)
do_activate_user(user3)
do_reactivate_user(user4)
end_time = floor_to_day(timezone_now()) + self.DAY
do_fill_count_stat_at_hour(self.stat, end_time)
for user in [user1, user3, user4]:
self.assertTrue(UserCount.objects.filter(
user=user, property=self.current_property, subgroup='false',
end_time=end_time, value=1).exists())
self.assertFalse(UserCount.objects.filter(user=user2).exists())
class TestRealmActiveHumans(AnalyticsTestCase):
def setUp(self) -> None:
super().setUp()
self.stat = COUNT_STATS['realm_active_humans::day']
self.current_property = self.stat.property
def mark_audit_active(self, user: UserProfile, end_time: Optional[datetime]=None) -> None:
if end_time is None:
end_time = self.TIME_ZERO
UserCount.objects.create(
user=user, realm=user.realm, property='active_users_audit:is_bot:day',
subgroup=ujson.dumps(user.is_bot), end_time=end_time, value=1)
def mark_15day_active(self, user: UserProfile, end_time: Optional[datetime]=None) -> None:
if end_time is None:
end_time = self.TIME_ZERO
UserCount.objects.create(
user=user, realm=user.realm, property='15day_actives::day',
end_time=end_time, value=1)
def test_basic_boolean_logic(self) -> None:
user = self.create_user()
self.mark_audit_active(user, end_time=self.TIME_ZERO - self.DAY)
self.mark_15day_active(user, end_time=self.TIME_ZERO)
self.mark_audit_active(user, end_time=self.TIME_ZERO + self.DAY)
self.mark_15day_active(user, end_time=self.TIME_ZERO + self.DAY)
for i in [-1, 0, 1]:
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i*self.DAY)
self.assertTableState(RealmCount, ['value', 'end_time'], [[1, self.TIME_ZERO + self.DAY]])
def test_bots_not_counted(self) -> None:
bot = self.create_user(is_bot=True)
self.mark_audit_active(bot)
self.mark_15day_active(bot)
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
self.assertTableState(RealmCount, [], [])
def test_multiple_users_realms_and_times(self) -> None:
user1 = self.create_user()
user2 = self.create_user()
second_realm = Realm.objects.create(string_id='second', name='second')
user3 = self.create_user(realm=second_realm)
user4 = self.create_user(realm=second_realm)
user5 = self.create_user(realm=second_realm)
for user in [user1, user2, user3, user4, user5]:
self.mark_audit_active(user)
self.mark_15day_active(user)
for user in [user1, user3, user4]:
self.mark_audit_active(user, end_time=self.TIME_ZERO - self.DAY)
self.mark_15day_active(user, end_time=self.TIME_ZERO - self.DAY)
for i in [-1, 0, 1]:
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i*self.DAY)
self.assertTableState(RealmCount, ['value', 'realm', 'end_time'],
[[2, self.default_realm, self.TIME_ZERO],
[3, second_realm, self.TIME_ZERO],
[1, self.default_realm, self.TIME_ZERO - self.DAY],
[2, second_realm, self.TIME_ZERO - self.DAY]])
# Check that adding spurious entries doesn't make a difference
self.mark_audit_active(user1, end_time=self.TIME_ZERO + self.DAY)
self.mark_15day_active(user2, end_time=self.TIME_ZERO + self.DAY)
self.mark_15day_active(user2, end_time=self.TIME_ZERO - self.DAY)
self.create_user()
third_realm = Realm.objects.create(string_id='third', name='third')
self.create_user(realm=third_realm)
RealmCount.objects.all().delete()
for i in [-1, 0, 1]:
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i*self.DAY)
self.assertTableState(RealmCount, ['value', 'realm', 'end_time'],
[[2, self.default_realm, self.TIME_ZERO],
[3, second_realm, self.TIME_ZERO],
[1, self.default_realm, self.TIME_ZERO - self.DAY],
[2, second_realm, self.TIME_ZERO - self.DAY]])
def test_end_to_end(self) -> None:
user1 = do_create_user('email1', 'password', self.default_realm, 'full_name', 'short_name')
user2 = do_create_user('email2', 'password', self.default_realm, 'full_name', 'short_name')
do_create_user('email3', 'password', self.default_realm, 'full_name', 'short_name')
time_zero = floor_to_day(timezone_now()) + self.DAY
update_user_activity_interval(user1, time_zero)
update_user_activity_interval(user2, time_zero)
do_deactivate_user(user2)
for property in ['active_users_audit:is_bot:day', '15day_actives::day',
'realm_active_humans::day']:
FillState.objects.create(property=property, state=FillState.DONE, end_time=time_zero)
process_count_stat(COUNT_STATS[property], time_zero+self.DAY)
self.assertEqual(RealmCount.objects.filter(
property='realm_active_humans::day', end_time=time_zero+self.DAY, value=1).count(), 1)
self.assertEqual(RealmCount.objects.filter(property='realm_active_humans::day').count(), 1)
| [
"Any",
"Any",
"Any",
"UserProfile",
"Recipient",
"Any",
"Type[BaseCount]",
"int",
"models.Model",
"Type[BaseCount]",
"List[str]",
"List[List[object]]",
"str",
"CountStat",
"datetime",
"UserProfile",
"timedelta",
"timedelta",
"int",
"str",
"float",
"UserProfile",
"UserProfile"
] | [
2062,
2675,
3231,
3664,
3688,
3709,
4217,
4241,
4427,
4994,
5021,
5069,
7251,
7595,
7616,
27359,
27386,
27433,
40806,
45074,
45092,
53059,
53414
] | [
2065,
2678,
3234,
3675,
3697,
3712,
4232,
4244,
4439,
5009,
5030,
5087,
7254,
7604,
7624,
27370,
27395,
27442,
40809,
45077,
45097,
53070,
53425
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/tests/test_fixtures.py | from analytics.lib.counts import CountStat
from analytics.lib.fixtures import generate_time_series_data
from zerver.lib.test_classes import ZulipTestCase
# A very light test suite; the code being tested is not run in production.
class TestFixtures(ZulipTestCase):
def test_deterministic_settings(self) -> None:
# test basic business_hour / non_business_hour calculation
# test we get an array of the right length with frequency=CountStat.DAY
data = generate_time_series_data(
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0)
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
data = generate_time_series_data(
days=1, business_hours_base=2000, non_business_hours_base=1500,
growth=2, spikiness=0, frequency=CountStat.HOUR)
# test we get an array of the right length with frequency=CountStat.HOUR
self.assertEqual(len(data), 24)
# test that growth doesn't affect the first data point
self.assertEqual(data[0], 2000)
# test that the last data point is growth times what it otherwise would be
self.assertEqual(data[-1], 1500*2)
# test autocorrelation == 1, since that's the easiest value to test
data = generate_time_series_data(
days=1, business_hours_base=2000, non_business_hours_base=2000,
autocorrelation=1, frequency=CountStat.HOUR)
self.assertEqual(data[0], data[1])
self.assertEqual(data[0], data[-1])
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/tests/test_views.py | from datetime import datetime, timedelta
from typing import Dict, List, Optional
import mock
from django.utils.timezone import utc
from analytics.lib.counts import COUNT_STATS, CountStat
from analytics.lib.time_utils import time_range
from analytics.models import FillState, \
RealmCount, UserCount, last_successful_fill
from analytics.views import get_chart_data, rewrite_client_arrays, \
sort_by_totals, sort_client_labels, stats
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.timestamp import ceiling_to_day, \
ceiling_to_hour, datetime_to_timestamp
from zerver.models import Client, get_realm
class TestStatsEndpoint(ZulipTestCase):
def test_stats(self) -> None:
self.user = self.example_user('hamlet')
self.login(self.user.email)
result = self.client_get('/stats')
self.assertEqual(result.status_code, 200)
# Check that we get something back
self.assert_in_response("Zulip analytics for", result)
def test_guest_user_cant_access_stats(self) -> None:
self.user = self.example_user('polonius')
self.login(self.user.email)
result = self.client_get('/stats')
self.assert_json_error(result, "Not allowed for guest users", 400)
result = self.client_get('/json/analytics/chart_data')
self.assert_json_error(result, "Not allowed for guest users", 400)
def test_stats_for_realm(self) -> None:
user_profile = self.example_user('hamlet')
self.login(user_profile.email)
result = self.client_get('/stats/realm/zulip/')
self.assertEqual(result.status_code, 302)
user_profile = self.example_user('hamlet')
user_profile.is_staff = True
user_profile.save(update_fields=['is_staff'])
result = self.client_get('/stats/realm/not_existing_realm/')
self.assertEqual(result.status_code, 302)
result = self.client_get('/stats/realm/zulip/')
self.assertEqual(result.status_code, 200)
self.assert_in_response("Zulip analytics for", result)
def test_stats_for_installation(self) -> None:
user_profile = self.example_user('hamlet')
self.login(user_profile.email)
result = self.client_get('/stats/installation')
self.assertEqual(result.status_code, 302)
user_profile = self.example_user('hamlet')
user_profile.is_staff = True
user_profile.save(update_fields=['is_staff'])
result = self.client_get('/stats/installation')
self.assertEqual(result.status_code, 200)
self.assert_in_response("Zulip analytics for", result)
class TestGetChartData(ZulipTestCase):
def setUp(self) -> None:
self.realm = get_realm('zulip')
self.user = self.example_user('hamlet')
self.login(self.user.email)
self.end_times_hour = [ceiling_to_hour(self.realm.date_created) + timedelta(hours=i)
for i in range(4)]
self.end_times_day = [ceiling_to_day(self.realm.date_created) + timedelta(days=i)
for i in range(4)]
def data(self, i: int) -> List[int]:
return [0, 0, i, 0]
def insert_data(self, stat: CountStat, realm_subgroups: List[Optional[str]],
user_subgroups: List[str]) -> None:
if stat.frequency == CountStat.HOUR:
insert_time = self.end_times_hour[2]
fill_time = self.end_times_hour[-1]
if stat.frequency == CountStat.DAY:
insert_time = self.end_times_day[2]
fill_time = self.end_times_day[-1]
RealmCount.objects.bulk_create([
RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
value=100+i, realm=self.realm)
for i, subgroup in enumerate(realm_subgroups)])
UserCount.objects.bulk_create([
UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
value=200+i, realm=self.realm, user=self.user)
for i, subgroup in enumerate(user_subgroups)])
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
def test_number_of_humans(self) -> None:
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['1day_actives::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['active_users_audit:is_bot:day']
self.insert_data(stat, ['false'], [])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
'frequency': CountStat.DAY,
'everyone': {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)},
'display_order': None,
'result': 'success',
})
def test_messages_sent_over_time(self) -> None:
stat = COUNT_STATS['messages_sent:is_bot:hour']
self.insert_data(stat, ['true', 'false'], ['false'])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour],
'frequency': CountStat.HOUR,
'everyone': {'bot': self.data(100), 'human': self.data(101)},
'user': {'bot': self.data(0), 'human': self.data(200)},
'display_order': None,
'result': 'success',
})
def test_messages_sent_by_message_type(self) -> None:
stat = COUNT_STATS['messages_sent:message_type:day']
self.insert_data(stat, ['public_stream', 'private_message'],
['public_stream', 'private_stream'])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_message_type'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
'frequency': CountStat.DAY,
'everyone': {'Public streams': self.data(100), 'Private streams': self.data(0),
'Private messages': self.data(101), 'Group private messages': self.data(0)},
'user': {'Public streams': self.data(200), 'Private streams': self.data(201),
'Private messages': self.data(0), 'Group private messages': self.data(0)},
'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'],
'result': 'success',
})
def test_messages_sent_by_client(self) -> None:
stat = COUNT_STATS['messages_sent:client:day']
client1 = Client.objects.create(name='client 1')
client2 = Client.objects.create(name='client 2')
client3 = Client.objects.create(name='client 3')
client4 = Client.objects.create(name='client 4')
self.insert_data(stat, [client4.id, client3.id, client2.id],
[client3.id, client1.id])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_client'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
'frequency': CountStat.DAY,
'everyone': {'client 4': self.data(100), 'client 3': self.data(101),
'client 2': self.data(102)},
'user': {'client 3': self.data(200), 'client 1': self.data(201)},
'display_order': ['client 1', 'client 2', 'client 3', 'client 4'],
'result': 'success',
})
def test_include_empty_subgroups(self) -> None:
FillState.objects.create(
property='realm_active_humans::day', end_time=self.end_times_day[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {"_1day": [0], "_15day": [0], "all_time": [0]})
self.assertFalse('user' in data)
FillState.objects.create(
property='messages_sent:is_bot:hour', end_time=self.end_times_hour[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {'human': [0], 'bot': [0]})
self.assertEqual(data['user'], {'human': [0], 'bot': [0]})
FillState.objects.create(
property='messages_sent:message_type:day', end_time=self.end_times_day[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_message_type'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {
'Public streams': [0], 'Private streams': [0],
'Private messages': [0], 'Group private messages': [0]})
self.assertEqual(data['user'], {
'Public streams': [0], 'Private streams': [0],
'Private messages': [0], 'Group private messages': [0]})
FillState.objects.create(
property='messages_sent:client:day', end_time=self.end_times_day[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_client'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {})
self.assertEqual(data['user'], {})
def test_start_and_end(self) -> None:
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['1day_actives::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['active_users_audit:is_bot:day']
self.insert_data(stat, ['false'], [])
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
# valid start and end
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'start': end_time_timestamps[1],
'end': end_time_timestamps[2]})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['end_times'], end_time_timestamps[1:3])
self.assertEqual(data['everyone'], {'_1day': [0, 100], '_15day': [0, 100], 'all_time': [0, 100]})
# start later then end
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'start': end_time_timestamps[2],
'end': end_time_timestamps[1]})
self.assert_json_error_contains(result, 'Start time is later than')
def test_min_length(self) -> None:
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['1day_actives::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['active_users_audit:is_bot:day']
self.insert_data(stat, ['false'], [])
# test min_length is too short to change anything
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'min_length': 2})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day])
self.assertEqual(data['everyone'], {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)})
# test min_length larger than filled data
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'min_length': 5})
self.assert_json_success(result)
data = result.json()
end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)]
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times])
self.assertEqual(data['everyone'], {'_1day': [0]+self.data(100), '_15day': [0]+self.data(100), 'all_time': [0]+self.data(100)})
def test_non_existent_chart(self) -> None:
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'does_not_exist'})
self.assert_json_error_contains(result, 'Unknown chart name')
def test_analytics_not_running(self) -> None:
# try to get data for a valid chart, but before we've put anything in the database
# (e.g. before update_analytics_counts has been run)
with mock.patch('logging.warning'):
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans'})
self.assert_json_error_contains(result, 'No analytics data available')
def test_get_chart_data_for_realm(self) -> None:
user_profile = self.example_user('hamlet')
self.login(user_profile.email)
result = self.client_get('/json/analytics/chart_data/realm/zulip/',
{'chart_name': 'number_of_humans'})
self.assert_json_error(result, "Must be an server administrator", 400)
user_profile = self.example_user('hamlet')
user_profile.is_staff = True
user_profile.save(update_fields=['is_staff'])
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
result = self.client_get('/json/analytics/chart_data/realm/not_existing_realm',
{'chart_name': 'number_of_humans'})
self.assert_json_error(result, 'Invalid organization', 400)
result = self.client_get('/json/analytics/chart_data/realm/zulip',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
def test_get_chart_data_for_installation(self) -> None:
user_profile = self.example_user('hamlet')
self.login(user_profile.email)
result = self.client_get('/json/analytics/chart_data/installation',
{'chart_name': 'number_of_humans'})
self.assert_json_error(result, "Must be an server administrator", 400)
user_profile = self.example_user('hamlet')
user_profile.is_staff = True
user_profile.save(update_fields=['is_staff'])
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
result = self.client_get('/json/analytics/chart_data/installation',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
class TestGetChartDataHelpers(ZulipTestCase):
# last_successful_fill is in analytics/models.py, but get_chart_data is
# the only function that uses it at the moment
def test_last_successful_fill(self) -> None:
self.assertIsNone(last_successful_fill('non-existant'))
a_time = datetime(2016, 3, 14, 19).replace(tzinfo=utc)
one_hour_before = datetime(2016, 3, 14, 18).replace(tzinfo=utc)
fillstate = FillState.objects.create(property='property', end_time=a_time,
state=FillState.DONE)
self.assertEqual(last_successful_fill('property'), a_time)
fillstate.state = FillState.STARTED
fillstate.save()
self.assertEqual(last_successful_fill('property'), one_hour_before)
def test_sort_by_totals(self) -> None:
empty = [] # type: List[int]
value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty}
self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd'])
def test_sort_client_labels(self) -> None:
data = {'everyone': {'a': [16], 'c': [15], 'b': [14], 'e': [13], 'd': [12], 'h': [11]},
'user': {'a': [6], 'b': [5], 'd': [4], 'e': [3], 'f': [2], 'g': [1]}}
self.assertEqual(sort_client_labels(data), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
class TestTimeRange(ZulipTestCase):
def test_time_range(self) -> None:
HOUR = timedelta(hours=1)
DAY = timedelta(days=1)
a_time = datetime(2016, 3, 14, 22, 59).replace(tzinfo=utc)
floor_hour = datetime(2016, 3, 14, 22).replace(tzinfo=utc)
floor_day = datetime(2016, 3, 14).replace(tzinfo=utc)
# test start == end
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
# test start == end == boundary, and min_length == 0
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
# test start and end on different boundaries
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, None),
[floor_hour, floor_hour+HOUR])
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, None),
[floor_day, floor_day+DAY])
# test min_length
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, 4),
[floor_hour-2*HOUR, floor_hour-HOUR, floor_hour, floor_hour+HOUR])
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, 4),
[floor_day-2*DAY, floor_day-DAY, floor_day, floor_day+DAY])
class TestMapArrays(ZulipTestCase):
def test_map_arrays(self) -> None:
a = {'desktop app 1.0': [1, 2, 3],
'desktop app 2.0': [10, 12, 13],
'desktop app 3.0': [21, 22, 23],
'website': [1, 2, 3],
'ZulipiOS': [1, 2, 3],
'ZulipElectron': [2, 5, 7],
'ZulipMobile': [1, 5, 7],
'ZulipPython': [1, 2, 3],
'API: Python': [1, 2, 3],
'SomethingRandom': [4, 5, 6],
'ZulipGitHubWebhook': [7, 7, 9],
'ZulipAndroid': [64, 63, 65]}
result = rewrite_client_arrays(a)
self.assertEqual(result,
{'Old desktop app': [32, 36, 39],
'Old iOS app': [1, 2, 3],
'Desktop app': [2, 5, 7],
'Mobile app': [1, 5, 7],
'Website': [1, 2, 3],
'Python API': [2, 4, 6],
'SomethingRandom': [4, 5, 6],
'GitHub webhook': [7, 7, 9],
'Old Android app': [64, 63, 65]})
| [
"int",
"CountStat",
"List[Optional[str]]",
"List[str]"
] | [
3124,
3204,
3232,
3289
] | [
3127,
3213,
3251,
3298
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/urls.py | from django.conf.urls import include, url
import analytics.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns = [
# Server admin (user_profile.is_staff) visible stats pages
url(r'^activity$', analytics.views.get_activity,
name='analytics.views.get_activity'),
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
name='analytics.views.get_realm_activity'),
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
name='analytics.views.get_user_activity'),
url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm,
name='analytics.views.stats_for_realm'),
url(r'^stats/installation$', analytics.views.stats_for_installation,
name='analytics.views.stats_for_installation'),
# User-visible stats page
url(r'^stats$', analytics.views.stats,
name='analytics.views.stats'),
]
# These endpoints are a part of the API (V1), which uses:
# * REST verbs
# * Basic auth (username:password is email:apiKey)
# * Takes and returns json-formatted data
#
# See rest_dispatch in zerver.lib.rest for an explanation of auth methods used
#
# All of these paths are accessed by either a /json or /api prefix
v1_api_and_json_patterns = [
# get data for the graphs at /stats
url(r'^analytics/chart_data$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data'}),
url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_realm'}),
url(r'^analytics/chart_data/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_installation'}),
]
i18n_urlpatterns += [
url(r'^api/v1/', include(v1_api_and_json_patterns)),
url(r'^json/', include(v1_api_and_json_patterns)),
]
urlpatterns = i18n_urlpatterns
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | analytics/views.py |
import itertools
import json
import logging
import re
import time
from collections import defaultdict
from datetime import datetime, timedelta
from typing import Any, Callable, Dict, List, \
Optional, Set, Tuple, Type, Union
import pytz
from django.conf import settings
from django.urls import reverse
from django.db import connection
from django.db.models import Sum
from django.db.models.query import QuerySet
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
from django.shortcuts import render
from django.template import RequestContext, loader
from django.utils.timezone import now as timezone_now, utc as timezone_utc
from django.utils.translation import ugettext as _
from jinja2 import Markup as mark_safe
import stripe
from analytics.lib.counts import COUNT_STATS, CountStat, process_count_stat
from analytics.lib.time_utils import time_range
from analytics.models import BaseCount, InstallationCount, \
RealmCount, StreamCount, UserCount, last_successful_fill, installation_epoch
from zerver.decorator import require_server_admin, require_server_admin_api, \
to_non_negative_int, to_utc_datetime, zulip_login_required, require_non_guest_user
from zerver.lib.exceptions import JsonableError
from zerver.lib.json_encoder_for_html import JSONEncoderForHTML
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.timestamp import ceiling_to_day, \
ceiling_to_hour, convert_to_UTC, timestamp_to_datetime
from zerver.models import Client, get_realm, Realm, \
UserActivity, UserActivityInterval, UserProfile
from zproject.settings import get_secret
def render_stats(request: HttpRequest, data_url_suffix: str, target_name: str,
for_installation: bool=False) -> HttpRequest:
page_params = dict(
data_url_suffix=data_url_suffix,
for_installation=for_installation,
debug_mode=False,
)
return render(request,
'analytics/stats.html',
context=dict(target_name=target_name,
page_params=JSONEncoderForHTML().encode(page_params)))
@zulip_login_required
def stats(request: HttpRequest) -> HttpResponse:
realm = request.user.realm
if request.user.is_guest:
# TODO: Make @zulip_login_required pass the UserProfile so we
# can use @require_non_guest_human_user
raise JsonableError(_("Not allowed for guest users"))
return render_stats(request, '', realm.name or realm.string_id)
@require_server_admin
@has_request_variables
def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
realm = get_realm(realm_str)
if realm is None:
return HttpResponseNotFound("Realm %s does not exist" % (realm_str,))
return render_stats(request, '/realm/%s' % (realm_str,), realm.name or realm.string_id)
@require_server_admin_api
@has_request_variables
def get_chart_data_for_realm(request: HttpRequest, user_profile: UserProfile,
realm_str: str, **kwargs: Any) -> HttpResponse:
realm = get_realm(realm_str)
if realm is None:
raise JsonableError(_("Invalid organization"))
return get_chart_data(request=request, user_profile=user_profile, realm=realm, **kwargs)
@require_server_admin
def stats_for_installation(request: HttpRequest) -> HttpResponse:
return render_stats(request, '/installation', 'Installation', True)
@require_server_admin_api
@has_request_variables
def get_chart_data_for_installation(request: HttpRequest, user_profile: UserProfile,
chart_name: str=REQ(), **kwargs: Any) -> HttpResponse:
return get_chart_data(request=request, user_profile=user_profile, for_installation=True, **kwargs)
@require_non_guest_user
@has_request_variables
def get_chart_data(request: HttpRequest, user_profile: UserProfile, chart_name: str=REQ(),
min_length: Optional[int]=REQ(converter=to_non_negative_int, default=None),
start: Optional[datetime]=REQ(converter=to_utc_datetime, default=None),
end: Optional[datetime]=REQ(converter=to_utc_datetime, default=None),
realm: Optional[Realm]=None, for_installation: bool=False) -> HttpResponse:
aggregate_table = RealmCount
if for_installation:
aggregate_table = InstallationCount
if chart_name == 'number_of_humans':
stats = [
COUNT_STATS['1day_actives::day'],
COUNT_STATS['realm_active_humans::day'],
COUNT_STATS['active_users_audit:is_bot:day']]
tables = [aggregate_table]
subgroup_to_label = {
stats[0]: {None: '_1day'},
stats[1]: {None: '_15day'},
stats[2]: {'false': 'all_time'}} # type: Dict[CountStat, Dict[Optional[str], str]]
labels_sort_function = None
include_empty_subgroups = True
elif chart_name == 'messages_sent_over_time':
stats = [COUNT_STATS['messages_sent:is_bot:hour']]
tables = [aggregate_table, UserCount]
subgroup_to_label = {stats[0]: {'false': 'human', 'true': 'bot'}}
labels_sort_function = None
include_empty_subgroups = True
elif chart_name == 'messages_sent_by_message_type':
stats = [COUNT_STATS['messages_sent:message_type:day']]
tables = [aggregate_table, UserCount]
subgroup_to_label = {stats[0]: {'public_stream': _('Public streams'),
'private_stream': _('Private streams'),
'private_message': _('Private messages'),
'huddle_message': _('Group private messages')}}
labels_sort_function = lambda data: sort_by_totals(data['everyone'])
include_empty_subgroups = True
elif chart_name == 'messages_sent_by_client':
stats = [COUNT_STATS['messages_sent:client:day']]
tables = [aggregate_table, UserCount]
# Note that the labels are further re-written by client_label_map
subgroup_to_label = {stats[0]:
{str(id): name for id, name in Client.objects.values_list('id', 'name')}}
labels_sort_function = sort_client_labels
include_empty_subgroups = False
else:
raise JsonableError(_("Unknown chart name: %s") % (chart_name,))
# Most likely someone using our API endpoint. The /stats page does not
# pass a start or end in its requests.
if start is not None:
start = convert_to_UTC(start)
if end is not None:
end = convert_to_UTC(end)
if start is not None and end is not None and start > end:
raise JsonableError(_("Start time is later than end time. Start: %(start)s, End: %(end)s") %
{'start': start, 'end': end})
if realm is None:
realm = user_profile.realm
if start is None:
if for_installation:
start = installation_epoch()
else:
start = realm.date_created
if end is None:
end = max(last_successful_fill(stat.property) or
datetime.min.replace(tzinfo=timezone_utc) for stat in stats)
if end is None or start > end:
logging.warning("User from realm %s attempted to access /stats, but the computed "
"start time: %s (creation of realm or installation) is later than the computed "
"end time: %s (last successful analytics update). Is the "
"analytics cron job running?" % (realm.string_id, start, end))
raise JsonableError(_("No analytics data available. Please contact your server administrator."))
assert len(set([stat.frequency for stat in stats])) == 1
end_times = time_range(start, end, stats[0].frequency, min_length)
data = {'end_times': end_times, 'frequency': stats[0].frequency} # type: Dict[str, Any]
aggregation_level = {InstallationCount: 'everyone', RealmCount: 'everyone', UserCount: 'user'}
# -1 is a placeholder value, since there is no relevant filtering on InstallationCount
id_value = {InstallationCount: -1, RealmCount: realm.id, UserCount: user_profile.id}
for table in tables:
data[aggregation_level[table]] = {}
for stat in stats:
data[aggregation_level[table]].update(get_time_series_by_subgroup(
stat, table, id_value[table], end_times, subgroup_to_label[stat], include_empty_subgroups))
if labels_sort_function is not None:
data['display_order'] = labels_sort_function(data)
else:
data['display_order'] = None
return json_success(data=data)
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
totals = [(sum(values), label) for label, values in value_arrays.items()]
totals.sort(reverse=True)
return [label for total, label in totals]
# For any given user, we want to show a fixed set of clients in the chart,
# regardless of the time aggregation or whether we're looking at realm or
# user data. This fixed set ideally includes the clients most important in
# understanding the realm's traffic and the user's traffic. This function
# tries to rank the clients so that taking the first N elements of the
# sorted list has a reasonable chance of doing so.
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
realm_order = sort_by_totals(data['everyone'])
user_order = sort_by_totals(data['user'])
label_sort_values = {} # type: Dict[str, float]
for i, label in enumerate(realm_order):
label_sort_values[label] = i
for i, label in enumerate(user_order):
label_sort_values[label] = min(i-.1, label_sort_values.get(label, i))
return [label for label, sort_value in sorted(label_sort_values.items(),
key=lambda x: x[1])]
def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet:
if table == RealmCount:
return RealmCount.objects.filter(realm_id=key_id)
elif table == UserCount:
return UserCount.objects.filter(user_id=key_id)
elif table == StreamCount:
return StreamCount.objects.filter(stream_id=key_id)
elif table == InstallationCount:
return InstallationCount.objects.all()
else:
raise AssertionError("Unknown table: %s" % (table,))
def client_label_map(name: str) -> str:
if name == "website":
return "Website"
if name.startswith("desktop app"):
return "Old desktop app"
if name == "ZulipElectron":
return "Desktop app"
if name == "ZulipAndroid":
return "Old Android app"
if name == "ZulipiOS":
return "Old iOS app"
if name == "ZulipMobile":
return "Mobile app"
if name in ["ZulipPython", "API: Python"]:
return "Python API"
if name.startswith("Zulip") and name.endswith("Webhook"):
return name[len("Zulip"):-len("Webhook")] + " webhook"
return name
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
mapped_arrays = {} # type: Dict[str, List[int]]
for label, array in value_arrays.items():
mapped_label = client_label_map(label)
if mapped_label in mapped_arrays:
for i in range(0, len(array)):
mapped_arrays[mapped_label][i] += value_arrays[label][i]
else:
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))]
return mapped_arrays
def get_time_series_by_subgroup(stat: CountStat,
table: Type[BaseCount],
key_id: int,
end_times: List[datetime],
subgroup_to_label: Dict[Optional[str], str],
include_empty_subgroups: bool) -> Dict[str, List[int]]:
queryset = table_filtered_to_id(table, key_id).filter(property=stat.property) \
.values_list('subgroup', 'end_time', 'value')
value_dicts = defaultdict(lambda: defaultdict(int)) # type: Dict[Optional[str], Dict[datetime, int]]
for subgroup, end_time, value in queryset:
value_dicts[subgroup][end_time] = value
value_arrays = {}
for subgroup, label in subgroup_to_label.items():
if (subgroup in value_dicts) or include_empty_subgroups:
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
if stat == COUNT_STATS['messages_sent:client:day']:
# HACK: We rewrite these arrays to collapse the Client objects
# with similar names into a single sum, and generally give
# them better names
return rewrite_client_arrays(value_arrays)
return value_arrays
eastern_tz = pytz.timezone('US/Eastern')
def make_table(title: str, cols: List[str], rows: List[Any], has_row_class: bool=False) -> str:
if not has_row_class:
def fix_row(row: Any) -> Dict[str, Any]:
return dict(cells=row, row_class=None)
rows = list(map(fix_row, rows))
data = dict(title=title, cols=cols, rows=rows)
content = loader.render_to_string(
'analytics/ad_hoc_query.html',
dict(data=data)
)
return content
def dictfetchall(cursor: connection.cursor) -> List[Dict[str, Any]]:
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(list(zip([col[0] for col in desc], row)))
for row in cursor.fetchall()
]
def get_realm_day_counts() -> Dict[str, Dict[str, str]]:
query = '''
select
r.string_id,
(now()::date - pub_date::date) age,
count(*) cnt
from zerver_message m
join zerver_userprofile up on up.id = m.sender_id
join zerver_realm r on r.id = up.realm_id
join zerver_client c on c.id = m.sending_client_id
where
(not up.is_bot)
and
pub_date > now()::date - interval '8 day'
and
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
group by
r.string_id,
age
order by
r.string_id,
age
'''
cursor = connection.cursor()
cursor.execute(query)
rows = dictfetchall(cursor)
cursor.close()
counts = defaultdict(dict) # type: Dict[str, Dict[int, int]]
for row in rows:
counts[row['string_id']][row['age']] = row['cnt']
result = {}
for string_id in counts:
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
min_cnt = min(raw_cnts[1:])
max_cnt = max(raw_cnts[1:])
def format_count(cnt: int, style: Optional[str]=None) -> str:
if style is not None:
good_bad = style
elif cnt == min_cnt:
good_bad = 'bad'
elif cnt == max_cnt:
good_bad = 'good'
else:
good_bad = 'neutral'
return '<td class="number %s">%s</td>' % (good_bad, cnt)
cnts = (format_count(raw_cnts[0], 'neutral')
+ ''.join(map(format_count, raw_cnts[1:])))
result[string_id] = dict(cnts=cnts)
return result
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
now = timezone_now()
query = '''
SELECT
realm.string_id,
realm.date_created,
realm.plan_type,
coalesce(user_counts.dau_count, 0) dau_count,
coalesce(wau_counts.wau_count, 0) wau_count,
(
SELECT
count(*)
FROM zerver_userprofile up
WHERE up.realm_id = realm.id
AND is_active
AND not is_bot
) user_profile_count,
(
SELECT
count(*)
FROM zerver_userprofile up
WHERE up.realm_id = realm.id
AND is_active
AND is_bot
) bot_count
FROM zerver_realm realm
LEFT OUTER JOIN
(
SELECT
up.realm_id realm_id,
count(distinct(ua.user_profile_id)) dau_count
FROM zerver_useractivity ua
JOIN zerver_userprofile up
ON up.id = ua.user_profile_id
WHERE
up.is_active
AND (not up.is_bot)
AND
query in (
'/json/send_message',
'send_message_backend',
'/api/v1/send_message',
'/json/update_pointer',
'/json/users/me/pointer',
'update_pointer_backend'
)
AND
last_visit > now() - interval '1 day'
GROUP BY realm_id
) user_counts
ON user_counts.realm_id = realm.id
LEFT OUTER JOIN
(
SELECT
realm_id,
count(*) wau_count
FROM (
SELECT
realm.id as realm_id,
up.email
FROM zerver_useractivity ua
JOIN zerver_userprofile up
ON up.id = ua.user_profile_id
JOIN zerver_realm realm
ON realm.id = up.realm_id
WHERE up.is_active
AND (not up.is_bot)
AND
ua.query in (
'/json/send_message',
'send_message_backend',
'/api/v1/send_message',
'/json/update_pointer',
'/json/users/me/pointer',
'update_pointer_backend'
)
GROUP by realm.id, up.email
HAVING max(last_visit) > now() - interval '7 day'
) as wau_users
GROUP BY realm_id
) wau_counts
ON wau_counts.realm_id = realm.id
WHERE EXISTS (
SELECT *
FROM zerver_useractivity ua
JOIN zerver_userprofile up
ON up.id = ua.user_profile_id
WHERE
up.realm_id = realm.id
AND up.is_active
AND (not up.is_bot)
AND
query in (
'/json/send_message',
'/api/v1/send_message',
'send_message_backend',
'/json/update_pointer',
'/json/users/me/pointer',
'update_pointer_backend'
)
AND
last_visit > now() - interval '2 week'
)
ORDER BY dau_count DESC, string_id ASC
'''
cursor = connection.cursor()
cursor.execute(query)
rows = dictfetchall(cursor)
cursor.close()
# Fetch all the realm administrator users
realm_admins = defaultdict(list) # type: Dict[str, List[str]]
for up in UserProfile.objects.select_related("realm").filter(
is_realm_admin=True,
is_active=True
):
realm_admins[up.realm.string_id].append(up.email)
for row in rows:
row['date_created_day'] = row['date_created'].strftime('%Y-%m-%d')
row['plan_type_string'] = [
'', 'self hosted', 'limited', 'standard', 'open source'][row['plan_type']]
row['age_days'] = int((now - row['date_created']).total_seconds()
/ 86400)
row['is_new'] = row['age_days'] < 12 * 7
row['realm_admin_email'] = ', '.join(realm_admins[row['string_id']])
# get messages sent per day
counts = get_realm_day_counts()
for row in rows:
try:
row['history'] = counts[row['string_id']]['cnts']
except Exception:
row['history'] = ''
# estimate annual subscription revenue
total_amount = 0
if settings.BILLING_ENABLED:
from corporate.lib.stripe import estimate_customer_arr
from corporate.models import Customer
stripe.api_key = get_secret('stripe_secret_key')
estimated_arr = {}
try:
for stripe_customer in stripe.Customer.list(limit=100):
# TODO: could do a select_related to get the realm.string_id, potentially
customer = Customer.objects.filter(stripe_customer_id=stripe_customer.id).first()
if customer is not None:
estimated_arr[customer.realm.string_id] = estimate_customer_arr(stripe_customer)
except stripe.error.StripeError:
pass
for row in rows:
row['amount'] = estimated_arr.get(row['string_id'], None)
total_amount = sum(estimated_arr.values())
# augment data with realm_minutes
total_hours = 0.0
for row in rows:
string_id = row['string_id']
minutes = realm_minutes.get(string_id, 0.0)
hours = minutes / 60.0
total_hours += hours
row['hours'] = str(int(hours))
try:
row['hours_per_user'] = '%.1f' % (hours / row['dau_count'],)
except Exception:
pass
# formatting
for row in rows:
row['stats_link'] = realm_stats_link(row['string_id'])
row['string_id'] = realm_activity_link(row['string_id'])
# Count active sites
def meets_goal(row: Dict[str, int]) -> bool:
return row['dau_count'] >= 5
num_active_sites = len(list(filter(meets_goal, rows)))
# create totals
total_dau_count = 0
total_user_profile_count = 0
total_bot_count = 0
total_wau_count = 0
for row in rows:
total_dau_count += int(row['dau_count'])
total_user_profile_count += int(row['user_profile_count'])
total_bot_count += int(row['bot_count'])
total_wau_count += int(row['wau_count'])
total_row = dict(
string_id='Total',
plan_type_string="",
amount=total_amount,
stats_link = '',
date_created_day='',
realm_admin_email='',
dau_count=total_dau_count,
user_profile_count=total_user_profile_count,
bot_count=total_bot_count,
hours=int(total_hours),
wau_count=total_wau_count,
)
rows.insert(0, total_row)
content = loader.render_to_string(
'analytics/realm_summary_table.html',
dict(rows=rows, num_active_sites=num_active_sites,
now=now.strftime('%Y-%m-%dT%H:%M:%SZ'))
)
return content
def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
day_end = timestamp_to_datetime(time.time())
day_start = day_end - timedelta(hours=24)
output = "Per-user online duration for the last 24 hours:\n"
total_duration = timedelta(0)
all_intervals = UserActivityInterval.objects.filter(
end__gte=day_start,
start__lte=day_end
).select_related(
'user_profile',
'user_profile__realm'
).only(
'start',
'end',
'user_profile__email',
'user_profile__realm__string_id'
).order_by(
'user_profile__realm__string_id',
'user_profile__email'
)
by_string_id = lambda row: row.user_profile.realm.string_id
by_email = lambda row: row.user_profile.email
realm_minutes = {}
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
realm_duration = timedelta(0)
output += '<hr>%s\n' % (string_id,)
for email, intervals in itertools.groupby(realm_intervals, by_email):
duration = timedelta(0)
for interval in intervals:
start = max(day_start, interval.start)
end = min(day_end, interval.end)
duration += end - start
total_duration += duration
realm_duration += duration
output += " %-*s%s\n" % (37, email, duration)
realm_minutes[string_id] = realm_duration.total_seconds() / 60
output += "\nTotal Duration: %s\n" % (total_duration,)
output += "\nTotal Duration in minutes: %s\n" % (total_duration.total_seconds() / 60.,)
output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)
content = mark_safe('<pre>' + output + '</pre>')
return content, realm_minutes
def sent_messages_report(realm: str) -> str:
title = 'Recently sent messages for ' + realm
cols = [
'Date',
'Humans',
'Bots'
]
query = '''
select
series.day::date,
humans.cnt,
bots.cnt
from (
select generate_series(
(now()::date - interval '2 week'),
now()::date,
interval '1 day'
) as day
) as series
left join (
select
pub_date::date pub_date,
count(*) cnt
from zerver_message m
join zerver_userprofile up on up.id = m.sender_id
join zerver_realm r on r.id = up.realm_id
where
r.string_id = %s
and
(not up.is_bot)
and
pub_date > now() - interval '2 week'
group by
pub_date::date
order by
pub_date::date
) humans on
series.day = humans.pub_date
left join (
select
pub_date::date pub_date,
count(*) cnt
from zerver_message m
join zerver_userprofile up on up.id = m.sender_id
join zerver_realm r on r.id = up.realm_id
where
r.string_id = %s
and
up.is_bot
and
pub_date > now() - interval '2 week'
group by
pub_date::date
order by
pub_date::date
) bots on
series.day = bots.pub_date
'''
cursor = connection.cursor()
cursor.execute(query, [realm, realm])
rows = cursor.fetchall()
cursor.close()
return make_table(title, cols, rows)
def ad_hoc_queries() -> List[Dict[str, str]]:
def get_page(query: str, cols: List[str], title: str) -> Dict[str, str]:
cursor = connection.cursor()
cursor.execute(query)
rows = cursor.fetchall()
rows = list(map(list, rows))
cursor.close()
def fix_rows(i: int,
fixup_func: Union[Callable[[Realm], mark_safe], Callable[[datetime], str]]) -> None:
for row in rows:
row[i] = fixup_func(row[i])
for i, col in enumerate(cols):
if col == 'Realm':
fix_rows(i, realm_activity_link)
elif col in ['Last time', 'Last visit']:
fix_rows(i, format_date_for_activity_reports)
content = make_table(title, cols, rows)
return dict(
content=content,
title=title
)
pages = []
###
for mobile_type in ['Android', 'ZulipiOS']:
title = '%s usage' % (mobile_type,)
query = '''
select
realm.string_id,
up.id user_id,
client.name,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
client.name like '%s'
group by string_id, up.id, client.name
having max(last_visit) > now() - interval '2 week'
order by string_id, up.id, client.name
''' % (mobile_type,)
cols = [
'Realm',
'User id',
'Name',
'Hits',
'Last time'
]
pages.append(get_page(query, cols, title))
###
title = 'Desktop users'
query = '''
select
realm.string_id,
client.name,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
client.name like 'desktop%%'
group by string_id, client.name
having max(last_visit) > now() - interval '2 week'
order by string_id, client.name
'''
cols = [
'Realm',
'Client',
'Hits',
'Last time'
]
pages.append(get_page(query, cols, title))
###
title = 'Integrations by realm'
query = '''
select
realm.string_id,
case
when query like '%%external%%' then split_part(query, '/', 5)
else client.name
end client_name,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
(query in ('send_message_backend', '/api/v1/send_message')
and client.name not in ('Android', 'ZulipiOS')
and client.name not like 'test: Zulip%%'
)
or
query like '%%external%%'
group by string_id, client_name
having max(last_visit) > now() - interval '2 week'
order by string_id, client_name
'''
cols = [
'Realm',
'Client',
'Hits',
'Last time'
]
pages.append(get_page(query, cols, title))
###
title = 'Integrations by client'
query = '''
select
case
when query like '%%external%%' then split_part(query, '/', 5)
else client.name
end client_name,
realm.string_id,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
(query in ('send_message_backend', '/api/v1/send_message')
and client.name not in ('Android', 'ZulipiOS')
and client.name not like 'test: Zulip%%'
)
or
query like '%%external%%'
group by client_name, string_id
having max(last_visit) > now() - interval '2 week'
order by client_name, string_id
'''
cols = [
'Client',
'Realm',
'Hits',
'Last time'
]
pages.append(get_page(query, cols, title))
return pages
@require_server_admin
@has_request_variables
def get_activity(request: HttpRequest) -> HttpResponse:
duration_content, realm_minutes = user_activity_intervals() # type: Tuple[mark_safe, Dict[str, float]]
counts_content = realm_summary_table(realm_minutes) # type: str
data = [
('Counts', counts_content),
('Durations', duration_content),
]
for page in ad_hoc_queries():
data.append((page['title'], page['content']))
title = 'Activity'
return render(
request,
'analytics/activity.html',
context=dict(data=data, title=title, is_home=True),
)
def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet:
fields = [
'user_profile__full_name',
'user_profile__email',
'query',
'client__name',
'count',
'last_visit',
]
records = UserActivity.objects.filter(
user_profile__realm__string_id=realm,
user_profile__is_active=True,
user_profile__is_bot=is_bot
)
records = records.order_by("user_profile__email", "-last_visit")
records = records.select_related('user_profile', 'client').only(*fields)
return records
def get_user_activity_records_for_email(email: str) -> List[QuerySet]:
fields = [
'user_profile__full_name',
'query',
'client__name',
'count',
'last_visit'
]
records = UserActivity.objects.filter(
user_profile__email=email
)
records = records.order_by("-last_visit")
records = records.select_related('user_profile', 'client').only(*fields)
return records
def raw_user_activity_table(records: List[QuerySet]) -> str:
cols = [
'query',
'client',
'count',
'last_visit'
]
def row(record: QuerySet) -> List[Any]:
return [
record.query,
record.client.name,
record.count,
format_date_for_activity_reports(record.last_visit)
]
rows = list(map(row, records))
title = 'Raw Data'
return make_table(title, cols, rows)
def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Dict[str, Any]]:
#: `Any` used above should be `Union(int, datetime)`.
#: However current version of `Union` does not work inside other function.
#: We could use something like:
# `Union[Dict[str, Dict[str, int]], Dict[str, Dict[str, datetime]]]`
#: but that would require this long `Union` to carry on throughout inner functions.
summary = {} # type: Dict[str, Dict[str, Any]]
def update(action: str, record: QuerySet) -> None:
if action not in summary:
summary[action] = dict(
count=record.count,
last_visit=record.last_visit
)
else:
summary[action]['count'] += record.count
summary[action]['last_visit'] = max(
summary[action]['last_visit'],
record.last_visit
)
if records:
summary['name'] = records[0].user_profile.full_name
for record in records:
client = record.client.name
query = record.query
update('use', record)
if client == 'API':
m = re.match('/api/.*/external/(.*)', query)
if m:
client = m.group(1)
update(client, record)
if client.startswith('desktop'):
update('desktop', record)
if client == 'website':
update('website', record)
if ('send_message' in query) or re.search('/api/.*/external/.*', query):
update('send', record)
if query in ['/json/update_pointer', '/json/users/me/pointer', '/api/v1/update_pointer',
'update_pointer_backend']:
update('pointer', record)
update(client, record)
return summary
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
if date:
return date.astimezone(eastern_tz).strftime('%Y-%m-%d %H:%M')
else:
return ''
def user_activity_link(email: str) -> mark_safe:
url_name = 'analytics.views.get_user_activity'
url = reverse(url_name, kwargs=dict(email=email))
email_link = '<a href="%s">%s</a>' % (url, email)
return mark_safe(email_link)
def realm_activity_link(realm_str: str) -> mark_safe:
url_name = 'analytics.views.get_realm_activity'
url = reverse(url_name, kwargs=dict(realm_str=realm_str))
realm_link = '<a href="%s">%s</a>' % (url, realm_str)
return mark_safe(realm_link)
def realm_stats_link(realm_str: str) -> mark_safe:
url_name = 'analytics.views.stats_for_realm'
url = reverse(url_name, kwargs=dict(realm_str=realm_str))
stats_link = '<a href="{}"><i class="fa fa-pie-chart"></i></a>'.format(url, realm_str)
return mark_safe(stats_link)
def realm_client_table(user_summaries: Dict[str, Dict[str, Dict[str, Any]]]) -> str:
exclude_keys = [
'internal',
'name',
'use',
'send',
'pointer',
'website',
'desktop',
]
rows = []
for email, user_summary in user_summaries.items():
email_link = user_activity_link(email)
name = user_summary['name']
for k, v in user_summary.items():
if k in exclude_keys:
continue
client = k
count = v['count']
last_visit = v['last_visit']
row = [
format_date_for_activity_reports(last_visit),
client,
name,
email_link,
count,
]
rows.append(row)
rows = sorted(rows, key=lambda r: r[0], reverse=True)
cols = [
'Last visit',
'Client',
'Name',
'Email',
'Count',
]
title = 'Clients'
return make_table(title, cols, rows)
def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
rows = []
for k, v in user_summary.items():
if k == 'name':
continue
client = k
count = v['count']
last_visit = v['last_visit']
row = [
format_date_for_activity_reports(last_visit),
client,
count,
]
rows.append(row)
rows = sorted(rows, key=lambda r: r[0], reverse=True)
cols = [
'last_visit',
'client',
'count',
]
title = 'User Activity'
return make_table(title, cols, rows)
def realm_user_summary_table(all_records: List[QuerySet],
admin_emails: Set[str]) -> Tuple[Dict[str, Dict[str, Any]], str]:
user_records = {}
def by_email(record: QuerySet) -> str:
return record.user_profile.email
for email, records in itertools.groupby(all_records, by_email):
user_records[email] = get_user_activity_summary(list(records))
def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]:
if k in user_summary:
return user_summary[k]['last_visit']
else:
return None
def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str:
if k in user_summary:
return user_summary[k]['count']
else:
return ''
def is_recent(val: Optional[datetime]) -> bool:
age = timezone_now() - val
return age.total_seconds() < 5 * 60
rows = []
for email, user_summary in user_records.items():
email_link = user_activity_link(email)
sent_count = get_count(user_summary, 'send')
cells = [user_summary['name'], email_link, sent_count]
row_class = ''
for field in ['use', 'send', 'pointer', 'desktop', 'ZulipiOS', 'Android']:
visit = get_last_visit(user_summary, field)
if field == 'use':
if visit and is_recent(visit):
row_class += ' recently_active'
if email in admin_emails:
row_class += ' admin'
val = format_date_for_activity_reports(visit)
cells.append(val)
row = dict(cells=cells, row_class=row_class)
rows.append(row)
def by_used_time(row: Dict[str, Any]) -> str:
return row['cells'][3]
rows = sorted(rows, key=by_used_time, reverse=True)
cols = [
'Name',
'Email',
'Total sent',
'Heard from',
'Message sent',
'Pointer motion',
'Desktop',
'ZulipiOS',
'Android',
]
title = 'Summary'
content = make_table(title, cols, rows, has_row_class=True)
return user_records, content
@require_server_admin
def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
data = [] # type: List[Tuple[str, str]]
all_user_records = {} # type: Dict[str, Any]
try:
admins = Realm.objects.get(string_id=realm_str).get_admin_users()
except Realm.DoesNotExist:
return HttpResponseNotFound("Realm %s does not exist" % (realm_str,))
admin_emails = {admin.email for admin in admins}
for is_bot, page_title in [(False, 'Humans'), (True, 'Bots')]:
all_records = list(get_user_activity_records_for_realm(realm_str, is_bot))
user_records, content = realm_user_summary_table(all_records, admin_emails)
all_user_records.update(user_records)
data += [(page_title, content)]
page_title = 'Clients'
content = realm_client_table(all_user_records)
data += [(page_title, content)]
page_title = 'History'
content = sent_messages_report(realm_str)
data += [(page_title, content)]
title = realm_str
return render(
request,
'analytics/activity.html',
context=dict(data=data, realm_link=None, title=title),
)
@require_server_admin
def get_user_activity(request: HttpRequest, email: str) -> HttpResponse:
records = get_user_activity_records_for_email(email)
data = [] # type: List[Tuple[str, str]]
user_summary = get_user_activity_summary(records)
content = user_activity_summary_table(user_summary)
data += [('Summary', content)]
content = raw_user_activity_table(records)
data += [('Info', content)]
title = email
return render(
request,
'analytics/activity.html',
context=dict(data=data, title=title),
)
| [
"HttpRequest",
"str",
"str",
"HttpRequest",
"HttpRequest",
"str",
"HttpRequest",
"UserProfile",
"str",
"Any",
"HttpRequest",
"HttpRequest",
"UserProfile",
"Any",
"HttpRequest",
"UserProfile",
"Dict[str, List[int]]",
"Dict[str, Dict[str, List[int]]]",
"Type[BaseCount]",
"int",
"str",
"Dict[str, List[int]]",
"CountStat",
"Type[BaseCount]",
"int",
"List[datetime]",
"Dict[Optional[str], str]",
"bool",
"str",
"List[str]",
"List[Any]",
"Any",
"connection.cursor",
"int",
"Dict[str, float]",
"Dict[str, int]",
"str",
"str",
"List[str]",
"str",
"int",
"Union[Callable[[Realm], mark_safe], Callable[[datetime], str]]",
"HttpRequest",
"str",
"bool",
"str",
"List[QuerySet]",
"QuerySet",
"List[QuerySet]",
"str",
"QuerySet",
"Optional[datetime]",
"str",
"str",
"str",
"Dict[str, Dict[str, Dict[str, Any]]]",
"Dict[str, Dict[str, Any]]",
"List[QuerySet]",
"Set[str]",
"QuerySet",
"Dict[str, Dict[str, datetime]]",
"str",
"Dict[str, Dict[str, str]]",
"str",
"Optional[datetime]",
"Dict[str, Any]",
"HttpRequest",
"str",
"HttpRequest",
"str"
] | [
1688,
1718,
1736,
2197,
2611,
2635,
2971,
2998,
3051,
3066,
3351,
3548,
3575,
3657,
3858,
3885,
8715,
9355,
9935,
9960,
10423,
11055,
11581,
11631,
11688,
11736,
11803,
11886,
12897,
12908,
12925,
13023,
13345,
14745,
15327,
21752,
24764,
26626,
26637,
26655,
26864,
26902,
31482,
32083,
32096,
32665,
33088,
33225,
33564,
34020,
34033,
35352,
35522,
35769,
36026,
36320,
37369,
37981,
38040,
38140,
38377,
38412,
38590,
38620,
38767,
39675,
40168,
40192,
41321,
41341
] | [
1699,
1721,
1739,
2208,
2622,
2638,
2982,
3009,
3054,
3069,
3362,
3559,
3586,
3660,
3869,
3896,
8735,
9386,
9950,
9963,
10426,
11075,
11590,
11646,
11691,
11750,
11827,
11890,
12900,
12917,
12934,
13026,
13362,
14748,
15343,
21766,
24767,
26629,
26646,
26658,
26867,
26964,
31493,
32086,
32100,
32668,
33102,
33233,
33578,
34023,
34041,
35370,
35525,
35772,
36029,
36356,
37394,
37995,
38048,
38148,
38407,
38415,
38615,
38623,
38785,
39689,
40179,
40195,
41332,
41344
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/__init__.py | # -*- coding: utf-8 -*-
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
VERSION = (0, 9, 'pre')
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/management/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/management/commands/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from django.db import models, migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Confirmation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/migrations/0002_realmcreationkey.py | # -*- coding: utf-8 -*-
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('confirmation', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='RealmCreationKey',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation_key', models.CharField(max_length=40, verbose_name='activation key')),
('date_created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created')),
],
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/migrations/0003_emailchangeconfirmation.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-17 09:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('confirmation', '0002_realmcreationkey'),
]
operations = [
migrations.CreateModel(
name='EmailChangeConfirmation',
fields=[
],
options={
'proxy': True,
},
bases=('confirmation.confirmation',),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/migrations/0004_remove_confirmationmanager.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-08 04:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('confirmation', '0003_emailchangeconfirmation'),
]
operations = [
migrations.DeleteModel(
name='EmailChangeConfirmation',
),
migrations.AlterModelOptions(
name='confirmation',
options={},
),
migrations.AddField(
model_name='confirmation',
name='type',
field=models.PositiveSmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='confirmation',
name='confirmation_key',
field=models.CharField(max_length=40),
),
migrations.AlterField(
model_name='confirmation',
name='date_sent',
field=models.DateTimeField(),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/migrations/0005_confirmation_realm.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-30 00:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('zerver', '0124_stream_enable_notifications'),
('confirmation', '0004_remove_confirmationmanager'),
]
operations = [
migrations.AddField(
model_name='confirmation',
name='realm',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/migrations/0006_realmcreationkey_presume_email_valid.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-01-29 18:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('confirmation', '0005_confirmation_realm'),
]
operations = [
migrations.AddField(
model_name='realmcreationkey',
name='presume_email_valid',
field=models.BooleanField(default=False),
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/migrations/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/models.py | # -*- coding: utf-8 -*-
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
__revision__ = '$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $'
import datetime
from django.db import models
from django.db.models import CASCADE
from django.urls import reverse
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.utils.timezone import now as timezone_now
from zerver.lib.utils import generate_random_token
from zerver.models import PreregistrationUser, EmailChangeStatus, MultiuseInvite, \
UserProfile, Realm
from random import SystemRandom
import string
from typing import Any, Dict, Optional, Union
class ConfirmationKeyException(Exception):
WRONG_LENGTH = 1
EXPIRED = 2
DOES_NOT_EXIST = 3
def __init__(self, error_type: int) -> None:
super().__init__()
self.error_type = error_type
def render_confirmation_key_error(request: HttpRequest, exception: ConfirmationKeyException) -> HttpResponse:
if exception.error_type == ConfirmationKeyException.WRONG_LENGTH:
return render(request, 'confirmation/link_malformed.html')
if exception.error_type == ConfirmationKeyException.EXPIRED:
return render(request, 'confirmation/link_expired.html')
return render(request, 'confirmation/link_does_not_exist.html')
def generate_key() -> str:
generator = SystemRandom()
# 24 characters * 5 bits of entropy/character = 120 bits of entropy
return ''.join(generator.choice(string.ascii_lowercase + string.digits) for _ in range(24))
ConfirmationObjT = Union[MultiuseInvite, PreregistrationUser, EmailChangeStatus]
def get_object_from_key(confirmation_key: str,
confirmation_type: int) -> ConfirmationObjT:
# Confirmation keys used to be 40 characters
if len(confirmation_key) not in (24, 40):
raise ConfirmationKeyException(ConfirmationKeyException.WRONG_LENGTH)
try:
confirmation = Confirmation.objects.get(confirmation_key=confirmation_key,
type=confirmation_type)
except Confirmation.DoesNotExist:
raise ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)
time_elapsed = timezone_now() - confirmation.date_sent
if time_elapsed.total_seconds() > _properties[confirmation.type].validity_in_days * 24 * 3600:
raise ConfirmationKeyException(ConfirmationKeyException.EXPIRED)
obj = confirmation.content_object
if hasattr(obj, "status"):
obj.status = getattr(settings, 'STATUS_ACTIVE', 1)
obj.save(update_fields=['status'])
return obj
def create_confirmation_link(obj: ContentType, host: str,
confirmation_type: int,
url_args: Optional[Dict[str, str]]=None) -> str:
key = generate_key()
Confirmation.objects.create(content_object=obj, date_sent=timezone_now(), confirmation_key=key,
realm=obj.realm, type=confirmation_type)
return confirmation_url(key, host, confirmation_type, url_args)
def confirmation_url(confirmation_key: str, host: str,
confirmation_type: int,
url_args: Optional[Dict[str, str]]=None) -> str:
if url_args is None:
url_args = {}
url_args['confirmation_key'] = confirmation_key
return '%s%s%s' % (settings.EXTERNAL_URI_SCHEME, host,
reverse(_properties[confirmation_type].url_name, kwargs=url_args))
class Confirmation(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=CASCADE)
object_id = models.PositiveIntegerField() # type: int
content_object = GenericForeignKey('content_type', 'object_id')
date_sent = models.DateTimeField() # type: datetime.datetime
confirmation_key = models.CharField(max_length=40) # type: str
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm]
# The following list is the set of valid types
USER_REGISTRATION = 1
INVITATION = 2
EMAIL_CHANGE = 3
UNSUBSCRIBE = 4
SERVER_REGISTRATION = 5
MULTIUSE_INVITE = 6
REALM_CREATION = 7
type = models.PositiveSmallIntegerField() # type: int
def __str__(self) -> str:
return '<Confirmation: %s>' % (self.content_object,)
class ConfirmationType:
def __init__(self, url_name: str,
validity_in_days: int=settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS) -> None:
self.url_name = url_name
self.validity_in_days = validity_in_days
_properties = {
Confirmation.USER_REGISTRATION: ConfirmationType('check_prereg_key_and_redirect'),
Confirmation.INVITATION: ConfirmationType('check_prereg_key_and_redirect',
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
Confirmation.EMAIL_CHANGE: ConfirmationType('zerver.views.user_settings.confirm_email_change'),
Confirmation.UNSUBSCRIBE: ConfirmationType('zerver.views.unsubscribe.email_unsubscribe',
validity_in_days=1000000), # should never expire
Confirmation.MULTIUSE_INVITE: ConfirmationType(
'zerver.views.registration.accounts_home_from_multiuse_invite',
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
Confirmation.REALM_CREATION: ConfirmationType('check_prereg_key_and_redirect'),
}
def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> str:
"""
Generate a unique link that a logged-out user can visit to unsubscribe from
Zulip e-mails without having to first log in.
"""
return create_confirmation_link(user_profile, user_profile.realm.host,
Confirmation.UNSUBSCRIBE,
url_args = {'email_type': email_type})
# Functions related to links generated by the generate_realm_creation_link.py
# management command.
# Note that being validated here will just allow the user to access the create_realm
# form, where they will enter their email and go through the regular
# Confirmation.REALM_CREATION pathway.
# Arguably RealmCreationKey should just be another ConfirmationObjT and we should
# add another Confirmation.type for this; it's this way for historical reasons.
def validate_key(creation_key: Optional[str]) -> Optional['RealmCreationKey']:
"""Get the record for this key, raising InvalidCreationKey if non-None but invalid."""
if creation_key is None:
return None
try:
key_record = RealmCreationKey.objects.get(creation_key=creation_key)
except RealmCreationKey.DoesNotExist:
raise RealmCreationKey.Invalid()
time_elapsed = timezone_now() - key_record.date_created
if time_elapsed.total_seconds() > settings.REALM_CREATION_LINK_VALIDITY_DAYS * 24 * 3600:
raise RealmCreationKey.Invalid()
return key_record
def generate_realm_creation_url(by_admin: bool=False) -> str:
key = generate_key()
RealmCreationKey.objects.create(creation_key=key,
date_created=timezone_now(),
presume_email_valid=by_admin)
return '%s%s%s' % (settings.EXTERNAL_URI_SCHEME,
settings.EXTERNAL_HOST,
reverse('zerver.views.create_realm',
kwargs={'creation_key': key}))
class RealmCreationKey(models.Model):
creation_key = models.CharField('activation key', max_length=40)
date_created = models.DateTimeField('created', default=timezone_now)
# True just if we should presume the email address the user enters
# is theirs, and skip sending mail to it to confirm that.
presume_email_valid = models.BooleanField(default=False) # type: bool
class Invalid(Exception):
pass
| [
"int",
"HttpRequest",
"ConfirmationKeyException",
"str",
"int",
"ContentType",
"str",
"int",
"str",
"str",
"int",
"str",
"UserProfile",
"str",
"Optional[str]"
] | [
959,
1081,
1105,
1834,
1882,
2817,
2836,
2889,
3278,
3289,
3334,
4536,
5620,
5645,
6504
] | [
962,
1092,
1129,
1837,
1885,
2828,
2839,
2892,
3281,
3292,
3337,
4539,
5631,
5648,
6517
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | confirmation/settings.py | # -*- coding: utf-8 -*-
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
from typing import Any, Dict
__revision__ = '$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $'
STATUS_ACTIVE = 1
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/lib/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/lib/stripe.py | import datetime
from functools import wraps
import logging
import os
from typing import Any, Callable, Dict, Optional, TypeVar, Tuple
import ujson
from django.conf import settings
from django.db import transaction
from django.utils.translation import ugettext as _
from django.utils.timezone import now as timezone_now
from django.core.signing import Signer
import stripe
from zerver.lib.exceptions import JsonableError
from zerver.lib.logging_util import log_to_file
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
from zerver.lib.utils import generate_random_token
from zerver.lib.actions import do_change_plan_type
from zerver.models import Realm, UserProfile, RealmAuditLog
from corporate.models import Customer, Plan, Coupon, BillingProcessor
from zproject.settings import get_secret
STRIPE_PUBLISHABLE_KEY = get_secret('stripe_publishable_key')
stripe.api_key = get_secret('stripe_secret_key')
BILLING_LOG_PATH = os.path.join('/var/log/zulip'
if not settings.DEVELOPMENT
else settings.DEVELOPMENT_LOG_DIRECTORY,
'billing.log')
billing_logger = logging.getLogger('corporate.stripe')
log_to_file(billing_logger, BILLING_LOG_PATH)
log_to_file(logging.getLogger('stripe'), BILLING_LOG_PATH)
CallableT = TypeVar('CallableT', bound=Callable[..., Any])
MIN_INVOICED_SEAT_COUNT = 30
DEFAULT_INVOICE_DAYS_UNTIL_DUE = 30
def get_seat_count(realm: Realm) -> int:
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False).count()
def sign_string(string: str) -> Tuple[str, str]:
salt = generate_random_token(64)
signer = Signer(salt=salt)
return signer.sign(string), salt
def unsign_string(signed_string: str, salt: str) -> str:
signer = Signer(salt=salt)
return signer.unsign(signed_string)
class BillingError(Exception):
# error messages
CONTACT_SUPPORT = _("Something went wrong. Please contact %s." % (settings.ZULIP_ADMINISTRATOR,))
TRY_RELOADING = _("Something went wrong. Please reload the page.")
# description is used only for tests
def __init__(self, description: str, message: str) -> None:
self.description = description
self.message = message
class StripeCardError(BillingError):
pass
class StripeConnectionError(BillingError):
pass
def catch_stripe_errors(func: CallableT) -> CallableT:
@wraps(func)
def wrapped(*args: Any, **kwargs: Any) -> Any:
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
if STRIPE_PUBLISHABLE_KEY is None:
raise BillingError('missing stripe config', "Missing Stripe config. "
"See https://zulip.readthedocs.io/en/latest/subsystems/billing.html.")
if not Plan.objects.exists():
raise BillingError('missing plans',
"Plan objects not created. Please run ./manage.py setup_stripe")
try:
return func(*args, **kwargs)
# See https://stripe.com/docs/api/python#error_handling, though
# https://stripe.com/docs/api/ruby#error_handling suggests there are additional fields, and
# https://stripe.com/docs/error-codes gives a more detailed set of error codes
except stripe.error.StripeError as e:
err = e.json_body.get('error', {})
billing_logger.error("Stripe error: %s %s %s %s" % (
e.http_status, err.get('type'), err.get('code'), err.get('param')))
if isinstance(e, stripe.error.CardError):
# TODO: Look into i18n for this
raise StripeCardError('card error', err.get('message'))
if isinstance(e, stripe.error.RateLimitError) or \
isinstance(e, stripe.error.APIConnectionError): # nocoverage TODO
raise StripeConnectionError(
'stripe connection error',
_("Something went wrong. Please wait a few seconds and try again."))
raise BillingError('other stripe error', BillingError.CONTACT_SUPPORT)
return wrapped # type: ignore # https://github.com/python/mypy/issues/1927
@catch_stripe_errors
def stripe_get_customer(stripe_customer_id: str) -> stripe.Customer:
return stripe.Customer.retrieve(stripe_customer_id, expand=["default_source"])
@catch_stripe_errors
def stripe_get_upcoming_invoice(stripe_customer_id: str) -> stripe.Invoice:
return stripe.Invoice.upcoming(customer=stripe_customer_id)
@catch_stripe_errors
def stripe_get_invoice_preview_for_downgrade(
stripe_customer_id: str, stripe_subscription_id: str,
stripe_subscriptionitem_id: str) -> stripe.Invoice:
return stripe.Invoice.upcoming(
customer=stripe_customer_id, subscription=stripe_subscription_id,
subscription_items=[{'id': stripe_subscriptionitem_id, 'quantity': 0}])
def preview_invoice_total_for_downgrade(stripe_customer: stripe.Customer) -> int:
stripe_subscription = extract_current_subscription(stripe_customer)
if stripe_subscription is None:
# Most likely situation is: user A goes to billing page, user B
# cancels subscription, user A clicks on "downgrade" or something
# else that calls this function.
billing_logger.error("Trying to extract subscription item that doesn't exist, for Stripe customer %s"
% (stripe_customer.id,))
raise BillingError('downgrade without subscription', BillingError.TRY_RELOADING)
for item in stripe_subscription['items']:
# There should only be one item, but we can't index into stripe_subscription['items']
stripe_subscriptionitem_id = item.id
return stripe_get_invoice_preview_for_downgrade(
stripe_customer.id, stripe_subscription.id, stripe_subscriptionitem_id).total
# This allows us to access /billing in tests without having to mock the
# whole invoice object
def upcoming_invoice_total(stripe_customer_id: str) -> int:
return stripe_get_upcoming_invoice(stripe_customer_id).total
# Return type should be Optional[stripe.Subscription], which throws a mypy error.
# Will fix once we add type stubs for the Stripe API.
def extract_current_subscription(stripe_customer: stripe.Customer) -> Any:
if not stripe_customer.subscriptions:
return None
for stripe_subscription in stripe_customer.subscriptions:
if stripe_subscription.status != "canceled":
return stripe_subscription
return None
def estimate_customer_arr(stripe_customer: stripe.Customer) -> int: # nocoverage
stripe_subscription = extract_current_subscription(stripe_customer)
if stripe_subscription is None:
return 0
# This is an overestimate for those paying by invoice
estimated_arr = stripe_subscription.plan.amount * stripe_subscription.quantity / 100.
if stripe_subscription.plan.interval == 'month':
estimated_arr *= 12
if stripe_customer.discount is not None:
estimated_arr *= 1 - stripe_customer.discount.coupon.percent_off/100.
return int(estimated_arr)
@catch_stripe_errors
def do_create_customer(user: UserProfile, stripe_token: Optional[str]=None,
coupon: Optional[Coupon]=None) -> stripe.Customer:
realm = user.realm
stripe_coupon_id = None
if coupon is not None:
stripe_coupon_id = coupon.stripe_coupon_id
# We could do a better job of handling race conditions here, but if two
# people from a realm try to upgrade at exactly the same time, the main
# bad thing that will happen is that we will create an extra stripe
# customer that we can delete or ignore.
stripe_customer = stripe.Customer.create(
description="%s (%s)" % (realm.string_id, realm.name),
email=user.email,
metadata={'realm_id': realm.id, 'realm_str': realm.string_id},
source=stripe_token,
coupon=stripe_coupon_id)
event_time = timestamp_to_datetime(stripe_customer.created)
with transaction.atomic():
RealmAuditLog.objects.create(
realm=user.realm, acting_user=user, event_type=RealmAuditLog.STRIPE_CUSTOMER_CREATED,
event_time=event_time)
if stripe_token is not None:
RealmAuditLog.objects.create(
realm=user.realm, acting_user=user, event_type=RealmAuditLog.STRIPE_CARD_CHANGED,
event_time=event_time)
Customer.objects.create(realm=realm, stripe_customer_id=stripe_customer.id)
user.is_billing_admin = True
user.save(update_fields=["is_billing_admin"])
return stripe_customer
@catch_stripe_errors
def do_replace_payment_source(user: UserProfile, stripe_token: str) -> stripe.Customer:
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
stripe_customer.source = stripe_token
# Deletes existing card: https://stripe.com/docs/api#update_customer-source
# This can also have other side effects, e.g. it will try to pay certain past-due
# invoices: https://stripe.com/docs/api#update_customer
updated_stripe_customer = stripe.Customer.save(stripe_customer)
RealmAuditLog.objects.create(
realm=user.realm, acting_user=user, event_type=RealmAuditLog.STRIPE_CARD_CHANGED,
event_time=timezone_now())
return updated_stripe_customer
@catch_stripe_errors
def do_replace_coupon(user: UserProfile, coupon: Coupon) -> stripe.Customer:
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
stripe_customer.coupon = coupon.stripe_coupon_id
return stripe.Customer.save(stripe_customer)
@catch_stripe_errors
def do_subscribe_customer_to_plan(user: UserProfile, stripe_customer: stripe.Customer, stripe_plan_id: str,
seat_count: int, tax_percent: float, charge_automatically: bool) -> None:
if extract_current_subscription(stripe_customer) is not None:
# Most likely due to two people in the org going to the billing page,
# and then both upgrading their plan. We don't send clients
# real-time event updates for the billing pages, so this is more
# likely than it would be in other parts of the app.
billing_logger.error("Stripe customer %s trying to subscribe to %s, "
"but has an active subscription" % (stripe_customer.id, stripe_plan_id))
raise BillingError('subscribing with existing subscription', BillingError.TRY_RELOADING)
customer = Customer.objects.get(stripe_customer_id=stripe_customer.id)
if charge_automatically:
billing_method = 'charge_automatically'
days_until_due = None
else:
billing_method = 'send_invoice'
days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE
# Note that there is a race condition here, where if two users upgrade at exactly the
# same time, they will have two subscriptions, and get charged twice. We could try to
# reduce the chance of it with a well-designed idempotency_key, but it's not easy since
# we also need to be careful not to block the customer from retrying if their
# subscription attempt fails (e.g. due to insufficient funds).
# Success here implies the stripe_customer was charged: https://stripe.com/docs/billing/lifecycle#active
# Otherwise we should expect it to throw a stripe.error.
stripe_subscription = stripe.Subscription.create(
customer=stripe_customer.id,
billing=billing_method,
days_until_due=days_until_due,
items=[{
'plan': stripe_plan_id,
'quantity': seat_count,
}],
prorate=True,
tax_percent=tax_percent)
with transaction.atomic():
customer.has_billing_relationship = True
customer.save(update_fields=['has_billing_relationship'])
customer.realm.has_seat_based_plan = True
customer.realm.save(update_fields=['has_seat_based_plan'])
RealmAuditLog.objects.create(
realm=customer.realm,
acting_user=user,
event_type=RealmAuditLog.STRIPE_PLAN_CHANGED,
event_time=timestamp_to_datetime(stripe_subscription.created),
extra_data=ujson.dumps({'plan': stripe_plan_id, 'quantity': seat_count,
'billing_method': billing_method}))
current_seat_count = get_seat_count(customer.realm)
if seat_count != current_seat_count:
RealmAuditLog.objects.create(
realm=customer.realm,
event_type=RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET,
event_time=timestamp_to_datetime(stripe_subscription.created),
requires_billing_update=True,
extra_data=ujson.dumps({'quantity': current_seat_count}))
def process_initial_upgrade(user: UserProfile, plan: Plan, seat_count: int,
stripe_token: Optional[str]) -> None:
customer = Customer.objects.filter(realm=user.realm).first()
if customer is None:
stripe_customer = do_create_customer(user, stripe_token=stripe_token)
# elif instead of if since we want to avoid doing two round trips to
# stripe if we can
elif stripe_token is not None:
stripe_customer = do_replace_payment_source(user, stripe_token)
do_subscribe_customer_to_plan(
user=user,
stripe_customer=stripe_customer,
stripe_plan_id=plan.stripe_plan_id,
seat_count=seat_count,
# TODO: billing address details are passed to us in the request;
# use that to calculate taxes.
tax_percent=0,
charge_automatically=(stripe_token is not None))
do_change_plan_type(user, Realm.STANDARD)
def attach_discount_to_realm(user: UserProfile, percent_off: int) -> None:
coupon = Coupon.objects.get(percent_off=percent_off)
customer = Customer.objects.filter(realm=user.realm).first()
if customer is None:
do_create_customer(user, coupon=coupon)
else:
do_replace_coupon(user, coupon)
@catch_stripe_errors
def process_downgrade(user: UserProfile) -> None:
stripe_customer = stripe_get_customer(
Customer.objects.filter(realm=user.realm).first().stripe_customer_id)
subscription_balance = preview_invoice_total_for_downgrade(stripe_customer)
# If subscription_balance > 0, they owe us money. This is likely due to
# people they added in the last day, so we can just forgive it.
# Stripe automatically forgives it when we delete the subscription, so nothing we need to do there.
if subscription_balance < 0:
stripe_customer.account_balance = stripe_customer.account_balance + subscription_balance
stripe_subscription = extract_current_subscription(stripe_customer)
# Wish these two could be transaction.atomic
stripe_subscription = stripe_subscription.delete()
stripe.Customer.save(stripe_customer)
with transaction.atomic():
user.realm.has_seat_based_plan = False
user.realm.save(update_fields=['has_seat_based_plan'])
RealmAuditLog.objects.create(
realm=user.realm,
acting_user=user,
event_type=RealmAuditLog.STRIPE_PLAN_CHANGED,
event_time=timestamp_to_datetime(stripe_subscription.canceled_at),
extra_data=ujson.dumps({'plan': None, 'quantity': stripe_subscription.quantity}))
# Doing this last, since it results in user-visible confirmation (via
# product changes) that the downgrade succeeded.
# Keeping it out of the transaction.atomic block because it will
# eventually have a lot of stuff going on.
do_change_plan_type(user, Realm.LIMITED)
## Process RealmAuditLog
def do_set_subscription_quantity(
customer: Customer, timestamp: int, idempotency_key: str, quantity: int) -> None:
stripe_customer = stripe_get_customer(customer.stripe_customer_id)
stripe_subscription = extract_current_subscription(stripe_customer)
stripe_subscription.quantity = quantity
stripe_subscription.proration_date = timestamp
stripe.Subscription.save(stripe_subscription, idempotency_key=idempotency_key)
def do_adjust_subscription_quantity(
customer: Customer, timestamp: int, idempotency_key: str, delta: int) -> None:
stripe_customer = stripe_get_customer(customer.stripe_customer_id)
stripe_subscription = extract_current_subscription(stripe_customer)
stripe_subscription.quantity = stripe_subscription.quantity + delta
stripe_subscription.proration_date = timestamp
stripe.Subscription.save(stripe_subscription, idempotency_key=idempotency_key)
def increment_subscription_quantity(
customer: Customer, timestamp: int, idempotency_key: str) -> None:
return do_adjust_subscription_quantity(customer, timestamp, idempotency_key, 1)
def decrement_subscription_quantity(
customer: Customer, timestamp: int, idempotency_key: str) -> None:
return do_adjust_subscription_quantity(customer, timestamp, idempotency_key, -1)
@catch_stripe_errors
def process_billing_log_entry(processor: BillingProcessor, log_row: RealmAuditLog) -> None:
processor.state = BillingProcessor.STARTED
processor.log_row = log_row
processor.save()
customer = Customer.objects.get(realm=log_row.realm)
timestamp = datetime_to_timestamp(log_row.event_time)
idempotency_key = 'process_billing_log_entry:%s' % (log_row.id,)
extra_args = {} # type: Dict[str, Any]
if log_row.extra_data is not None:
extra_args = ujson.loads(log_row.extra_data)
processing_functions = {
RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET: do_set_subscription_quantity,
RealmAuditLog.USER_CREATED: increment_subscription_quantity,
RealmAuditLog.USER_ACTIVATED: increment_subscription_quantity,
RealmAuditLog.USER_DEACTIVATED: decrement_subscription_quantity,
RealmAuditLog.USER_REACTIVATED: increment_subscription_quantity,
} # type: Dict[str, Callable[..., None]]
processing_functions[log_row.event_type](customer, timestamp, idempotency_key, **extra_args)
processor.state = BillingProcessor.DONE
processor.save()
def get_next_billing_log_entry(processor: BillingProcessor) -> Optional[RealmAuditLog]:
if processor.state == BillingProcessor.STARTED:
return processor.log_row
assert processor.state != BillingProcessor.STALLED
if processor.state not in [BillingProcessor.DONE, BillingProcessor.SKIPPED]:
raise BillingError(
'unknown processor state',
"Check for typos, since this value is sometimes set by hand: %s" % (processor.state,))
if processor.realm is None:
realms_with_processors = BillingProcessor.objects.exclude(
realm=None).values_list('realm', flat=True)
query = RealmAuditLog.objects.exclude(realm__in=realms_with_processors)
else:
global_processor = BillingProcessor.objects.get(realm=None)
query = RealmAuditLog.objects.filter(
realm=processor.realm, id__lt=global_processor.log_row.id)
return query.filter(id__gt=processor.log_row.id,
requires_billing_update=True).order_by('id').first()
def run_billing_processor_one_step(processor: BillingProcessor) -> bool:
# Returns True if a row was processed, or if processing was attempted
log_row = get_next_billing_log_entry(processor)
if log_row is None:
if processor.realm is not None:
processor.delete()
return False
try:
process_billing_log_entry(processor, log_row)
return True
except Exception as e:
# Possible errors include processing subscription quantity entries
# after downgrade, since the downgrade code doesn't check that
# billing processor is up to date
billing_logger.error("Error on log_row.realm=%s, event_type=%s, log_row.id=%s, "
"processor.id=%s, processor.realm=%s" % (
processor.log_row.realm.string_id, processor.log_row.event_type,
processor.log_row.id, processor.id, processor.realm))
if isinstance(e, StripeCardError):
if processor.realm is None:
BillingProcessor.objects.create(log_row=processor.log_row,
realm=processor.log_row.realm,
state=BillingProcessor.STALLED)
processor.state = BillingProcessor.SKIPPED
else:
processor.state = BillingProcessor.STALLED
processor.save()
return True
raise
| [
"Realm",
"str",
"str",
"str",
"str",
"str",
"CallableT",
"Any",
"Any",
"str",
"str",
"str",
"str",
"str",
"stripe.Customer",
"str",
"stripe.Customer",
"stripe.Customer",
"UserProfile",
"UserProfile",
"str",
"UserProfile",
"Coupon",
"UserProfile",
"stripe.Customer",
"str",
"int",
"float",
"bool",
"UserProfile",
"Plan",
"int",
"Optional[str]",
"UserProfile",
"int",
"UserProfile",
"Customer",
"int",
"str",
"int",
"Customer",
"int",
"str",
"int",
"Customer",
"int",
"str",
"Customer",
"int",
"str",
"BillingProcessor",
"RealmAuditLog",
"BillingProcessor",
"BillingProcessor"
] | [
1476,
1605,
1769,
1780,
2168,
2182,
2397,
2462,
2477,
4282,
4464,
4648,
4677,
4718,
4990,
6030,
6295,
6596,
7193,
8722,
8749,
9455,
9476,
9769,
9799,
9832,
9883,
9901,
9930,
12909,
12928,
12946,
12993,
13832,
13858,
14167,
15823,
15844,
15866,
15881,
16272,
16293,
16315,
16327,
16746,
16767,
16789,
16943,
16964,
16986,
17148,
17175,
18267,
19308
] | [
1481,
1608,
1772,
1783,
2171,
2185,
2406,
2465,
2480,
4285,
4467,
4651,
4680,
4721,
5005,
6033,
6310,
6611,
7204,
8733,
8752,
9466,
9482,
9780,
9814,
9835,
9886,
9906,
9934,
12920,
12932,
12949,
13006,
13843,
13861,
14178,
15831,
15847,
15869,
15884,
16280,
16296,
16318,
16330,
16754,
16770,
16792,
16951,
16967,
16989,
17164,
17188,
18283,
19324
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/management/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/management/commands/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/management/commands/process_billing_updates.py | """\
Run BillingProcessors.
This management command is run via supervisor. Do not run on multiple
machines, as the code has not been made robust to race conditions from doing
so. (Alternatively, you can set `BILLING_PROCESSOR_ENABLED=False` on all but
one machine to make the command have no effect.)
"""
import time
from typing import Any
from django.conf import settings
from django.core.management.base import BaseCommand
from zerver.lib.context_managers import lockfile
from zerver.lib.management import sleep_forever
from corporate.lib.stripe import StripeConnectionError, \
run_billing_processor_one_step
from corporate.models import BillingProcessor
class Command(BaseCommand):
help = """Run BillingProcessors, to sync billing-relevant updates into Stripe.
Run this command under supervisor.
Usage: ./manage.py process_billing_updates
"""
def handle(self, *args: Any, **options: Any) -> None:
if not settings.BILLING_PROCESSOR_ENABLED:
sleep_forever()
with lockfile("/tmp/zulip_billing_processor.lockfile"):
while True:
for processor in BillingProcessor.objects.exclude(
state=BillingProcessor.STALLED):
try:
entry_processed = run_billing_processor_one_step(processor)
except StripeConnectionError:
time.sleep(5*60)
# Less load on the db during times of activity
# and more responsiveness when the load is low
if entry_processed:
time.sleep(10)
else:
time.sleep(2)
| [
"Any",
"Any"
] | [
891,
907
] | [
894,
910
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/management/commands/setup_stripe.py | from corporate.models import Plan, Coupon, Customer
from django.conf import settings
from zerver.lib.management import ZulipBaseCommand
from zproject.settings import get_secret
from typing import Any
import stripe
stripe.api_key = get_secret('stripe_secret_key')
class Command(ZulipBaseCommand):
help = """Script to add the appropriate products and plans to Stripe."""
def handle(self, *args: Any, **options: Any) -> None:
assert (settings.DEVELOPMENT or settings.TEST_SUITE)
Customer.objects.all().delete()
Plan.objects.all().delete()
Coupon.objects.all().delete()
# Zulip Cloud offerings
product = stripe.Product.create(
name="Zulip Cloud Standard",
type='service',
statement_descriptor="Zulip Cloud Standard",
unit_label="user")
plan = stripe.Plan.create(
currency='usd',
interval='month',
product=product.id,
amount=800,
billing_scheme='per_unit',
nickname=Plan.CLOUD_MONTHLY,
usage_type='licensed')
Plan.objects.create(nickname=Plan.CLOUD_MONTHLY, stripe_plan_id=plan.id)
plan = stripe.Plan.create(
currency='usd',
interval='year',
product=product.id,
amount=8000,
billing_scheme='per_unit',
nickname=Plan.CLOUD_ANNUAL,
usage_type='licensed')
Plan.objects.create(nickname=Plan.CLOUD_ANNUAL, stripe_plan_id=plan.id)
coupon = stripe.Coupon.create(
duration='forever',
name='25% discount',
percent_off=25)
Coupon.objects.create(percent_off=25, stripe_coupon_id=coupon.id)
coupon = stripe.Coupon.create(
duration='forever',
name='85% discount',
percent_off=85)
Coupon.objects.create(percent_off=85, stripe_coupon_id=coupon.id)
| [
"Any",
"Any"
] | [
405,
421
] | [
408,
424
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-25 12:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('zerver', '0189_userprofile_add_some_emojisets'),
]
operations = [
migrations.CreateModel(
name='BillingProcessor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state', models.CharField(max_length=20)),
('last_modified', models.DateTimeField(auto_now=True)),
('log_row', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.RealmAuditLog')),
('realm', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name='Coupon',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('percent_off', models.SmallIntegerField(unique=True)),
('stripe_coupon_id', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stripe_customer_id', models.CharField(max_length=255, unique=True)),
('has_billing_relationship', models.BooleanField(default=False)),
('realm', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name='Plan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nickname', models.CharField(max_length=40, unique=True)),
('stripe_plan_id', models.CharField(max_length=255, unique=True)),
],
),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/migrations/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/models.py | import datetime
from django.db import models
from zerver.models import Realm, RealmAuditLog
class Customer(models.Model):
realm = models.OneToOneField(Realm, on_delete=models.CASCADE) # type: Realm
stripe_customer_id = models.CharField(max_length=255, unique=True) # type: str
# Becomes True the first time a payment successfully goes through, and never
# goes back to being False
has_billing_relationship = models.BooleanField(default=False) # type: bool
def __str__(self) -> str:
return "<Customer %s %s>" % (self.realm, self.stripe_customer_id)
class Plan(models.Model):
# The two possible values for nickname
CLOUD_MONTHLY = 'monthly'
CLOUD_ANNUAL = 'annual'
nickname = models.CharField(max_length=40, unique=True) # type: str
stripe_plan_id = models.CharField(max_length=255, unique=True) # type: str
class Coupon(models.Model):
percent_off = models.SmallIntegerField(unique=True) # type: int
stripe_coupon_id = models.CharField(max_length=255, unique=True) # type: str
def __str__(self) -> str:
return '<Coupon: %s %s %s>' % (self.percent_off, self.stripe_coupon_id, self.id)
class BillingProcessor(models.Model):
log_row = models.ForeignKey(RealmAuditLog, on_delete=models.CASCADE) # RealmAuditLog
# Exactly one processor, the global processor, has realm=None.
realm = models.OneToOneField(Realm, null=True, on_delete=models.CASCADE) # type: Realm
DONE = 'done'
STARTED = 'started'
SKIPPED = 'skipped' # global processor only
STALLED = 'stalled' # realm processors only
state = models.CharField(max_length=20) # type: str
last_modified = models.DateTimeField(auto_now=True) # type: datetime.datetime
def __str__(self) -> str:
return '<BillingProcessor: %s %s %s>' % (self.realm, self.log_row, self.id)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/tests/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/tests/test_stripe.py | import datetime
from functools import wraps
from mock import Mock, patch
import operator
import os
import re
import sys
from typing import Any, Callable, Dict, List, Optional, TypeVar, Tuple, cast
import ujson
import json
from django.core import signing
from django.core.management import call_command
from django.core.urlresolvers import get_resolver
from django.http import HttpResponse
from django.utils.timezone import utc as timezone_utc
import stripe
from zerver.lib.actions import do_deactivate_user, do_create_user, \
do_activate_user, do_reactivate_user, activity_change_requires_seat_update
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.timestamp import timestamp_to_datetime, datetime_to_timestamp
from zerver.models import Realm, UserProfile, get_realm, RealmAuditLog
from corporate.lib.stripe import catch_stripe_errors, \
do_subscribe_customer_to_plan, attach_discount_to_realm, \
get_seat_count, extract_current_subscription, sign_string, unsign_string, \
get_next_billing_log_entry, run_billing_processor_one_step, \
BillingError, StripeCardError, StripeConnectionError, stripe_get_customer, \
DEFAULT_INVOICE_DAYS_UNTIL_DUE, MIN_INVOICED_SEAT_COUNT
from corporate.models import Customer, Plan, Coupon, BillingProcessor
import corporate.urls
CallableT = TypeVar('CallableT', bound=Callable[..., Any])
GENERATE_STRIPE_FIXTURES = False
STRIPE_FIXTURES_DIR = "corporate/tests/stripe_fixtures"
fixture_data_file = open(os.path.join(os.path.dirname(__file__), 'stripe_fixtures.json'), 'r')
fixture_data = ujson.load(fixture_data_file)
def mock_create_customer(*args: Any, **kwargs: Any) -> stripe.Customer:
return stripe.util.convert_to_stripe_object(fixture_data["create_customer"])
def mock_create_subscription(*args: Any, **kwargs: Any) -> stripe.Subscription:
return stripe.util.convert_to_stripe_object(fixture_data["create_subscription"])
def mock_customer_with_subscription(*args: Any, **kwargs: Any) -> stripe.Customer:
return stripe.util.convert_to_stripe_object(fixture_data["customer_with_subscription"])
def mock_customer_with_canceled_subscription(*args: Any, **kwargs: Any) -> stripe.Customer:
customer = mock_customer_with_subscription()
customer.subscriptions.data[0].status = "canceled"
customer.subscriptions.data[0].canceled_at = 1532602160
return customer
def mock_customer_with_cancel_at_period_end_subscription(*args: Any, **kwargs: Any) -> stripe.Customer: # nocoverage
customer = mock_customer_with_subscription()
customer.subscriptions.data[0].canceled_at = 1532602243
customer.subscriptions.data[0].cancel_at_period_end = True
return customer
def mock_customer_with_account_balance(account_balance: int) -> Callable[[str, List[str]], stripe.Customer]:
def customer_with_account_balance(stripe_customer_id: str, expand: List[str]) -> stripe.Customer:
stripe_customer = mock_customer_with_subscription()
stripe_customer.account_balance = account_balance
return stripe_customer
return customer_with_account_balance
def mock_invoice_preview_for_downgrade(total: int=-1000) -> Callable[[str, str, Dict[str, Any]], stripe.Invoice]:
def invoice_preview(customer: str, subscription: str,
subscription_items: Dict[str, Any]) -> stripe.Invoice:
# TODO: Get a better fixture; this is not at all what these look like
stripe_invoice = stripe.util.convert_to_stripe_object(fixture_data["upcoming_invoice"])
stripe_invoice.total = total
return stripe_invoice
return invoice_preview
# TODO: check that this creates a token similar to what is created by our
# actual Stripe Checkout flows
def stripe_create_token(card_number: str="4242424242424242") -> stripe.Token:
return stripe.Token.create(
card={
"number": card_number,
"exp_month": 3,
"exp_year": 2033,
"cvc": "333",
"name": "Ada Starr",
"address_line1": "Under the sea,",
"address_city": "Pacific",
"address_zip": "33333",
"address_country": "United States",
})
def stripe_fixture_path(decorated_function_name: str, mocked_function_name: str, call_count: int) -> str:
# Make the eventual filename a bit shorter, and also we conventionally
# use test_* for the python test files
if decorated_function_name[:5] == 'test_':
decorated_function_name = decorated_function_name[5:]
return "{}/{}:{}.{}.json".format(
STRIPE_FIXTURES_DIR, decorated_function_name, mocked_function_name[7:], call_count)
def fixture_files_for_function(decorated_function: CallableT) -> List[str]: # nocoverage
decorated_function_name = decorated_function.__name__
if decorated_function_name[:5] == 'test_':
decorated_function_name = decorated_function_name[5:]
return sorted(['{}/{}'.format(STRIPE_FIXTURES_DIR, f) for f in os.listdir(STRIPE_FIXTURES_DIR)
if f.startswith(decorated_function_name)])
def generate_and_save_stripe_fixture(decorated_function_name: str, mocked_function_name: str,
mocked_function: CallableT) -> Callable[[Any, Any], Any]: # nocoverage
def _generate_and_save_stripe_fixture(*args: Any, **kwargs: Any) -> Any:
# Note that mock is not the same as mocked_function, even though their
# definitions look the same
mock = operator.attrgetter(mocked_function_name)(sys.modules[__name__])
fixture_path = stripe_fixture_path(decorated_function_name, mocked_function_name, mock.call_count)
try:
# Talk to Stripe
stripe_object = mocked_function(*args, **kwargs)
except stripe.error.StripeError as e:
with open(fixture_path, 'w') as f:
error_dict = e.__dict__
error_dict["headers"] = dict(error_dict["headers"])
f.write(json.dumps(error_dict, indent=2, separators=(',', ': '), sort_keys=True) + "\n")
raise e
with open(fixture_path, 'w') as f:
if stripe_object is not None:
f.write(str(stripe_object) + "\n")
else:
f.write("{}\n")
return stripe_object
return _generate_and_save_stripe_fixture
def read_stripe_fixture(decorated_function_name: str,
mocked_function_name: str) -> Callable[[Any, Any], Any]:
def _read_stripe_fixture(*args: Any, **kwargs: Any) -> Any:
mock = operator.attrgetter(mocked_function_name)(sys.modules[__name__])
fixture_path = stripe_fixture_path(decorated_function_name, mocked_function_name, mock.call_count)
fixture = ujson.load(open(fixture_path, 'r'))
# Check for StripeError fixtures
if "json_body" in fixture:
requestor = stripe.api_requestor.APIRequestor()
# This function will raise the relevant StripeError according to the fixture
requestor.interpret_response(fixture["http_body"], fixture["http_status"], fixture["headers"])
return stripe.util.convert_to_stripe_object(fixture)
return _read_stripe_fixture
def normalize_fixture_data(decorated_function: CallableT) -> None: # nocoverage
# stripe ids are all of the form cus_D7OT2jf5YAtZQ2
id_lengths = [
('cus', 14), ('sub', 14), ('si', 14), ('sli', 14), ('req', 14), ('tok', 24), ('card', 24)]
# We'll replace cus_D7OT2jf5YAtZQ2 with something like cus_NORMALIZED0001
pattern_translations = {
"%s_[A-Za-z0-9]{%d}" % (prefix, length): "%s_NORMALIZED%%0%dd" % (prefix, length - 10)
for prefix, length in id_lengths
}
# We'll replace "invoice_prefix": "A35BC4Q" with something like "invoice_prefix": "NORMA01"
pattern_translations.update({
'"invoice_prefix": "[A-Za-z0-9]{7}"': '"invoice_prefix": "NORMA%02d"',
'"fingerprint": "[A-Za-z0-9]{16}"': '"fingerprint": "NORMALIZED%06d"',
'"number": "[A-Za-z0-9]{7}-[A-Za-z0-9]{4}"': '"number": "NORMALI-%04d"',
})
normalized_values = {pattern: {}
for pattern in pattern_translations.keys()} # type: Dict[str, Dict[str, str]]
for fixture_file in fixture_files_for_function(decorated_function):
with open(fixture_file, "r") as f:
file_content = f.read()
for pattern, translation in pattern_translations.items():
for match in re.findall(pattern, file_content):
if match not in normalized_values[pattern]:
normalized_values[pattern][match] = translation % (len(normalized_values[pattern]) + 1,)
file_content = file_content.replace(match, normalized_values[pattern][match])
# Overwrite all IP addresses
file_content = re.sub(r'"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"', '"0.0.0.0"', file_content)
with open(fixture_file, "w") as f:
f.write(file_content)
def mock_stripe(*mocked_function_names: str,
generate: Optional[bool]=None) -> Callable[[CallableT], CallableT]:
def _mock_stripe(decorated_function: CallableT) -> CallableT:
generate_fixture = generate
if generate_fixture is None:
generate_fixture = GENERATE_STRIPE_FIXTURES
mocked_function_names_ = ["stripe.{}".format(name) for name in mocked_function_names]
for mocked_function_name in mocked_function_names_:
mocked_function = operator.attrgetter(mocked_function_name)(sys.modules[__name__])
if generate_fixture:
side_effect = generate_and_save_stripe_fixture(
decorated_function.__name__, mocked_function_name, mocked_function) # nocoverage
else:
side_effect = read_stripe_fixture(decorated_function.__name__, mocked_function_name)
decorated_function = patch(mocked_function_name, side_effect=side_effect)(decorated_function)
@wraps(decorated_function)
def wrapped(*args: Any, **kwargs: Any) -> Any:
val = decorated_function(*args, **kwargs)
if generate_fixture: # nocoverage
normalize_fixture_data(decorated_function)
return val
return cast(CallableT, wrapped)
return _mock_stripe
# A Kandra is a fictional character that can become anything. Used as a
# wildcard when testing for equality.
class Kandra(object):
def __eq__(self, other: Any) -> bool:
return True
def process_all_billing_log_entries() -> None:
assert not RealmAuditLog.objects.get(pk=1).requires_billing_update
processor = BillingProcessor.objects.create(
log_row=RealmAuditLog.objects.get(pk=1), realm=None, state=BillingProcessor.DONE)
while run_billing_processor_one_step(processor):
pass
class StripeTest(ZulipTestCase):
@mock_stripe("Product.create", "Plan.create", "Coupon.create", generate=False)
def setUp(self, mock3: Mock, mock2: Mock, mock1: Mock) -> None:
call_command("setup_stripe")
# legacy
self.token = 'token'
# The values below should be copied from stripe_fixtures.json
self.stripe_customer_id = 'cus_D7OT2jf5YAtZQL'
self.customer_created = 1529990750
self.stripe_coupon_id = Coupon.objects.get(percent_off=85).stripe_coupon_id
self.stripe_plan_id = 'plan_Do3xCvbzO89OsR'
self.subscription_created = 1529990751
self.quantity = 8
self.signed_seat_count, self.salt = sign_string(str(self.quantity))
def get_signed_seat_count_from_response(self, response: HttpResponse) -> Optional[str]:
match = re.search(r'name=\"signed_seat_count\" value=\"(.+)\"', response.content.decode("utf-8"))
return match.group(1) if match else None
def get_salt_from_response(self, response: HttpResponse) -> Optional[str]:
match = re.search(r'name=\"salt\" value=\"(\w+)\"', response.content.decode("utf-8"))
return match.group(1) if match else None
@patch("corporate.lib.stripe.billing_logger.error")
def test_catch_stripe_errors(self, mock_billing_logger_error: Mock) -> None:
@catch_stripe_errors
def raise_invalid_request_error() -> None:
raise stripe.error.InvalidRequestError(
"message", "param", "code", json_body={})
with self.assertRaises(BillingError) as context:
raise_invalid_request_error()
self.assertEqual('other stripe error', context.exception.description)
mock_billing_logger_error.assert_called()
@catch_stripe_errors
def raise_card_error() -> None:
error_message = "The card number is not a valid credit card number."
json_body = {"error": {"message": error_message}}
raise stripe.error.CardError(error_message, "number", "invalid_number",
json_body=json_body)
with self.assertRaises(StripeCardError) as context:
raise_card_error()
self.assertIn('not a valid credit card', context.exception.message)
self.assertEqual('card error', context.exception.description)
mock_billing_logger_error.assert_called()
def test_billing_not_enabled(self) -> None:
with self.settings(BILLING_ENABLED=False):
self.login(self.example_email("iago"))
response = self.client_get("/upgrade/")
self.assert_in_success_response(["Page not found (404)"], response)
@mock_stripe("Customer.retrieve", "Subscription.create", "Customer.create", "Token.create", "Invoice.upcoming")
def test_initial_upgrade(self, mock5: Mock, mock4: Mock, mock3: Mock, mock2: Mock, mock1: Mock) -> None:
user = self.example_user("hamlet")
self.login(user.email)
response = self.client_get("/upgrade/")
self.assert_in_success_response(['We can also bill by invoice'], response)
self.assertFalse(user.realm.has_seat_based_plan)
self.assertNotEqual(user.realm.plan_type, Realm.STANDARD)
self.assertFalse(Customer.objects.filter(realm=user.realm).exists())
# Click "Make payment" in Stripe Checkout
self.client_post("/upgrade/", {
'stripeToken': stripe_create_token().id,
'signed_seat_count': self.get_signed_seat_count_from_response(response),
'salt': self.get_salt_from_response(response),
'plan': Plan.CLOUD_ANNUAL})
# Check that we correctly created Customer and Subscription objects in Stripe
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
self.assertEqual(stripe_customer.default_source.id[:5], 'card_')
self.assertEqual(stripe_customer.description, "zulip (Zulip Dev)")
self.assertEqual(stripe_customer.discount, None)
self.assertEqual(stripe_customer.email, user.email)
self.assertEqual(dict(stripe_customer.metadata),
{'realm_id': str(user.realm.id), 'realm_str': 'zulip'})
stripe_subscription = extract_current_subscription(stripe_customer)
self.assertEqual(stripe_subscription.billing, 'charge_automatically')
self.assertEqual(stripe_subscription.days_until_due, None)
self.assertEqual(stripe_subscription.plan.id,
Plan.objects.get(nickname=Plan.CLOUD_ANNUAL).stripe_plan_id)
self.assertEqual(stripe_subscription.quantity, self.quantity)
self.assertEqual(stripe_subscription.status, 'active')
self.assertEqual(stripe_subscription.tax_percent, 0)
# Check that we correctly populated Customer and RealmAuditLog in Zulip
self.assertEqual(1, Customer.objects.filter(stripe_customer_id=stripe_customer.id,
realm=user.realm).count())
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', 'event_time').order_by('id'))
self.assertEqual(audit_log_entries, [
(RealmAuditLog.STRIPE_CUSTOMER_CREATED, timestamp_to_datetime(stripe_customer.created)),
(RealmAuditLog.STRIPE_CARD_CHANGED, timestamp_to_datetime(stripe_customer.created)),
# TODO: Add a test where stripe_customer.created != stripe_subscription.created
(RealmAuditLog.STRIPE_PLAN_CHANGED, timestamp_to_datetime(stripe_subscription.created)),
(RealmAuditLog.REALM_PLAN_TYPE_CHANGED, Kandra()),
])
# Check that we correctly updated Realm
realm = get_realm("zulip")
self.assertTrue(realm.has_seat_based_plan)
self.assertEqual(realm.plan_type, Realm.STANDARD)
self.assertEqual(realm.max_invites, Realm.INVITES_STANDARD_REALM_DAILY_MAX)
# Check that we can no longer access /upgrade
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/billing/', response.url)
# Check /billing has the correct information
response = self.client_get("/billing/")
self.assert_not_in_success_response(['We can also bill by invoice'], response)
for substring in ['Your plan will renew on', '$%s.00' % (80 * self.quantity,),
'Card ending in 4242']:
self.assert_in_response(substring, response)
@mock_stripe("Token.create", "Invoice.upcoming", "Customer.retrieve", "Customer.create", "Subscription.create")
def test_billing_page_permissions(self, mock5: Mock, mock4: Mock, mock3: Mock,
mock2: Mock, mock1: Mock) -> None:
# Check that non-admins can access /upgrade via /billing, when there is no Customer object
self.login(self.example_email('hamlet'))
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
# Check that non-admins can sign up and pay
self.client_post("/upgrade/", {'stripeToken': stripe_create_token().id,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL})
# Check that the non-admin hamlet can still access /billing
response = self.client_get("/billing/")
self.assert_in_success_response(["for billing history or to make changes"], response)
# Check admins can access billing, even though they are not a billing admin
self.login(self.example_email('iago'))
response = self.client_get("/billing/")
self.assert_in_success_response(["for billing history or to make changes"], response)
# Check that a non-admin, non-billing admin user does not have access
self.login(self.example_email("cordelia"))
response = self.client_get("/billing/")
self.assert_in_success_response(["You must be an organization administrator"], response)
@mock_stripe("Token.create", "Customer.create", "Subscription.create", "Customer.retrieve")
def test_upgrade_with_outdated_seat_count(
self, mock4: Mock, mock3: Mock, mock2: Mock, mock1: Mock) -> None:
self.login(self.example_email("hamlet"))
new_seat_count = 123
# Change the seat count while the user is going through the upgrade flow
response = self.client_get("/upgrade/")
with patch('corporate.lib.stripe.get_seat_count', return_value=new_seat_count):
self.client_post("/upgrade/", {
'stripeToken': stripe_create_token().id,
'signed_seat_count': self.get_signed_seat_count_from_response(response),
'salt': self.get_salt_from_response(response),
'plan': Plan.CLOUD_ANNUAL})
# Check that the subscription call used the old quantity, not new_seat_count
stripe_customer = stripe_get_customer(
Customer.objects.get(realm=get_realm('zulip')).stripe_customer_id)
stripe_subscription = extract_current_subscription(stripe_customer)
self.assertEqual(stripe_subscription.quantity, self.quantity)
# Check that we have the STRIPE_PLAN_QUANTITY_RESET entry, and that we
# correctly handled the requires_billing_update field
audit_log_entries = list(RealmAuditLog.objects.order_by('-id')
.values_list('event_type', 'event_time',
'requires_billing_update')[:5])[::-1]
self.assertEqual(audit_log_entries, [
(RealmAuditLog.STRIPE_CUSTOMER_CREATED, timestamp_to_datetime(stripe_customer.created), False),
(RealmAuditLog.STRIPE_CARD_CHANGED, timestamp_to_datetime(stripe_customer.created), False),
# TODO: Ideally this test would force stripe_customer.created != stripe_subscription.created
(RealmAuditLog.STRIPE_PLAN_CHANGED, timestamp_to_datetime(stripe_subscription.created), False),
(RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET, timestamp_to_datetime(stripe_subscription.created), True),
(RealmAuditLog.REALM_PLAN_TYPE_CHANGED, Kandra(), False),
])
self.assertEqual(ujson.loads(RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET).values_list('extra_data', flat=True).first()),
{'quantity': new_seat_count})
@mock_stripe("Token.create", "Customer.create", "Subscription.create", "Customer.retrieve", "Customer.save")
def test_upgrade_where_subscription_save_fails_at_first(
self, mock5: Mock, mock4: Mock, mock3: Mock, mock2: Mock, mock1: Mock) -> None:
user = self.example_user("hamlet")
self.login(user.email)
# From https://stripe.com/docs/testing#cards: Attaching this card to
# a Customer object succeeds, but attempts to charge the customer fail.
self.client_post("/upgrade/", {'stripeToken': stripe_create_token('4000000000000341').id,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL})
# Check that we created a Customer object with has_billing_relationship False
customer = Customer.objects.get(realm=get_realm('zulip'))
self.assertFalse(customer.has_billing_relationship)
original_stripe_customer_id = customer.stripe_customer_id
# Check that we created a customer in stripe, with no subscription
stripe_customer = stripe_get_customer(customer.stripe_customer_id)
self.assertFalse(extract_current_subscription(stripe_customer))
# Check that we correctly populated RealmAuditLog
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', flat=True).order_by('id'))
self.assertEqual(audit_log_entries, [RealmAuditLog.STRIPE_CUSTOMER_CREATED,
RealmAuditLog.STRIPE_CARD_CHANGED])
# Check that we did not update Realm
realm = get_realm("zulip")
self.assertFalse(realm.has_seat_based_plan)
# Check that we still get redirected to /upgrade
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
# Try again, with a valid card
self.client_post("/upgrade/", {'stripeToken': stripe_create_token().id,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL})
customer = Customer.objects.get(realm=get_realm('zulip'))
# Impossible to create two Customers, but check that we didn't
# change stripe_customer_id and that we updated has_billing_relationship
self.assertEqual(customer.stripe_customer_id, original_stripe_customer_id)
self.assertTrue(customer.has_billing_relationship)
# Check that we successfully added a subscription
stripe_customer = stripe_get_customer(customer.stripe_customer_id)
self.assertTrue(extract_current_subscription(stripe_customer))
# Check that we correctly populated RealmAuditLog
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', flat=True).order_by('id'))
self.assertEqual(audit_log_entries, [RealmAuditLog.STRIPE_CUSTOMER_CREATED,
RealmAuditLog.STRIPE_CARD_CHANGED,
RealmAuditLog.STRIPE_CARD_CHANGED,
RealmAuditLog.STRIPE_PLAN_CHANGED,
RealmAuditLog.REALM_PLAN_TYPE_CHANGED])
# Check that we correctly updated Realm
realm = get_realm("zulip")
self.assertTrue(realm.has_seat_based_plan)
# Check that we can no longer access /upgrade
response = self.client_get("/upgrade/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/billing/', response.url)
def test_upgrade_with_tampered_seat_count(self) -> None:
self.login(self.example_email("hamlet"))
response = self.client_post("/upgrade/", {
'stripeToken': self.token,
'signed_seat_count': "randomsalt",
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL
})
self.assert_in_success_response(["Upgrade to Zulip Standard"], response)
self.assertEqual(response['error_description'], 'tampered seat count')
def test_upgrade_with_tampered_plan(self) -> None:
self.login(self.example_email("hamlet"))
response = self.client_post("/upgrade/", {
'stripeToken': self.token,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': "invalid"
})
self.assert_in_success_response(["Upgrade to Zulip Standard"], response)
self.assertEqual(response['error_description'], 'tampered plan')
def test_upgrade_with_insufficient_invoiced_seat_count(self) -> None:
self.login(self.example_email("hamlet"))
# Test invoicing for less than MIN_INVOICED_SEAT_COUNT
response = self.client_post("/upgrade/", {
'invoiced_seat_count': self.quantity,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL
})
self.assert_in_success_response(["Upgrade to Zulip Standard",
"at least %d users" % (MIN_INVOICED_SEAT_COUNT,)], response)
self.assertEqual(response['error_description'], 'lowball seat count')
# Test invoicing for less than your user count
with patch("corporate.views.MIN_INVOICED_SEAT_COUNT", 3):
response = self.client_post("/upgrade/", {
'invoiced_seat_count': self.quantity - 1,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL
})
self.assert_in_success_response(["Upgrade to Zulip Standard",
"at least %d users" % (self.quantity,)], response)
self.assertEqual(response['error_description'], 'lowball seat count')
@patch("corporate.lib.stripe.billing_logger.error")
def test_upgrade_with_uncaught_exception(self, mock1: Mock) -> None:
self.login(self.example_email("hamlet"))
with patch("corporate.views.process_initial_upgrade", side_effect=Exception):
response = self.client_post("/upgrade/", {
'stripeToken': self.token,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL
})
self.assert_in_success_response(["Upgrade to Zulip Standard",
"Something went wrong. Please contact"], response)
self.assertEqual(response['error_description'], 'uncaught exception during upgrade')
@mock_stripe("Customer.create", "Subscription.create", "Subscription.save",
"Customer.retrieve", "Invoice.list")
def test_upgrade_billing_by_invoice(self, mock5: Mock, mock4: Mock, mock3: Mock,
mock2: Mock, mock1: Mock) -> None:
user = self.example_user("hamlet")
self.login(user.email)
self.client_post("/upgrade/", {
'invoiced_seat_count': 123,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL})
process_all_billing_log_entries()
# Check that we correctly created a Customer in Stripe
stripe_customer = stripe_get_customer(Customer.objects.get(realm=user.realm).stripe_customer_id)
self.assertEqual(stripe_customer.email, user.email)
# It can take a second for Stripe to attach the source to the
# customer, and in particular it may not be attached at the time
# stripe_get_customer is called above, causing test flakes.
# So commenting the next line out, but leaving it here so future readers know what
# is supposed to happen here (e.g. the default_source is not None as it would be if
# we had not added a Subscription).
# self.assertEqual(stripe_customer.default_source.type, 'ach_credit_transfer')
# Check that we correctly created a Subscription in Stripe
stripe_subscription = extract_current_subscription(stripe_customer)
self.assertEqual(stripe_subscription.billing, 'send_invoice')
self.assertEqual(stripe_subscription.days_until_due, DEFAULT_INVOICE_DAYS_UNTIL_DUE)
self.assertEqual(stripe_subscription.plan.id,
Plan.objects.get(nickname=Plan.CLOUD_ANNUAL).stripe_plan_id)
self.assertEqual(stripe_subscription.quantity, get_seat_count(user.realm))
self.assertEqual(stripe_subscription.status, 'active')
# Check that we correctly created an initial Invoice in Stripe
for stripe_invoice in stripe.Invoice.list(customer=stripe_customer.id, limit=1):
self.assertTrue(stripe_invoice.auto_advance)
self.assertEqual(stripe_invoice.billing, 'send_invoice')
self.assertEqual(stripe_invoice.billing_reason, 'subscription_create')
# Transitions to 'open' after 1-2 hours
self.assertEqual(stripe_invoice.status, 'draft')
# Very important. Check that we're invoicing for 123, and not get_seat_count
self.assertEqual(stripe_invoice.amount_due, 8000*123)
# Check that we correctly updated Realm
realm = get_realm("zulip")
self.assertTrue(realm.has_seat_based_plan)
self.assertEqual(realm.plan_type, Realm.STANDARD)
# Check that we created a Customer in Zulip
self.assertEqual(1, Customer.objects.filter(stripe_customer_id=stripe_customer.id,
realm=realm).count())
# Check that RealmAuditLog has STRIPE_PLAN_QUANTITY_RESET, and doesn't have STRIPE_CARD_CHANGED
audit_log_entries = list(RealmAuditLog.objects.order_by('-id')
.values_list('event_type', 'event_time',
'requires_billing_update')[:4])[::-1]
self.assertEqual(audit_log_entries, [
(RealmAuditLog.STRIPE_CUSTOMER_CREATED, timestamp_to_datetime(stripe_customer.created), False),
(RealmAuditLog.STRIPE_PLAN_CHANGED, timestamp_to_datetime(stripe_subscription.created), False),
(RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET, timestamp_to_datetime(stripe_subscription.created), True),
(RealmAuditLog.REALM_PLAN_TYPE_CHANGED, Kandra(), False),
])
self.assertEqual(ujson.loads(RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET).values_list('extra_data', flat=True).first()),
{'quantity': self.quantity})
@patch("stripe.Customer.retrieve", side_effect=mock_customer_with_subscription)
def test_redirect_for_billing_home(self, mock_customer_with_subscription: Mock) -> None:
user = self.example_user("iago")
self.login(user.email)
# No Customer yet; check that we are redirected to /upgrade
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
# Customer, but no billing relationship
customer = Customer.objects.create(
realm=user.realm, stripe_customer_id=self.stripe_customer_id,
has_billing_relationship=False)
response = self.client_get("/billing/")
self.assertEqual(response.status_code, 302)
self.assertEqual('/upgrade/', response.url)
customer.has_billing_relationship = True
customer.save()
with patch("corporate.views.upcoming_invoice_total", return_value=0):
response = self.client_get("/billing/")
self.assert_not_in_success_response(['We can also bill by invoice'], response)
self.assert_in_response('Your plan will renew on', response)
def test_get_seat_count(self) -> None:
realm = get_realm("zulip")
initial_count = get_seat_count(realm)
user1 = UserProfile.objects.create(realm=realm, email='user1@zulip.com', pointer=-1)
user2 = UserProfile.objects.create(realm=realm, email='user2@zulip.com', pointer=-1)
self.assertEqual(get_seat_count(realm), initial_count + 2)
# Test that bots aren't counted
user1.is_bot = True
user1.save(update_fields=['is_bot'])
self.assertEqual(get_seat_count(realm), initial_count + 1)
# Test that inactive users aren't counted
do_deactivate_user(user2)
self.assertEqual(get_seat_count(realm), initial_count)
def test_extract_current_subscription(self) -> None:
self.assertIsNone(extract_current_subscription(mock_create_customer()))
subscription = extract_current_subscription(mock_customer_with_subscription())
self.assertEqual(subscription["id"][:4], "sub_")
self.assertIsNone(extract_current_subscription(mock_customer_with_canceled_subscription()))
def test_subscribe_customer_to_second_plan(self) -> None:
with self.assertRaisesRegex(BillingError, 'subscribing with existing subscription'):
do_subscribe_customer_to_plan(self.example_user("iago"),
mock_customer_with_subscription(),
self.stripe_plan_id, self.quantity, 0, True)
def test_sign_string(self) -> None:
string = "abc"
signed_string, salt = sign_string(string)
self.assertEqual(string, unsign_string(signed_string, salt))
with self.assertRaises(signing.BadSignature):
unsign_string(signed_string, "randomsalt")
@patch("stripe.Customer.retrieve", side_effect=mock_create_customer)
@patch("stripe.Customer.create", side_effect=mock_create_customer)
def test_attach_discount_to_realm(self, mock_create_customer: Mock,
mock_retrieve_customer: Mock) -> None:
user = self.example_user('hamlet')
# Before customer exists
attach_discount_to_realm(user, 85)
mock_create_customer.assert_called_once_with(
description=Kandra(), email=self.example_email('hamlet'), metadata=Kandra(),
source=None, coupon=self.stripe_coupon_id)
mock_create_customer.reset_mock()
# For existing customer
Coupon.objects.create(percent_off=42, stripe_coupon_id='42OFF')
with patch.object(
stripe.Customer, 'save', autospec=True,
side_effect=lambda stripe_customer: self.assertEqual(stripe_customer.coupon, '42OFF')):
attach_discount_to_realm(user, 42)
mock_create_customer.assert_not_called()
@patch("stripe.Subscription.delete")
@patch("stripe.Customer.save")
@patch("stripe.Invoice.upcoming", side_effect=mock_invoice_preview_for_downgrade())
@patch("stripe.Customer.retrieve", side_effect=mock_customer_with_subscription)
def test_downgrade(self, mock_retrieve_customer: Mock, mock_upcoming_invoice: Mock,
mock_save_customer: Mock, mock_delete_subscription: Mock) -> None:
realm = get_realm('zulip')
realm.has_seat_based_plan = True
realm.plan_type = Realm.STANDARD
realm.save(update_fields=['has_seat_based_plan', 'plan_type'])
Customer.objects.create(
realm=realm, stripe_customer_id=self.stripe_customer_id, has_billing_relationship=True)
user = self.example_user('iago')
self.login(user.email)
response = self.client_post("/json/billing/downgrade", {})
self.assert_json_success(response)
mock_delete_subscription.assert_called()
mock_save_customer.assert_called()
realm = get_realm('zulip')
self.assertFalse(realm.has_seat_based_plan)
audit_log_entries = list(RealmAuditLog.objects.filter(acting_user=user)
.values_list('event_type', flat=True).order_by('id'))
# TODO: once we have proper mocks, test for event_time and extra_data in STRIPE_PLAN_CHANGED
self.assertEqual(audit_log_entries, [RealmAuditLog.STRIPE_PLAN_CHANGED,
RealmAuditLog.REALM_PLAN_TYPE_CHANGED])
self.assertEqual(realm.plan_type, Realm.LIMITED)
@patch("stripe.Customer.save")
@patch("stripe.Customer.retrieve", side_effect=mock_create_customer)
def test_downgrade_with_no_subscription(
self, mock_retrieve_customer: Mock, mock_save_customer: Mock) -> None:
realm = get_realm('zulip')
Customer.objects.create(
realm=realm, stripe_customer_id=self.stripe_customer_id, has_billing_relationship=True)
self.login(self.example_email('iago'))
response = self.client_post("/json/billing/downgrade", {})
self.assert_json_error_contains(response, 'Please reload')
self.assertEqual(ujson.loads(response.content)['error_description'], 'downgrade without subscription')
mock_save_customer.assert_not_called()
@patch("stripe.Subscription.delete")
@patch("stripe.Customer.retrieve", side_effect=mock_customer_with_account_balance(1234))
def test_downgrade_credits(self, mock_retrieve_customer: Mock,
mock_delete_subscription: Mock) -> None:
user = self.example_user('iago')
self.login(user.email)
Customer.objects.create(
realm=user.realm, stripe_customer_id=self.stripe_customer_id, has_billing_relationship=True)
# Check that positive balance is forgiven
with patch("stripe.Invoice.upcoming", side_effect=mock_invoice_preview_for_downgrade(1000)):
with patch.object(
stripe.Customer, 'save', autospec=True,
side_effect=lambda customer: self.assertEqual(customer.account_balance, 1234)):
response = self.client_post("/json/billing/downgrade", {})
self.assert_json_success(response)
# Check that negative balance is credited
with patch("stripe.Invoice.upcoming", side_effect=mock_invoice_preview_for_downgrade(-1000)):
with patch.object(
stripe.Customer, 'save', autospec=True,
side_effect=lambda customer: self.assertEqual(customer.account_balance, 234)):
response = self.client_post("/json/billing/downgrade", {})
self.assert_json_success(response)
@patch("stripe.Customer.retrieve", side_effect=mock_customer_with_subscription)
def test_replace_payment_source(self, mock_retrieve_customer: Mock) -> None:
user = self.example_user("iago")
self.login(user.email)
Customer.objects.create(realm=user.realm, stripe_customer_id=self.stripe_customer_id)
with patch.object(stripe.Customer, 'save', autospec=True,
side_effect=lambda customer: self.assertEqual(customer.source, "new_token")):
result = self.client_post("/json/billing/sources/change",
{'stripe_token': ujson.dumps("new_token")})
self.assert_json_success(result)
log_entry = RealmAuditLog.objects.order_by('-id').first()
self.assertEqual(user, log_entry.acting_user)
self.assertEqual(RealmAuditLog.STRIPE_CARD_CHANGED, log_entry.event_type)
@patch("stripe.Customer.retrieve", side_effect=mock_customer_with_subscription)
def test_replace_payment_source_with_stripe_error(self, mock_retrieve_customer: Mock) -> None:
user = self.example_user("iago")
self.login(user.email)
Customer.objects.create(realm=user.realm, stripe_customer_id=self.stripe_customer_id)
with patch.object(stripe.Customer, 'save', autospec=True,
side_effect=stripe.error.StripeError('message', json_body={})):
response = self.client_post("/json/billing/sources/change",
{'stripe_token': ujson.dumps("new_token")})
self.assertEqual(ujson.loads(response.content)['error_description'], 'other stripe error')
self.assert_json_error_contains(response, 'Something went wrong. Please contact')
self.assertFalse(RealmAuditLog.objects.filter(
event_type=RealmAuditLog.STRIPE_CARD_CHANGED).exists())
@patch("stripe.Customer.create", side_effect=mock_create_customer)
@patch("stripe.Subscription.create", side_effect=mock_create_subscription)
@patch("stripe.Customer.retrieve", side_effect=mock_customer_with_subscription)
def test_billing_quantity_changes_end_to_end(
self, mock_customer_with_subscription: Mock, mock_create_subscription: Mock,
mock_create_customer: Mock) -> None:
self.login(self.example_email("hamlet"))
processor = BillingProcessor.objects.create(
log_row=RealmAuditLog.objects.order_by('id').first(), state=BillingProcessor.DONE)
def check_billing_processor_update(event_type: str, quantity: int) -> None:
def check_subscription_save(subscription: stripe.Subscription, idempotency_key: str) -> None:
self.assertEqual(subscription.quantity, quantity)
log_row = RealmAuditLog.objects.filter(
event_type=event_type, requires_billing_update=True).order_by('-id').first()
self.assertEqual(idempotency_key, 'process_billing_log_entry:%s' % (log_row.id,))
self.assertEqual(subscription.proration_date, datetime_to_timestamp(log_row.event_time))
with patch.object(stripe.Subscription, 'save', autospec=True,
side_effect=check_subscription_save):
run_billing_processor_one_step(processor)
# Test STRIPE_PLAN_QUANTITY_RESET
new_seat_count = 123
# change the seat count while the user is going through the upgrade flow
with patch('corporate.lib.stripe.get_seat_count', return_value=new_seat_count):
self.client_post("/upgrade/", {'stripeToken': self.token,
'signed_seat_count': self.signed_seat_count,
'salt': self.salt,
'plan': Plan.CLOUD_ANNUAL})
check_billing_processor_update(RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET, new_seat_count)
# Test USER_CREATED
user = do_create_user('newuser@zulip.com', 'password', get_realm('zulip'), 'full name', 'short name')
check_billing_processor_update(RealmAuditLog.USER_CREATED, self.quantity + 1)
# Test USER_DEACTIVATED
do_deactivate_user(user)
check_billing_processor_update(RealmAuditLog.USER_DEACTIVATED, self.quantity - 1)
# Test USER_REACTIVATED
do_reactivate_user(user)
check_billing_processor_update(RealmAuditLog.USER_REACTIVATED, self.quantity + 1)
# Test USER_ACTIVATED
# Not a proper use of do_activate_user, but it's fine to call it like this for this test
do_activate_user(user)
check_billing_processor_update(RealmAuditLog.USER_ACTIVATED, self.quantity + 1)
class RequiresBillingUpdateTest(ZulipTestCase):
def test_activity_change_requires_seat_update(self) -> None:
# Realm doesn't have a seat based plan
self.assertFalse(activity_change_requires_seat_update(self.example_user("hamlet")))
realm = get_realm("zulip")
realm.has_seat_based_plan = True
realm.save(update_fields=['has_seat_based_plan'])
# seat based plan + user not a bot
user = self.example_user("hamlet")
self.assertTrue(activity_change_requires_seat_update(user))
user.is_bot = True
user.save(update_fields=['is_bot'])
# seat based plan but user is a bot
self.assertFalse(activity_change_requires_seat_update(user))
def test_requires_billing_update_for_is_active_changes(self) -> None:
count = RealmAuditLog.objects.count()
realm = get_realm("zulip")
user1 = do_create_user('user1@zulip.com', 'password', realm, 'full name', 'short name')
do_deactivate_user(user1)
do_reactivate_user(user1)
# Not a proper use of do_activate_user, but it's fine to call it like this for this test
do_activate_user(user1)
self.assertEqual(count + 4,
RealmAuditLog.objects.filter(requires_billing_update=False).count())
realm.has_seat_based_plan = True
realm.save(update_fields=['has_seat_based_plan'])
user2 = do_create_user('user2@zulip.com', 'password', realm, 'full name', 'short name')
do_deactivate_user(user2)
do_reactivate_user(user2)
do_activate_user(user2)
self.assertEqual(4, RealmAuditLog.objects.filter(requires_billing_update=True).count())
class RequiresBillingAccessTest(ZulipTestCase):
def setUp(self) -> None:
hamlet = self.example_user("hamlet")
hamlet.is_billing_admin = True
hamlet.save(update_fields=["is_billing_admin"])
# mocked_function_name will typically be something imported from
# stripe.py. In theory we could have endpoints that need to mock
# multiple functions, but we'll cross that bridge when we get there.
def _test_endpoint(self, url: str, mocked_function_name: str,
request_data: Optional[Dict[str, Any]]={}) -> None:
# Normal users do not have access
self.login(self.example_email('cordelia'))
response = self.client_post(url, request_data)
self.assert_json_error_contains(response, "Must be a billing administrator or an organization")
# Billing admins have access
self.login(self.example_email('hamlet'))
with patch("corporate.views.{}".format(mocked_function_name)) as mocked1:
response = self.client_post(url, request_data)
self.assert_json_success(response)
mocked1.assert_called()
# Realm admins have access, even if they are not billing admins
self.login(self.example_email('iago'))
with patch("corporate.views.{}".format(mocked_function_name)) as mocked2:
response = self.client_post(url, request_data)
self.assert_json_success(response)
mocked2.assert_called()
def test_json_endpoints(self) -> None:
params = [
("/json/billing/sources/change", "do_replace_payment_source",
{'stripe_token': ujson.dumps('token')}),
("/json/billing/downgrade", "process_downgrade", {})
] # type: List[Tuple[str, str, Dict[str, Any]]]
for (url, mocked_function_name, data) in params:
self._test_endpoint(url, mocked_function_name, data)
# Make sure that we are testing all the JSON endpoints
# Quite a hack, but probably fine for now
string_with_all_endpoints = str(get_resolver('corporate.urls').reverse_dict)
json_endpoints = set([word.strip("\"'()[],$") for word in string_with_all_endpoints.split()
if 'json' in word])
self.assertEqual(len(json_endpoints), len(params))
class BillingProcessorTest(ZulipTestCase):
def add_log_entry(self, realm: Realm=get_realm('zulip'),
event_type: str=RealmAuditLog.USER_CREATED,
requires_billing_update: bool=True) -> RealmAuditLog:
return RealmAuditLog.objects.create(
realm=realm, event_time=datetime.datetime(2001, 2, 3, 4, 5, 6).replace(tzinfo=timezone_utc),
event_type=event_type, requires_billing_update=requires_billing_update)
def test_get_next_billing_log_entry(self) -> None:
second_realm = Realm.objects.create(string_id='second', name='second')
entry1 = self.add_log_entry(realm=second_realm)
realm_processor = BillingProcessor.objects.create(
realm=second_realm, log_row=entry1, state=BillingProcessor.DONE)
entry2 = self.add_log_entry()
# global processor
processor = BillingProcessor.objects.create(
log_row=entry2, state=BillingProcessor.STARTED)
# Test STARTED, STALLED, and typo'ed state entry
self.assertEqual(entry2, get_next_billing_log_entry(processor))
processor.state = BillingProcessor.STALLED
processor.save()
with self.assertRaises(AssertionError):
get_next_billing_log_entry(processor)
processor.state = 'typo'
processor.save()
with self.assertRaisesRegex(BillingError, 'unknown processor state'):
get_next_billing_log_entry(processor)
# Test global processor is handled correctly
processor.state = BillingProcessor.DONE
processor.save()
# test it ignores entries with requires_billing_update=False
entry3 = self.add_log_entry(requires_billing_update=False)
# test it ignores entries with realm processors
entry4 = self.add_log_entry(realm=second_realm)
self.assertIsNone(get_next_billing_log_entry(processor))
# test it does catch entries it should
entry5 = self.add_log_entry()
self.assertEqual(entry5, get_next_billing_log_entry(processor))
# Test realm processor is handled correctly
# test it gets the entry with its realm, and ignores the entry with
# requires_billing_update=False, when global processor is up ahead
processor.log_row = entry5
processor.save()
self.assertEqual(entry4, get_next_billing_log_entry(realm_processor))
# test it doesn't run past the global processor
processor.log_row = entry3
processor.save()
self.assertIsNone(get_next_billing_log_entry(realm_processor))
def test_run_billing_processor_logic_when_no_errors(self) -> None:
second_realm = Realm.objects.create(string_id='second', name='second')
entry1 = self.add_log_entry(realm=second_realm)
realm_processor = BillingProcessor.objects.create(
realm=second_realm, log_row=entry1, state=BillingProcessor.DONE)
entry2 = self.add_log_entry()
# global processor
processor = BillingProcessor.objects.create(
log_row=entry2, state=BillingProcessor.DONE)
# Test nothing to process
# test nothing changes, for global processor
self.assertFalse(run_billing_processor_one_step(processor))
self.assertEqual(2, BillingProcessor.objects.count())
# test realm processor gets deleted
self.assertFalse(run_billing_processor_one_step(realm_processor))
self.assertEqual(1, BillingProcessor.objects.count())
self.assertEqual(1, BillingProcessor.objects.filter(realm=None).count())
# Test something to process
processor.state = BillingProcessor.STARTED
processor.save()
realm_processor = BillingProcessor.objects.create(
realm=second_realm, log_row=entry1, state=BillingProcessor.STARTED)
Customer.objects.create(realm=get_realm('zulip'), stripe_customer_id='cust_1')
Customer.objects.create(realm=second_realm, stripe_customer_id='cust_2')
with patch('corporate.lib.stripe.do_adjust_subscription_quantity'):
# test return values
self.assertTrue(run_billing_processor_one_step(processor))
self.assertTrue(run_billing_processor_one_step(realm_processor))
# test no processors get added or deleted
self.assertEqual(2, BillingProcessor.objects.count())
@patch("corporate.lib.stripe.billing_logger.error")
def test_run_billing_processor_with_card_error(self, mock_billing_logger_error: Mock) -> None:
second_realm = Realm.objects.create(string_id='second', name='second')
entry1 = self.add_log_entry(realm=second_realm)
# global processor
processor = BillingProcessor.objects.create(
log_row=entry1, state=BillingProcessor.STARTED)
Customer.objects.create(realm=second_realm, stripe_customer_id='cust_2')
# card error on global processor should create a new realm processor
with patch('corporate.lib.stripe.do_adjust_subscription_quantity',
side_effect=stripe.error.CardError('message', 'param', 'code', json_body={})):
self.assertTrue(run_billing_processor_one_step(processor))
self.assertEqual(2, BillingProcessor.objects.count())
self.assertTrue(BillingProcessor.objects.filter(
realm=None, log_row=entry1, state=BillingProcessor.SKIPPED).exists())
self.assertTrue(BillingProcessor.objects.filter(
realm=second_realm, log_row=entry1, state=BillingProcessor.STALLED).exists())
mock_billing_logger_error.assert_called()
# card error on realm processor should change state to STALLED
realm_processor = BillingProcessor.objects.filter(realm=second_realm).first()
realm_processor.state = BillingProcessor.STARTED
realm_processor.save()
with patch('corporate.lib.stripe.do_adjust_subscription_quantity',
side_effect=stripe.error.CardError('message', 'param', 'code', json_body={})):
self.assertTrue(run_billing_processor_one_step(realm_processor))
self.assertEqual(2, BillingProcessor.objects.count())
self.assertTrue(BillingProcessor.objects.filter(
realm=second_realm, log_row=entry1, state=BillingProcessor.STALLED).exists())
mock_billing_logger_error.assert_called()
@patch("corporate.lib.stripe.billing_logger.error")
def test_run_billing_processor_with_uncaught_error(self, mock_billing_logger_error: Mock) -> None:
# This tests three different things:
# * That run_billing_processor_one_step passes through exceptions that
# are not StripeCardError
# * That process_billing_log_entry catches StripeErrors and re-raises them as BillingErrors
# * That processor.state=STARTED for non-StripeCardError exceptions
entry1 = self.add_log_entry()
entry2 = self.add_log_entry()
processor = BillingProcessor.objects.create(
log_row=entry1, state=BillingProcessor.DONE)
Customer.objects.create(realm=get_realm('zulip'), stripe_customer_id='cust_1')
with patch('corporate.lib.stripe.do_adjust_subscription_quantity',
side_effect=stripe.error.StripeError('message', json_body={})):
with self.assertRaises(BillingError):
run_billing_processor_one_step(processor)
mock_billing_logger_error.assert_called()
# check processor.state is STARTED
self.assertTrue(BillingProcessor.objects.filter(
log_row=entry2, state=BillingProcessor.STARTED).exists())
| [
"Any",
"Any",
"Any",
"Any",
"Any",
"Any",
"Any",
"Any",
"Any",
"Any",
"int",
"str",
"List[str]",
"str",
"str",
"Dict[str, Any]",
"str",
"str",
"int",
"CallableT",
"str",
"str",
"CallableT",
"Any",
"Any",
"str",
"str",
"Any",
"Any",
"CallableT",
"str",
"CallableT",
"Any",
"Any",
"Any",
"Mock",
"Mock",
"Mock",
"HttpResponse",
"HttpResponse",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"Mock",
"str",
"int",
"stripe.Subscription",
"str",
"str",
"str",
"Mock",
"Mock"
] | [
1629,
1644,
1787,
1802,
1960,
1975,
2145,
2160,
2434,
2449,
2737,
2848,
2861,
3231,
3250,
3299,
4216,
4243,
4260,
4682,
5112,
5139,
5198,
5302,
5317,
6371,
6422,
6493,
6508,
7235,
9003,
9133,
10023,
10038,
10459,
10961,
10974,
10987,
11601,
11836,
12134,
13654,
13667,
13680,
13693,
13706,
17570,
17583,
17596,
17647,
17660,
19242,
19255,
19268,
19281,
21703,
21716,
21729,
21742,
21755,
27789,
28633,
28646,
28659,
28712,
28725,
32631,
35644,
35712,
36775,
36804,
36853,
36885,
38269,
38295,
39013,
39076,
40372,
41287,
42428,
42460,
42500,
42768,
42783,
42851,
42889,
47098,
47125,
53470,
55461
] | [
1632,
1647,
1790,
1805,
1963,
1978,
2148,
2163,
2437,
2452,
2740,
2851,
2870,
3234,
3253,
3313,
4219,
4246,
4263,
4691,
5115,
5142,
5207,
5305,
5320,
6374,
6425,
6496,
6511,
7244,
9006,
9142,
10026,
10041,
10462,
10965,
10978,
10991,
11613,
11848,
12138,
13658,
13671,
13684,
13697,
13710,
17574,
17587,
17600,
17651,
17664,
19246,
19259,
19272,
19285,
21707,
21720,
21733,
21746,
21759,
27793,
28637,
28650,
28663,
28716,
28729,
32635,
35648,
35716,
36779,
36808,
36857,
36889,
38273,
38299,
39017,
39080,
40376,
41291,
42432,
42464,
42504,
42771,
42786,
42870,
42892,
47101,
47128,
53474,
55465
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/urls.py | from typing import Any
from django.views.generic import TemplateView
from django.conf.urls import include, url
import corporate.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns = [
# Zephyr/MIT
url(r'^zephyr/$', TemplateView.as_view(template_name='corporate/zephyr.html')),
url(r'^zephyr-mirror/$', TemplateView.as_view(template_name='corporate/zephyr-mirror.html')),
# Billing
url(r'^billing/$', corporate.views.billing_home, name='corporate.views.billing_home'),
url(r'^upgrade/$', corporate.views.initial_upgrade, name='corporate.views.initial_upgrade'),
] # type: Any
v1_api_and_json_patterns = [
url(r'^billing/downgrade$', rest_dispatch,
{'POST': 'corporate.views.downgrade'}),
url(r'^billing/sources/change', rest_dispatch,
{'POST': 'corporate.views.replace_payment_source'}),
]
# Make a copy of i18n_urlpatterns so that they appear without prefix for English
urlpatterns = list(i18n_urlpatterns)
urlpatterns += [
url(r'^api/v1/', include(v1_api_and_json_patterns)),
url(r'^json/', include(v1_api_and_json_patterns)),
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | corporate/views.py | from typing import Any, Dict, Optional, Tuple
import logging
from django.core import signing
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.utils import timezone
from django.utils.translation import ugettext as _, ugettext as err_
from django.shortcuts import redirect, render
from django.urls import reverse
from django.conf import settings
from zerver.decorator import zulip_login_required, require_billing_access
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.lib.validator import check_string
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.models import UserProfile, Realm
from corporate.lib.stripe import STRIPE_PUBLISHABLE_KEY, \
stripe_get_customer, upcoming_invoice_total, get_seat_count, \
extract_current_subscription, process_initial_upgrade, sign_string, \
unsign_string, BillingError, process_downgrade, do_replace_payment_source, \
MIN_INVOICED_SEAT_COUNT
from corporate.models import Customer, Plan
billing_logger = logging.getLogger('corporate.stripe')
def unsign_and_check_upgrade_parameters(user: UserProfile, plan_nickname: str,
signed_seat_count: str, salt: str) -> Tuple[Plan, int]:
if plan_nickname not in [Plan.CLOUD_ANNUAL, Plan.CLOUD_MONTHLY]:
billing_logger.warning("Tampered plan during realm upgrade. user: %s, realm: %s (%s)."
% (user.id, user.realm.id, user.realm.string_id))
raise BillingError('tampered plan', BillingError.CONTACT_SUPPORT)
plan = Plan.objects.get(nickname=plan_nickname)
try:
seat_count = int(unsign_string(signed_seat_count, salt))
except signing.BadSignature:
billing_logger.warning("Tampered seat count during realm upgrade. user: %s, realm: %s (%s)."
% (user.id, user.realm.id, user.realm.string_id))
raise BillingError('tampered seat count', BillingError.CONTACT_SUPPORT)
return plan, seat_count
@zulip_login_required
def initial_upgrade(request: HttpRequest) -> HttpResponse:
if not settings.BILLING_ENABLED:
return render(request, "404.html")
user = request.user
error_message = ""
error_description = "" # only used in tests
customer = Customer.objects.filter(realm=user.realm).first()
if customer is not None and customer.has_billing_relationship:
return HttpResponseRedirect(reverse('corporate.views.billing_home'))
if request.method == 'POST':
try:
plan, seat_count = unsign_and_check_upgrade_parameters(
user, request.POST['plan'], request.POST['signed_seat_count'], request.POST['salt'])
if 'invoiced_seat_count' in request.POST:
min_required_seat_count = max(seat_count, MIN_INVOICED_SEAT_COUNT)
if int(request.POST['invoiced_seat_count']) < min_required_seat_count:
raise BillingError(
'lowball seat count',
"You must invoice for at least %d users." % (min_required_seat_count,))
seat_count = int(request.POST['invoiced_seat_count'])
process_initial_upgrade(user, plan, seat_count, request.POST.get('stripeToken', None))
except BillingError as e:
error_message = e.message
error_description = e.description
except Exception as e:
billing_logger.exception("Uncaught exception in billing: %s" % (e,))
error_message = BillingError.CONTACT_SUPPORT
error_description = "uncaught exception during upgrade"
else:
return HttpResponseRedirect(reverse('corporate.views.billing_home'))
seat_count = get_seat_count(user.realm)
signed_seat_count, salt = sign_string(str(seat_count))
context = {
'publishable_key': STRIPE_PUBLISHABLE_KEY,
'email': user.email,
'seat_count': seat_count,
'signed_seat_count': signed_seat_count,
'salt': salt,
'plan': "Zulip Standard",
'nickname_monthly': Plan.CLOUD_MONTHLY,
'nickname_annual': Plan.CLOUD_ANNUAL,
'error_message': error_message,
'cloud_monthly_price': 8,
'cloud_annual_price': 80,
'cloud_annual_price_per_month': 6.67,
} # type: Dict[str, Any]
response = render(request, 'corporate/upgrade.html', context=context)
response['error_description'] = error_description
return response
PLAN_NAMES = {
Plan.CLOUD_ANNUAL: "Zulip Standard (billed annually)",
Plan.CLOUD_MONTHLY: "Zulip Standard (billed monthly)",
}
@zulip_login_required
def billing_home(request: HttpRequest) -> HttpResponse:
user = request.user
customer = Customer.objects.filter(realm=user.realm).first()
if customer is None:
return HttpResponseRedirect(reverse('corporate.views.initial_upgrade'))
if not customer.has_billing_relationship:
return HttpResponseRedirect(reverse('corporate.views.initial_upgrade'))
if not user.is_realm_admin and not user.is_billing_admin:
context = {'admin_access': False} # type: Dict[str, Any]
return render(request, 'corporate/billing.html', context=context)
context = {'admin_access': True}
stripe_customer = stripe_get_customer(customer.stripe_customer_id)
if stripe_customer.account_balance > 0: # nocoverage, waiting for mock_stripe to mature
context.update({'account_charges': '{:,.2f}'.format(stripe_customer.account_balance / 100.)})
if stripe_customer.account_balance < 0: # nocoverage
context.update({'account_credits': '{:,.2f}'.format(-stripe_customer.account_balance / 100.)})
subscription = extract_current_subscription(stripe_customer)
if subscription:
plan_name = PLAN_NAMES[Plan.objects.get(stripe_plan_id=subscription.plan.id).nickname]
seat_count = subscription.quantity
# Need user's timezone to do this properly
renewal_date = '{dt:%B} {dt.day}, {dt.year}'.format(
dt=timestamp_to_datetime(subscription.current_period_end))
renewal_amount = upcoming_invoice_total(customer.stripe_customer_id)
# Can only get here by subscribing and then downgrading. We don't support downgrading
# yet, but keeping this code here since we will soon.
else: # nocoverage
plan_name = "Zulip Free"
seat_count = 0
renewal_date = ''
renewal_amount = 0
payment_method = None
stripe_source = stripe_customer.default_source
if stripe_source is not None:
if stripe_source.object == 'card':
# To fix mypy error, set Customer.default_source: Union[Source, Card] in stubs and debug
payment_method = "Card ending in %(last4)s" % \
{'last4': stripe_source.last4} # type: ignore # see above
context.update({
'plan_name': plan_name,
'seat_count': seat_count,
'renewal_date': renewal_date,
'renewal_amount': '{:,.2f}'.format(renewal_amount / 100.),
'payment_method': payment_method,
'publishable_key': STRIPE_PUBLISHABLE_KEY,
'stripe_email': stripe_customer.email,
})
return render(request, 'corporate/billing.html', context=context)
@require_billing_access
def downgrade(request: HttpRequest, user: UserProfile) -> HttpResponse:
try:
process_downgrade(user)
except BillingError as e:
return json_error(e.message, data={'error_description': e.description})
return json_success()
@require_billing_access
@has_request_variables
def replace_payment_source(request: HttpRequest, user: UserProfile,
stripe_token: str=REQ("stripe_token", validator=check_string)) -> HttpResponse:
try:
do_replace_payment_source(user, stripe_token)
except BillingError as e:
return json_error(e.message, data={'error_description': e.description})
return json_success()
| [
"UserProfile",
"str",
"str",
"str",
"HttpRequest",
"HttpRequest",
"HttpRequest",
"UserProfile",
"HttpRequest",
"UserProfile"
] | [
1172,
1200,
1264,
1275,
2122,
4729,
7376,
7395,
7686,
7705
] | [
1183,
1203,
1267,
1278,
2133,
4740,
7387,
7406,
7697,
7716
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | docs/conf.py | # -*- coding: utf-8 -*-
#
# zulip-contributor-docs documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 17 16:24:04 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
from typing import Any, Dict, List, Optional
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [] # type: List[str]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Zulip'
copyright = '2015-2018, The Zulip Team'
author = 'The Zulip Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.9+git'
# The full version, including alpha/beta/rc tags.
release = '1.9.0+git'
# This allows us to insert a warning that appears only on an unreleased
# version, e.g. to say that something is likely to have changed.
if release.endswith('+git'):
tags.add('unreleased')
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None # type: Optional[str]
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# Read The Docs can't import sphinx_rtd_theme, so don't import it there.
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'sphinx_rtd_theme'
html_style = None
html_theme_options = {'collapse_navigation': False}
using_rtd_theme = True
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'collapse_navigation': False,
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'zulip-contributor-docsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
} # type: Dict[str, str]
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'zulip-contributor-docs.tex', 'Zulip Documentation',
'The Zulip Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'zulip-contributor-docs', 'Zulip Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'zulip-contributor-docs', 'Zulip Documentation',
author, 'zulip-contributor-docs', 'Documentation for contributing to Zulip.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
from recommonmark.parser import CommonMarkParser
from recommonmark.transform import AutoStructify
source_parsers = {
'.md': CommonMarkParser,
}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
def setup(app: Any) -> None:
app.add_config_value('recommonmark_config', {
'enable_eval_rst': True,
# Turn off recommonmark features we aren't using.
'enable_auto_doc_ref': False,
'auto_toc_tree_section': None,
'enable_auto_toc_tree': False,
'enable_math': False,
'enable_inline_math': False,
'url_resolver': lambda x: x,
}, True)
# Enable `eval_rst`, and any other features enabled in recommonmark_config.
# Docs: http://recommonmark.readthedocs.io/en/latest/auto_structify.html
# (But NB those docs are for master, not latest release.)
app.add_transform(AutoStructify)
# overrides for wide tables in RTD theme
app.add_stylesheet('theme_overrides.css') # path relative to _static
| [
"Any"
] | [
10182
] | [
10185
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | manage.py | #!/usr/bin/env python3
import os
import sys
import types
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
import scripts.lib.setup_path_on_import
from scripts.lib.zulip_tools import assert_not_running_as_root
if __name__ == "__main__":
assert_not_running_as_root()
if (os.access('/etc/zulip/zulip.conf', os.R_OK) and not
os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)):
# The best way to detect running manage.py as another user in
# production before importing anything that would require that
# access is to check for access to /etc/zulip/zulip.conf (in
# which case it's a production server, not a dev environment)
# and lack of access for /etc/zulip/zulip-secrets.conf (which
# should be only readable by root and zulip)
print("Error accessing Zulip secrets; manage.py in production must be run as the zulip user.")
sys.exit(1)
# Performance Hack: We make the pika.adapters.twisted_connection
# module unavailable, to save ~100ms of import time for most Zulip
# management commands for code we don't use. The correct
# long-term fix for this will be to get a setting integrated
# upstream to disable pika importing this.
# See https://github.com/pika/pika/issues/1128
sys.modules['pika.adapters.twisted_connection'] = types.ModuleType(
'pika.adapters.twisted_connection')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
from django.conf import settings
from django.core.management import execute_from_command_line
from django.core.management.base import CommandError
from scripts.lib.zulip_tools import log_management_command
log_management_command(" ".join(sys.argv), settings.MANAGEMENT_LOG_PATH)
os.environ.setdefault("PYTHONSTARTUP", os.path.join(BASE_DIR, "scripts/lib/pythonrc.py"))
if "--no-traceback" not in sys.argv and len(sys.argv) > 1:
sys.argv.append("--traceback")
try:
execute_from_command_line(sys.argv)
except CommandError as e:
print(e, file=sys.stderr)
sys.exit(1)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | pgroonga/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | pgroonga/migrations/0001_enable.py | # -*- coding: utf-8 -*-
from django.db import models, migrations
from django.contrib.postgres import operations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('zerver', '0001_initial'),
]
database_setting = settings.DATABASES["default"]
if "postgres" in database_setting["ENGINE"]:
operations = [
migrations.RunSQL("""
ALTER ROLE %(USER)s SET search_path TO %(SCHEMA)s,public,pgroonga,pg_catalog;
SET search_path = %(SCHEMA)s,public,pgroonga,pg_catalog;
ALTER TABLE zerver_message ADD COLUMN search_pgroonga text;
-- TODO: We want to use CREATE INDEX CONCURRENTLY but it can't be used in
-- transaction. Django uses transaction implicitly.
-- Django 1.10 may solve the problem.
CREATE INDEX zerver_message_search_pgroonga ON zerver_message
USING pgroonga(search_pgroonga pgroonga.text_full_text_search_ops);
""" % database_setting,
"""
SET search_path = %(SCHEMA)s,public,pgroonga,pg_catalog;
DROP INDEX zerver_message_search_pgroonga;
ALTER TABLE zerver_message DROP COLUMN search_pgroonga;
SET search_path = %(SCHEMA)s,public;
ALTER ROLE %(USER)s SET search_path TO %(SCHEMA)s,public;
""" % database_setting),
]
else:
operations = []
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | pgroonga/migrations/0002_html_escape_subject.py | # -*- coding: utf-8 -*-
from django.db import models, migrations, connection
from django.contrib.postgres import operations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.migrate import do_batch_update
def rebuild_pgroonga_index(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
with connection.cursor() as cursor:
do_batch_update(cursor, 'zerver_message', ['search_pgroonga'],
["escape_html(subject) || ' ' || rendered_content"],
escape=False, batch_size=10000)
class Migration(migrations.Migration):
atomic = False
dependencies = [
('pgroonga', '0001_enable'),
]
operations = [
migrations.RunPython(rebuild_pgroonga_index,
reverse_code=migrations.RunPython.noop)
]
| [
"StateApps",
"DatabaseSchemaEditor"
] | [
333,
359
] | [
342,
379
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | pgroonga/migrations/0003_v2_api_upgrade.py | # -*- coding: utf-8 -*-
from django.db import models, migrations
from django.contrib.postgres import operations
from django.conf import settings
class Migration(migrations.Migration):
atomic = False
dependencies = [
('pgroonga', '0002_html_escape_subject'),
]
database_setting = settings.DATABASES["default"]
operations = [
migrations.RunSQL(["""
ALTER ROLE %(USER)s SET search_path TO %(SCHEMA)s,public;
SET search_path = %(SCHEMA)s,public;
DROP INDEX zerver_message_search_pgroonga;
""" % database_setting, """
CREATE INDEX CONCURRENTLY zerver_message_search_pgroonga ON zerver_message
USING pgroonga(search_pgroonga pgroonga_text_full_text_search_ops_v2);
"""],
["""
ALTER ROLE %(USER)s SET search_path TO %(SCHEMA)s,public,pgroonga,pg_catalog;
SET search_path = %(SCHEMA)s,public,pgroonga,pg_catalog;
DROP INDEX zerver_message_search_pgroonga;
""" % database_setting, """
CREATE INDEX CONCURRENTLY zerver_message_search_pgroonga ON zerver_message
USING pgroonga(search_pgroonga pgroonga.text_full_text_search_ops);
"""])
]
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | pgroonga/migrations/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/clean_emoji_cache.py | #!/usr/bin/env python3
import argparse
import os
import sys
if False:
from typing import Set
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ZULIP_PATH)
from scripts.lib.zulip_tools import generate_sha1sum_emoji, \
get_caches_to_be_purged, get_environment, get_recent_deployments, \
parse_cache_script_args, purge_unused_caches
ENV = get_environment()
EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
if ENV == "travis":
EMOJI_CACHE_PATH = os.path.join(os.environ["HOME"], "zulip-emoji-cache")
def get_caches_in_use(threshold_days):
# type: (int) -> Set[str]
setups_to_check = set([ZULIP_PATH, ])
caches_in_use = set()
if ENV == "prod":
setups_to_check |= get_recent_deployments(threshold_days)
if ENV == "dev":
CACHE_SYMLINK = os.path.join(ZULIP_PATH, "static", "generated", "emoji")
CURRENT_CACHE = os.path.dirname(os.path.realpath(CACHE_SYMLINK))
caches_in_use.add(CURRENT_CACHE)
for setup_dir in setups_to_check:
emoji_link_path = os.path.join(setup_dir, "static/generated/emoji")
if not os.path.islink(emoji_link_path):
# This happens for a deployment directory extracted from a
# tarball, which just has a copy of the emoji data, not a symlink.
continue
caches_in_use.add(os.readlink(emoji_link_path))
return caches_in_use
def main(args: argparse.Namespace) -> None:
caches_in_use = get_caches_in_use(args.threshold_days)
purge_unused_caches(
EMOJI_CACHE_PATH, caches_in_use, "emoji cache", args)
if __name__ == "__main__":
args = parse_cache_script_args("This script cleans unused zulip emoji caches.")
main(args)
| [
"argparse.Namespace"
] | [
1440
] | [
1458
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/clean_node_cache.py | #!/usr/bin/env python3
import argparse
import os
import subprocess
import sys
if False:
from typing import Set
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ZULIP_PATH)
from scripts.lib.node_cache import generate_sha1sum_node_modules
from scripts.lib.zulip_tools import get_caches_to_be_purged, \
get_environment, get_recent_deployments, parse_cache_script_args, \
purge_unused_caches
ENV = get_environment()
NODE_MODULES_CACHE_PATH = "/srv/zulip-npm-cache"
if ENV == "travis":
NODE_MODULES_CACHE_PATH = os.path.join(os.environ["HOME"], "zulip-npm-cache")
try:
subprocess.check_output(["/home/travis/zulip-yarn/bin/yarn", '--version'])
except OSError:
print('yarn not found. Most probably we are running static-analysis and '
'hence yarn is not installed. Exiting without cleaning npm cache.')
sys.exit(0)
def get_caches_in_use(threshold_days):
# type: (int) -> Set[str]
setups_to_check = set([ZULIP_PATH, ])
caches_in_use = set()
if ENV == "prod":
setups_to_check |= get_recent_deployments(threshold_days)
if ENV == "dev":
# In dev always include the currently active cache in order
# not to break current installation in case dependencies
# are updated with bumping the provision version.
CURRENT_CACHE = os.path.dirname(os.path.realpath(os.path.join(ZULIP_PATH, "node_modules")))
caches_in_use.add(CURRENT_CACHE)
for setup_dir in setups_to_check:
node_modules_link_path = os.path.join(setup_dir, "node_modules")
if not os.path.islink(node_modules_link_path):
# If 'package.json' file doesn't exist then no node_modules
# cache is associated with this setup.
continue
caches_in_use.add(os.readlink(node_modules_link_path))
return caches_in_use
def main(args: argparse.Namespace) -> None:
caches_in_use = get_caches_in_use(args.threshold_days)
purge_unused_caches(
NODE_MODULES_CACHE_PATH, caches_in_use, "node modules cache", args)
if __name__ == "__main__":
args = parse_cache_script_args("This script cleans unused zulip npm caches.")
main(args)
| [
"argparse.Namespace"
] | [
1927
] | [
1945
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/clean_venv_cache.py | #!/usr/bin/env python3
import argparse
import os
import sys
if False:
# Typing module isn't always available when this is run on older
# Python 3.4 (Trusty).
from typing import Set
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ZULIP_PATH)
from scripts.lib.hash_reqs import expand_reqs, hash_deps
from scripts.lib.zulip_tools import get_caches_to_be_purged, \
get_environment, get_recent_deployments, parse_cache_script_args, \
purge_unused_caches
ENV = get_environment()
VENV_CACHE_DIR = '/srv/zulip-venv-cache'
if ENV == "travis":
VENV_CACHE_DIR = os.path.join(os.environ["HOME"], "zulip-venv-cache")
def get_caches_in_use(threshold_days):
# type: (int) -> Set[str]
setups_to_check = set([ZULIP_PATH, ])
caches_in_use = set()
def add_current_venv_cache(venv_name: str) -> None:
CACHE_SYMLINK = os.path.join(os.path.dirname(ZULIP_PATH), venv_name)
CURRENT_CACHE = os.path.dirname(os.path.realpath(CACHE_SYMLINK))
caches_in_use.add(CURRENT_CACHE)
if ENV == "prod":
setups_to_check |= get_recent_deployments(threshold_days)
if ENV == "dev":
add_current_venv_cache("zulip-py3-venv")
add_current_venv_cache("zulip-thumbor-venv")
for path in setups_to_check:
reqs_dir = os.path.join(path, "requirements")
# If the target directory doesn't contain a requirements
# directory, skip it to avoid throwing an exception trying to
# list its requirements subdirectory.
if not os.path.exists(reqs_dir):
continue
for filename in os.listdir(reqs_dir):
requirements_file = os.path.join(reqs_dir, filename)
deps = expand_reqs(requirements_file)
hash_val = hash_deps(deps)
caches_in_use.add(os.path.join(VENV_CACHE_DIR, hash_val))
return caches_in_use
def main(args: argparse.Namespace) -> None:
caches_in_use = get_caches_in_use(args.threshold_days)
purge_unused_caches(
VENV_CACHE_DIR, caches_in_use, "venv cache", args)
if __name__ == "__main__":
args = parse_cache_script_args("This script cleans unused zulip venv caches.")
main(args)
| [
"str",
"argparse.Namespace"
] | [
870,
1930
] | [
873,
1948
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/hash_reqs.py | #!/usr/bin/env python3
import os
import sys
import argparse
import hashlib
if False:
from typing import Iterable, List, MutableSet
def expand_reqs_helper(fpath, visited):
# type: (str, MutableSet[str]) -> List[str]
if fpath in visited:
return []
else:
visited.add(fpath)
curr_dir = os.path.dirname(fpath)
result = [] # type: List[str]
for line in open(fpath):
if line.startswith('#'):
continue
dep = line.split(" #", 1)[0].strip() # remove comments and strip whitespace
if dep:
if dep.startswith('-r'):
child = os.path.join(curr_dir, dep[3:])
result += expand_reqs_helper(child, visited)
else:
result.append(dep)
return result
def expand_reqs(fpath):
# type: (str) -> List[str]
"""
Returns a sorted list of unique dependencies specified by the requirements file `fpath`.
Removes comments from the output and recursively visits files specified inside `fpath`.
`fpath` can be either an absolute path or a relative path.
"""
absfpath = os.path.abspath(fpath)
output = expand_reqs_helper(absfpath, set())
return sorted(set(output))
def hash_deps(deps):
# type: (Iterable[str]) -> str
deps_str = "\n".join(deps) + "\n"
return hashlib.sha1(deps_str.encode('utf-8')).hexdigest()
def main():
# type: () -> int
description = ("Finds the SHA1 hash of list of dependencies in a requirements file"
" after recursively visiting all files specified in it.")
parser = argparse.ArgumentParser(description=description)
parser.add_argument("fpath", metavar="FILE",
help="Path to requirements file")
parser.add_argument("--print", dest="print_reqs", action='store_true',
help="Print all dependencies")
args = parser.parse_args()
deps = expand_reqs(args.fpath)
hash = hash_deps(deps)
print(hash)
if args.print_reqs:
for dep in deps:
print(dep)
return 0
if __name__ == "__main__":
sys.exit(main())
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/node_cache.py |
import os
import hashlib
if False:
from typing import Optional, List, IO, Tuple, Any
from scripts.lib.zulip_tools import subprocess_text_output, run
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
ZULIP_SRV_PATH = "/srv"
if 'TRAVIS' in os.environ:
# In Travis CI, we don't have root access
ZULIP_SRV_PATH = "/home/travis"
NODE_MODULES_CACHE_PATH = os.path.join(ZULIP_SRV_PATH, 'zulip-npm-cache')
YARN_BIN = os.path.join(ZULIP_SRV_PATH, 'zulip-yarn/bin/yarn')
DEFAULT_PRODUCTION = False
def get_yarn_args(production):
# type: (bool) -> List[str]
if production:
yarn_args = ["--prod"]
else:
yarn_args = []
return yarn_args
def generate_sha1sum_node_modules(setup_dir=None, production=DEFAULT_PRODUCTION):
# type: (Optional[str], bool) -> str
if setup_dir is None:
setup_dir = os.path.realpath(os.getcwd())
PACKAGE_JSON_FILE_PATH = os.path.join(setup_dir, 'package.json')
YARN_LOCK_FILE_PATH = os.path.join(setup_dir, 'yarn.lock')
sha1sum = hashlib.sha1()
sha1sum.update(subprocess_text_output(['cat', PACKAGE_JSON_FILE_PATH]).encode('utf8'))
if os.path.exists(YARN_LOCK_FILE_PATH):
# For backwards compatibility, we can't assume yarn.lock exists
sha1sum.update(subprocess_text_output(['cat', YARN_LOCK_FILE_PATH]).encode('utf8'))
sha1sum.update(subprocess_text_output([YARN_BIN, '--version']).encode('utf8'))
sha1sum.update(subprocess_text_output(['node', '--version']).encode('utf8'))
yarn_args = get_yarn_args(production=production)
sha1sum.update(''.join(sorted(yarn_args)).encode('utf8'))
return sha1sum.hexdigest()
def setup_node_modules(production=DEFAULT_PRODUCTION, stdout=None, stderr=None, copy_modules=False,
prefer_offline=False):
# type: (bool, Optional[IO[Any]], Optional[IO[Any]], bool, bool) -> None
yarn_args = get_yarn_args(production=production)
if prefer_offline:
yarn_args.append("--prefer-offline")
sha1sum = generate_sha1sum_node_modules(production=production)
target_path = os.path.join(NODE_MODULES_CACHE_PATH, sha1sum)
cached_node_modules = os.path.join(target_path, 'node_modules')
success_stamp = os.path.join(target_path, '.success-stamp')
# Check if a cached version already exists
if not os.path.exists(success_stamp):
do_yarn_install(target_path,
yarn_args,
success_stamp,
stdout=stdout,
stderr=stderr,
copy_modules=copy_modules)
print("Using cached node modules from %s" % (cached_node_modules,))
cmds = [
['rm', '-rf', 'node_modules'],
["ln", "-nsf", cached_node_modules, 'node_modules'],
]
for cmd in cmds:
run(cmd, stdout=stdout, stderr=stderr)
def do_yarn_install(target_path, yarn_args, success_stamp, stdout=None, stderr=None,
copy_modules=False):
# type: (str, List[str], str, Optional[IO[Any]], Optional[IO[Any]], bool) -> None
cmds = [
['mkdir', '-p', target_path],
['cp', 'package.json', "yarn.lock", target_path],
]
cached_node_modules = os.path.join(target_path, 'node_modules')
if copy_modules:
print("Cached version not found! Copying node modules.")
cmds.append(["cp", "-rT", "prod-static/serve/node_modules", cached_node_modules])
else:
print("Cached version not found! Installing node modules.")
# Copy the existing node_modules to speed up install
if os.path.exists("node_modules"):
cmds.append(["cp", "-R", "node_modules/", cached_node_modules])
cd_exec = os.path.join(ZULIP_PATH, "scripts/lib/cd_exec")
if os.environ.get('CUSTOM_CA_CERTIFICATES'):
cmds.append([YARN_BIN, "config", "set", "cafile", os.environ['CUSTOM_CA_CERTIFICATES']])
cmds.append([cd_exec, target_path, YARN_BIN, "install", "--non-interactive"] +
yarn_args)
cmds.append(['touch', success_stamp])
for cmd in cmds:
run(cmd, stdout=stdout, stderr=stderr)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/pythonrc.py | try:
from django.conf import settings
from zerver.models import *
from zerver.lib.actions import * # type: ignore # Otherwise have duplicate imports with previous line
from analytics.models import *
except Exception:
import traceback
print("\nException importing Zulip core modules on startup!")
traceback.print_exc()
else:
print("\nSuccessfully imported Zulip settings, models, and actions functions.")
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/queue_workers.py | #!/usr/bin/env python3
import argparse
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(BASE_DIR)
import scripts.lib.setup_path_on_import
os.environ['DJANGO_SETTINGS_MODULE'] = 'zproject.settings'
import django
django.setup()
from zerver.worker.queue_processors import get_active_worker_queues
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--queue-type', action='store', dest='queue_type', default=None,
help="Specify which types of queues to list")
args = parser.parse_args()
for worker in sorted(get_active_worker_queues(args.queue_type)):
print(worker)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/setup_path_on_import.py | """
Use libraries from a virtualenv (by modifying sys.path) in production.
Also add Zulip's root directory to sys.path
"""
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
activate_this = os.path.join(
BASE_DIR,
"zulip-py3-venv",
"bin",
"activate_this.py")
if os.path.exists(activate_this):
# this file will exist in production
exec(open(activate_this).read(), {}, dict(__file__=activate_this))
sys.path.append(BASE_DIR)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/setup_venv.py |
import os
import shutil
import subprocess
from scripts.lib.zulip_tools import run, ENDC, WARNING, parse_lsb_release
from scripts.lib.hash_reqs import expand_reqs
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
VENV_CACHE_PATH = "/srv/zulip-venv-cache"
if 'TRAVIS' in os.environ:
# In Travis CI, we don't have root access
VENV_CACHE_PATH = "/home/travis/zulip-venv-cache"
if False:
# Don't add a runtime dependency on typing
from typing import List, Optional, Tuple, Set
VENV_DEPENDENCIES = [
"build-essential",
"libffi-dev",
"libfreetype6-dev", # Needed for image types with Pillow
"zlib1g-dev", # Needed to handle compressed PNGs with Pillow
"libjpeg-dev", # Needed to handle JPEGs with Pillow
"libldap2-dev",
"libmemcached-dev",
"python3-dev", # Needed to install typed-ast dependency of mypy
"python-dev",
"python3-pip",
"python-pip",
"python-virtualenv", # Trusty lacks `python3-virtualenv`.
# Fortunately we don't need the library,
# only the command, and this suffices.
"python3-six",
"python-six",
"libxml2-dev", # Used for installing talon
"libxslt1-dev", # Used for installing talon
"libpq-dev", # Needed by psycopg2
"libssl-dev", # Needed to build pycurl and other libraries
]
codename = parse_lsb_release()["DISTRIB_CODENAME"]
if codename != "trusty":
# Workaround for the fact that trusty has a different package name here.
VENV_DEPENDENCIES.append("virtualenv")
THUMBOR_VENV_DEPENDENCIES = [
"libcurl4-openssl-dev",
"libjpeg-dev",
"zlib1g-dev",
"libfreetype6-dev",
"libpng-dev",
"gifsicle",
]
def install_venv_deps(requirements_file):
# type: (str) -> None
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
run(["pip", "install", "-U", "--requirement", pip_requirements])
run(["pip", "install", "--no-deps", "--requirement", requirements_file])
def get_index_filename(venv_path):
# type: (str) -> str
return os.path.join(venv_path, 'package_index')
def get_package_names(requirements_file):
# type: (str) -> List[str]
packages = expand_reqs(requirements_file)
cleaned = []
operators = ['~=', '==', '!=', '<', '>']
for package in packages:
if package.startswith("git+https://") and '#egg=' in package:
split_package = package.split("#egg=")
if len(split_package) != 2:
raise Exception("Unexpected duplicate #egg in package %s" % (package,))
# Extract the package name from Git requirements entries
package = split_package[1]
for operator in operators:
if operator in package:
package = package.split(operator)[0]
package = package.strip()
if package:
cleaned.append(package.lower())
return sorted(cleaned)
def create_requirements_index_file(venv_path, requirements_file):
# type: (str, str) -> str
"""
Creates a file, called package_index, in the virtual environment
directory that contains all the PIP packages installed in the
virtual environment. This file is used to determine the packages
that can be copied to a new virtual environment.
"""
index_filename = get_index_filename(venv_path)
packages = get_package_names(requirements_file)
with open(index_filename, 'w') as writer:
writer.write('\n'.join(packages))
writer.write('\n')
return index_filename
def get_venv_packages(venv_path):
# type: (str) -> Set[str]
"""
Returns the packages installed in the virtual environment using the
package index file.
"""
with open(get_index_filename(venv_path)) as reader:
return set(p.strip() for p in reader.read().split('\n') if p.strip())
def try_to_copy_venv(venv_path, new_packages):
# type: (str, Set[str]) -> bool
"""
Tries to copy packages from an old virtual environment in the cache
to the new virtual environment. The algorithm works as follows:
1. Find a virtual environment, v, from the cache that has the
highest overlap with the new requirements such that:
a. The new requirements only add to the packages of v.
b. The new requirements only upgrade packages of v.
2. Copy the contents of v to the new virtual environment using
virtualenv-clone.
3. Delete all .pyc files in the new virtual environment.
"""
if not os.path.exists(VENV_CACHE_PATH):
return False
venv_name = os.path.basename(venv_path)
overlaps = [] # type: List[Tuple[int, str, Set[str]]]
old_packages = set() # type: Set[str]
for sha1sum in os.listdir(VENV_CACHE_PATH):
curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
if (curr_venv_path == venv_path or
not os.path.exists(get_index_filename(curr_venv_path))):
continue
old_packages = get_venv_packages(curr_venv_path)
# We only consider using using old virtualenvs that only
# contain packages that we want in our new virtualenv.
if not (old_packages - new_packages):
overlap = new_packages & old_packages
overlaps.append((len(overlap), curr_venv_path, overlap))
target_log = get_logfile_name(venv_path)
source_venv_path = None
if overlaps:
# Here, we select the old virtualenv with the largest overlap
overlaps = sorted(overlaps)
_, source_venv_path, copied_packages = overlaps[-1]
print('Copying packages from {}'.format(source_venv_path))
clone_ve = "{}/bin/virtualenv-clone".format(source_venv_path)
cmd = "sudo {exe} {source} {target}".format(exe=clone_ve,
source=source_venv_path,
target=venv_path).split()
try:
# TODO: We can probably remove this in a few months, now
# that we can expect that virtualenv-clone is present in
# all of our recent virtualenvs.
run(cmd)
except Exception:
# Virtualenv-clone is not installed. Install it and try running
# the command again.
try:
run("{}/bin/pip install --no-deps virtualenv-clone".format(
source_venv_path).split())
run(cmd)
except Exception:
# virtualenv-clone isn't working, so just make a new venv
return False
run(["sudo", "chown", "-R",
"{}:{}".format(os.getuid(), os.getgid()), venv_path])
source_log = get_logfile_name(source_venv_path)
copy_parent_log(source_log, target_log)
create_log_entry(target_log, source_venv_path, copied_packages,
new_packages - copied_packages)
return True
return False
def get_logfile_name(venv_path):
# type: (str) -> str
return "{}/setup-venv.log".format(venv_path)
def create_log_entry(target_log, parent, copied_packages, new_packages):
# type: (str, str, Set[str], Set[str]) -> None
venv_path = os.path.dirname(target_log)
with open(target_log, 'a') as writer:
writer.write("{}\n".format(venv_path))
if copied_packages:
writer.write(
"Copied from {}:\n".format(parent))
writer.write("\n".join('- {}'.format(p) for p in sorted(copied_packages)))
writer.write("\n")
writer.write("New packages:\n")
writer.write("\n".join('- {}'.format(p) for p in sorted(new_packages)))
writer.write("\n\n")
def copy_parent_log(source_log, target_log):
# type: (str, str) -> None
if os.path.exists(source_log):
shutil.copyfile(source_log, target_log)
def do_patch_activate_script(venv_path):
# type: (str) -> None
"""
Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV
is set to venv_path during the script's execution whenever it is sourced.
"""
# venv_path should be what we want to have in VIRTUAL_ENV after patching
script_path = os.path.join(venv_path, "bin", "activate")
file_obj = open(script_path)
lines = file_obj.readlines()
for i, line in enumerate(lines):
if line.startswith('VIRTUAL_ENV='):
lines[i] = 'VIRTUAL_ENV="%s"\n' % (venv_path,)
file_obj.close()
file_obj = open(script_path, 'w')
file_obj.write("".join(lines))
file_obj.close()
def setup_virtualenv(target_venv_path, requirements_file, virtualenv_args=None, patch_activate_script=False):
# type: (Optional[str], str, Optional[List[str]], bool) -> str
# Check if a cached version already exists
path = os.path.join(ZULIP_PATH, 'scripts', 'lib', 'hash_reqs.py')
output = subprocess.check_output([path, requirements_file], universal_newlines=True)
sha1sum = output.split()[0]
if target_venv_path is None:
cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, 'venv')
else:
cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, os.path.basename(target_venv_path))
success_stamp = os.path.join(cached_venv_path, "success-stamp")
if not os.path.exists(success_stamp):
do_setup_virtualenv(cached_venv_path, requirements_file, virtualenv_args or [])
open(success_stamp, 'w').close()
print("Using cached Python venv from %s" % (cached_venv_path,))
if target_venv_path is not None:
run(["sudo", "ln", "-nsf", cached_venv_path, target_venv_path])
if patch_activate_script:
do_patch_activate_script(target_venv_path)
activate_this = os.path.join(cached_venv_path, "bin", "activate_this.py")
exec(open(activate_this).read(), {}, dict(__file__=activate_this))
return cached_venv_path
def add_cert_to_pipconf():
# type: () -> None
conffile = os.path.expanduser("~/.pip/pip.conf")
confdir = os.path.expanduser("~/.pip/")
os.makedirs(confdir, exist_ok=True)
run(["crudini", "--set", conffile, "global", "cert", os.environ["CUSTOM_CA_CERTIFICATES"]])
def do_setup_virtualenv(venv_path, requirements_file, virtualenv_args):
# type: (str, str, List[str]) -> None
# Setup Python virtualenv
new_packages = set(get_package_names(requirements_file))
run(["sudo", "rm", "-rf", venv_path])
if not try_to_copy_venv(venv_path, new_packages):
# Create new virtualenv.
run(["sudo", "mkdir", "-p", venv_path])
run(["sudo", "virtualenv"] + virtualenv_args + [venv_path])
run(["sudo", "chown", "-R",
"{}:{}".format(os.getuid(), os.getgid()), venv_path])
create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)
create_requirements_index_file(venv_path, requirements_file)
# Switch current Python context to the virtualenv.
activate_this = os.path.join(venv_path, "bin", "activate_this.py")
exec(open(activate_this).read(), {}, dict(__file__=activate_this))
# use custom certificate if needed
if os.environ.get('CUSTOM_CA_CERTIFICATES'):
print("Configuring pip to use custom CA certificates...")
add_cert_to_pipconf()
try:
install_venv_deps(requirements_file)
except subprocess.CalledProcessError:
# Might be a failure due to network connection issues. Retrying...
print(WARNING + "`pip install` failed; retrying..." + ENDC)
install_venv_deps(requirements_file)
run(["sudo", "chmod", "-R", "a+rX", venv_path])
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/lib/zulip_tools.py | #!/usr/bin/env python3
import argparse
import datetime
import errno
import hashlib
import logging
import os
import pwd
import re
import shlex
import shutil
import subprocess
import sys
import time
import json
import uuid
if False:
from typing import Sequence, Set, Any, Dict, List
DEPLOYMENTS_DIR = "/home/zulip/deployments"
LOCK_DIR = os.path.join(DEPLOYMENTS_DIR, "lock")
TIMESTAMP_FORMAT = '%Y-%m-%d-%H-%M-%S'
# Color codes
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BLACKONYELLOW = '\x1b[0;30;43m'
WHITEONRED = '\x1b[0;37;41m'
BOLDRED = '\x1B[1;31m'
GREEN = '\x1b[32m'
YELLOW = '\x1b[33m'
BLUE = '\x1b[34m'
MAGENTA = '\x1b[35m'
CYAN = '\x1b[36m'
def parse_cache_script_args(description):
# type: (str) -> argparse.Namespace
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"--threshold", dest="threshold_days", type=int, default=14,
nargs="?", metavar="<days>", help="Any cache which is not in "
"use by a deployment not older than threshold days(current "
"installation in dev) and older than threshold days will be "
"deleted. (defaults to 14)")
parser.add_argument(
"--dry-run", dest="dry_run", action="store_true",
help="If specified then script will only print the caches "
"that it will delete/keep back. It will not delete any cache.")
parser.add_argument(
"--verbose", dest="verbose", action="store_true",
help="If specified then script will print a detailed report "
"of what is being will deleted/kept back.")
args = parser.parse_args()
args.verbose |= args.dry_run # Always print a detailed report in case of dry run.
return args
def get_deployment_version(extract_path):
# type: (str) -> str
version = '0.0.0'
for item in os.listdir(extract_path):
item_path = os.path.join(extract_path, item)
if item.startswith('zulip-server') and os.path.isdir(item_path):
with open(os.path.join(item_path, 'version.py')) as f:
result = re.search('ZULIP_VERSION = "(.*)"', f.read())
if result:
version = result.groups()[0]
break
return version
def is_invalid_upgrade(current_version, new_version):
# type: (str, str) -> bool
if new_version > '1.4.3' and current_version <= '1.3.10':
return True
return False
def subprocess_text_output(args):
# type: (Sequence[str]) -> str
return subprocess.check_output(args, universal_newlines=True).strip()
def su_to_zulip():
# type: () -> None
pwent = pwd.getpwnam("zulip")
os.setgid(pwent.pw_gid)
os.setuid(pwent.pw_uid)
os.environ['HOME'] = os.path.abspath(os.path.join(DEPLOYMENTS_DIR, '..'))
def make_deploy_path():
# type: () -> str
timestamp = datetime.datetime.now().strftime(TIMESTAMP_FORMAT)
return os.path.join(DEPLOYMENTS_DIR, timestamp)
if __name__ == '__main__':
cmd = sys.argv[1]
if cmd == 'make_deploy_path':
print(make_deploy_path())
def get_dev_uuid_var_path(create_if_missing=False):
# type: (bool) -> str
zulip_path = os.path.realpath(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.realpath(__file__)))))
uuid_path = os.path.join(os.path.realpath(os.path.dirname(zulip_path)), ".zulip-dev-uuid")
if os.path.exists(uuid_path):
with open(uuid_path) as f:
zulip_uuid = f.read().strip()
else:
if create_if_missing:
zulip_uuid = str(uuid.uuid4())
# We need sudo here, since the path will be under /srv/ in the
# development environment.
subprocess.check_call(["sudo", "/bin/bash", "-c",
"echo %s > %s" % (zulip_uuid, uuid_path)])
else:
raise AssertionError("Missing UUID file; please run tools/provision!")
result_path = os.path.join(zulip_path, "var", zulip_uuid)
os.makedirs(result_path, exist_ok=True)
return result_path
def get_deployment_lock(error_rerun_script):
# type: (str) -> None
start_time = time.time()
got_lock = False
while time.time() - start_time < 300:
try:
os.mkdir(LOCK_DIR)
got_lock = True
break
except OSError:
print(WARNING + "Another deployment in progress; waiting for lock... " +
"(If no deployment is running, rmdir %s)" % (LOCK_DIR,) + ENDC)
sys.stdout.flush()
time.sleep(3)
if not got_lock:
print(FAIL + "Deployment already in progress. Please run\n" +
" %s\n" % (error_rerun_script,) +
"manually when the previous deployment finishes, or run\n" +
" rmdir %s\n" % (LOCK_DIR,) +
"if the previous deployment crashed." +
ENDC)
sys.exit(1)
def release_deployment_lock():
# type: () -> None
shutil.rmtree(LOCK_DIR)
def run(args, **kwargs):
# type: (Sequence[str], **Any) -> None
# Output what we're doing in the `set -x` style
print("+ %s" % (" ".join(map(shlex.quote, args)),))
if kwargs.get('shell'):
# With shell=True we can only pass string to Popen
args = " ".join(args)
try:
subprocess.check_call(args, **kwargs)
except subprocess.CalledProcessError:
print()
print(WHITEONRED + "Error running a subcommand of %s: %s" %
(sys.argv[0], " ".join(map(shlex.quote, args))) +
ENDC)
print(WHITEONRED + "Actual error output for the subcommand is just above this." +
ENDC)
print()
raise
def log_management_command(cmd, log_path):
# type: (str, str) -> None
log_dir = os.path.dirname(log_path)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
formatter = logging.Formatter("%(asctime)s: %(message)s")
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(formatter)
logger = logging.getLogger("zulip.management")
logger.addHandler(file_handler)
logger.setLevel(logging.INFO)
logger.info("Ran '%s'" % (cmd,))
def get_environment():
# type: () -> str
if os.path.exists(DEPLOYMENTS_DIR):
return "prod"
if os.environ.get("TRAVIS"):
return "travis"
return "dev"
def get_recent_deployments(threshold_days):
# type: (int) -> Set[str]
# Returns a list of deployments not older than threshold days
# including `/root/zulip` directory if it exists.
recent = set()
threshold_date = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
for dir_name in os.listdir(DEPLOYMENTS_DIR):
target_dir = os.path.join(DEPLOYMENTS_DIR, dir_name)
if not os.path.isdir(target_dir):
# Skip things like uwsgi sockets, symlinks, etc.
continue
if not os.path.exists(os.path.join(target_dir, "zerver")):
# Skip things like "lock" that aren't actually a deployment directory
continue
try:
date = datetime.datetime.strptime(dir_name, TIMESTAMP_FORMAT)
if date >= threshold_date:
recent.add(target_dir)
except ValueError:
# Always include deployments whose name is not in the format of a timestamp.
recent.add(target_dir)
# If it is a symlink then include the target as well.
if os.path.islink(target_dir):
recent.add(os.path.realpath(target_dir))
if os.path.exists("/root/zulip"):
recent.add("/root/zulip")
return recent
def get_threshold_timestamp(threshold_days):
# type: (int) -> int
# Given number of days, this function returns timestamp corresponding
# to the time prior to given number of days.
threshold = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
threshold_timestamp = int(time.mktime(threshold.utctimetuple()))
return threshold_timestamp
def get_caches_to_be_purged(caches_dir, caches_in_use, threshold_days):
# type: (str, Set[str], int) -> Set[str]
# Given a directory containing caches, a list of caches in use
# and threshold days, this function return a list of caches
# which can be purged. Remove the cache only if it is:
# 1: Not in use by the current installation(in dev as well as in prod).
# 2: Not in use by a deployment not older than `threshold_days`(in prod).
# 3: Not in use by '/root/zulip'.
# 4: Not older than `threshold_days`.
caches_to_purge = set()
threshold_timestamp = get_threshold_timestamp(threshold_days)
for cache_dir_base in os.listdir(caches_dir):
cache_dir = os.path.join(caches_dir, cache_dir_base)
if cache_dir in caches_in_use:
# Never purge a cache which is in use.
continue
if os.path.getctime(cache_dir) < threshold_timestamp:
caches_to_purge.add(cache_dir)
return caches_to_purge
def purge_unused_caches(caches_dir, caches_in_use, cache_type, args):
# type: (str, Set[str], str, argparse.Namespace) -> None
all_caches = set([os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)])
caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days)
caches_to_keep = all_caches - caches_to_purge
may_be_perform_purging(
caches_to_purge, caches_to_keep, cache_type, args.dry_run, args.verbose)
if args.verbose:
print("Done!")
def generate_sha1sum_emoji(zulip_path):
# type: (str) -> str
ZULIP_EMOJI_DIR = os.path.join(zulip_path, 'tools', 'setup', 'emoji')
sha = hashlib.sha1()
filenames = ['emoji_map.json', 'build_emoji', 'emoji_setup_utils.py', 'emoji_names.py']
for filename in filenames:
file_path = os.path.join(ZULIP_EMOJI_DIR, filename)
with open(file_path, 'rb') as reader:
sha.update(reader.read())
# Take into account the version of `emoji-datasource-google` package
# while generating success stamp.
PACKAGE_FILE_PATH = os.path.join(zulip_path, 'package.json')
with open(PACKAGE_FILE_PATH, 'r') as fp:
parsed_package_file = json.load(fp)
dependency_data = parsed_package_file['dependencies']
if 'emoji-datasource-google' in dependency_data:
emoji_datasource_version = dependency_data['emoji-datasource-google'].encode('utf-8')
else:
emoji_datasource_version = b"0"
sha.update(emoji_datasource_version)
return sha.hexdigest()
def may_be_perform_purging(dirs_to_purge, dirs_to_keep, dir_type, dry_run, verbose):
# type: (Set[str], Set[str], str, bool, bool) -> None
if dry_run:
print("Performing a dry run...")
else:
print("Cleaning unused %ss..." % (dir_type,))
for directory in dirs_to_purge:
if verbose:
print("Cleaning unused %s: %s" % (dir_type, directory))
if not dry_run:
subprocess.check_call(["sudo", "rm", "-rf", directory])
for directory in dirs_to_keep:
if verbose:
print("Keeping used %s: %s" % (dir_type, directory))
def parse_lsb_release():
# type: () -> Dict[str, str]
distro_info = {}
try:
# For performance reasons, we read /etc/lsb-release directly,
# rather than using the lsb_release command; this saves ~50ms
# in several places in provisioning and the installer
with open('/etc/lsb-release', 'r') as fp:
data = [line.strip().split('=') for line in fp]
for k, v in data:
if k not in ['DISTRIB_CODENAME', 'DISTRIB_ID']:
# We only return to the caller the values that we get
# from lsb_release in the exception code path.
continue
distro_info[k] = v
except FileNotFoundError:
# Unfortunately, Debian stretch doesn't yet have an
# /etc/lsb-release, so we instead fetch the pieces of data
# that we use from the `lsb_release` command directly.
vendor = subprocess_text_output(["lsb_release", "-is"])
codename = subprocess_text_output(["lsb_release", "-cs"])
distro_info = dict(
DISTRIB_CODENAME=codename,
DISTRIB_ID=vendor
)
return distro_info
def file_or_package_hash_updated(paths, hash_name, is_force, package_versions=[]):
# type: (List[str], str, bool, List[str]) -> bool
# Check whether the files or package_versions passed as arguments
# changed compared to the last execution.
sha1sum = hashlib.sha1()
for path in paths:
with open(path, 'rb') as file_to_hash:
sha1sum.update(file_to_hash.read())
# The ouput of tools like build_pygments_data depends
# on the version of some pip packages as well.
for package_version in package_versions:
sha1sum.update(package_version.encode("utf-8"))
hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
new_hash = sha1sum.hexdigest()
with open(hash_path, 'a+') as hash_file:
hash_file.seek(0)
last_hash = hash_file.read()
if is_force or (new_hash != last_hash):
hash_file.seek(0)
hash_file.truncate()
hash_file.write(new_hash)
return True
return False
def is_root() -> bool:
if 'posix' in os.name and os.geteuid() == 0:
return True
return False
def assert_not_running_as_root() -> None:
script_name = os.path.abspath(sys.argv[0])
if is_root():
msg = ("{shortname} should not be run as root. Use `su zulip` to switch to the 'zulip'\n"
"user before rerunning this, or use \n su zulip -c '{name} ...'\n"
"to switch users and run this as a single command.").format(
name=script_name,
shortname=os.path.basename(script_name))
print(msg)
sys.exit(1)
def assert_running_as_root(strip_lib_from_paths: bool=False) -> None:
script_name = os.path.abspath(sys.argv[0])
# Since these Python scripts are run inside a thin shell wrapper,
# we need to replace the paths in order to ensure we instruct
# users to (re)run the right command.
if strip_lib_from_paths:
script_name = script_name.replace("scripts/lib/upgrade", "scripts/upgrade")
if not is_root():
print("{} must be run as root.".format(script_name))
sys.exit(1)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/nagios/cron_file_helper.py | import time
# Avoid requiring the typing module to be installed
if False:
from typing import Tuple
def nagios_from_file(results_file):
# type: (str) -> Tuple[int, str]
"""Returns a nagios-appropriate string and return code obtained by
parsing the desired file on disk. The file on disk should be of format
%s|%s % (timestamp, nagios_string)
This file is created by various nagios checking cron jobs such as
check-rabbitmq-queues and check-rabbitmq-consumers"""
data = open(results_file).read().strip()
pieces = data.split('|')
if not len(pieces) == 4:
state = 'UNKNOWN'
ret = 3
data = "Results file malformed"
else:
timestamp = int(pieces[0])
time_diff = time.time() - timestamp
if time_diff > 60 * 2:
ret = 3
state = 'UNKNOWN'
data = "Results file is stale"
else:
ret = int(pieces[1])
state = pieces[2]
data = pieces[3]
return (ret, "%s: %s" % (state, data))
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | scripts/setup/generate_secrets.py | #!/usr/bin/env python3
# This tools generates /etc/zulip/zulip-secrets.conf
import sys
import os
if False:
from typing import Dict, List, Optional
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(BASE_DIR)
import scripts.lib.setup_path_on_import
os.environ['DJANGO_SETTINGS_MODULE'] = 'zproject.settings'
from django.utils.crypto import get_random_string
import argparse
import uuid
import configparser
from zerver.lib.str_utils import force_str
from zerver.lib.utils import generate_random_token
os.chdir(os.path.join(os.path.dirname(__file__), '..', '..'))
CAMO_CONFIG_FILENAME = '/etc/default/camo'
# Standard, 64-bit tokens
AUTOGENERATED_SETTINGS = [
'avatar_salt',
'initial_password_salt',
'local_database_password',
'rabbitmq_password',
'shared_secret',
'thumbor_key',
]
# TODO: We can eliminate this function if we refactor the install
# script to run generate_secrets before zulip-puppet-apply.
def generate_camo_config_file(camo_key):
# type: (str) -> None
camo_config = """ENABLED=yes
PORT=9292
CAMO_KEY=%s
""" % (camo_key,)
with open(CAMO_CONFIG_FILENAME, 'w') as camo_file:
camo_file.write(camo_config)
print("Generated Camo config file %s" % (CAMO_CONFIG_FILENAME,))
def generate_django_secretkey():
# type: () -> str
"""Secret key generation taken from Django's startproject.py"""
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars)
def get_old_conf(output_filename):
# type: (str) -> Dict[str, str]
if not os.path.exists(output_filename) or os.path.getsize(output_filename) == 0:
return {}
secrets_file = configparser.RawConfigParser()
secrets_file.read(output_filename)
return dict(secrets_file.items("secrets"))
def generate_secrets(development=False):
# type: (bool) -> None
if development:
OUTPUT_SETTINGS_FILENAME = "zproject/dev-secrets.conf"
else:
OUTPUT_SETTINGS_FILENAME = "/etc/zulip/zulip-secrets.conf"
current_conf = get_old_conf(OUTPUT_SETTINGS_FILENAME)
lines = [] # type: List[str]
if len(current_conf) == 0:
lines = ['[secrets]\n']
def need_secret(name):
# type: (str) -> bool
return name not in current_conf
def add_secret(name, value):
# type: (str, str) -> None
lines.append("%s = %s\n" % (name, value))
current_conf[name] = value
for name in AUTOGENERATED_SETTINGS:
if need_secret(name):
add_secret(name, generate_random_token(64))
if need_secret('secret_key'):
add_secret('secret_key', generate_django_secretkey())
if need_secret('camo_key'):
add_secret('camo_key', get_random_string(64))
# zulip_org_key is generated using os.urandom().
# zulip_org_id does not require a secure CPRNG,
# it only needs to be unique.
if need_secret('zulip_org_key'):
add_secret('zulip_org_key', get_random_string(64))
if need_secret('zulip_org_id'):
add_secret('zulip_org_id', str(uuid.uuid4()))
if not development:
# Write the Camo config file directly
generate_camo_config_file(current_conf['camo_key'])
if len(lines) == 0:
print("generate_secrets: No new secrets to generate.")
return
out = open(OUTPUT_SETTINGS_FILENAME, 'a')
# Write a newline at the start, in case there was no newline at
# the end of the file due to human editing.
out.write("\n" + force_str("".join(lines)))
out.close()
print("Generated new secrets in %s." % (OUTPUT_SETTINGS_FILENAME,))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--development', action='store_true', dest='development',
help='For setting up the developer env for zulip')
group.add_argument('--production', action='store_false', dest='development',
help='For setting up the production env for zulip')
results = parser.parse_args()
generate_secrets(results.development)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/commands/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/commands/crawl_with_status.py | from scrapy.commands.crawl import Command
from scrapy.exceptions import UsageError
from typing import List, Any
class StatusCommand(Command):
def run(self, args: List[str], opts: Any) -> None:
if len(args) < 1:
raise UsageError()
elif len(args) > 1:
raise UsageError(
"running 'scrapy crawl' with more than one spider is no longer supported")
spname = args[0]
crawler = self.crawler_process.create_crawler(spname)
self.crawler_process.crawl(crawler)
self.crawler_process.start()
# Get exceptions quantity from crawler stat data
if crawler.spider.has_error:
# Return non-zero exit code if exceptions are contained
self.exitcode = 1
| [
"List[str]",
"Any"
] | [
168,
185
] | [
177,
188
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/settings.py | # -*- coding: utf-8 -*-
# Scrapy settings for documentation_crawler project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'documentation_crawler'
SPIDER_MODULES = ['documentation_crawler.spiders']
NEWSPIDER_MODULE = 'documentation_crawler.spiders'
COMMANDS_MODULE = 'documentation_crawler.commands'
LOG_LEVEL = 'ERROR'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
USER_AGENT = ('Mozilla/5.0 (X11; Linux x86_64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/54.0.2840.59 Safari/537.36')
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'documentation_crawler.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'documentation_crawler.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'documentation_crawler.pipelines.SomePipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/spiders/__init__.py | # This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py | import os
import pathlib
from typing import List
from .common.spiders import BaseDocumentationSpider
def get_start_url() -> List[str]:
# Get index html file as start url and convert it to file uri
dir_path = os.path.dirname(os.path.realpath(__file__))
start_file = os.path.join(dir_path, os.path.join(*[os.pardir] * 4),
"docs/_build/html/index.html")
return [
pathlib.Path(os.path.abspath(start_file)).as_uri()
]
class DocumentationSpider(BaseDocumentationSpider):
name = "documentation_crawler"
deny_domains = ['localhost:9991']
deny = [r'\_sources\/.*\.txt']
start_urls = get_start_url()
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/spiders/check_help_documentation.py | import os
from posixpath import basename
from urllib.parse import urlparse
from .common.spiders import BaseDocumentationSpider
from typing import Any, List, Set
def get_images_dir(images_path: str) -> str:
# Get index html file as start url and convert it to file uri
dir_path = os.path.dirname(os.path.realpath(__file__))
target_path = os.path.join(dir_path, os.path.join(*[os.pardir] * 4), images_path)
return os.path.realpath(target_path)
class UnusedImagesLinterSpider(BaseDocumentationSpider):
images_path = ""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.static_images = set() # type: Set[str]
self.images_static_dir = get_images_dir(self.images_path) # type: str
def _is_external_url(self, url: str) -> bool:
is_external = url.startswith('http') and self.start_urls[0] not in url
if self._has_extension(url) and 'localhost:9981/{}'.format(self.images_path) in url:
self.static_images.add(basename(urlparse(url).path))
return is_external or self._has_extension(url)
def closed(self, *args: Any, **kwargs: Any) -> None:
unused_images = set(os.listdir(self.images_static_dir)) - self.static_images
if unused_images:
exception_message = "The following images are not used in documentation " \
"and can be removed: {}"
self._set_error_state()
unused_images_relatedpath = [
os.path.join(self.images_path, img) for img in unused_images]
raise Exception(exception_message.format(', '.join(unused_images_relatedpath)))
class HelpDocumentationSpider(UnusedImagesLinterSpider):
name = "help_documentation_crawler"
start_urls = ['http://localhost:9981/help']
deny_domains = [] # type: List[str]
deny = ['/privacy']
images_path = "static/images/help"
class APIDocumentationSpider(UnusedImagesLinterSpider):
name = 'api_documentation_crawler'
start_urls = ['http://localhost:9981/api']
deny_domains = [] # type: List[str]
images_path = "static/images/api"
class PorticoDocumentationSpider(BaseDocumentationSpider):
name = 'portico_documentation_crawler'
start_urls = ['http://localhost:9981/hello',
'http://localhost:9981/history',
'http://localhost:9981/plans',
'http://localhost:9981/team',
'http://localhost:9981/apps',
'http://localhost:9981/integrations',
'http://localhost:9981/terms',
'http://localhost:9981/privacy',
'http://localhost:9981/features',
'http://localhost:9981/why-zulip',
'http://localhost:9981/for/open-source',
'http://localhost:9981/for/companies',
'http://localhost:9981/for/working-groups-and-communities',
'http://localhost:9981/for/mystery-hunt',
'http://localhost:9981/security']
deny_domains = [] # type: List[str]
| [
"str",
"Any",
"Any",
"str",
"Any",
"Any"
] | [
198,
574,
589,
814,
1149,
1164
] | [
201,
577,
592,
817,
1152,
1167
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/spiders/common/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py | import logging
import re
import scrapy
from scrapy import Request
from scrapy.linkextractors import IGNORED_EXTENSIONS
from scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor
from scrapy.utils.url import url_has_any_extension
from typing import Any, Generator, List, Optional, Tuple
EXCLUDED_URLS = [
# Google calendar returns 404s on HEAD requests unconditionally
'https://calendar.google.com/calendar/embed?src=ktiduof4eoh47lmgcl2qunnc0o@group.calendar.google.com',
# Returns 409 errors to HEAD requests frequently
'https://medium.freecodecamp.org/',
# Returns 404 to HEAD requests unconditionally
'https://www.git-tower.com/blog/command-line-cheat-sheet/',
]
class BaseDocumentationSpider(scrapy.Spider):
name = None # type: Optional[str]
# Exclude domain address.
deny_domains = [] # type: List[str]
start_urls = [] # type: List[str]
deny = [] # type: List[str]
file_extensions = ['.' + ext for ext in IGNORED_EXTENSIONS] # type: List[str]
tags = ('a', 'area', 'img')
attrs = ('href', 'src')
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.has_error = False
def _set_error_state(self) -> None:
self.has_error = True
def _has_extension(self, url: str) -> bool:
return url_has_any_extension(url, self.file_extensions)
def _is_external_url(self, url: str) -> bool:
return url.startswith('http') or self._has_extension(url)
def check_existing(self, response: Any) -> None:
self.log(response)
def check_permalink(self, response: Any) -> None:
self.log(response)
xpath_template = "//*[@id='{permalink}' or @name='{permalink}']"
m = re.match(r".+\#(?P<permalink>.*)$", response.request.url) # Get anchor value.
if not m:
return
permalink = m.group('permalink')
# Check permalink existing on response page.
if not response.selector.xpath(xpath_template.format(permalink=permalink)):
self._set_error_state()
raise Exception(
"Permalink #{} is not found on page {}".format(permalink, response.request.url))
def parse(self, response: Any) -> Generator[Request, None, None]:
self.log(response)
for link in LxmlLinkExtractor(deny_domains=self.deny_domains, deny_extensions=['doc'],
tags=self.tags, attrs=self.attrs, deny=self.deny,
canonicalize=False).extract_links(response):
callback = self.parse # type: Any
dont_filter = False
method = 'GET'
if self._is_external_url(link.url):
callback = self.check_existing
method = 'HEAD'
elif '#' in link.url:
dont_filter = True
callback = self.check_permalink
yield Request(link.url, method=method, callback=callback, dont_filter=dont_filter,
errback=self.error_callback)
def retry_request_with_get(self, request: Request) -> Generator[Request, None, None]:
request.method = 'GET'
request.dont_filter = True
yield request
def exclude_error(self, url: str) -> bool:
if url in EXCLUDED_URLS:
return True
return False
def error_callback(self, failure: Any) -> Optional[Generator[Any, None, None]]:
if hasattr(failure.value, 'response') and failure.value.response:
response = failure.value.response
if self.exclude_error(response.url):
return None
if response.status == 404:
self._set_error_state()
raise Exception('Page not found: {}'.format(response))
if response.status == 405 and response.request.method == 'HEAD':
# Method 'HEAD' not allowed, repeat request with 'GET'
return self.retry_request_with_get(response.request)
self.log("Error! Please check link: {}".format(response), logging.ERROR)
elif isinstance(failure.type, IOError):
self._set_error_state()
else:
raise Exception(failure.value)
return None
| [
"Any",
"Any",
"str",
"str",
"Any",
"Any",
"Any",
"Request",
"str",
"Any"
] | [
1098,
1113,
1306,
1421,
1541,
1623,
2236,
3116,
3282,
3413
] | [
1101,
1116,
1309,
1424,
1544,
1626,
2239,
3123,
3285,
3416
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/droplets/add_mentor.py | # Allows a mentor to ssh into a Digital Ocean droplet. This is designed to be
# executed on the target machine.
#
# This script takes the username of the mentor as an argument:
#
# $ python3 add_mentor.py <mentor's username>
#
# Alternatively you can pass in --remove to remove their ssh key from the
# machine:
#
# $ python3 add_mentor.py --remove <mentor's username>
import os
import sys
from argparse import ArgumentParser
from typing import List
import socket
import re
import requests
parser = ArgumentParser(description='Give a mentor ssh access to this machine.')
parser.add_argument('username', help='Github username of the mentor.')
parser.add_argument('--remove', help='Remove his/her key from the machine.',
action='store_true', default=False)
# Wrap keys with line comments for easier key removal.
append_key = """\
#<{username}>{{{{
{key}
#}}}}<{username}>
"""
def get_mentor_keys(username: str) -> List[str]:
url = 'https://api.github.com/users/{}/keys'.format(username)
r = requests.get(url)
if r.status_code != 200:
print('Cannot connect to Github...')
sys.exit(1)
keys = r.json()
if not keys:
print('Mentor "{}" has no public key.'.format(username))
sys.exit(1)
return [key['key'] for key in keys]
if __name__ == '__main__':
args = parser.parse_args()
authorized_keys = os.path.expanduser('~/.ssh/authorized_keys')
if args.remove:
remove_re = re.compile('#<{0}>{{{{.+}}}}<{0}>(\n)?'.format(args.username),
re.DOTALL | re.MULTILINE)
with open(authorized_keys, 'r+') as f:
old_content = f.read()
new_content = re.sub(remove_re, '', old_content)
f.seek(0)
f.write(new_content)
f.truncate()
print('Successfully removed {}\' SSH key!'.format(args.username))
else:
keys = get_mentor_keys(args.username)
with open(authorized_keys, 'a') as f:
for key in keys:
f.write(append_key.format(username=args.username, key=key))
print('Successfully added {}\'s SSH key!'.format(args.username))
print('Can you let your mentor know that they can connect to this machine with:\n')
print(' $ ssh zulipdev@{}\n'.format(socket.gethostname()))
| [
"str"
] | [
929
] | [
932
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/droplets/create.py | # Creates a Droplet on Digital Ocean for remote Zulip development.
# Particularly useful for sprints/hackathons, interns, and other
# situation where one wants to quickly onboard new contributors.
#
# This script takes one argument: the name of the GitHub user for whom you want
# to create a Zulip developer environment. Requires Python 3.
#
# Requires python-digitalocean library:
# https://github.com/koalalorenzo/python-digitalocean
#
# Also requires Digital Ocean team membership for Zulip and api token:
# https://cloud.digitalocean.com/settings/api/tokens
#
# Copy conf.ini-template to conf.ini and populate with your api token.
#
# usage: python3 create.py <username>
import sys
import configparser
import urllib.error
import urllib.request
import json
import digitalocean
import time
import argparse
import os
from typing import Any, Dict, List
# initiation argument parser
parser = argparse.ArgumentParser(description='Create a Zulip devopment VM Digital Ocean droplet.')
parser.add_argument("username", help="Github username for whom you want to create a Zulip dev droplet")
parser.add_argument('--tags', nargs='+', default=[])
parser.add_argument('-f', '--recreate', dest='recreate', action="store_true", default=False)
def get_config():
# type: () -> configparser.ConfigParser
config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf.ini'))
return config
def user_exists(username):
# type: (str) -> bool
print("Checking to see if GitHub user {0} exists...".format(username))
user_api_url = "https://api.github.com/users/{0}".format(username)
try:
response = urllib.request.urlopen(user_api_url)
json.loads(response.read().decode())
print("...user exists!")
return True
except urllib.error.HTTPError as err:
print(err)
print("Does the github user {0} exist?".format(username))
sys.exit(1)
def get_keys(username):
# type: (str) -> List[Dict[str, Any]]
print("Checking to see that GitHub user has available public keys...")
apiurl_keys = "https://api.github.com/users/{0}/keys".format(username)
try:
response = urllib.request.urlopen(apiurl_keys)
userkeys = json.loads(response.read().decode())
if not userkeys:
print("No keys found. Has user {0} added ssh keys to their github account?".format(username))
sys.exit(1)
print("...public keys found!")
return userkeys
except urllib.error.HTTPError as err:
print(err)
print("Has user {0} added ssh keys to their github account?".format(username))
sys.exit(1)
def fork_exists(username):
# type: (str) -> bool
print("Checking to see GitHub user has forked zulip/zulip...")
apiurl_fork = "https://api.github.com/repos/{0}/zulip".format(username)
try:
response = urllib.request.urlopen(apiurl_fork)
json.loads(response.read().decode())
print("...fork found!")
return True
except urllib.error.HTTPError as err:
print(err)
print("Has user {0} forked zulip/zulip?".format(username))
sys.exit(1)
def exit_if_droplet_exists(my_token: str, username: str, recreate: bool) -> None:
print("Checking to see if droplet for {0} already exists...".format(username))
manager = digitalocean.Manager(token=my_token)
my_droplets = manager.get_all_droplets()
for droplet in my_droplets:
if droplet.name == "{0}.zulipdev.org".format(username):
if not recreate:
print("Droplet for user {0} already exists. Pass --recreate if you "
"need to recreate the droplet.".format(username))
sys.exit(1)
else:
print("Deleting existing droplet for {0}.".format(username))
droplet.destroy()
return
print("...No droplet found...proceeding.")
def set_user_data(username, userkeys):
# type: (str, List[Dict[str, Any]]) -> str
print("Setting cloud-config data, populated with GitHub user's public keys...")
ssh_authorized_keys = ""
# spaces here are important here - these need to be properly indented under
# ssh_authorized_keys:
for key in userkeys:
ssh_authorized_keys += "\n - {0}".format(key['key'])
# print(ssh_authorized_keys)
setup_repo = """\
cd /home/zulipdev/{1} && git remote add origin https://github.com/{0}/{1}.git && git fetch origin"""
server_repo_setup = setup_repo.format(username, "zulip")
python_api_repo_setup = setup_repo.format(username, "python-zulip-api")
cloudconf = """
#cloud-config
users:
- name: zulipdev
ssh_authorized_keys:{0}
runcmd:
- su -c '{1}' zulipdev
- su -c 'git clean -f' zulipdev
- su -c '{2}' zulipdev
- su -c 'git clean -f' zulipdev
- su -c 'git config --global core.editor nano' zulipdev
- su -c 'git config --global pull.rebase true' zulipdev
power_state:
mode: reboot
condition: True
""".format(ssh_authorized_keys, server_repo_setup, python_api_repo_setup)
print("...returning cloud-config data.")
return cloudconf
def create_droplet(my_token, template_id, username, tags, user_data):
# type: (str, str, str, List[str], str) -> str
droplet = digitalocean.Droplet(
token=my_token,
name='{0}.zulipdev.org'.format(username),
region='nyc3',
image=template_id,
size_slug='2gb',
user_data=user_data,
tags=tags,
backups=False)
print("Initiating droplet creation...")
droplet.create()
incomplete = True
while incomplete:
actions = droplet.get_actions()
for action in actions:
action.load()
print("...[{0}]: {1}".format(action.type, action.status))
if action.type == 'create' and action.status == 'completed':
incomplete = False
break
if incomplete:
time.sleep(15)
print("...droplet created!")
droplet.load()
print("...ip address for new droplet is: {0}.".format(droplet.ip_address))
return droplet.ip_address
def delete_existing_records(records: List[digitalocean.Record], record_name: str) -> None:
count = 0
for record in records:
if record.name == record_name and record.domain == 'zulipdev.org' and record.type == 'A':
record.destroy()
count = count + 1
if count:
print("Deleted {0} existing A records for {1}.zulipdev.org.".format(count, record_name))
def create_dns_record(my_token, username, ip_address):
# type: (str, str, str) -> None
domain = digitalocean.Domain(token=my_token, name='zulipdev.org')
domain.load()
records = domain.get_records()
delete_existing_records(records, username)
wildcard_name = "*." + username
delete_existing_records(records, wildcard_name)
print("Creating new A record for {0}.zulipdev.org that points to {1}.".format(username, ip_address))
domain.create_new_domain_record(type='A', name=username, data=ip_address)
print("Creating new A record for *.{0}.zulipdev.org that points to {1}.".format(username, ip_address))
domain.create_new_domain_record(type='A', name=wildcard_name, data=ip_address)
def print_completion(username):
# type: (str) -> None
print("""
COMPLETE! Droplet for GitHub user {0} is available at {0}.zulipdev.org.
Instructions for use are below. (copy and paste to the user)
------
Your remote Zulip dev server has been created!
- Connect to your server by running
`ssh zulipdev@{0}.zulipdev.org` on the command line
(Terminal for macOS and Linux, Bash for Git on Windows).
- There is no password; your account is configured to use your ssh keys.
- Once you log in, you should see `(zulip-venv) ~$`.
- To start the dev server, `cd zulip` and then run `./tools/run-dev.py`.
- While the dev server is running, you can see the Zulip server in your browser at
http://{0}.zulipdev.org:9991.
""".format(username))
print("See [Developing remotely](https://zulip.readthedocs.io/en/latest/development/remote.html) "
"for tips on using the remote dev instance and "
"[Git & GitHub Guide](https://zulip.readthedocs.io/en/latest/git/index.html) "
"to learn how to use Git with Zulip.\n")
print("Note that this droplet will automatically be deleted after a month of inactivity. "
"If you are leaving Zulip for more than a few weeks, we recommend pushing all of your "
"active branches to GitHub.")
print("------")
if __name__ == '__main__':
# define id of image to create new droplets from
# You can get this with something like the following. You may need to try other pages.
# Broken in two to satisfy linter (line too long)
# curl -X GET -H "Content-Type: application/json" -u <API_KEY>: "https://api.digitaloc
# ean.com/v2/images?page=5" | grep --color=always base.zulipdev.org
template_id = "36947213"
# get command line arguments
args = parser.parse_args()
print("Creating Zulip developer environment for GitHub user {0}...".format(args.username))
# get config details
config = get_config()
# see if droplet already exists for this user
user_exists(username=args.username)
# grab user's public keys
public_keys = get_keys(username=args.username)
# now make sure the user has forked zulip/zulip
fork_exists(username=args.username)
api_token = config['digitalocean']['api_token']
# does the droplet already exist?
exit_if_droplet_exists(my_token=api_token, username=args.username, recreate=args.recreate)
# set user_data
user_data = set_user_data(username=args.username, userkeys=public_keys)
# create droplet
ip_address = create_droplet(my_token=api_token,
template_id=template_id,
username=args.username,
tags=args.tags,
user_data=user_data)
# create dns entry
create_dns_record(my_token=api_token, username=args.username, ip_address=ip_address)
# print completion message
print_completion(username=args.username)
sys.exit(1)
| [
"str",
"str",
"bool",
"List[digitalocean.Record]",
"str"
] | [
3220,
3235,
3250,
6262,
6302
] | [
3223,
3238,
3254,
6287,
6305
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/js-dep-visualizer.py | #!/usr/bin/env python3
"""
$ ./tools/js-dep-visualizer.py
$ dot -Tpng var/zulip-deps.dot -o var/zulip-deps.png
"""
import os
import re
import subprocess
import sys
from collections import defaultdict
from typing import Any, DefaultDict, Dict, List, Set, Tuple
Edge = Tuple[str, str]
EdgeSet = Set[Edge]
Method = str
MethodDict = DefaultDict[Edge, List[Method]]
TOOLS_DIR = os.path.abspath(os.path.dirname(__file__))
ROOT_DIR = os.path.dirname(TOOLS_DIR)
sys.path.insert(0, ROOT_DIR)
from tools.lib.graph import (
Graph,
make_dot_file,
best_edge_to_remove,
)
JS_FILES_DIR = os.path.join(ROOT_DIR, 'static/js')
OUTPUT_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.dot'))
PNG_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.png'))
def get_js_edges():
# type: () -> Tuple[EdgeSet, MethodDict]
names = set()
modules = [] # type: List[Dict[str, Any]]
for js_file in os.listdir(JS_FILES_DIR):
if not js_file.endswith('.js'):
continue
name = js_file[:-3] # remove .js
path = os.path.join(JS_FILES_DIR, js_file)
names.add(name)
modules.append(dict(
name=name,
path=path,
regex=re.compile(r'[^_]{}\.\w+\('.format(name))
))
comment_regex = re.compile(r'\s+//')
call_regex = re.compile(r'[^_](\w+\.\w+)\(')
methods = defaultdict(list) # type: DefaultDict[Edge, List[Method]]
edges = set()
for module in modules:
parent = module['name']
with open(module['path']) as f:
for line in f:
if comment_regex.match(line):
continue
if 'subs.forEach' in line:
continue
m = call_regex.search(line)
if not m:
continue
for g in m.groups():
child, method = g.split('.')
if (child not in names):
continue
if child == parent:
continue
tup = (parent, child)
edges.add(tup)
methods[tup].append(method)
return edges, methods
def find_edges_to_remove(graph, methods):
# type: (Graph, MethodDict) -> Tuple[Graph, List[Edge]]
EXEMPT_EDGES = [
# These are sensible dependencies, so don't cut them.
('rows', 'message_store'),
('filter', 'stream_data'),
('server_events', 'user_events'),
('compose_fade', 'stream_data'),
('narrow', 'message_list'),
('stream_list', 'topic_list',),
('subs', 'stream_muting'),
('hashchange', 'settings'),
('tutorial', 'narrow'),
('activity', 'resize'),
('hashchange', 'drafts'),
('compose', 'echo'),
('compose', 'resize'),
('settings', 'resize'),
('compose', 'unread_ops'),
('compose', 'drafts'),
('echo', 'message_edit'),
('echo', 'stream_list'),
('hashchange', 'narrow'),
('hashchange', 'subs'),
('message_edit', 'echo'),
('popovers', 'message_edit'),
('unread_ui', 'activity'),
('message_fetch', 'message_util'),
('message_fetch', 'resize'),
('message_util', 'resize'),
('notifications', 'tutorial'),
('message_util', 'unread_ui'),
('muting_ui', 'stream_list'),
('muting_ui', 'unread_ui'),
('stream_popover', 'subs'),
('stream_popover', 'muting_ui'),
('narrow', 'message_fetch'),
('narrow', 'message_util'),
('narrow', 'navigate'),
('unread_ops', 'unread_ui'),
('narrow', 'unread_ops'),
('navigate', 'unread_ops'),
('pm_list', 'unread_ui'),
('stream_list', 'unread_ui'),
('popovers', 'compose'),
('popovers', 'muting_ui'),
('popovers', 'narrow'),
('popovers', 'resize'),
('pm_list', 'resize'),
('notifications', 'navigate'),
('compose', 'socket'),
('stream_muting', 'message_util'),
('subs', 'stream_list'),
('ui', 'message_fetch'),
('ui', 'unread_ops'),
('condense', 'message_viewport'),
('compose_actions', 'compose'),
('compose_actions', 'resize'),
('settings_streams', 'stream_data'),
('drafts', 'hashchange'),
('settings_notifications', 'stream_edit'),
('compose', 'stream_edit'),
('subs', 'stream_edit'),
('narrow_state', 'stream_data'),
('stream_edit', 'stream_list'),
('reactions', 'emoji_picker'),
('message_edit', 'resize'),
] # type: List[Edge]
def is_exempt(edge):
# type: (Tuple[str, str]) -> bool
parent, child = edge
if edge == ('server_events', 'reload'):
return False
if parent in ['server_events', 'user_events', 'stream_events',
'message_events', 'reload']:
return True
if child == 'rows':
return True
return edge in EXEMPT_EDGES
APPROVED_CUTS = [
('stream_edit', 'stream_events'),
('unread_ui', 'pointer'),
('typing_events', 'narrow'),
('echo', 'message_events'),
('resize', 'navigate'),
('narrow', 'search'),
('subs', 'stream_events'),
('stream_color', 'tab_bar'),
('stream_color', 'subs'),
('stream_data', 'narrow'),
('unread', 'narrow'),
('composebox_typeahead', 'compose'),
('message_list', 'message_edit'),
('message_edit', 'compose'),
('message_store', 'compose'),
('settings_notifications', 'subs'),
('settings', 'settings_muting'),
('message_fetch', 'tutorial'),
('settings', 'subs'),
('activity', 'narrow'),
('compose', 'compose_actions'),
('compose', 'subs'),
('compose_actions', 'drafts'),
('compose_actions', 'narrow'),
('compose_actions', 'unread_ops'),
('drafts', 'compose'),
('drafts', 'echo'),
('echo', 'compose'),
('echo', 'narrow'),
('echo', 'pm_list'),
('echo', 'ui'),
('message_fetch', 'activity'),
('message_fetch', 'narrow'),
('message_fetch', 'pm_list'),
('message_fetch', 'stream_list'),
('message_fetch', 'ui'),
('narrow', 'ui'),
('message_util', 'compose'),
('subs', 'compose'),
('narrow', 'hashchange'),
('subs', 'hashchange'),
('navigate', 'narrow'),
('navigate', 'stream_list'),
('pm_list', 'narrow'),
('pm_list', 'stream_popover'),
('muting_ui', 'stream_popover'),
('popovers', 'stream_popover'),
('topic_list', 'stream_popover'),
('stream_edit', 'subs'),
('topic_list', 'narrow'),
('stream_list', 'narrow'),
('stream_list', 'pm_list'),
('stream_list', 'unread_ops'),
('notifications', 'ui'),
('notifications', 'narrow'),
('notifications', 'unread_ops'),
('typing', 'narrow'),
('message_events', 'compose'),
('stream_muting', 'stream_list'),
('subs', 'narrow'),
('unread_ui', 'pm_list'),
('unread_ui', 'stream_list'),
('overlays', 'hashchange'),
('emoji_picker', 'reactions'),
]
def cut_is_legal(edge):
# type: (Edge) -> bool
parent, child = edge
if child in ['reload', 'popovers', 'overlays', 'notifications',
'server_events', 'compose_actions']:
return True
return edge in APPROVED_CUTS
graph.remove_exterior_nodes()
removed_edges = list()
print()
while graph.num_edges() > 0:
edge = best_edge_to_remove(graph, is_exempt)
if edge is None:
print('we may not be allowing edge cuts!!!')
break
if cut_is_legal(edge):
graph = graph.minus_edge(edge)
graph.remove_exterior_nodes()
removed_edges.append(edge)
else:
for removed_edge in removed_edges:
print(removed_edge)
print()
edge_str = str(edge) + ','
print(edge_str)
for method in methods[edge]:
print(' ' + method)
break
return graph, removed_edges
def report_roadmap(edges, methods):
# type: (List[Edge], MethodDict) -> None
child_modules = {child for parent, child in edges}
module_methods = defaultdict(set) # type: DefaultDict[str, Set[str]]
callers = defaultdict(set) # type: DefaultDict[Tuple[str, str], Set[str]]
for parent, child in edges:
for method in methods[(parent, child)]:
module_methods[child].add(method)
callers[(child, method)].add(parent)
for child in sorted(child_modules):
print(child + '.js')
for method in module_methods[child]:
print(' ' + child + '.' + method)
for caller in sorted(callers[(child, method)]):
print(' ' + caller + '.js')
print()
print()
def produce_partial_output(graph):
# type: (Graph) -> None
print(graph.num_edges())
buffer = make_dot_file(graph)
graph.report()
with open(OUTPUT_FILE_PATH, 'w') as f:
f.write(buffer)
subprocess.check_call(["dot", "-Tpng", OUTPUT_FILE_PATH, "-o", PNG_FILE_PATH])
print()
print('See dot file here: {}'.format(OUTPUT_FILE_PATH))
print('See output png file: {}'.format(PNG_FILE_PATH))
def run():
# type: () -> None
edges, methods = get_js_edges()
graph = Graph(edges)
graph, removed_edges = find_edges_to_remove(graph, methods)
if graph.num_edges() == 0:
report_roadmap(removed_edges, methods)
else:
produce_partial_output(graph)
if __name__ == '__main__':
run()
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/__init__.py | [] | [] | [] |
|
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/capitalization.py |
from typing import List, Tuple, Set, Pattern, Match
import re
from bs4 import BeautifulSoup
# The phrases in this list will be ignored. The longest phrase is
# tried first; this removes the chance of smaller phrases changing
# the text before longer phrases are tried.
# The errors shown by `tools/check-capitalization` can be added to
# this list without any modification.
IGNORED_PHRASES = [
# Proper nouns and acronyms
r"Android",
r"API",
r"APNS",
r"App Store",
r"Botserver",
r"Cookie Bot",
r"Dropbox",
r"GitHub",
r"G Suite",
r"Google",
r"Gravatar",
r"Hamlet",
r"HTTP",
r"ID",
r"IDs",
r"IP",
r"JIRA",
r"JSON",
r"Kerberos",
r"LDAP",
r"Mac",
r"macOS",
r"MiB",
r"OTP",
r"Pivotal",
r"Play Store",
r'REMOTE_USER',
r'Slack',
r"SSO",
r'Terms of Service',
r'Tuesday',
r"URL",
r"Ubuntu",
r"Updown",
r"V5",
r"Webathena",
r"Windows",
r"WordPress",
r"XML",
r"Zephyr",
r"Zulip",
r"Zulip Team",
r"iPhone",
r"iOS",
r"Emoji One",
r"mailinator.com",
# Code things
r".zuliprc",
r"__\w+\.\w+__",
# Things using "I"
r"I say",
r"I want",
r"I'm",
# Specific short words
r"and",
r"bot",
r"e.g.",
r"etc.",
r"images",
r"enabled",
r"disabled",
r"zulip_org_id",
r"admins",
r"members",
# Placeholders
r"keyword",
r"streamname",
r"user@example.com",
# Fragments of larger strings
(r'your subscriptions on your Streams page'),
(r'Change notification settings for individual streams on your '
'<a href="/#streams">Streams page</a>.'),
(r'Looking for our '
'<a href="/integrations" target="_blank">Integrations</a> or '
'<a href="/api" target="_blank">API</a> documentation?'),
r'Most stream administration is done on the <a href="/#streams">Streams page</a>.',
r"one or more people...",
r"confirmation email",
r"invites remaining",
r"was too large; the maximum file size is 25MiB.",
r"selected message",
r"a-z",
# SPECIAL CASES
# Enter is usually capitalized
r"Press Enter to send",
# Because topics usually are lower-case, this would look weird if it were capitalized
r"more topics",
# For consistency with "more topics"
r"more conversations",
# Capital 'i' looks weird in reminders popover
r"in 1 hour",
r"in 20 minutes",
r"in 3 hours",
# We should probably just delete this string from translations
r'activation key',
# this is used as a topic
r'^hello$',
# These are used as example short names (e.g. an uncapitalized context):
r"^marketing$",
r"^cookie$",
r"^new_emoji$",
# Used to refer custom time limits
r"\bN\b",
# TO CLEAN UP
# Just want to avoid churning login.html right now
r"or Choose a user",
# This is a parsing bug in the tool
r"argument ",
# I can't find this one
r"text",
r"GIF",
# Emoji name placeholder
r"leafy green vegetable",
]
# Sort regexes in descending order of their lengths. As a result, the
# longer phrases will be ignored first.
IGNORED_PHRASES.sort(key=lambda regex: len(regex), reverse=True)
# Compile regexes to improve performance. This also extracts the
# text using BeautifulSoup and then removes extra whitespaces from
# it. This step enables us to add HTML in our regexes directly.
COMPILED_IGNORED_PHRASES = [
re.compile(' '.join(BeautifulSoup(regex, 'lxml').text.split()))
for regex in IGNORED_PHRASES
]
SPLIT_BOUNDARY = '?.!' # Used to split string into sentences.
SPLIT_BOUNDARY_REGEX = re.compile(r'[{}]'.format(SPLIT_BOUNDARY))
# Regexes which check capitalization in sentences.
DISALLOWED_REGEXES = [re.compile(regex) for regex in [
r'^[a-z]', # Checks if the sentence starts with a lower case character.
r'^[A-Z][a-z]+[\sa-z0-9]+[A-Z]', # Checks if an upper case character exists
# after a lower case character when the first character is in upper case.
]]
BANNED_WORDS = {
'realm': ('The term realm should not appear in user-facing strings. '
'Use organization instead.'),
}
def get_safe_phrase(phrase):
# type: (str) -> str
"""
Safe phrase is in lower case and doesn't contain characters which can
conflict with split boundaries. All conflicting characters are replaced
with low dash (_).
"""
phrase = SPLIT_BOUNDARY_REGEX.sub('_', phrase)
return phrase.lower()
def replace_with_safe_phrase(matchobj):
# type: (Match[str]) -> str
"""
The idea is to convert IGNORED_PHRASES into safe phrases, see
`get_safe_phrase()` function. The only exception is when the
IGNORED_PHRASE is at the start of the text or after a split
boundary; in this case, we change the first letter of the phrase
to upper case.
"""
ignored_phrase = matchobj.group(0)
safe_string = get_safe_phrase(ignored_phrase)
start_index = matchobj.start()
complete_string = matchobj.string
is_string_start = start_index == 0
# We expect that there will be one space between split boundary
# and the next word.
punctuation = complete_string[max(start_index - 2, 0)]
is_after_split_boundary = punctuation in SPLIT_BOUNDARY
if is_string_start or is_after_split_boundary:
return safe_string.capitalize()
return safe_string
def get_safe_text(text):
# type: (str) -> str
"""
This returns text which is rendered by BeautifulSoup and is in the
form that can be split easily and has all IGNORED_PHRASES processed.
"""
soup = BeautifulSoup(text, 'lxml')
text = ' '.join(soup.text.split()) # Remove extra whitespaces.
for phrase_regex in COMPILED_IGNORED_PHRASES:
text = phrase_regex.sub(replace_with_safe_phrase, text)
return text
def is_capitalized(safe_text):
# type: (str) -> bool
sentences = SPLIT_BOUNDARY_REGEX.split(safe_text)
sentences = [sentence.strip()
for sentence in sentences if sentence.strip()]
if not sentences:
return False
for sentence in sentences:
for regex in DISALLOWED_REGEXES:
if regex.search(sentence):
return False
return True
def check_banned_words(text: str) -> List[str]:
lower_cased_text = text.lower()
errors = []
for word, reason in BANNED_WORDS.items():
if word in lower_cased_text:
# Hack: Should move this into BANNED_WORDS framework; for
# now, just hand-code the skips:
if 'realm_name' in lower_cased_text:
continue
kwargs = dict(word=word, text=text, reason=reason)
msg = "{word} found in '{text}'. {reason}".format(**kwargs)
errors.append(msg)
return errors
def check_capitalization(strings):
# type: (List[str]) -> Tuple[List[str], List[str], List[str]]
errors = []
ignored = []
banned_word_errors = []
for text in strings:
text = ' '.join(text.split()) # Remove extra whitespaces.
safe_text = get_safe_text(text)
has_ignored_phrase = text != safe_text
capitalized = is_capitalized(safe_text)
if not capitalized:
errors.append(text)
elif capitalized and has_ignored_phrase:
ignored.append(text)
banned_word_errors.extend(check_banned_words(text))
return sorted(errors), sorted(ignored), sorted(banned_word_errors)
| [
"str"
] | [
6321
] | [
6324
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/css_parser.py | from typing import Callable, List, Tuple, Union, Optional
####### Helpers
class Token:
def __init__(self, s, line, col):
# type: (str, int, int) -> None
self.s = s
self.line = line
self.col = col
class CssParserException(Exception):
def __init__(self, msg, token):
# type: (str, Token) -> None
self.msg = msg
self.token = token
def __str__(self):
# type: () -> str
return self.msg
def find_end_brace(tokens, i, end):
# type: (List[Token], int, int) -> int
depth = 0
while i < end:
s = tokens[i].s
if s == '{':
depth += 1
elif s == '}':
if depth == 0:
raise CssParserException('unexpected }', tokens[i])
elif depth == 1:
break
depth -= 1
i += 1
else:
raise CssParserException('missing }', tokens[i-1])
return i
def get_whitespace(tokens, i, end):
# type: (List[Token], int, int) -> Tuple[int, str]
text = ''
while (i < end) and ws(tokens[i].s[0]):
s = tokens[i].s
text += s
i += 1
return i, text
def get_whitespace_and_comments(tokens, i, end, line=None):
# type: (List[Token], int, int, Optional[int]) -> Tuple[int, str]
def is_fluff_token(token):
# type: (Token) -> bool
s = token.s
if ws(s[0]):
return True
elif s.startswith('/*'):
# For CSS comments, the caller may pass in a line
# number to indicate that they only want to get
# comments on the same line. (Subsequent comments
# will be attached to the next actual line of code.)
if line is None:
return True
if tokens[i].line == line:
return True
return False
text = ''
while (i < end) and is_fluff_token(tokens[i]):
s = tokens[i].s
text += s
i += 1
return i, text
def indent_count(s):
# type: (str) -> int
return len(s) - len(s.lstrip())
def dedent_block(s):
# type: (str) -> (str)
s = s.lstrip()
lines = s.split('\n')
non_blank_lines = [line for line in lines if line]
if len(non_blank_lines) <= 1:
return s
min_indent = min(indent_count(line) for line in lines[1:])
lines = [lines[0]] + [line[min_indent:] for line in lines[1:]]
return '\n'.join(lines)
def indent_block(s):
# type: (str) -> (str)
lines = s.split('\n')
lines = [
' ' + line if line else ''
for line in lines
]
return '\n'.join(lines)
def ltrim(s):
# type: (str) -> (str)
content = s.lstrip()
padding = s[:-1 * len(content)]
s = padding.replace(' ', '')[1:] + content
return s
def rtrim(s):
# type: (str) -> (str)
content = s.rstrip()
padding = s[len(content):]
s = content + padding.replace(' ', '')[:-1]
return s
############### Begin parsing here
def parse_sections(tokens, start, end):
# type: (List[Token], int, int) -> 'CssSectionList'
i = start
sections = []
while i < end:
start, pre_fluff = get_whitespace_and_comments(tokens, i, end)
if start >= end:
raise CssParserException('unexpected empty section', tokens[end-1])
i = find_end_brace(tokens, start, end)
section_end = i + 1
i, post_fluff = get_whitespace(tokens, i+1, end)
section = parse_section(
tokens=tokens,
start=start,
end=section_end,
pre_fluff=pre_fluff,
post_fluff=post_fluff
)
sections.append(section)
section_list = CssSectionList(
tokens=tokens,
sections=sections,
)
return section_list
def parse_section(tokens, start, end, pre_fluff, post_fluff):
# type: (List[Token], int, int, str, str) -> Union['CssNestedSection', 'CssSection']
assert not ws(tokens[start].s)
assert tokens[end-1].s == '}' # caller should strip trailing fluff
first_token = tokens[start].s
if first_token in ('@media', '@keyframes') or first_token.startswith('@-'):
i, selector_list = parse_selectors_section(tokens, start, end) # not technically selectors
section_list = parse_sections(tokens, i+1, end-1)
nested_section = CssNestedSection(
tokens=tokens,
selector_list=selector_list,
section_list=section_list,
pre_fluff=pre_fluff,
post_fluff=post_fluff,
)
return nested_section
else:
i, selector_list = parse_selectors_section(tokens, start, end)
declaration_block = parse_declaration_block(tokens, i, end)
section = CssSection(
tokens=tokens,
selector_list=selector_list,
declaration_block=declaration_block,
pre_fluff=pre_fluff,
post_fluff=post_fluff,
)
return section
def parse_selectors_section(tokens, start, end):
# type: (List[Token], int, int) -> Tuple[int, 'CssSelectorList']
start, pre_fluff = get_whitespace_and_comments(tokens, start, end)
assert pre_fluff == ''
i = start
text = ''
while i < end and tokens[i].s != '{':
s = tokens[i].s
text += s
i += 1
selector_list = parse_selectors(tokens, start, i)
return i, selector_list
def parse_selectors(tokens, start, end):
# type: (List[Token], int, int) -> 'CssSelectorList'
i = start
selectors = []
while i < end:
s = tokens[i].s
if s == ',':
selector = parse_selector(tokens, start, i)
selectors.append(selector)
i += 1
start = i
if s.startswith('/*'):
raise CssParserException('Comments in selector section are not allowed', tokens[i])
i += 1
selector = parse_selector(tokens, start, i)
selectors.append(selector)
selector_list = CssSelectorList(
tokens=tokens,
selectors=selectors,
)
return selector_list
def parse_selector(tokens, start, end):
# type: (List[Token], int, int) -> CssSelector
i, pre_fluff = get_whitespace_and_comments(tokens, start, end)
levels = []
last_i = None
while i < end:
token = tokens[i]
i += 1
if not ws(token.s[0]):
last_i = i
levels.append(token)
if last_i is None:
raise CssParserException('Missing selector', tokens[-1])
assert last_i is not None
start, post_fluff = get_whitespace_and_comments(tokens, last_i, end)
selector = CssSelector(
tokens=tokens,
pre_fluff=pre_fluff,
post_fluff=post_fluff,
levels=levels,
)
return selector
def parse_declaration_block(tokens, start, end):
# type: (List[Token], int, int) -> 'CssDeclarationBlock'
assert tokens[start].s == '{' # caller should strip leading fluff
assert tokens[end-1].s == '}' # caller should strip trailing fluff
i = start + 1
declarations = []
while i < end-1:
start = i
i, _ = get_whitespace_and_comments(tokens, i, end)
while (i < end) and (tokens[i].s != ';'):
i += 1
if i < end:
i, _ = get_whitespace_and_comments(tokens, i+1, end, line=tokens[i].line)
declaration = parse_declaration(tokens, start, i)
declarations.append(declaration)
declaration_block = CssDeclarationBlock(
tokens=tokens,
declarations=declarations,
)
return declaration_block
def parse_declaration(tokens, start, end):
# type: (List[Token], int, int) -> 'CssDeclaration'
i, pre_fluff = get_whitespace_and_comments(tokens, start, end)
if (i >= end) or (tokens[i].s == '}'):
raise CssParserException('Empty declaration or missing semicolon', tokens[i-1])
css_property = tokens[i].s
if tokens[i+1].s != ':':
raise CssParserException('We expect a colon here', tokens[i])
i += 2
start = i
while (i < end) and (tokens[i].s != ';') and (tokens[i].s != '}'):
i += 1
css_value = parse_value(tokens, start, i)
semicolon = (i < end) and (tokens[i].s == ';')
if semicolon:
i += 1
_, post_fluff = get_whitespace_and_comments(tokens, i, end, line=tokens[i].line)
declaration = CssDeclaration(
tokens=tokens,
pre_fluff=pre_fluff,
post_fluff=post_fluff,
css_property=css_property,
css_value=css_value,
semicolon=semicolon,
)
return declaration
def parse_value(tokens, start, end):
# type: (List[Token], int, int) -> 'CssValue'
i, pre_fluff = get_whitespace_and_comments(tokens, start, end)
if i < end:
value = tokens[i]
else:
raise CssParserException('Missing value', tokens[i-1])
i, post_fluff = get_whitespace_and_comments(tokens, i+1, end)
return CssValue(
tokens=tokens,
value=value,
pre_fluff=pre_fluff,
post_fluff=post_fluff,
)
#### Begin CSS classes here
class CssSectionList:
def __init__(self, tokens, sections):
# type: (List[Token], List[Union['CssNestedSection', 'CssSection']]) -> None
self.tokens = tokens
self.sections = sections
def text(self):
# type: () -> str
res = '\n\n'.join(section.text().strip() for section in self.sections) + '\n'
return res
class CssNestedSection:
def __init__(self, tokens, selector_list, section_list, pre_fluff, post_fluff):
# type: (List[Token], 'CssSelectorList', CssSectionList, str, str) -> None
self.tokens = tokens
self.selector_list = selector_list
self.section_list = section_list
self.pre_fluff = pre_fluff
self.post_fluff = post_fluff
def text(self):
# type: () -> str
res = ''
res += ltrim(self.pre_fluff)
res += self.selector_list.text().strip()
res += ' {\n'
res += indent_block(self.section_list.text().strip())
res += '\n}'
res += rtrim(self.post_fluff)
return res
class CssSection:
def __init__(self, tokens, selector_list, declaration_block, pre_fluff, post_fluff):
# type: (List[Token], 'CssSelectorList', 'CssDeclarationBlock', str, str) -> None
self.tokens = tokens
self.selector_list = selector_list
self.declaration_block = declaration_block
self.pre_fluff = pre_fluff
self.post_fluff = post_fluff
def text(self):
# type: () -> str
res = ''
res += rtrim(dedent_block(self.pre_fluff))
if res:
res += '\n'
res += self.selector_list.text().strip()
res += ' '
res += self.declaration_block.text()
res += '\n'
res += rtrim(self.post_fluff)
return res
class CssSelectorList:
def __init__(self, tokens, selectors):
# type: (List[Token], List['CssSelector']) -> None
self.tokens = tokens
self.selectors = selectors
def text(self):
# type: () -> str
return ',\n'.join(sel.text() for sel in self.selectors)
class CssSelector:
def __init__(self, tokens, pre_fluff, post_fluff, levels):
# type: (List[Token],str, str, List[Token]) -> None
self.tokens = tokens
self.pre_fluff = pre_fluff
self.post_fluff = post_fluff
self.levels = levels
def text(self):
# type: () -> str
res = ' '.join(level.s for level in self.levels)
return res
class CssDeclarationBlock:
def __init__(self, tokens, declarations):
# type: (List[Token], List['CssDeclaration']) -> None
self.tokens = tokens
self.declarations = declarations
def text(self):
# type: () -> str
res = '{\n'
for declaration in self.declarations:
res += ' ' + declaration.text()
res += '}'
return res
class CssDeclaration:
def __init__(self, tokens, pre_fluff, post_fluff, css_property, css_value, semicolon):
# type: (List[Token], str, str, str, 'CssValue', bool) -> None
self.tokens = tokens
self.pre_fluff = pre_fluff
self.post_fluff = post_fluff
self.css_property = css_property
self.css_value = css_value
self.semicolon = semicolon
def text(self):
# type: () -> str
res = ''
res += ltrim(self.pre_fluff).rstrip()
if res:
res += '\n '
res += self.css_property
res += ':'
value_text = self.css_value.text().rstrip()
if value_text.startswith('\n'):
res += value_text
elif '\n' in value_text:
res += ' '
res += ltrim(value_text)
else:
res += ' '
res += value_text.strip()
res += ';'
res += rtrim(self.post_fluff)
res += '\n'
return res
class CssValue:
def __init__(self, tokens, value, pre_fluff, post_fluff):
# type: (List[Token], Token, str, str) -> None
self.value = value
self.pre_fluff = pre_fluff
self.post_fluff = post_fluff
assert pre_fluff.strip() == ''
def text(self):
# type: () -> str
return self.pre_fluff + self.value.s + self.post_fluff
def parse(text):
# type: (str) -> CssSectionList
tokens = tokenize(text)
section_list = parse_sections(tokens, 0, len(tokens))
return section_list
#### Begin tokenizer section here
def ws(c):
# type: (str) -> bool
return c in ' \t\n'
def tokenize(text):
# type: (str) -> List[Token]
class State:
def __init__(self):
# type: () -> None
self.i = 0
self.line = 1
self.col = 1
tokens = []
state = State()
def add_token(s, state):
# type: (str, State) -> None
# deep copy data
token = Token(s=s, line=state.line, col=state.col)
tokens.append(token)
def legal(offset):
# type: (int) -> bool
return state.i + offset < len(text)
def advance(n):
# type: (int) -> None
for _ in range(n):
state.i += 1
if state.i >= 0 and text[state.i - 1] == '\n':
state.line += 1
state.col = 1
else:
state.col += 1
def looking_at(s):
# type: (str) -> bool
return text[state.i:state.i+len(s)] == s
def get_field(terminator):
# type: (Callable[[str], bool]) -> str
offset = 0
paren_level = 0
while legal(offset) and (paren_level or not terminator(text[state.i + offset])):
c = text[state.i + offset]
if c == '(':
paren_level += 1
elif c == ')':
paren_level -= 1
offset += 1
return text[state.i:state.i+offset]
in_property = False
in_value = False
in_media_line = False
starting_media_section = False
while state.i < len(text):
c = text[state.i]
if c in '{};:,':
if c == ':':
in_property = False
in_value = True
elif c == ';':
in_property = True
in_value = False
elif c in '{':
if starting_media_section:
starting_media_section = False
else:
in_property = True
elif c == '}':
in_property = False
s = c
elif ws(c):
terminator = lambda c: not ws(c)
s = get_field(terminator)
elif looking_at('/*'):
# hacky
old_i = state.i
while (state.i < len(text)) and not looking_at('*/'):
state.i += 1
if not looking_at('*/'):
raise CssParserException('unclosed comment', tokens[-1])
s = text[old_i:state.i+2]
state.i = old_i
elif looking_at('@media'):
s = '@media'
in_media_line = True
starting_media_section = True
elif in_media_line:
in_media_line = False
terminator = lambda c: c == '{'
s = get_field(terminator)
s = s.rstrip()
elif in_property:
terminator = lambda c: ws(c) or c in ':{'
s = get_field(terminator)
elif in_value:
in_value = False
in_property = True
terminator = lambda c: c in ';}'
s = get_field(terminator)
s = s.rstrip()
else:
terminator = lambda c: ws(c) or c == ','
s = get_field(terminator)
add_token(s, state)
advance(len(s))
return tokens
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/find_add_class.py |
from typing import List, Set, Tuple
import os
import re
GENERIC_KEYWORDS = [
'active',
'alert',
'danger',
'condensed',
'disabled',
'enabled',
'error',
'expanded',
'fade-out',
'first',
'hide',
'in',
'show',
'notdisplayed',
'popover',
'no-border',
'rtl',
'second',
'selected',
'slide-left',
'success',
'text-error',
'warning',
'zoom-in', # TODO: clean these up, they are confusing
'zoom-out',
]
def raise_error(fn, i, line):
# type: (str, int, str) -> None
error = '''
In %s line %d there is the following line of code:
%s
Our tools want to be able to identify which modules
add which HTML/CSS classes, and we need two things to
happen:
- The code must explicitly name the class.
- Only one module can refer to that class (unless
it is something generic like an alert class).
If you get this error, you can usually address it by
refactoring your code to be more explicit, or you can
move the common code that sets the class to a library
module. If neither of those applies, you need to
modify %s
''' % (fn, i, line, __file__)
raise Exception(error)
def generic(html_class):
# type: (str) -> bool
for kw in GENERIC_KEYWORDS:
if kw in html_class:
return True
return False
def display(fns):
# type: (List[str]) -> None
for tup in find(fns):
# this format is for code generation purposes
print(' ' * 8 + repr(tup) + ',')
def find(fns):
# type: (List[str]) -> List[Tuple[str, str]]
encountered = set() # type: Set[str]
tups = [] # type: List[Tuple[str, str]]
for full_fn in fns:
# Don't check frontend tests, since they may do all sorts of
# extra hackery that isn't of interest to us.
if full_fn.startswith("frontend_tests"):
continue
lines = list(open(full_fn))
fn = os.path.basename(full_fn)
module_classes = set() # type: Set[str]
for i, line in enumerate(lines):
if 'addClass' in line:
html_classes = [] # type: List[str]
m = re.search(r'''addClass\(['"](.*?)['"]''', line)
if m:
html_classes = [m.group(1)]
if not html_classes:
if 'bar-success' in line:
html_classes = ['bar-success', 'bar-danger']
elif fn == 'hotspots.js' and 'arrow_placement' in line:
html_classes = ['arrow-top', 'arrow-left', 'arrow-bottom', 'arrow-right']
elif 'color_class' in line:
continue
elif 'stream_dark' in line:
continue
elif 'opts.' in line:
continue
elif fn == 'signup.js' and 'class_to_add' in line:
html_classes = ['error', 'success']
elif fn == 'ui_report.js' and 'status_classes' in line:
html_classes = ['alert']
if not html_classes:
raise_error(full_fn, i, line)
for html_class in html_classes:
if generic(html_class):
continue
if html_class in module_classes:
continue
if html_class in encountered:
raise_error(full_fn, i, line)
tups.append((fn, html_class))
module_classes.add(html_class)
encountered.add(html_class)
return tups
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/gitlint-rules.py | from typing import Text, List
import gitlint
from gitlint.rules import LineRule, RuleViolation, CommitMessageTitle
from gitlint.options import StrOption
import re
# Word list from https://github.com/m1foley/fit-commit
# Copyright (c) 2015 Mike Foley
# License: MIT
# Ref: fit_commit/validators/tense.rb
WORD_SET = {
'adds', 'adding', 'added',
'allows', 'allowing', 'allowed',
'amends', 'amending', 'amended',
'bumps', 'bumping', 'bumped',
'calculates', 'calculating', 'calculated',
'changes', 'changing', 'changed',
'cleans', 'cleaning', 'cleaned',
'commits', 'committing', 'committed',
'corrects', 'correcting', 'corrected',
'creates', 'creating', 'created',
'darkens', 'darkening', 'darkened',
'disables', 'disabling', 'disabled',
'displays', 'displaying', 'displayed',
'documents', 'documenting', 'documented',
'drys', 'drying', 'dryed',
'ends', 'ending', 'ended',
'enforces', 'enforcing', 'enforced',
'enqueues', 'enqueuing', 'enqueued',
'extracts', 'extracting', 'extracted',
'finishes', 'finishing', 'finished',
'fixes', 'fixing', 'fixed',
'formats', 'formatting', 'formatted',
'guards', 'guarding', 'guarded',
'handles', 'handling', 'handled',
'hides', 'hiding', 'hid',
'increases', 'increasing', 'increased',
'ignores', 'ignoring', 'ignored',
'implements', 'implementing', 'implemented',
'improves', 'improving', 'improved',
'keeps', 'keeping', 'kept',
'kills', 'killing', 'killed',
'makes', 'making', 'made',
'merges', 'merging', 'merged',
'moves', 'moving', 'moved',
'permits', 'permitting', 'permitted',
'prevents', 'preventing', 'prevented',
'pushes', 'pushing', 'pushed',
'rebases', 'rebasing', 'rebased',
'refactors', 'refactoring', 'refactored',
'removes', 'removing', 'removed',
'renames', 'renaming', 'renamed',
'reorders', 'reordering', 'reordered',
'replaces', 'replacing', 'replaced',
'requires', 'requiring', 'required',
'restores', 'restoring', 'restored',
'sends', 'sending', 'sent',
'sets', 'setting',
'separates', 'separating', 'separated',
'shows', 'showing', 'showed',
'simplifies', 'simplifying', 'simplified',
'skips', 'skipping', 'skipped',
'sorts', 'sorting',
'speeds', 'speeding', 'sped',
'starts', 'starting', 'started',
'supports', 'supporting', 'supported',
'takes', 'taking', 'took',
'testing', 'tested', # 'tests' excluded to reduce false negative
'truncates', 'truncating', 'truncated',
'updates', 'updating', 'updated',
'uses', 'using', 'used'
}
imperative_forms = sorted([
'add', 'allow', 'amend', 'bump', 'calculate', 'change', 'clean', 'commit',
'correct', 'create', 'darken', 'disable', 'display', 'document', 'dry',
'end', 'enforce', 'enqueue', 'extract', 'finish', 'fix', 'format', 'guard',
'handle', 'hide', 'ignore', 'implement', 'improve', 'increase', 'keep',
'kill', 'make', 'merge', 'move', 'permit', 'prevent', 'push', 'rebase',
'refactor', 'remove', 'rename', 'reorder', 'replace', 'require', 'restore',
'send', 'separate', 'set', 'show', 'simplify', 'skip', 'sort', 'speed',
'start', 'support', 'take', 'test', 'truncate', 'update', 'use',
])
def head_binary_search(key, words):
# type: (Text, List[str]) -> str
""" Find the imperative mood version of `word` by looking at the first
3 characters. """
# Edge case: 'disable' and 'display' have the same 3 starting letters.
if key in ['displays', 'displaying', 'displayed']:
return 'display'
lower = 0
upper = len(words) - 1
while True:
if lower > upper:
# Should not happen
raise Exception("Cannot find imperative mood of {}".format(key))
mid = (lower + upper) // 2
imperative_form = words[mid]
if key[:3] == imperative_form[:3]:
return imperative_form
elif key < imperative_form:
upper = mid - 1
elif key > imperative_form:
lower = mid + 1
class ImperativeMood(LineRule):
""" This rule will enforce that the commit message title uses imperative
mood. This is done by checking if the first word is in `WORD_SET`, if so
show the word in the correct mood. """
name = "title-imperative-mood"
id = "Z1"
target = CommitMessageTitle
error_msg = ('The first word in commit title should be in imperative mood '
'("{word}" -> "{imperative}"): "{title}"')
def validate(self, line, commit):
# type: (Text, gitlint.commit) -> List[RuleViolation]
violations = []
# Ignore the section tag (ie `<section tag>: <message body>.`)
words = line.split(': ', 1)[-1].split()
first_word = words[0].lower()
if first_word in WORD_SET:
imperative = head_binary_search(first_word, imperative_forms)
violation = RuleViolation(self.id, self.error_msg.format(
word=first_word,
imperative=imperative,
title=commit.message.title
))
violations.append(violation)
return violations
class TitleMatchRegexAllowException(LineRule):
"""Allows revert commits contrary to the built-in title-match-regex rule"""
name = 'title-match-regex-allow-exception'
id = 'Z2'
target = CommitMessageTitle
options_spec = [StrOption('regex', ".*", "Regex the title should match")]
def validate(self, title, commit):
# type: (Text, gitlint.commit) -> List[RuleViolation]
regex = self.options['regex'].value
pattern = re.compile(regex, re.UNICODE)
if not pattern.search(title) and not title.startswith("Revert \""):
violation_msg = u"Title does not match regex ({0})".format(regex)
return [RuleViolation(self.id, violation_msg, title)]
return []
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/graph.py |
from collections import defaultdict
from typing import Callable, DefaultDict, Iterator, List, Optional, Set, Tuple
Edge = Tuple[str, str]
EdgeSet = Set[Edge]
class Graph:
def __init__(self, tuples):
# type: (EdgeSet) -> None
self.children = defaultdict(list) # type: DefaultDict[str, List[str]]
self.parents = defaultdict(list) # type: DefaultDict[str, List[str]]
self.nodes = set() # type: Set[str]
for parent, child in tuples:
self.parents[child].append(parent)
self.children[parent].append(child)
self.nodes.add(parent)
self.nodes.add(child)
def copy(self):
# type: () -> 'Graph'
return Graph(self.edges())
def num_edges(self):
# type: () -> int
return len(self.edges())
def minus_edge(self, edge):
# type: (Edge) -> 'Graph'
edges = self.edges().copy()
edges.remove(edge)
return Graph(edges)
def edges(self):
# type: () -> EdgeSet
s = set()
for parent in self.nodes:
for child in self.children[parent]:
s.add((parent, child))
return s
def remove_exterior_nodes(self):
# type: () -> None
still_work_to_do = True
while still_work_to_do:
still_work_to_do = False # for now
for node in self.nodes:
if self.is_exterior_node(node):
self.remove(node)
still_work_to_do = True
break
def is_exterior_node(self, node):
# type: (str) -> bool
parents = self.parents[node]
children = self.children[node]
if not parents:
return True
if not children:
return True
if len(parents) > 1 or len(children) > 1:
return False
# If our only parent and child are the same node, then we could
# effectively be collapsed into the parent, so don't add clutter.
return parents[0] == children[0]
def remove(self, node):
# type: (str) -> None
for parent in self.parents[node]:
self.children[parent].remove(node)
for child in self.children[node]:
self.parents[child].remove(node)
self.nodes.remove(node)
def report(self):
# type: () -> None
print('parents/children/module')
tups = sorted([
(len(self.parents[node]), len(self.children[node]), node)
for node in self.nodes])
for tup in tups:
print(tup)
def best_edge_to_remove(orig_graph, is_exempt):
# type: (Graph, Callable[[Edge], bool]) -> Optional[Edge]
# expects an already reduced graph as input
orig_edges = orig_graph.edges()
def get_choices():
# type: () -> Iterator[Tuple[int, Edge]]
for edge in orig_edges:
if is_exempt(edge):
continue
graph = orig_graph.minus_edge(edge)
graph.remove_exterior_nodes()
size = graph.num_edges()
yield (size, edge)
choices = list(get_choices())
if not choices:
return None
min_size, best_edge = min(choices)
if min_size >= orig_graph.num_edges():
raise Exception('no edges work here')
return best_edge
def make_dot_file(graph):
# type: (Graph) -> str
buffer = 'digraph G {\n'
for node in graph.nodes:
buffer += node + ';\n'
for child in graph.children[node]:
buffer += '{} -> {};\n'.format(node, child)
buffer += '}'
return buffer
def test():
# type: () -> None
graph = Graph(set([
('x', 'a'),
('a', 'b'),
('b', 'c'),
('c', 'a'),
('c', 'd'),
('d', 'e'),
('e', 'f'),
('e', 'g'),
]))
graph.remove_exterior_nodes()
s = make_dot_file(graph)
open('zulip-deps.dot', 'w').write(s)
if __name__ == '__main__':
test()
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/html_branches.py | from typing import Dict, List, Optional, Set
import re
from collections import defaultdict
from .template_parser import (
tokenize,
Token,
)
class HtmlBranchesException(Exception):
# TODO: Have callers pass in line numbers.
pass
class HtmlTreeBranch:
"""
For <p><div id='yo'>bla<span class='bar'></span></div></p>, store a
representation of the tags all the way down to the leaf, which would
conceptually be something like "p div(#yo) span(.bar)".
"""
def __init__(self, tags, fn):
# type: (List['TagInfo'], Optional[str]) -> None
self.tags = tags
self.fn = fn
self.line = tags[-1].token.line
self.words = set() # type: Set[str]
for tag in tags:
for word in tag.words:
self.words.add(word)
def staircase_text(self):
# type: () -> str
"""
produces representation of a node in staircase-like format:
html
body.main-section
p#intro
"""
res = '\n'
indent = ' ' * 4
for t in self.tags:
res += indent + t.text() + '\n'
indent += ' ' * 4
return res
def text(self):
# type: () -> str
"""
produces one-line representation of branch:
html body.main-section p#intro
"""
return ' '.join(t.text() for t in self.tags)
class Node:
def __init__(self, token, parent): # FIXME parent parameter is not used!
# type: (Token, Optional[Node]) -> None
self.token = token
self.children = [] # type: List[Node]
self.parent = None # type: Optional[Node]
class TagInfo:
def __init__(self, tag, classes, ids, token):
# type: (str, List[str], List[str], Token) -> None
self.tag = tag
self.classes = classes
self.ids = ids
self.token = token
self.words = \
[self.tag] + \
['.' + s for s in classes] + \
['#' + s for s in ids]
def text(self):
# type: () -> str
s = self.tag
if self.classes:
s += '.' + '.'.join(self.classes)
if self.ids:
s += '#' + '#'.join(self.ids)
return s
def get_tag_info(token):
# type: (Token) -> TagInfo
s = token.s
tag = token.tag
classes = [] # type: List[str]
ids = [] # type: List[str]
searches = [
(classes, ' class="(.*?)"'),
(classes, " class='(.*?)'"),
(ids, ' id="(.*?)"'),
(ids, " id='(.*?)'"),
]
for lst, regex in searches:
m = re.search(regex, s)
if m:
for g in m.groups():
lst += split_for_id_and_class(g)
return TagInfo(tag=tag, classes=classes, ids=ids, token=token)
def split_for_id_and_class(element):
# type: (str) -> List[str]
# Here we split a given string which is expected to contain id or class
# attributes from HTML tags. This also takes care of template variables
# in string during splitting process. For eg. 'red black {{ a|b|c }}'
# is split as ['red', 'black', '{{ a|b|c }}']
outside_braces = True # type: bool
lst = []
s = ''
for ch in element:
if ch == '{':
outside_braces = False
if ch == '}':
outside_braces = True
if ch == ' ' and outside_braces:
if not s == '':
lst.append(s)
s = ''
else:
s += ch
if not s == '':
lst.append(s)
return lst
def html_branches(text, fn=None):
# type: (str, Optional[str]) -> List[HtmlTreeBranch]
tree = html_tag_tree(text)
branches = [] # type: List[HtmlTreeBranch]
def walk(node, tag_info_list=None):
# type: (Node, Optional[List[TagInfo]]) -> None
info = get_tag_info(node.token)
if tag_info_list is None:
tag_info_list = [info]
else:
tag_info_list = tag_info_list[:] + [info]
if node.children:
for child in node.children:
walk(node=child, tag_info_list=tag_info_list)
else:
tree_branch = HtmlTreeBranch(tags=tag_info_list, fn=fn)
branches.append(tree_branch)
for node in tree.children:
walk(node, None)
return branches
def html_tag_tree(text):
# type: (str) -> Node
tokens = tokenize(text)
top_level = Node(token=None, parent=None)
stack = [top_level]
for token in tokens:
# Add tokens to the Node tree first (conditionally).
if token.kind in ('html_start', 'html_singleton'):
parent = stack[-1]
node = Node(token=token, parent=parent)
parent.children.append(node)
# Then update the stack to have the next node that
# we will be appending to at the top.
if token.kind == 'html_start':
stack.append(node)
elif token.kind == 'html_end':
stack.pop()
return top_level
def build_id_dict(templates):
# type: (List[str]) -> (Dict[str, List[str]])
template_id_dict = defaultdict(list) # type: (Dict[str, List[str]])
for fn in templates:
text = open(fn).read()
list_tags = tokenize(text)
for tag in list_tags:
info = get_tag_info(tag)
for ids in info.ids:
template_id_dict[ids].append("Line " + str(info.token.line) + ":" + fn)
return template_id_dict
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/html_grep.py | from collections import defaultdict
from typing import Dict, List, Set
from .html_branches import html_branches, HtmlTreeBranch
def show_all_branches(fns):
# type: (List[str]) -> None
for fn in fns:
print(fn)
text = open(fn).read()
branches = html_branches(text, fn=fn)
for branch in branches:
print(branch.text())
print('---')
class Grepper:
'''
A Grepper object is optimized to do repeated
searches of words that can be found in our
HtmlTreeBranch objects.
'''
def __init__(self, fns):
# type: (List[str]) -> None
all_branches = [] # type: List[HtmlTreeBranch]
for fn in fns:
text = open(fn).read()
branches = html_branches(text, fn=fn)
all_branches += branches
self.word_dict = defaultdict(set) # type: Dict[str, Set[HtmlTreeBranch]]
for b in all_branches:
for word in b.words:
self.word_dict[word].add(b)
self.all_branches = set(all_branches)
def grep(self, word_set):
# type: (Set[str]) -> None
words = list(word_set) # type: List[str]
if len(words) == 0:
matches = self.all_branches
else:
matches = self.word_dict[words[0]]
for i in range(1, len(words)):
matches = matches & self.word_dict[words[i]]
branches = list(matches)
branches.sort(key=lambda branch: (branch.fn, branch.line))
for branch in branches:
print('%s %d' % (branch.fn, branch.line))
print(branch.staircase_text())
print('')
def grep(fns, words):
# type: (List[str], Set[str]) -> None
grepper = Grepper(fns)
grepper.grep(words)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/pretty_print.py |
from typing import Any, Dict, List
from .template_parser import (
tokenize,
Token,
is_django_block_tag,
)
import subprocess
def pretty_print_html(html, num_spaces=4):
# type: (str, int) -> str
# We use 1-based indexing for both rows and columns.
tokens = tokenize(html)
lines = html.split('\n')
# We will keep a stack of "start" tags so that we know
# when HTML ranges end. Note that some start tags won't
# be blocks from an indentation standpoint.
stack = [] # type: List[Dict[str, Any]]
# Seed our stack with a pseudo entry to make depth calculations
# easier.
info = dict(
block=False,
depth=-1,
line=-1,
token_kind='html_start',
tag='html',
extra_indent=0,
ignore_lines=[]) # type: Dict[str, Any]
stack.append(info)
# Our main job is to figure out offsets that we use to nudge lines
# over by.
offsets = {} # type: Dict[int, int]
# Loop through our start/end tokens, and calculate offsets. As
# we proceed, we will push/pop info dictionaries on/off a stack.
for token in tokens:
if token.kind in ('html_start', 'handlebars_start', 'handlebars_singleton',
'html_singleton', 'django_start') and stack[-1]['tag'] != 'pre':
# An HTML start tag should only cause a new indent if we
# are on a new line.
if (token.tag not in ('extends', 'include', 'else', 'elif') and
(is_django_block_tag(token.tag) or
token.kind != 'django_start')):
is_block = token.line > stack[-1]['line']
if is_block:
if (((token.kind == 'handlebars_start' and
stack[-1]['token_kind'] == 'handlebars_start') or
(token.kind == 'django_start' and
stack[-1]['token_kind'] == 'django_start')) and
not stack[-1]['indenting']):
info = stack.pop()
info['depth'] = info['depth'] + 1
info['indenting'] = True
info['adjust_offset_until'] = token.line
stack.append(info)
new_depth = stack[-1]['depth'] + 1
extra_indent = stack[-1]['extra_indent']
line = lines[token.line - 1]
adjustment = len(line)-len(line.lstrip()) + 1
offset = (1 + extra_indent + new_depth * num_spaces) - adjustment
info = dict(
block=True,
depth=new_depth,
actual_depth=new_depth,
line=token.line,
tag=token.tag,
token_kind=token.kind,
line_span=token.line_span,
offset=offset,
extra_indent=token.col - adjustment + extra_indent,
extra_indent_prev=extra_indent,
adjustment=adjustment,
indenting=True,
adjust_offset_until=token.line,
ignore_lines=[]
)
if token.kind in ('handlebars_start', 'django_start'):
info.update(dict(depth=new_depth - 1, indenting=False))
else:
info = dict(
block=False,
depth=stack[-1]['depth'],
actual_depth=stack[-1]['depth'],
line=token.line,
tag=token.tag,
token_kind=token.kind,
extra_indent=stack[-1]['extra_indent'],
ignore_lines=[]
)
stack.append(info)
elif (token.kind in ('html_end', 'handlebars_end', 'html_singleton_end',
'django_end', 'handlebars_singleton_end') and
(stack[-1]['tag'] != 'pre' or token.tag == 'pre')):
info = stack.pop()
if info['block']:
# We are at the end of an indentation block. We
# assume the whole block was formatted ok before, just
# possibly at an indentation that we don't like, so we
# nudge over all lines in the block by the same offset.
start_line = info['line']
end_line = token.line
if token.tag == 'pre':
offsets[start_line] = 0
offsets[end_line] = 0
stack[-1]['ignore_lines'].append(start_line)
stack[-1]['ignore_lines'].append(end_line)
else:
offsets[start_line] = info['offset']
line = lines[token.line - 1]
adjustment = len(line)-len(line.lstrip()) + 1
if adjustment == token.col and token.kind != 'html_singleton_end':
offsets[end_line] = (info['offset'] +
info['adjustment'] -
adjustment +
info['extra_indent'] -
info['extra_indent_prev'])
elif (start_line + info['line_span'] - 1 == end_line and
info['line_span'] > 1):
offsets[end_line] = (1 + info['extra_indent'] +
(info['depth'] + 1) * num_spaces) - adjustment
# We would like singleton tags and tags which spread over
# multiple lines to have 2 space indentation.
offsets[end_line] -= 2
elif token.line != info['line']:
offsets[end_line] = info['offset']
if token.tag != 'pre' and token.tag != 'script':
for line_num in range(start_line + 1, end_line):
# Be careful not to override offsets that happened
# deeper in the HTML within our block.
if line_num not in offsets:
line = lines[line_num - 1]
new_depth = info['depth'] + 1
if (line.lstrip().startswith('{{else}}') or
line.lstrip().startswith('{% else %}') or
line.lstrip().startswith('{% elif')):
new_depth = info['actual_depth']
extra_indent = info['extra_indent']
adjustment = len(line)-len(line.lstrip()) + 1
offset = (1 + extra_indent + new_depth * num_spaces) - adjustment
if line_num <= start_line + info['line_span'] - 1:
# We would like singleton tags and tags which spread over
# multiple lines to have 2 space indentation.
offset -= 2
offsets[line_num] = offset
elif (token.kind in ('handlebars_end', 'django_end') and
info['indenting'] and
line_num < info['adjust_offset_until'] and
line_num not in info['ignore_lines']):
offsets[line_num] += num_spaces
elif token.tag != 'pre':
for line_num in range(start_line + 1, end_line):
if line_num not in offsets:
offsets[line_num] = info['offset']
else:
for line_num in range(start_line + 1, end_line):
if line_num not in offsets:
offsets[line_num] = 0
stack[-1]['ignore_lines'].append(line_num)
# Now that we have all of our offsets calculated, we can just
# join all our lines together, fixing up offsets as needed.
formatted_lines = []
for i, line in enumerate(html.split('\n')):
row = i + 1
offset = offsets.get(row, 0)
pretty_line = line
if line.strip() == '':
pretty_line = ''
else:
if offset > 0:
pretty_line = (' ' * offset) + pretty_line
elif offset < 0:
pretty_line = pretty_line[-1 * offset:]
assert line.strip() == pretty_line.strip()
formatted_lines.append(pretty_line)
return '\n'.join(formatted_lines)
def validate_indent_html(fn):
# type: (str) -> int
file = open(fn)
html = file.read()
phtml = pretty_print_html(html)
file.close()
if not html.split('\n') == phtml.split('\n'):
temp_file = open('/var/tmp/pretty_html.txt', 'w')
temp_file.write(phtml)
temp_file.close()
print('Invalid Indentation detected in file: '
'%s\nDiff for the file against expected indented file:' % (fn), flush=True)
subprocess.call(['diff', fn, '/var/tmp/pretty_html.txt'], stderr=subprocess.STDOUT)
return 0
return 1
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/provision.py | #!/usr/bin/env python3
import os
import sys
import logging
import argparse
import platform
import subprocess
import glob
import hashlib
os.environ["PYTHONUNBUFFERED"] = "y"
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ZULIP_PATH)
from scripts.lib.zulip_tools import run, subprocess_text_output, OKBLUE, ENDC, WARNING, \
get_dev_uuid_var_path, FAIL, parse_lsb_release, file_or_package_hash_updated
from scripts.lib.setup_venv import (
setup_virtualenv, VENV_DEPENDENCIES, THUMBOR_VENV_DEPENDENCIES
)
from scripts.lib.node_cache import setup_node_modules, NODE_MODULES_CACHE_PATH
from version import PROVISION_VERSION
if False:
from typing import Any
SUPPORTED_PLATFORMS = {
"Ubuntu": [
"trusty",
"xenial",
"bionic",
],
"Debian": [
"stretch",
],
}
VENV_PATH = "/srv/zulip-py3-venv"
VAR_DIR_PATH = os.path.join(ZULIP_PATH, 'var')
LOG_DIR_PATH = os.path.join(VAR_DIR_PATH, 'log')
UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'uploads')
TEST_UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'test_uploads')
COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'coverage')
NODE_TEST_COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'node-coverage')
is_travis = 'TRAVIS' in os.environ
is_circleci = 'CIRCLECI' in os.environ
# TODO: De-duplicate this with emoji_dump.py
EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
if is_travis:
# In Travis CI, we don't have root access
EMOJI_CACHE_PATH = "/home/travis/zulip-emoji-cache"
if not os.path.exists(os.path.join(ZULIP_PATH, ".git")):
print(FAIL + "Error: No Zulip git repository present!" + ENDC)
print("To setup the Zulip development environment, you should clone the code")
print("from GitHub, rather than using a Zulip production release tarball.")
sys.exit(1)
# Check the RAM on the user's system, and throw an effort if <1.5GB.
# This avoids users getting segfaults running `pip install` that are
# generally more annoying to debug.
with open("/proc/meminfo") as meminfo:
ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
ram_gb = float(ram_size) / 1024.0 / 1024.0
if ram_gb < 1.5:
print("You have insufficient RAM (%s GB) to run the Zulip development environment." % (
round(ram_gb, 2),))
print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
sys.exit(1)
try:
UUID_VAR_PATH = get_dev_uuid_var_path(create_if_missing=True)
os.makedirs(UUID_VAR_PATH, exist_ok=True)
if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')):
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
os.symlink(
os.path.join(ZULIP_PATH, 'README.md'),
os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')
)
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
except OSError:
print(FAIL + "Error: Unable to create symlinks."
"Make sure you have permission to create symbolic links." + ENDC)
print("See this page for more information:")
print(" https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html#os-symlink-error")
sys.exit(1)
if platform.architecture()[0] == '64bit':
arch = 'amd64'
elif platform.architecture()[0] == '32bit':
arch = "i386"
else:
logging.critical("Only x86 is supported;"
"ping zulip-devel@googlegroups.com if you want another architecture.")
sys.exit(1)
# Ideally we wouldn't need to install a dependency here, before we
# know the codename.
if not os.path.exists("/usr/bin/lsb_release"):
subprocess.check_call(["sudo", "apt-get", "install", "-y", "lsb-release"])
distro_info = parse_lsb_release()
vendor = distro_info['DISTRIB_ID']
codename = distro_info['DISTRIB_CODENAME']
if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
logging.critical("Unsupported platform: {} {}".format(vendor, codename))
sys.exit(1)
POSTGRES_VERSION_MAP = {
"stretch": "9.6",
"trusty": "9.3",
"xenial": "9.5",
"bionic": "10",
}
POSTGRES_VERSION = POSTGRES_VERSION_MAP[codename]
UBUNTU_COMMON_APT_DEPENDENCIES = [
"closure-compiler",
"memcached",
"rabbitmq-server",
"redis-server",
"hunspell-en-us",
"supervisor",
"git",
"yui-compressor",
"wget",
"ca-certificates", # Explicit dependency in case e.g. wget is already installed
"puppet", # Used by lint
"puppet-lint",
"gettext", # Used by makemessages i18n
"curl", # Used for fetching PhantomJS as wget occasionally fails on redirects
"netcat", # Used for flushing memcached
"moreutils", # Used for sponge command
"libfontconfig1", # Required by phantomjs
] + VENV_DEPENDENCIES + THUMBOR_VENV_DEPENDENCIES
APT_DEPENDENCIES = {
"stretch": UBUNTU_COMMON_APT_DEPENDENCIES + [
"postgresql-9.6",
"postgresql-9.6-tsearch-extras",
"postgresql-9.6-pgroonga",
],
"trusty": UBUNTU_COMMON_APT_DEPENDENCIES + [
"postgresql-9.3",
"postgresql-9.3-tsearch-extras",
"postgresql-9.3-pgroonga",
],
"xenial": UBUNTU_COMMON_APT_DEPENDENCIES + [
"postgresql-9.5",
"postgresql-9.5-tsearch-extras",
"postgresql-9.5-pgroonga",
],
"bionic": UBUNTU_COMMON_APT_DEPENDENCIES + [
"postgresql-10",
"postgresql-10-pgroonga",
"postgresql-10-tsearch-extras",
],
}
TSEARCH_STOPWORDS_PATH = "/usr/share/postgresql/%s/tsearch_data/" % (POSTGRES_VERSION,)
REPO_STOPWORDS_PATH = os.path.join(
ZULIP_PATH,
"puppet",
"zulip",
"files",
"postgresql",
"zulip_english.stop",
)
LOUD = dict(_out=sys.stdout, _err=sys.stderr)
user_id = os.getuid()
def setup_shell_profile(shell_profile):
# type: (str) -> None
shell_profile_path = os.path.expanduser(shell_profile)
def write_command(command):
# type: (str) -> None
if os.path.exists(shell_profile_path):
with open(shell_profile_path, 'r') as shell_profile_file:
lines = [line.strip() for line in shell_profile_file.readlines()]
if command not in lines:
with open(shell_profile_path, 'a+') as shell_profile_file:
shell_profile_file.writelines(command + '\n')
else:
with open(shell_profile_path, 'w') as shell_profile_file:
shell_profile_file.writelines(command + '\n')
source_activate_command = "source " + os.path.join(VENV_PATH, "bin", "activate")
write_command(source_activate_command)
write_command('cd /srv/zulip')
def install_apt_deps():
# type: () -> None
# setup-apt-repo does an `apt-get update`
run(["sudo", "./scripts/lib/setup-apt-repo"])
# By doing list -> set -> list conversion we remove duplicates.
deps_to_install = list(set(APT_DEPENDENCIES[codename]))
run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + deps_to_install)
def main(options):
# type: (Any) -> int
# yarn and management commands expect to be run from the root of the
# project.
os.chdir(ZULIP_PATH)
# setup-apt-repo does an `apt-get update`
# hash the apt dependencies
sha_sum = hashlib.sha1()
for apt_depedency in APT_DEPENDENCIES[codename]:
sha_sum.update(apt_depedency.encode('utf8'))
# hash the content of setup-apt-repo
sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())
new_apt_dependencies_hash = sha_sum.hexdigest()
last_apt_dependencies_hash = None
apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
with open(apt_hash_file_path, 'a+') as hash_file:
hash_file.seek(0)
last_apt_dependencies_hash = hash_file.read()
if (new_apt_dependencies_hash != last_apt_dependencies_hash):
try:
install_apt_deps()
except subprocess.CalledProcessError:
# Might be a failure due to network connection issues. Retrying...
print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
# Since a common failure mode is for the caching in
# `setup-apt-repo` to optimize the fast code path to skip
# running `apt-get update` when the target apt repository
# is out of date, we run it explicitly here so that we
# recover automatically.
run(['sudo', 'apt-get', 'update'])
install_apt_deps()
with open(apt_hash_file_path, 'w') as hash_file:
hash_file.write(new_apt_dependencies_hash)
else:
print("No changes to apt dependencies, so skipping apt operations.")
# Here we install node.
proxy_env = [
"env",
"http_proxy=" + os.environ.get("http_proxy", ""),
"https_proxy=" + os.environ.get("https_proxy", ""),
"no_proxy=" + os.environ.get("no_proxy", ""),
]
run(["sudo", "-H"] + proxy_env + ["scripts/lib/install-node"])
# This is a wrapper around `yarn`, which we run last since
# it can often fail due to network issues beyond our control.
try:
# Hack: We remove `node_modules` as root to work around an
# issue with the symlinks being improperly owned by root.
if os.path.islink("node_modules"):
run(["sudo", "rm", "-f", "node_modules"])
run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
run(["sudo", "chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
setup_node_modules(prefer_offline=True)
except subprocess.CalledProcessError:
print(WARNING + "`yarn install` failed; retrying..." + ENDC)
setup_node_modules()
# Install shellcheck.
run(["sudo", "scripts/lib/install-shellcheck"])
# Import tools/setup_venv.py instead of running it so that we get an
# activated virtualenv for the rest of the provisioning process.
from tools.setup import setup_venvs
setup_venvs.main()
setup_shell_profile('~/.bash_profile')
setup_shell_profile('~/.zprofile')
run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])
# create log directory `zulip/var/log`
os.makedirs(LOG_DIR_PATH, exist_ok=True)
# create upload directory `var/uploads`
os.makedirs(UPLOAD_DIR_PATH, exist_ok=True)
# create test upload directory `var/test_upload`
os.makedirs(TEST_UPLOAD_DIR_PATH, exist_ok=True)
# create coverage directory`var/coverage`
os.makedirs(COVERAGE_DIR_PATH, exist_ok=True)
# create linecoverage directory`var/node-coverage`
os.makedirs(NODE_TEST_COVERAGE_DIR_PATH, exist_ok=True)
# `build_emoji` script requires `emoji-datasource` package which we install
# via npm and hence it should be executed after we are done installing npm
# packages.
if not os.path.isdir(EMOJI_CACHE_PATH):
run(["sudo", "mkdir", EMOJI_CACHE_PATH])
run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
run(["tools/setup/emoji/build_emoji"])
# copy over static files from the zulip_bots package
run(["tools/setup/generate_zulip_bots_static_files"])
webfont_paths = ["tools/setup/generate-custom-icon-webfont", "static/icons/fonts/template.hbs"]
webfont_paths += glob.glob('static/assets/icons/*')
if file_or_package_hash_updated(webfont_paths, "webfont_files_hash", options.is_force):
run(["tools/setup/generate-custom-icon-webfont"])
else:
print("No need to run `tools/setup/generate-custom-icon-webfont`.")
build_pygments_data_paths = ["tools/setup/build_pygments_data", "tools/setup/lang.json"]
from pygments import __version__ as pygments_version
if file_or_package_hash_updated(build_pygments_data_paths, "build_pygments_data_hash", options.is_force,
[pygments_version]):
run(["tools/setup/build_pygments_data"])
else:
print("No need to run `tools/setup/build_pygments_data`.")
run(["scripts/setup/generate_secrets.py", "--development"])
run(["tools/update-authors-json", "--use-fixture"])
email_source_paths = ["tools/inline-email-css", "templates/zerver/emails/email.css"]
email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
if file_or_package_hash_updated(email_source_paths, "last_email_source_files_hash", options.is_force):
run(["tools/inline-email-css"])
else:
print("No need to run `tools/inline-email-css`.")
if is_circleci or (is_travis and not options.is_production_travis):
run(["sudo", "service", "rabbitmq-server", "restart"])
run(["sudo", "service", "redis-server", "restart"])
run(["sudo", "service", "memcached", "restart"])
run(["sudo", "service", "postgresql", "restart"])
elif options.is_docker:
run(["sudo", "service", "rabbitmq-server", "restart"])
run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
run(["sudo", "service", "redis-server", "restart"])
run(["sudo", "service", "memcached", "restart"])
if not options.is_production_travis:
# The following block is skipped for the production Travis
# suite, because that suite doesn't make use of these elements
# of the development environment (it just uses the development
# environment to build a release tarball).
# Need to set up Django before using template_database_status
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
import django
django.setup()
from zerver.lib.test_fixtures import template_database_status, run_db_migrations
try:
from zerver.lib.queue import SimpleQueueClient
SimpleQueueClient()
rabbitmq_is_configured = True
except Exception:
rabbitmq_is_configured = False
if options.is_force or not rabbitmq_is_configured:
run(["scripts/setup/configure-rabbitmq"])
else:
print("RabbitMQ is already configured.")
migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
dev_template_db_status = template_database_status(
migration_status=migration_status_path,
settings="zproject.settings",
database_name="zulip",
)
if options.is_force or dev_template_db_status == 'needs_rebuild':
run(["tools/setup/postgres-init-dev-db"])
run(["tools/do-destroy-rebuild-database"])
elif dev_template_db_status == 'run_migrations':
run_db_migrations('dev')
elif dev_template_db_status == 'current':
print("No need to regenerate the dev DB.")
test_template_db_status = template_database_status()
if options.is_force or test_template_db_status == 'needs_rebuild':
run(["tools/setup/postgres-init-test-db"])
run(["tools/do-destroy-rebuild-test-database"])
elif test_template_db_status == 'run_migrations':
run_db_migrations('test')
elif test_template_db_status == 'current':
print("No need to regenerate the test DB.")
# Consider updating generated translations data: both `.mo`
# files and `language-options.json`.
paths = ['zerver/management/commands/compilemessages.py']
paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
paths += glob.glob('static/locale/*/translations.json')
if file_or_package_hash_updated(paths, "last_compilemessages_hash", options.is_force):
run(["./manage.py", "compilemessages"])
else:
print("No need to run `manage.py compilemessages`.")
run(["scripts/lib/clean-unused-caches"])
version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
print('writing to %s\n' % (version_file,))
open(version_file, 'w').write(PROVISION_VERSION + '\n')
print()
print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
return 0
if __name__ == "__main__":
description = ("Provision script to install Zulip")
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--force', action='store_true', dest='is_force',
default=False,
help="Ignore all provisioning optimizations.")
parser.add_argument('--production-travis', action='store_true',
dest='is_production_travis',
default=False,
help="Provision for Travis with production settings.")
parser.add_argument('--docker', action='store_true',
dest='is_docker',
default=False,
help="Provision for Docker.")
options = parser.parse_args()
sys.exit(main(options))
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/sanity_check.py | import os
import pwd
import sys
def check_venv(filename):
# type: (str) -> None
try:
import django
import ujson
import zulip
except ImportError:
print("You need to run %s inside a Zulip dev environment." % (filename,))
user_id = os.getuid()
user_name = pwd.getpwuid(user_id).pw_name
if user_name != 'vagrant' and user_name != 'zulipdev':
print("If you are using Vagrant, you can `vagrant ssh` to enter the Vagrant guest.")
else:
print("You can `source /srv/zulip-py3-venv/bin/activate` "
"to enter the Zulip development environment.")
sys.exit(1)
| [] | [] | [] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/template_parser.py | from typing import Callable, List, Optional, Text
class TemplateParserException(Exception):
def __init__(self, message):
# type: (str) -> None
self.message = message
def __str__(self):
# type: () -> str
return self.message
class TokenizationException(Exception):
def __init__(self, message, line_content=None):
# type: (str, Optional[str]) -> None
self.message = message
self.line_content = line_content
class TokenizerState:
def __init__(self):
# type: () -> None
self.i = 0
self.line = 1
self.col = 1
class Token:
def __init__(self, kind, s, tag, line, col, line_span):
# type: (str, str, str, int, int, int) -> None
self.kind = kind
self.s = s
self.tag = tag
self.line = line
self.col = col
self.line_span = line_span
def tokenize(text):
# type: (str) -> List[Token]
def advance(n):
# type: (int) -> None
for _ in range(n):
state.i += 1
if state.i >= 0 and text[state.i - 1] == '\n':
state.line += 1
state.col = 1
else:
state.col += 1
def looking_at(s):
# type: (str) -> bool
return text[state.i:state.i+len(s)] == s
def looking_at_htmlcomment():
# type: () -> bool
return looking_at("<!--")
def looking_at_handlebarcomment():
# type: () -> bool
return looking_at("{{!")
def looking_at_djangocomment():
# type: () -> bool
return looking_at("{#")
def looking_at_handlebarpartial() -> bool:
return looking_at("{{partial")
def looking_at_html_start():
# type: () -> bool
return looking_at("<") and not looking_at("</")
def looking_at_html_end():
# type: () -> bool
return looking_at("</")
def looking_at_handlebars_start():
# type: () -> bool
return looking_at("{{#") or looking_at("{{^")
def looking_at_handlebars_end():
# type: () -> bool
return looking_at("{{/")
def looking_at_django_start():
# type: () -> bool
return looking_at("{% ") and not looking_at("{% end")
def looking_at_django_end():
# type: () -> bool
return looking_at("{% end")
state = TokenizerState()
tokens = []
while state.i < len(text):
try:
if looking_at_htmlcomment():
s = get_html_comment(text, state.i)
tag = s[4:-3]
kind = 'html_comment'
elif looking_at_handlebarcomment():
s = get_handlebar_comment(text, state.i)
tag = s[3:-2]
kind = 'handlebar_comment'
elif looking_at_djangocomment():
s = get_django_comment(text, state.i)
tag = s[2:-2]
kind = 'django_comment'
elif looking_at_handlebarpartial():
s = get_handlebar_partial(text, state.i)
tag = s[9:-2]
kind = 'handlebars_singleton'
elif looking_at_html_start():
s = get_html_tag(text, state.i)
tag_parts = s[1:-1].split()
if not tag_parts:
raise TemplateParserException("Tag name missing")
tag = tag_parts[0]
if is_special_html_tag(s, tag):
kind = 'html_special'
elif is_self_closing_html_tag(s, tag):
kind = 'html_singleton'
else:
kind = 'html_start'
elif looking_at_html_end():
s = get_html_tag(text, state.i)
tag = s[2:-1]
kind = 'html_end'
elif looking_at_handlebars_start():
s = get_handlebars_tag(text, state.i)
tag = s[3:-2].split()[0]
kind = 'handlebars_start'
elif looking_at_handlebars_end():
s = get_handlebars_tag(text, state.i)
tag = s[3:-2]
kind = 'handlebars_end'
elif looking_at_django_start():
s = get_django_tag(text, state.i)
tag = s[3:-2].split()[0]
kind = 'django_start'
elif looking_at_django_end():
s = get_django_tag(text, state.i)
tag = s[6:-3]
kind = 'django_end'
else:
advance(1)
continue
except TokenizationException as e:
raise TemplateParserException('''%s at Line %d Col %d:"%s"''' %
(e.message, state.line, state.col,
e.line_content))
line_span = len(s.split('\n'))
token = Token(
kind=kind,
s=s,
tag=tag,
line=state.line,
col=state.col,
line_span=line_span
)
tokens.append(token)
advance(len(s))
def add_pseudo_end_token(kind: str) -> None:
token = Token(
kind=kind,
s='</' + tag + '>',
tag=tag,
line=state.line,
col=state.col,
line_span=1
)
tokens.append(token)
if kind == 'html_singleton':
# Here we insert a Pseudo html_singleton_end tag so as to have
# ease of detection of end of singleton html tags which might be
# needed in some cases as with our html pretty printer.
add_pseudo_end_token('html_singleton_end')
if kind == 'handlebars_singleton':
# We insert a pseudo handlbar end tag for singleton cases of
# handlebars like the partials. This helps in indenting multi line partials.
add_pseudo_end_token('handlebars_singleton_end')
return tokens
def validate(fn=None, text=None, check_indent=True):
# type: (Optional[str], Optional[str], bool) -> None
assert fn or text
if fn is None:
fn = '<in memory file>'
if text is None:
text = open(fn).read()
tokens = tokenize(text)
class State:
def __init__(self, func):
# type: (Callable[[Token], None]) -> None
self.depth = 0
self.matcher = func
def no_start_tag(token):
# type: (Token) -> None
raise TemplateParserException('''
No start tag
fn: %s
end tag:
%s
line %d, col %d
''' % (fn, token.tag, token.line, token.col))
state = State(no_start_tag)
def start_tag_matcher(start_token):
# type: (Token) -> None
state.depth += 1
start_tag = start_token.tag.strip('~')
start_line = start_token.line
start_col = start_token.col
old_matcher = state.matcher
def f(end_token):
# type: (Token) -> None
end_tag = end_token.tag.strip('~')
end_line = end_token.line
end_col = end_token.col
if start_tag == 'a':
max_lines = 3
else:
max_lines = 1
problem = None
if (start_tag == 'code') and (end_line == start_line + 1):
problem = 'Code tag is split across two lines.'
if start_tag != end_tag:
problem = 'Mismatched tag.'
elif check_indent and (end_line > start_line + max_lines):
if end_col != start_col:
problem = 'Bad indentation.'
if problem:
raise TemplateParserException('''
fn: %s
%s
start:
%s
line %d, col %d
end tag:
%s
line %d, col %d
''' % (fn, problem, start_token.s, start_line, start_col, end_tag, end_line, end_col))
state.matcher = old_matcher
state.depth -= 1
state.matcher = f
for token in tokens:
kind = token.kind
tag = token.tag
if kind == 'html_start':
start_tag_matcher(token)
elif kind == 'html_end':
state.matcher(token)
elif kind == 'handlebars_start':
start_tag_matcher(token)
elif kind == 'handlebars_end':
state.matcher(token)
elif kind == 'django_start':
if is_django_block_tag(tag):
start_tag_matcher(token)
elif kind == 'django_end':
state.matcher(token)
if state.depth != 0:
raise TemplateParserException('Missing end tag')
def is_special_html_tag(s, tag):
# type: (str, str) -> bool
return tag in ['link', 'meta', '!DOCTYPE']
def is_self_closing_html_tag(s: Text, tag: Text) -> bool:
self_closing_tag = tag in [
'area',
'base',
'br',
'col',
'embed',
'hr',
'img',
'input',
'param',
'source',
'track',
'wbr',
]
singleton_tag = s.endswith('/>')
return self_closing_tag or singleton_tag
def is_django_block_tag(tag):
# type: (str) -> bool
return tag in [
'autoescape',
'block',
'comment',
'for',
'if',
'ifequal',
'verbatim',
'blocktrans',
'trans',
'raw',
'with',
]
def get_handlebars_tag(text, i):
# type: (str, int) -> str
end = i + 2
while end < len(text) - 1 and text[end] != '}':
end += 1
if text[end] != '}' or text[end+1] != '}':
raise TokenizationException('Tag missing "}}"', text[i:end+2])
s = text[i:end+2]
return s
def get_django_tag(text, i):
# type: (str, int) -> str
end = i + 2
while end < len(text) - 1 and text[end] != '%':
end += 1
if text[end] != '%' or text[end+1] != '}':
raise TokenizationException('Tag missing "%}"', text[i:end+2])
s = text[i:end+2]
return s
def get_html_tag(text, i):
# type: (str, int) -> str
quote_count = 0
end = i + 1
unclosed_end = 0
while end < len(text) and (text[end] != '>' or quote_count % 2 != 0 and text[end] != '<'):
if text[end] == '"':
quote_count += 1
if not unclosed_end and text[end] == '<':
unclosed_end = end
end += 1
if quote_count % 2 != 0:
if unclosed_end:
raise TokenizationException('Unbalanced Quotes', text[i:unclosed_end])
else:
raise TokenizationException('Unbalanced Quotes', text[i:end+1])
if end == len(text) or text[end] != '>':
raise TokenizationException('Tag missing ">"', text[i:end+1])
s = text[i:end+1]
return s
def get_html_comment(text, i):
# type: (str, int) -> str
end = i + 7
unclosed_end = 0
while end <= len(text):
if text[end-3:end] == '-->':
return text[i:end]
if not unclosed_end and text[end] == '<':
unclosed_end = end
end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_handlebar_comment(text, i):
# type: (str, int) -> str
end = i + 5
unclosed_end = 0
while end <= len(text):
if text[end-2:end] == '}}':
return text[i:end]
if not unclosed_end and text[end] == '<':
unclosed_end = end
end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_django_comment(text, i):
# type: (str, int) -> str
end = i + 4
unclosed_end = 0
while end <= len(text):
if text[end-2:end] == '#}':
return text[i:end]
if not unclosed_end and text[end] == '<':
unclosed_end = end
end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_handlebar_partial(text, i):
# type: (str, int) -> str
end = i + 10
unclosed_end = 0
while end <= len(text):
if text[end-2:end] == '}}':
return text[i:end]
if not unclosed_end and text[end] == '<':
unclosed_end = end
end += 1
raise TokenizationException('Unclosed partial', text[i:unclosed_end])
| [
"str",
"Text",
"Text"
] | [
5133,
8996,
9007
] | [
5136,
9000,
9011
] |
archives/18-2-SKKU-OSS_2018-2-OSS-L5.zip | tools/lib/test_script.py | from typing import Optional, Tuple
import os
from distutils.version import LooseVersion
from version import PROVISION_VERSION
from scripts.lib.zulip_tools import get_dev_uuid_var_path
def get_major_version(v):
# type: (str) -> int
return int(v.split('.')[0])
def get_version_file():
# type: () -> str
uuid_var_path = get_dev_uuid_var_path()
return os.path.join(uuid_var_path, 'provision_version')
PREAMBLE = '''
Before we run tests, we make sure your provisioning version
is correct by looking at var/provision_version, which is at
version %s, and we compare it to the version in source
control (version.py), which is %s.
'''
def preamble(version):
# type: (str) -> str
text = PREAMBLE % (version, PROVISION_VERSION)
text += '\n'
return text
NEED_TO_DOWNGRADE = '''
It looks like you checked out a branch that expects an older
version of dependencies than the version you provisioned last.
This may be ok, but it's likely that you either want to rebase
your branch on top of upstream/master or re-provision your VM.
Do this: `./tools/provision`
'''
NEED_TO_UPGRADE = '''
It looks like you checked out a branch that has added
dependencies beyond what you last provisioned. Your command
is likely to fail until you add dependencies by provisioning.
Do this: `./tools/provision`
'''
def get_provisioning_status():
# type: () -> Tuple[bool, Optional[str]]
version_file = get_version_file()
if not os.path.exists(version_file):
# If the developer doesn't have a version_file written by
# a previous provision, then we don't do any safety checks
# here on the assumption that the developer is managing
# their own dependencies and not running provision.
return True, None
version = open(version_file).read().strip()
# Normal path for people that provision--we're all good!
if version == PROVISION_VERSION:
return True, None
# We may be more provisioned than the branch we just moved to. As
# long as the major version hasn't changed, then we should be ok.
if LooseVersion(version) > LooseVersion(PROVISION_VERSION):
if get_major_version(version) == get_major_version(PROVISION_VERSION):
return True, None
else:
return False, preamble(version) + NEED_TO_DOWNGRADE
return False, preamble(version) + NEED_TO_UPGRADE
| [] | [] | [] |