zip
stringlengths 19
109
| filename
stringlengths 4
185
| contents
stringlengths 0
30.1M
| type_annotations
sequencelengths 0
1.97k
| type_annotation_starts
sequencelengths 0
1.97k
| type_annotation_ends
sequencelengths 0
1.97k
|
---|---|---|---|---|---|
archives/097475_hansberger.zip | hansberger/users/forms.py | from django.contrib.auth import get_user_model, forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
User = get_user_model()
class UserChangeForm(forms.UserChangeForm):
class Meta(forms.UserChangeForm.Meta):
model = User
class UserCreationForm(forms.UserCreationForm):
error_message = forms.UserCreationForm.error_messages.update(
{"duplicate_username": _("This username has already been taken.")}
)
class Meta(forms.UserCreationForm.Meta):
model = User
def clean_username(self):
username = self.cleaned_data["username"]
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise ValidationError(self.error_messages["duplicate_username"])
| [] | [] | [] |
archives/097475_hansberger.zip | hansberger/users/migrations/0001_initial.py | import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [("auth", "0008_alter_user_username_max_length")]
operations = [
migrations.CreateModel(
name="User",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
blank=True, null=True, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
error_messages={
"unique": "A user with that username already exists."
},
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[
django.contrib.auth.validators.UnicodeUsernameValidator()
],
verbose_name="username",
),
),
(
"first_name",
models.CharField(
blank=True, max_length=30, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=150, verbose_name="last name"
),
),
(
"email",
models.EmailField(
blank=True, max_length=254, verbose_name="email address"
),
),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
(
"date_joined",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="date joined"
),
),
(
"name",
models.CharField(
blank=True, max_length=255, verbose_name="Name of User"
),
),
(
"groups",
models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
(
"user_permissions",
models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
],
options={
"verbose_name_plural": "users",
"verbose_name": "user",
"abstract": False,
},
managers=[("objects", django.contrib.auth.models.UserManager())],
)
]
| [] | [] | [] |
archives/097475_hansberger.zip | hansberger/users/migrations/__init__.py | [] | [] | [] |
|
archives/097475_hansberger.zip | hansberger/users/models.py | from django.contrib.auth.models import AbstractUser
from django.db.models import CharField
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
class User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name = CharField(_("Name of User"), blank=True, max_length=255)
def get_absolute_url(self):
return reverse("users:detail", kwargs={"username": self.username})
| [] | [] | [] |
archives/097475_hansberger.zip | hansberger/users/tests/__init__.py | [] | [] | [] |
|
archives/097475_hansberger.zip | hansberger/users/tests/factories.py | from typing import Any, Sequence
from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, Faker, post_generation
class UserFactory(DjangoModelFactory):
username = Faker("user_name")
email = Faker("email")
name = Faker("name")
@post_generation
def password(self, create: bool, extracted: Sequence[Any], **kwargs):
password = Faker(
"password",
length=42,
special_chars=True,
digits=True,
upper_case=True,
lower_case=True,
).generate(extra_kwargs={})
self.set_password(password)
class Meta:
model = get_user_model()
django_get_or_create = ["username"]
| [
"bool",
"Sequence[Any]"
] | [
325,
342
] | [
329,
355
] |
archives/097475_hansberger.zip | hansberger/users/tests/test_forms.py | import pytest
from hansberger.users.forms import UserCreationForm
from hansberger.users.tests.factories import UserFactory
pytestmark = pytest.mark.django_db
class TestUserCreationForm:
def test_clean_username(self):
# A user with proto_user params does not exist yet.
proto_user = UserFactory.build()
form = UserCreationForm(
{
"username": proto_user.username,
"password1": proto_user._password,
"password2": proto_user._password,
}
)
assert form.is_valid()
assert form.clean_username() == proto_user.username
# Creating a user.
form.save()
# The user with proto_user params already exists,
# hence cannot be created.
form = UserCreationForm(
{
"username": proto_user.username,
"password1": proto_user._password,
"password2": proto_user._password,
}
)
assert not form.is_valid()
assert len(form.errors) == 1
assert "username" in form.errors
| [] | [] | [] |
archives/097475_hansberger.zip | hansberger/users/tests/test_models.py | import pytest
from django.conf import settings
pytestmark = pytest.mark.django_db
def test_user_get_absolute_url(user: settings.AUTH_USER_MODEL):
assert user.get_absolute_url() == f"/users/{user.username}/"
| [
"settings.AUTH_USER_MODEL"
] | [
122
] | [
146
] |
archives/097475_hansberger.zip | hansberger/users/tests/test_urls.py | import pytest
from django.conf import settings
from django.urls import reverse, resolve
pytestmark = pytest.mark.django_db
def test_detail(user: settings.AUTH_USER_MODEL):
assert (
reverse("users:detail", kwargs={"username": user.username})
== f"/users/{user.username}/"
)
assert resolve(f"/users/{user.username}/").view_name == "users:detail"
def test_list():
assert reverse("users:list") == "/users/"
assert resolve("/users/").view_name == "users:list"
def test_update():
assert reverse("users:update") == "/users/~update/"
assert resolve("/users/~update/").view_name == "users:update"
def test_redirect():
assert reverse("users:redirect") == "/users/~redirect/"
assert resolve("/users/~redirect/").view_name == "users:redirect"
| [
"settings.AUTH_USER_MODEL"
] | [
148
] | [
172
] |
archives/097475_hansberger.zip | hansberger/users/tests/test_views.py | import pytest
from django.conf import settings
from django.test import RequestFactory
from hansberger.users.views import UserRedirectView, UserUpdateView
pytestmark = pytest.mark.django_db
class TestUserUpdateView:
"""
TODO:
extracting view initialization code as class-scoped fixture
would be great if only pytest-django supported non-function-scoped
fixture db access -- this is a work-in-progress for now:
https://github.com/pytest-dev/pytest-django/pull/258
"""
def test_get_success_url(
self, user: settings.AUTH_USER_MODEL, request_factory: RequestFactory
):
view = UserUpdateView()
request = request_factory.get("/fake-url/")
request.user = user
view.request = request
assert view.get_success_url() == f"/users/{user.username}/"
def test_get_object(
self, user: settings.AUTH_USER_MODEL, request_factory: RequestFactory
):
view = UserUpdateView()
request = request_factory.get("/fake-url/")
request.user = user
view.request = request
assert view.get_object() == user
class TestUserRedirectView:
def test_get_redirect_url(
self, user: settings.AUTH_USER_MODEL, request_factory: RequestFactory
):
view = UserRedirectView()
request = request_factory.get("/fake-url")
request.user = user
view.request = request
assert view.get_redirect_url() == f"/users/{user.username}/"
| [
"settings.AUTH_USER_MODEL",
"RequestFactory",
"settings.AUTH_USER_MODEL",
"RequestFactory",
"settings.AUTH_USER_MODEL",
"RequestFactory"
] | [
565,
608,
889,
932,
1221,
1264
] | [
589,
622,
913,
946,
1245,
1278
] |
archives/097475_hansberger.zip | hansberger/users/urls.py | from django.urls import path
from hansberger.users.views import (
user_list_view,
user_redirect_view,
user_update_view,
user_detail_view,
)
app_name = "users"
urlpatterns = [
path("", view=user_list_view, name="list"),
path("~redirect/", view=user_redirect_view, name="redirect"),
path("~update/", view=user_update_view, name="update"),
path("<str:username>/", view=user_detail_view, name="detail"),
]
| [] | [] | [] |
archives/097475_hansberger.zip | hansberger/users/views.py | from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse
from django.views.generic import DetailView, ListView, RedirectView, UpdateView
User = get_user_model()
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
slug_field = "username"
slug_url_kwarg = "username"
user_detail_view = UserDetailView.as_view()
class UserListView(LoginRequiredMixin, ListView):
model = User
slug_field = "username"
slug_url_kwarg = "username"
user_list_view = UserListView.as_view()
class UserUpdateView(LoginRequiredMixin, UpdateView):
model = User
fields = ["name"]
def get_success_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
def get_object(self):
return User.objects.get(username=self.request.user.username)
user_update_view = UserUpdateView.as_view()
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
user_redirect_view = UserRedirectView.as_view()
| [] | [] | [] |
archives/097475_hansberger.zip | manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
# This allows easy placement of apps within the interior
# hansberger directory.
current_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(current_path, "hansberger"))
execute_from_command_line(sys.argv)
| [] | [] | [] |
archives/0hoo_snowball.zip | app.py | from datetime import datetime
from collections import defaultdict
from flask import Flask, request, render_template, redirect, url_for
from bson.objectid import ObjectId
import db
from scrapper import parse_snowball
from utils import mean_or_zero
from etftag import ETFTag
app = Flask(__name__)
VERSION = 1.10
INTEREST = 2.25
@app.route('/stocks')
@app.route('/stocks/<status>')
@app.route('/stocks/<status>/<alt>')
@app.route('/')
def stocks(status=None, alt=None):
find = None
stat = {}
if status == 'starred':
find = {'starred': True}
elif status == 'owned':
find = {'owned': True}
elif status == 'starredorowned':
find = {'$or': [{'starred': True}, {'owned': True}]}
elif status == 'doubtful':
find = {'doubtful': True}
order_by = request.args.get('order_by', 'expected_rate')
ordering = request.args.get('ordering', 'desc')
filter_id = request.args.get('filter_id', None)
filters = db.all_filters()
current_filter = None
if filter_id:
current_filter = db.filter_by_id(filter_id)
stocks = db.all_stocks(order_by=order_by,
ordering=ordering, find=find,
filter_by_expected_rate=find==None,
filter_bad=status!='bad',
filter_options=(current_filter.filter_options if current_filter else []),
rank_options=(current_filter.rank_options if current_filter else []))
if current_filter and current_filter.rank_options:
order_by = None
if status in ['owned', 'starred', 'starredorowned']:
stat['low_pbr'] = len([stock for stock in stocks if stock.pbr <= 1])
stat['high_expected_rate'] = len([stock for stock in stocks if stock.expected_rate >= 15])
stat['fscore'] = len([stock for stock in stocks if stock.latest_fscore >= 3])
stat['mean_expected_rate'] = mean_or_zero([stock.expected_rate for stock in stocks])
stat['mean_expected_rate_by_low_pbr'] = mean_or_zero([stock.expected_rate_by_low_pbr for stock in stocks])
stat['mean_future_roe'] = mean_or_zero([stock.future_roe for stock in stocks])
qROE_numbers = [stock.QROEs[0][1] for stock in stocks if len(stock.QROEs) > 0]
qROE_numbers = [float(roe_number) for roe_number in qROE_numbers if roe_number]
stat['mean_qROEs'] = mean_or_zero(qROE_numbers)
stat['qROEs_count'] = len(qROE_numbers)
return render_template('stocks.html', VERSION=VERSION, stocks=stocks, order_by=order_by, ordering=ordering, status=status,
available_filter_options=db.available_filter_options, filters=filters,
current_filter=current_filter, stat=stat, available_rank_options=db.available_rank_options,
alt=alt)
@app.route('/stocks/filter/new')
def stocks_new_filter():
filters = db.all_filters()
name = '새필터' + str(len(filters) + 1)
filter_id = db.save_filter({
'name': name,
'options': [],
})
return redirect(url_for('stocks', filter_id=filter_id))
@app.route('/stocks/filter/<filter_id>/save', methods=['POST'])
def stocks_save_filter(filter_id):
if request.method == 'POST':
current_filter = db.filter_by_id(filter_id)
name = request.form.get('filter_name', '')
current_filter['name'] = name
db.save_filter(current_filter)
return redirect(url_for('stocks', filter_id=filter_id))
@app.route('/stocks/filter/<filter_id>/remove')
def stocks_remove_filter(filter_id):
db.remove_filter(filter_id)
return redirect(url_for('stocks'))
@app.route('/stocks/filter/<filter_id>/add_filter_option', methods=['POST'])
def stocks_add_filter_option(filter_id):
if request.method == 'POST':
name = request.form.get('filter_name')
key = request.form.get('filter_option_key')
morethan = request.form.get('filter_option_morethan')
morethan = True if morethan == 'morethan' else False
try:
value = float(request.form.get('filter_option_value', 0))
except:
value = 0
selected = [filter_option for filter_option in db.available_filter_options if filter_option.key == key][0]
new_filter_option = db.FilterOption(key, selected.title, morethan, value, selected.is_boolean)
current_filter = db.filter_by_id(filter_id)
options = current_filter.get('options', [])
filter_option_dict = new_filter_option._asdict()
filter_option_dict['_id'] = ObjectId()
options.append(filter_option_dict)
current_filter['options'] = options
current_filter['name'] = name
db.save_filter(current_filter)
return redirect(url_for('stocks', filter_id=current_filter['_id']))
@app.route('/stocks/filter/<filter_id>/remove_filter_option/<filter_option_id>')
def stocks_remove_filter_option(filter_id, filter_option_id):
current_filter = db.filter_by_id(filter_id)
remain = [o for o in current_filter.get('options', []) if o.get('_id', None) != ObjectId(filter_option_id)]
current_filter['options'] = remain
db.save_filter(current_filter)
return redirect(url_for('stocks', filter_id=current_filter['_id']))
@app.route('/stocks/filter/<filter_id>/add_rank_option')
def stocks_add_rank_option(filter_id):
rank_option_key = request.args.get('key', None)
if not rank_option_key:
return redirect(url_for('stocks', filter_id=filter_id))
options = [r for r in db.available_rank_options if r.key == rank_option_key]
if options:
rank_option_to_add = options[0]
current_filter = db.filter_by_id(filter_id)
options = current_filter.get('options', [])
options.append(rank_option_to_add._asdict())
db.save_filter(current_filter)
return redirect(url_for('stocks', filter_id=filter_id, status='rank', alt='alt1'))
@app.route('/stocks/filter/<filter_id>/remove_rank_option')
def stocks_remove_rank_option(filter_id):
rank_option_key = request.args.get('key', None)
if not rank_option_key:
return redirect(url_for('stocks', filter_id=filter_id))
current_filter = db.filter_by_id(filter_id)
options = current_filter.get('options', [])
options = [o for o in options if o['key'] != rank_option_key]
current_filter['options'] = options
db.save_filter(current_filter)
return redirect(url_for('stocks', filter_id=filter_id, status='rank', alt='alt1'))
@app.route('/stocks/fill')
def stocks_fill_snowball_stats():
[s.fill_snowball_stat() for s in db.all_stocks()]
return redirect(url_for('stocks'))
@app.route('/stock/<code>')
def stock(code):
stock = db.stock_by_code(code)
filters = db.all_filters()
return render_template('stock_detail.html', VERSION=VERSION, stock=stock, filters=filters)
@app.route('/stock/<code>/records')
def stock_records(code):
import historical
stock = db.stock_by_code(code)
records_by_year = historical.records_by_year(stock)
now = datetime.now()
records_by_year = [data for data in records_by_year if data[0].year >= now.replace(year=now.year-2).year]
return render_template('stock_records.html', VERSION=VERSION, stock=stock, records_by_year=records_by_year)
@app.route('/stock/refresh/<code>')
def stock_refresh(code):
parse_snowball(code)
return redirect(url_for('stock', code=code))
@app.route('/stock/<code>/expected_rate')
def stock_expected_rate_by_price(code):
stock = db.stock_by_code(code)
try:
expected_rate_price = float(request.args.get('price'))
except ValueError:
return redirect(url_for('stock', code=code))
return render_template('stock_detail.html', VERSION=VERSION, stock=stock, expected_rate_price=expected_rate_price)
@app.route('/stock/<code>/my_price', methods=['POST'])
def stock_my_price(code):
if request.method == 'POST':
stock = db.stock_by_code(code)
stock['my_price'] = float(request.form.get('my_price', 0))
db.save_stock(stock)
return redirect(url_for('stock_refresh', code=code))
@app.route('/stock/<code>/adjust', methods=['POST'])
def stock_adjusted_future_roe(code):
if request.method == 'POST':
stock = db.stock_by_code(code)
stock['adjusted_future_roe'] = float(request.form.get('adjusted_future_roe', 0))
db.save_stock(stock)
return redirect(url_for('stock_refresh', code=code))
@app.route('/stock/<code>/adjustpbr', methods=['POST'])
def stock_adjusted_future_pbr(code):
if request.method == 'POST':
stock = db.stock_by_code(code)
stock['adjusted_future_pbr'] = float(request.form.get('adjusted_future_pbr', 0))
db.save_stock(stock)
return redirect(url_for('stock_refresh', code=code))
@app.route('/stock/<code>/adjustpbr/clear')
def stock_clear_adjusted_future_pbr(code):
stock = db.stock_by_code(code)
stock['adjusted_future_pbr'] = 0
db.save_stock(stock)
return redirect(url_for('stock_refresh', code=code))
@app.route('/stock/<code>/note', methods=['POST'])
def stock_update_note(code):
if request.method == 'POST':
stock = db.stock_by_code(code)
stock['note'] = str(request.form.get('note', ''))
db.save_stock(stock)
return redirect(url_for('stock', code=code))
@app.route('/stock/<code>/clear')
def stock_clear_adjusted_future_roe(code):
stock = db.stock_by_code(code)
stock['adjusted_future_roe'] = 0
db.save_stock(stock)
return redirect(url_for('stock_refresh', code=code))
@app.route('/stock/<code>/<status>/<on>')
def stock_status(code, status, on):
stock = db.stock_by_code(code)
stock[status] = on == 'on'
if status == 'owned' and stock[status]:
stock['starred'] = False
elif status == 'starred' and stock[status]:
stock['owned'] = False
db.save_stock(stock)
return redirect(url_for('stock', code=code))
@app.route('/stocks/add', methods=['POST'])
def add_stock():
if request.method == 'POST':
code = request.form.get('code', None)
if code:
parse_snowball(code)
return redirect('stocks')
@app.route('/stocks/<code>/remove')
def remove_stock(code):
db.remove_stock(code)
return redirect(url_for('stocks'))
@app.route('/etfs/<etf_type>')
def etfs(etf_type='domestic'):
momentum_base = 'month3' if etf_type == 'domestic' else 'month6'
momentum_base_kr = '3개월' if etf_type == 'domestic' else '6개월'
order_by = request.args.get('order_by', momentum_base)
ordering = request.args.get('ordering', 'desc')
etfs = db.all_etf(order_by=order_by, ordering=ordering, etf_type=etf_type)
bond_etfs = ['148070', '152380']
tags = defaultdict(list)
for etf in etfs:
for tag in etf.get('tags'):
tags[tag].append(etf)
tags = {k: ETFTag(k, v) for k, v in tags.items()}
stat = {}
etfs_by_momentum_base = [etf for etf in db.all_etf(order_by=momentum_base, ordering='desc', etf_type=etf_type) if etf[momentum_base] != 0]
no_bond_etfs = sorted([etf for etf in etfs_by_momentum_base if etf['code'] not in bond_etfs], key=lambda x: x.get(momentum_base, 0), reverse=True)
stat['absolute_momentum_momentum_base_avg'] = mean_or_zero([etf[momentum_base] for etf in no_bond_etfs])
stat['absolute_momentum_high'] = no_bond_etfs[0]
stat['relative_momentum_etf'] = etfs_by_momentum_base[0]
tags = sorted(tags.values(), key=lambda t: getattr(t, momentum_base), reverse=True)
filters = db.all_filters()
return render_template('etfs.html', VERSION=VERSION, INTEREST=INTEREST, filters=filters, etfs=etfs, order_by=order_by, ordering=ordering,
stat=stat, tags=tags, etf_type=etf_type, momentum_base=momentum_base, momentum_base_kr=momentum_base_kr)
if __name__ == '__main__':
app.debug = True
app.run() | [] | [] | [] |
archives/0hoo_snowball.zip | core_sample.py | import requests
from lxml import html
DAUM_BASIC = 'http://finance.daum.net/item/main.daum?code='
NAVER_COMPANY = 'http://companyinfo.stock.naver.com/v1/company/c1010001.aspx?cmp_cd='
NAVER_YEARLY = "http://companyinfo.stock.naver.com/v1/company/ajax/cF1001.aspx?cmp_cd=%s&fin_typ=0&freq_typ=Y"
def basic(code):
url = DAUM_BASIC + code
content = requests.get(url).content
tree = html.fromstring(content)
title = tree.xpath('//*[@id="topWrap"]/div[1]/h2')[0].text
price = tree.xpath('//*[@id="topWrap"]/div[1]/ul[2]/li[1]/em')[0].text
price = float(price.replace(',', ''))
return (title, price)
def bps(code):
url = 'http://companyinfo.stock.naver.com/v1/company/c1010001.aspx?cmp_cd=' + code
content = requests.get(url).content
tree = html.fromstring(content)
bps = tree.xpath('//*[@id="pArea"]/div[1]/div/table/tr[3]/td/dl/dt[2]/b')[0].text
bps = int(bps.replace(',', ''))
return bps
def snowball(code):
url = NAVER_YEARLY % code
content = requests.get(url).content
tree = html.fromstring(content)
ROEs = tree.xpath('/html/body/table/tbody/tr[22]/td/span/text()')
ROEs = [float(v.replace(',', '')) for v in ROEs]
count = len(ROEs)
return ROEs[:count-3], ROEs[count-3:]
def future_bps(bps, future_roe, future=10):
return int(bps * ((1 + (future_roe / 100)) ** future))
def expected_rate(future_bps, price, future=10):
return ((future_bps / price) ** (1.0 / future) - 1) * 100
def invest_price(future_bps, target_rate=15, future=10):
return int(future_bps / ((1 + (target_rate / 100)) ** future))
| [] | [] | [] |
archives/0hoo_snowball.zip | db.py | from typing import Tuple, List, Optional, Dict
from types import FunctionType
from datetime import datetime
from functools import partial
from itertools import repeat
from statistics import mean, StatisticsError
from collections import UserDict, namedtuple
from pymongo import MongoClient, ASCENDING, DESCENDING
from bson.objectid import ObjectId
from utils import attr_or_key_getter, first_or_none
FScore = namedtuple('FScore', ['total_issued_stock', 'profitable', 'cfo'])
YearStat = namedtuple('YearStat', ['year', 'value', 'calculated'])
Quarter = namedtuple('Quarter', ['year', 'number', 'estimated'])
FilterOption = namedtuple('Filter', ['key', 'title', 'morethan', 'value', 'is_boolean'])
RankOption = namedtuple('Rank', ['key', 'title', 'asc', 'is_rankoption'])
YEAR_STAT = Tuple[int, int]
YEAR_FSCORE = Tuple[int, FScore]
client = MongoClient()
db = client.snowball
DIVIDEND_TAX_RATE = 15.40
FUTURE = 10
TARGET_RATE = 15
THIS_YEAR = datetime.now().year
LAST_YEAR = THIS_YEAR - 1
available_rank_options = [
RankOption(key='rank_pbr', title='PBR', asc=True, is_rankoption=True),
RankOption(key='rank_per', title='PER', asc=True, is_rankoption=True),
RankOption(key='rank_last_year_gpa', title='GPA', asc=False, is_rankoption=True),
RankOption(key='rank_dividend', title='배당', asc=False, is_rankoption=True),
RankOption(key='rank_beta', title='저베타', asc=True, is_rankoption=True),
RankOption(key='rank_month1', title='1개월', asc=False, is_rankoption=True),
RankOption(key='rank_month3', title='3개월', asc=False, is_rankoption=True),
RankOption(key='rank_month6', title='6개월', asc=False, is_rankoption=True),
RankOption(key='rank_month12', title='12개월', asc=False, is_rankoption=True),
RankOption(key='rank_relative_earning_rate', title='상대수익률', asc=False, is_rankoption=True),
RankOption(key='rank_roic', title='ROIC', asc=False, is_rankoption=True),
]
available_filter_options = [
FilterOption(key='expected_rate', title='기대수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='latest_fscore', title='FScore', morethan=None, value=None, is_boolean=False),
FilterOption(key='future_roe', title='fROE', morethan=None, value=None, is_boolean=False),
FilterOption(key='expected_rate_by_current_pbr', title='현P기대수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='expected_rate_by_low_pbr', title='저P기대수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='pbr', title='PBR', morethan=None, value=None, is_boolean=False),
FilterOption(key='per', title='PER', morethan=None, value=None, is_boolean=False),
FilterOption(key='last_year_pcr', title='PCR', morethan=None, value=None, is_boolean=False),
FilterOption(key='last_year_psr', title='PSR', morethan=None, value=None, is_boolean=False),
FilterOption(key='dividend_rate', title='배당률', morethan=None, value=None, is_boolean=False),
FilterOption(key='beta', title='베타', morethan=None, value=None, is_boolean=False),
FilterOption(key='foreigner_weight', title='외국인비중', morethan=None, value=None, is_boolean=False),
FilterOption(key='floating_rate', title='유동주식비율', morethan=None, value=None, is_boolean=False),
FilterOption(key='month1', title='1개월수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='month3', title='3개월수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='month6', title='6개월수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='month12', title='12개월수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='relative_earning_rate', title='상대수익률', morethan=None, value=None, is_boolean=False),
FilterOption(key='countable_last_four_years_roes_count', title='계산가능ROE수', morethan=None, value=None, is_boolean=False),
FilterOption(key='roe_max_diff', title='ROE최대최소차', morethan=None, value=None, is_boolean=False),
FilterOption(key='last_four_years_roe_max_diff', title='최근4년ROE최대최소차', morethan=None, value=None, is_boolean=False),
FilterOption(key='calculable_pbr_count', title='계산가능PBR수', morethan=None, value=None, is_boolean=False),
FilterOption(key='FCF_surplus_years', title='FCF흑자연수', morethan=None, value=None, is_boolean=False),
FilterOption(key='rank_last_year_gpa', title='GPA순위', morethan=None, value=None, is_boolean=False),
FilterOption(key='agg_rank', title='시총순위', morethan=None, value=None, is_boolean=False),
FilterOption(key='rank_beta', title='저베타순위', morethan=None, value=None, is_boolean=False),
FilterOption(key='is_closing_month_march', title='3월결산(참)', morethan=None, value=None, is_boolean=True),
FilterOption(key='is_five_years_record_low', title='5년최저PBR(참)', morethan=None, value=None, is_boolean=True),
FilterOption(key='has_consensus', title='컨센서스있음(참)', morethan=None, value=None, is_boolean=True),
FilterOption(key='is_positive_consensus_roe', title='컨센서스>fROE(참)', morethan=None, value=None, is_boolean=True),
FilterOption(key='is_starred', title='관심종목(참)', morethan=None, value=None, is_boolean=True),
FilterOption(key='is_owned', title='보유종목(참)', morethan=None, value=None, is_boolean=True),
]
class Filter(UserDict):
@property
def filter_options(self) -> List[FilterOption]:
return [FilterOption(
key=o['key'],
title=o['title'],
morethan=o['morethan'],
value=o['value'],
is_boolean=o.get('is_boolean', False)) for o in self['options'] if not o.get('is_rankoption', False)]
@property
def dict_filter_options(self) -> List[dict]:
return [o for o in self['options'] if not o.get('is_rankoption', False)]
@property
def rank_options(self) -> List[RankOption]:
return [RankOption(
key=o['key'],
title=o['title'],
asc=o['asc'],
is_rankoption=True) for o in self['options'] if o.get('is_rankoption', False)]
class ETF(UserDict):
@property
def object_id(self) -> str:
return self['_id']
@property
def tags(self) -> str:
return ', '.join(self.get('tags'))
class Stock(UserDict):
def __hash__(self):
return hash(frozenset(self.items()))
@property
def object_id(self) -> str:
return self['_id']
@property
def is_starred(self) -> bool:
return self.get('starred', False)
@property
def is_owned(self) -> bool:
return self.get('owned', False)
@property
def current_price(self) -> int:
return int(self.get('current_price', 0))
@property
def price_arrow(self) -> str:
if self.get('price_diff') == 0:
return ''
else:
return '▲' if self.get('price_diff') > 0 else '▼'
@property
def price_color(self) -> str:
if self.get('price_diff') == 0:
return 'black'
else:
return 'red' if self.get('price_diff') > 0 else 'blue'
@property
def price_sign(self) -> str:
return '+' if self.get('price_diff') > 0 else ''
@property
def pbr(self) -> float:
return self.get('pbr', 0)
@property
def per(self) -> float:
return self.get('per', 0)
@property
def financial_statements_url(self) -> str:
return "http://companyinfo.stock.naver.com/v1/company/ajax/cF1001.aspx?cmp_cd=%s&fin_typ=0&freq_typ=Y" % (self['code'])
@property
def roes(self) -> List[Tuple[int, Optional[float]]]:
return self.year_stat('ROEs')
@property
def pbrs(self) -> List[Tuple[int, Optional[float]]]:
return self.year_stat('PBRs')
@property
def pers(self) -> List[Tuple[int, Optional[float]]]:
return self.year_stat('PERs')
@property
def epss(self) -> List[Tuple[int, Optional[float]]]:
return self.year_stat('EPSs')
@property
def countable_roes(self) -> List[Tuple[int, Optional[float]]]:
return [roe for roe in self.get('ROEs', []) if roe]
@property
def countable_last_four_years_roes_count(self) -> int:
return len(self.last_four_years_roe)
@property
def low_pbr(self) -> float:
try:
return min([year_pbr[1] for year_pbr in self.year_stat('PBRs', exclude_future=True) if year_pbr[1] > 0])
except ValueError:
return 0
@property
def high_pbr(self) -> float:
try:
return max([year_pbr[1] for year_pbr in self.year_stat('PBRs', exclude_future=True) if year_pbr[1] > 0])
except ValueError:
return 0
@property
def mid_pbr(self) -> float:
return (self.low_pbr + self.get('pbr')) / 2
@property
def adjusted_eps(self) -> int:
past_eps = [eps[1] for eps in self.year_stat('EPSs', exclude_future=True)]
if len(past_eps) < 3:
return 0
return int(((past_eps[-1] * 3) + (past_eps[-2] * 2) + past_eps[-3]) / 6)
@property
def mid_roe(self) -> float:
ROEs = self.countable_roes
return mean([mean(ROEs), min(ROEs)]) if len(ROEs) > 2 else 0
@property
def eps_growth(self) -> float:
EPSs = self.get('EPSs', [0, 0])
try:
return mean([y/x - 1 for x, y in zip(EPSs[:-1], EPSs[1:])]) * 100
except ZeroDivisionError:
return 0
@property
def dividend_rate(self) -> float:
return self.get('dividend_rate', 0)
@property
def beta(self) -> float:
return self.get('beta', 0)
@property
def foreigner_weight(self) -> float:
return self.get('foreigner_weight', 0)
@property
def floating_rate(self) -> float:
return self.get('floating_rate', 0)
@property
def has_note(self) -> bool:
return len(self.get('note', '')) > 0
@property
def latest_fscore(self) -> int:
last_year_fscore = [f for f in self.fscores if f[0] == LAST_YEAR]
if not last_year_fscore:
return -1
else:
fscore = last_year_fscore[0][1]
return sum([fscore.total_issued_stock + fscore.profitable + fscore.cfo])
@property
def fscores(self) -> List[Tuple[int, FScore]]:
NPs = self.year_stat('NPs')
return [(np[0], self.fscore(np[0])) for np in NPs]
@property
def mean_per(self) -> float:
PERs = self.get('PERs', [])
return mean(PERs) if len(PERs) > 2 else 0
@property
def dividend_tax_adjust(self) -> float:
return self.get('dividend_rate', 0) * (DIVIDEND_TAX_RATE / 100)
@property
def last_four_years_roe(self) -> List[int]:
return [roe[1] for roe in self.four_years_roe(THIS_YEAR)]
def four_years_roe(self, year) -> List[Tuple[int, float]]:
return [roe for roe in self.year_stat('ROEs') if roe[1] and roe[0] >= (year - 4) and roe[0] < year]
@property
def calculated_roe_count(self) -> int:
return len(self.last_four_years_roe)
@property
def calculable_pbr_count(self) -> int:
return len([pbr for pbr in self.year_stat('PBRs', exclude_future=True) if pbr[1] > 0])
@property
def mean_roe(self) -> float:
return mean(self.last_four_years_roe) if self.last_four_years_roe else 0
@property
def future_roe(self) -> float:
return self.mean_roe - self.dividend_tax_adjust
@property
def expected_rate(self) -> float:
return self.calc_expected_rate(self.calc_future_bps, FUTURE)
@property
def invest_price(self) -> float:
future_bps = self.calc_future_bps(FUTURE)
return int(future_bps / ((1 + (1 * TARGET_RATE / 100)) ** FUTURE))
@property
def expected_rate_by_current_pbr(self) -> float:
return self.calc_expected_rate(self.calc_future_price_current_pbr, FUTURE)
@property
def expected_rate_by_low_pbr(self) -> float:
return self.calc_expected_rate(self.calc_future_price_low_pbr, FUTURE)
@property
def expected_rate_by_mid_pbr(self) -> float:
return self.calc_expected_rate(self.calc_future_price_low_current_mid_pbr, FUTURE)
@property
def expected_rate_by_adjusted_future_pbr(self) -> float:
return self.calc_expected_rate(self.calc_future_price_adjusted_future_pbr, FUTURE)
@property
def intrinsic_value(self) -> int:
return int((self.get('bps', 0) + (self.adjusted_eps * 10)) / 2)
@property
def intrinsic_discount_rate(self) -> float:
return (self.intrinsic_value / self.current_price ** (1.0 / 1) - 1) * 100
@property
def peg_current_per(self) -> float:
return self.get('per', 0) / self.eps_growth if self.eps_growth != 0 else 0
@property
def peg_mean_per(self) -> float:
return self.mean_per / self.eps_growth if self.eps_growth != 0 else 0
@property
def roe_max_diff(self) -> float:
ROEs = self.countable_roes
return max(ROEs) - min(ROEs) if len(ROEs) > 2 else 0
@property
def last_four_years_roe_max_diff(self) -> float:
try:
return max(self.last_four_years_roe) - min(self.last_four_years_roe)
except:
return 0
@property
def QROEs(self) -> List[Tuple[Quarter, float]]:
return [(Quarter(*qroe[0]), qroe[1]) for qroe in self.get('QROEs', [])]
@property
def QBPSs(self) -> List[Tuple[Quarter, int]]:
return [(Quarter(*qbps[0]), qbps[1]) for qbps in self.get('QBPSs', [])]
@property
def QROEs_QBPSs(self):
return zip(self.QROEs, self.QBPSs)
@property
def calculable(self) -> bool:
return self.get('bps', 0) > 0 and (self.get('adjusted_future_roe', 0) or self.future_roe) > 0
@property
def future_bps(self) -> int:
return self.calc_future_bps(FUTURE)
@property
def BPSs(self):
return self.year_stat('BPSs')
@property
def DEPTs(self):
return self.year_stat('DEPTs')
@property
def CFOs(self):
if self.get('CFOs') and type(self.get('CFOs')[0]) is int:
return self.year_stat('CFOs')
return self.get('CFOs', [(0, 0)])
@property
def CFIs(self):
if self.get('CFIs') and type(self.get('CFIs')[0]) is int:
return self.year_stat('CFIs')
return self.get('CFIs', [(0, 0)])
@property
def CFFs(self):
if self.get('CFFs') and type(self.get('CFFs')[0]) is int:
return self.year_stat('CFFs')
return self.get('CFFs', [(0, 0)])
@property
def FCFs(self):
if self.get('FCFs') and type(self.get('FCFs')[0]) is int:
return self.year_stat('FCFs')
return self.get('FCFs', [(0, 0)])
@property
def FCF_surplus_years(self):
return len([v for v in self.year_stat('FCFs', exclude_future=True) if v[1] > 0])
@property
def is_five_years_record_low(self):
return self.low_pbr > self.pbr
@property
def has_consensus(self) -> bool:
return len(self.consensus_roes) > 0
@property
def consensus_roes(self):
return [pair for pair in self.roes if pair[0] > LAST_YEAR]
@property
def mean_consensus_roe(self):
try:
return mean([pair[1] for pair in self.consensus_roes if pair[1]])
except StatisticsError:
return 0
@property
def is_positive_consensus_roe(self):
if not self.has_consensus:
return False
return self.mean_consensus_roe >= self.future_roe
@property
def TAs(self):
return self.year_stat('TAs', exclude_future=False)
@property
def rank_last_year_gpa(self):
return self.get('rank_last_year_gpa')
@property
def rank_pbr(self):
return self.get('rank_pbr')
@property
def is_closing_month_march(self):
return self.get('closing_month', 0) == 3
@property
def current_assets(self):
return self.get('current_assets', [])
@property
def current_liability(self):
return self.get('current_liability', [])
@property
def total_liability(self):
return self.get('total_liability', [])
@property
def current_ratio(self):
return [(c[0][0], (c[0][1] / c[1][1] if c[1][1] != 0 else 0) * 100) for c in zip(self.current_assets, self.current_liability)]
@property
def current_ratio_last_year(self):
if not self.current_ratio:
return 0
last_year = [c[1] for c in self.current_ratio if c[0] == LAST_YEAR]
return last_year[0] if last_year else 0
@property
def NCAV(self):
asset = [c[1] for c in self.current_assets if c[0] == LAST_YEAR]
liability = [c[1] for c in self.total_liability if c[0] == LAST_YEAR]
if not asset or not liability:
return 0
return asset[0] - liability[0]
@property
def NCAV_ratio(self):
return self.NCAV / self.get('agg_value', 1) * 100
def calc_gpa(self, gp):
if not gp[1]:
return None
TA = [TA for TA in self.TAs if TA[0] == gp[0]]
if not TA:
return None
TA = TA[0]
if not TA[1]:
return None
return gp[1] / TA[1]
@property
def GPAs(self):
return [(gp[0], self.calc_gpa(gp)) for gp in self.get('GPs', [])]
@property
def GPA_stat(self):
return zip(self.TAs, [v for v in self.get('GPs', []) if v[1]], [v for v in self.GPAs if v[1]])
@property
def last_year_gpa(self):
v = [gpa[1] for gpa in self.GPAs if gpa[0] == LAST_YEAR]
if not v or not v[0]:
return 0
return v[0]
@property
def last_year_pcr(self):
v = [pcr[1] for pcr in self.get('PCRs', []) if pcr[0] == LAST_YEAR]
if not v or not v[0]:
return 0
return v[0]
@property
def last_year_psr(self):
v = [psr[1] for psr in self.get('PSRs', []) if psr[0] == LAST_YEAR]
if not v or not v[0]:
return 0
return v[0]
@property
def agg_rank(self):
return self.get('agg_rank')
@property
def use_fnguide(self):
return self.get('use_fnguide', False)
@property
def month1(self):
return self.get('month1', 0)
@property
def month3(self):
return self.get('month3', 0)
@property
def month6(self):
return self.get('month6', 0)
@property
def month12(self):
return self.get('month12', 0)
@property
def relative_earning_rate(self):
return self.get('relative_earning_rate', -100)
@property
def sales_cost_ratio(self):
return [(s[0], c[1] / s[1] * 100) for c, s in zip(self.get('sales_cost', []), self.get('sales', []))]
@property
def SGA_ratio(self):
return [(s[0], c[1] / s[1] * 100) for c, s in zip(self.get('SGAs', []), self.get('sales', []))]
@property
def mean_ROIC(self):
values = [v[1] for v in self.get('ROICs', []) if v[1] > 0]
return mean(values) if values else 0
@property
def last_year_fcf(self):
fcf = [fcf[1] for fcf in self.FCFs if fcf[0] == LAST_YEAR]
return fcf[0] if fcf else 0
@property
def last_year_pfr(self):
fcf = self.last_year_fcf
if not fcf:
return 0
return self.get('agg_value', 1) / fcf
def value_by_year(self, key, year):
return first_or_none([v[1]for v in attr_or_key_getter(key, self, []) if v[0] == year])
def total_asset_turnover_by(self, year):
return first_or_none([v[1]for v in self.get('total_asset_turnover', []) if v[0] == year])
def net_working_capital_by(self, year):
return first_or_none([v[1]for v in self.get('net_working_capital', []) if v[0] == year])
def expected_rate_by_price(self, price: int) -> float:
return self.calc_expected_rate(self.calc_future_bps, FUTURE, price=price)
def calc_future_bps(self, future: int) -> int:
if not self.calculable:
return 0
bps = self.get('bps', 0)
adjusted_future_roe = self.get('adjusted_future_roe', 0)
future_roe = adjusted_future_roe or self.future_roe
return int(bps * ((1 + (1 * future_roe / 100)) ** future))
def calc_future_price_low_pbr(self, future: int) -> int:
return int(self.calc_future_bps(future) * self.low_pbr)
def calc_future_price_high_pbr(self, future: int) -> int:
return int(self.calc_future_bps(future) * self.high_pbr)
def calc_future_price_current_pbr(self, future: int) -> int:
return int(self.calc_future_bps(future) * self['pbr'])
def calc_future_price_low_current_mid_pbr(self, future: int) -> int:
return int(self.calc_future_bps(future) * self.mid_pbr)
def calc_future_price_adjusted_future_pbr(self, future: int) -> int:
return int(self.calc_future_bps(future) * self.get('adjusted_future_pbr', 0))
def calc_expected_rate(self, calc_bps, future: int, price: int=None):
if not price:
price = self.current_price
future_bps = calc_bps(future)
if future_bps < 0:
return 0
return ((future_bps / price) ** (1.0 / future) - 1) * 100
def ten_year_prices(self) -> List[Tuple[int, float]]:
price = self.get('my_price', 0)
if not price:
return []
prices = []
for i in range(1, 11):
price = price + (price * 0.15)
prices.append((i, price))
return prices
def fscore(self, year) -> FScore:
total_issued_stock = 0
profitable = 0
cfo = 0
TIs = self.get('TIs', [])
if len(TIs) > 2 and len(set(TIs)) <= 1:
total_issued_stock = 1
NPs = self.year_stat('NPs')
year_profit = [p[1] for p in NPs if p[0] == year]
if len(year_profit) > 0 and year_profit[0] > 0:
profitable = 1
CFOs = self.CFOs
if len(CFOs) > 0:
if type(CFOs[0]) is int:
CFOs = self.year_stat('CFOs')
year_cfo = [c[1] for c in CFOs if c[0] == year]
if len(year_cfo) > 0 and year_cfo[0] > 0:
cfo = 1
return FScore(total_issued_stock=total_issued_stock, profitable=profitable, cfo=cfo)
def year_stat(self, stat, exclude_future=False) -> List[Tuple[int, Optional[float]]]:
stats = self.get(stat)
if not stats:
return [(0, 0)]
last_year_index = self.get('last_year_index')
assert(last_year_index is not None)
if len(stats) < last_year_index:
year = lambda idx: LAST_YEAR - len(stats) + idx + 1
return [(year(idx), value) for idx, value in enumerate(stats)
if not exclude_future or year(idx) <= LAST_YEAR]
else:
year = lambda idx: LAST_YEAR - (last_year_index - idx)
return [(year(idx), value) for idx, value in enumerate(stats)
if not exclude_future or year(idx) <= LAST_YEAR]
def __str__(self) -> str:
return '{} : {}'.format(self['title'], self['code'])
def make_filter_option_func(filter_option):
def filter_option_func(s):
v = getattr(Stock(s), filter_option.key)
if filter_option.is_boolean:
return v
return v >= filter_option.value if filter_option.morethan else v <= filter_option.value
return filter_option_func
def update_rank_by(stocks: List[Stock], key: str, rank_key: str, reverse: bool):
countable = [s for s in stocks if attr_or_key_getter(key, s, default_value=None) and attr_or_key_getter(key, s, default_value=None) > 0]
for idx, stock in enumerate(sorted(countable, key=partial(attr_or_key_getter, key), reverse=reverse)):
stock[rank_key] = idx + 1
save_stock(stock)
uncountable = [s for s in stocks if not attr_or_key_getter(key, s, default_value=None) or attr_or_key_getter(key, s, default_value=None) < 0]
for stock in uncountable:
stock[rank_key] = len(stocks)
save_stock(stock)
def update_ranks():
stocks = [Stock(s) for s in db.stocks.find()]
update_rank_by(stocks, 'last_year_gpa', 'rank_last_year_gpa', reverse=True)
update_rank_by(stocks, 'agg_value', 'agg_rank', reverse=True)
update_rank_by(stocks, 'pbr', 'rank_pbr', reverse=False)
update_rank_by(stocks, 'per', 'rank_per', reverse=False)
update_rank_by(stocks, 'dividend_rate', 'rank_dividend', reverse=True)
update_rank_by(stocks, 'beta', 'rank_beta', reverse=False)
update_rank_by(stocks, 'floating_rate', 'rank_floating_rate', reverse=True)
update_rank_by(stocks, 'foreigner_weight', 'rank_foreigner_weight', reverse=True)
update_rank_by(stocks, 'month1', 'rank_month1', reverse=True)
update_rank_by(stocks, 'month3', 'rank_month3', reverse=True)
update_rank_by(stocks, 'month6', 'rank_month3', reverse=True)
update_rank_by(stocks, 'month12', 'rank_month3', reverse=True)
update_rank_by(stocks, 'relative_earning_rate', 'rank_relative_earning_rate', reverse=True)
update_rank_by(stocks, 'NCAV_ratio', 'rank_ncav', reverse=True)
update_rank_by(stocks, 'mean_ROIC', 'rank_roic', reverse=True)
update_rank_by(stocks, 'current_ratio_last_year', 'rank_current_ratio', reverse=True)
update_rank_by(stocks, 'last_year_pcr', 'rank_last_year_pcr', reverse=False)
update_rank_by(stocks, 'last_year_psr', 'rank_last_year_psr', reverse=False)
update_rank_by(stocks, 'last_year_pfr', 'rank_last_year_pfr', reverse=False)
def all_stocks(order_by='title', ordering='asc', find=None, filter_by_expected_rate=True, filter_bad=True, filter_options=[], rank_options=[]) -> List[Stock]:
stocks = [Stock(dict) for dict in (db.stocks.find(find) if find else db.stocks.find())]
filter_funcs = []
if filter_options or rank_options:
filter_by_expected_rate = False
filter_bad = False
if filter_by_expected_rate:
filter_by_expected_rate_func = lambda s: (s.expected_rate > 0 and filter_bad) or (s.expected_rate < 0 and not filter_bad)
filter_funcs.append(filter_by_expected_rate_func)
for filter_option in filter_options:
filter_funcs.append(make_filter_option_func(filter_option))
stocks = sorted([s for s in stocks if all(list(map(FunctionType.__call__, filter_funcs, repeat(s))))],
key=partial(attr_or_key_getter, order_by), reverse=(ordering != 'asc'))
if rank_options:
for stock in stocks:
stock['total_rank'] = sum([stock.get(r.key) for r in rank_options])
return sorted(stocks, key=partial(attr_or_key_getter, 'total_rank'), reverse=False)
return stocks
def stock_by_code(code) -> Stock:
return Stock(db.stocks.find_one({'code': code}))
def save_stock(stock) -> Stock:
exist = db.stocks.find_one({'code': stock['code']})
if exist:
print("update:" ,stock)
db.stocks.update_one({'code': exist['code']}, {'$set': stock})
else:
db.stocks.insert_one(stock)
return stock_by_code(stock['code'])
def unset_keys(keys_to_unsets):
for key in keys_to_unsets:
db.stocks.update({}, {'$unset':{key: 1}}, multi=True)
def all_filters():
dicts = db.filters.find()
return [Filter(f) for f in dicts]
def filter_by_id(filter_id) -> Filter:
return Filter(db.filters.find_one({'_id': ObjectId(filter_id)}))
def save_filter(filter):
filter_id = filter.get('_id', None)
if filter_id:
return db.filters.update_one({'_id': ObjectId(filter_id)}, {'$set': filter}).upserted_id
else:
return db.filters.insert_one(filter).inserted_id
def remove_filter(filter_id):
db.filters.delete_one({'_id': ObjectId(filter_id)})
def remove_stock(code):
db.stocks.remove({'code': code})
def save_prices(prices):
db.prices.insert_many(prices)
def get_latest_price(code):
return db.prices.find_one({'code': code}, sort=[('date', DESCENDING)])
def get_prices(code):
return list(db.prices.find({'code': code}, sort=[('date', ASCENDING)]))
def save_etf(etf) -> ETF:
exist = db.etf.find_one({'code': etf['code']})
if exist:
print("update:" ,etf)
db.etf.update_one({'code': exist['code']}, {'$set': etf})
else:
db.etf.insert_one(etf)
return etf_by_code(etf['code'])
def etf_by_code(code) -> ETF:
return ETF(db.etf.find_one({'code': code}))
def all_etf(order_by='title', ordering='asc', etf_type='domestic'):
ETFs = [ETF(dict) for dict in db.etf.find({'type': etf_type})]
ETFs = sorted(ETFs, key=partial(attr_or_key_getter, order_by), reverse=(ordering != 'asc'))
return ETFs | [
"int",
"int",
"int",
"int",
"int",
"int",
"int",
"int",
"List[Stock]",
"str",
"str",
"bool"
] | [
20096,
20232,
20572,
20699,
20830,
20967,
21105,
21256,
23747,
23765,
23780,
23794
] | [
20099,
20235,
20575,
20702,
20833,
20970,
21108,
21259,
23758,
23768,
23783,
23798
] |
archives/0hoo_snowball.zip | etftag.py | from utils import mean_or_zero
class ETFTag:
def __init__(self, tag, etfs=[]):
self.tag = tag
self.etfs = sorted(etfs, key=lambda e: e.get('month3', 0), reverse=True)
@property
def month1(self):
return mean_or_zero([etf['month1'] for etf in self.etfs])
@property
def month3(self):
return mean_or_zero([etf['month3'] for etf in self.etfs])
@property
def month6(self):
return mean_or_zero([etf['month6'] for etf in self.etfs])
@property
def month12(self):
return mean_or_zero([etf['month12'] for etf in self.etfs]) | [] | [] | [] |
archives/0hoo_snowball.zip | historical.py | from typing import List, Optional, Tuple
from datetime import datetime, timedelta
from collections import namedtuple
from statistics import mean
import itertools
import urllib.request
import json
from db import Stock
import db
KAKAO_DAY_CANDLES = "http://stock.kakao.com/api/securities/KOREA-A%s/day_candles.json?limit=%d&to=%s"
now = datetime.now()
THIS_YEAR = now.year
TODAY = now.strftime('%Y-%m-%d')
YESTERDAY = (now - timedelta(days=1)).strftime('%Y-%m-%d')
TWO_YEARS_AGO = now.replace(year=now.year-2, month=1, day=1).strftime('%Y-%m-%d')
Record = namedtuple('Record', ['date', 'price', 'expected_rate', 'bps', 'fROE'])
YearStat = namedtuple('YearStat', ['year', 'high_price', 'low_price', 'high_expected_rate', 'low_expected_rate', 'bps', 'fROE'])
Event = namedtuple('Event', ['date', 'record', 'buy'])
EventStat = namedtuple('EventStat', ['buy_count', 'sell_count', 'profit'])
def parse_date(date_str: str) -> datetime:
return datetime.strptime(date_str.split('T')[0], '%Y-%m-%d')
def load_kakao_json(code: str, date: str=YESTERDAY) -> List[dict]:
url = KAKAO_DAY_CANDLES % (code, 90000, date)
print(url)
urlopen = urllib.request.urlopen(url)
data = json.loads(urlopen.read().decode())
if 'dayCandles' not in data:
return
return data['dayCandles']
def parse_day_candles(code: str, date: str=YESTERDAY):
data = load_kakao_json(code, date)
prices = [{'code': code, 'price': d['tradePrice'], 'date': parse_date(d['date'])} for d in data]
first_date = prices[-1]['date']
if first_date.month != 1 and first_date.day != 1:
yesterday_of_first = first_date - timedelta(days=1)
data = load_kakao_json(code, date=yesterday_of_first.strftime('%Y-%m-%d'))
old = [{'code': code, 'price': d['tradePrice'], 'date': parse_date(d['date'])} for d in data]
prices = old + prices
latest = db.get_latest_price(code)
if latest:
prices = [p for p in prices if p['date'] > latest['date']]
if prices:
db.save_prices(prices)
def make_record(date, price, bps, stock) -> Record:
if not bps:
return Record(date=date, price=price, expected_rate=0, bps=0, fROE=0)
year = date.year
ROEs = [roe[1] for roe in stock.four_years_roe(year)]
if len(ROEs) < 1:
return Record(date=date, price=price, expected_rate=0, bps=0, fROE=0)
future_roe = mean(ROEs)
calc_future_bps = lambda future: int(bps * ((1 + (1 * future_roe / 100)) ** future))
expected_rate = stock.calc_expected_rate(calc_future_bps, 10, price)
return Record(date=date, price=price, expected_rate=expected_rate, bps=bps, fROE=future_roe)
def build_records(stock: Stock) -> List[Record]:
prices = db.get_prices(stock['code'])
if not prices:
parse_day_candles(stock['code'])
prices = db.get_prices(stock['code'])
else:
last_date = prices[-1]['date']
if last_date.strftime('%Y-%m-%d') != YESTERDAY:
prices = db.get_prices(stock['code'])
if not prices:
return
BPSs = {b[0]: b[1] for b in stock.year_stat('BPSs', exclude_future=True)}
return [make_record(p['date'], p['price'], BPSs.get(p['date'].year-1), stock) for p in prices]
def make_year_stat(year: int, records: List[Record]) -> YearStat:
high_price = max(record.price for record in records)
low_price = min(record.price for record in records)
high_expected_rate = max(record.expected_rate for record in records)
low_expected_rate = min(record.expected_rate for record in records)
stat = YearStat(year=year,
high_price=high_price, low_price=low_price,
high_expected_rate=high_expected_rate, low_expected_rate=low_expected_rate,
bps=records[0].bps, fROE=records[0].fROE)
return stat
def records_by_year(stock: Stock) -> List[Tuple[YearStat, List[Record]]]:
records = build_records(stock)
by_year = [(k, list(list(g))) for k, g in itertools.groupby(records, lambda r: r.date.year)]
by_year = [(make_year_stat(year, records), records) for year, records in by_year]
events = simulate(by_year)
return [(year_stat, records, [e for e in events if e.date.year == year_stat.year]) for year_stat, records in by_year]
def simulate(by_year: List[Tuple[YearStat, List[Record]]]) -> List[Event]:
events = []
last_buy_event = None
for year_stat, records in by_year:
mid_expected_rate = mean([year_stat.high_expected_rate, year_stat.low_expected_rate])
if mid_expected_rate < 13.5:
mid_expected_rate = 13.5
for r in records:
if not last_buy_event and r.expected_rate >= mid_expected_rate:
last_buy_event = Event(date=r.date, record=r, buy=True)
events.append(last_buy_event)
if last_buy_event and ((last_buy_event.record.expected_rate - r.expected_rate) >= 1.2
or (last_buy_event.record.price * 0.13 + last_buy_event.record.price) <= r.price):
events.append(Event(date=r.date, record=r, buy=False))
last_buy_event = None
return events
| [
"str",
"str",
"str",
"Stock",
"int",
"List[Record]",
"Stock",
"List[Tuple[YearStat, List[Record]]]"
] | [
920,
1031,
1335,
2689,
3265,
3279,
3840,
4282
] | [
923,
1034,
1338,
2694,
3268,
3291,
3845,
4317
] |
archives/0hoo_snowball.zip | scrapper.py | from typing import List
import csv
import time
import random
from datetime import datetime
from statistics import mean
import urllib.request
import json
import codecs
from functools import partial
import requests
from lxml import html
import db
from db import Quarter
from utils import parse_float, parse_int, first_or_none, float_or_none
DAUM_BASIC = 'http://finance-service.daum.net/item/main.daum?code='
NAVER_COMPANY = 'http://companyinfo.stock.naver.com/v1/company/c1010001.aspx?cmp_cd='
NAVER_YEARLY = "http://companyinfo.stock.naver.com/v1/company/ajax/cF1001.aspx?cmp_cd=%s&fin_typ=0&freq_typ=Y"
NAVER_QUARTERLY = "http://companyinfo.stock.naver.com/v1/company/ajax/cF1001.aspx?cmp_cd=%s&fin_typ=0&freq_typ=Q"
NAVER_JSON1 = 'http://companyinfo.stock.naver.com/v1/company/cF4002.aspx?cmp_cd=%s&frq=0&rpt=1&finGubun=MAIN&frqTyp=0&cn='
NAVER_JSON5 = 'http://companyinfo.stock.naver.com/v1/company/cF4002.aspx?cmp_cd=%s&frq=0&rpt=5&finGubun=MAIN&frqTyp=0&cn='
NAVER = 'https://finance.naver.com/item/main.nhn?code='
FNGUIDE = 'http://comp.fnguide.com/SVO2/ASP/SVD_main.asp?pGB=1&gicode=A'
FNGUIDE_FINANCIAL_STMT = 'http://comp.fnguide.com/SVO2/ASP/SVD_Finance.asp?pGB=1&gicode=A%s&cID=&MenuYn=Y&ReportGB=&NewMenuID=103'
FNGUIDE_FINANCIAL_RATIO = 'http://comp.fnguide.com/SVO2/ASP/SVD_FinanceRatio.asp?pGB=1&gicode=A%s&cID=&MenuYn=Y&ReportGB=&NewMenuID=104'
FNGUIDE_INVEST_GUIDE = 'http://comp.fnguide.com/SVO2/asp/SVD_Invest.asp?pGB=1&gicode=A%s&cID=&MenuYn=Y&ReportGB=&NewMenuID=105&stkGb=701'
LAST_YEAR = str(datetime.now().year - 1)
def fill_company(filename: str='company.csv'):
random.seed()
with open(filename, newline='', encoding='UTF8') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
code = row['구글코드']
if code.startswith('KRX:'):
code = code[4:]
elif code.startswith('KOSDAQ:'):
code = code[7:]
parse_snowball(code)
time.sleep(random.random())
db.update_ranks()
def parse_snowball_stocks(filter_bad: bool=True, only_starred_owned: bool=False):
random.seed()
find = {'$or': [{'starred': True}, {'owned': True}]} if only_starred_owned else None
stocks = db.all_stocks(find=find, filter_bad=filter_bad)
print('{} 종목 수집'.format(len(stocks)))
for stock in stocks:
if stock.get('code', None):
parse_snowball(stock['code'])
time.sleep(random.random())
db.update_ranks()
def tree_from_url(url: str, decode: str=None):
content = requests.get(url).content
if decode:
content = content.decode(decode)
return html.fromstring(content)
def parse_basic(code):
print('종목 {} 기본...'.format(code))
url = DAUM_BASIC + code
print('다음 {}'.format(url))
tree = tree_from_url(url)
if not tree.xpath('//*[@id="topWrap"]/div[1]/h2'):
return False
title = tree.xpath('//*[@id="topWrap"]/div[1]/h2')[0].text
price = parse_float(tree.xpath('//*[@id="topWrap"]/div[1]/ul[2]/li[1]/em')[0].text)
diff = tree.xpath('//*[@id="topWrap"]/div[1]/ul[2]/li[2]/span')[0]
rate_diff = tree.xpath('//*[@id="topWrap"]/div[1]/ul[2]/li[3]/span')[0].text
exchange = tree.xpath('//*[@id="topWrap"]/div[1]/ul[1]/li[2]/a')[0].text
price_diff = parse_float(diff.text)
rate_diff = float(rate_diff.replace(',', '').replace('+', '').replace('-', '').replace('%', '').replace('%', ''))
is_price_down = diff.get('class').endswith('down')
if is_price_down:
price_diff = -abs(price_diff)
rate_diff = -abs(rate_diff)
per = parse_float(tree.xpath('//*[@id="stockContent"]/ul[2]/li[3]/dl[2]/dd')[0].text.split('/')[1])
pbr = parse_float(tree.xpath('//*[@id="stockContent"]/ul[2]/li[4]/dl[2]/dd')[0].text.split('/')[1])
trade_volume = parse_float(tree.xpath('//*[@id="topWrap"]/div[1]/ul[2]/li[5]/span[1]')[0].text)
trade_value = parse_float(tree.xpath('//*[@id="topWrap"]/div[1]/ul[2]/li[6]/span')[0].text)
agg_value = parse_float(tree.xpath('//*[@id="stockContent"]/ul[2]/li[2]/dl[2]/dd')[0].text)
print('종목명: {title} 현재가: {price}'.format(title=title, price=price))
stock = {
'code': code,
'title': title,
'current_price': price,
'price_diff': price_diff,
'rate_diff': rate_diff,
'per': per,
'pbr': pbr,
'trade_volume': trade_volume,
'trade_value': trade_value,
'exchange': exchange,
'agg_value': agg_value,
}
db.save_stock(stock)
return True
def quarter_from(text: str) -> Quarter:
if (not text) or ('/' not in text):
return None
estimated = text.endswith('(E)')
text = text[:-3] if estimated else text
comp = text.split('/')
return Quarter(year=int(comp[0]), number=int(int(comp[1]) / 3), estimated=estimated)
def parse_quarterly(code: str):
print('분기 {}'.format(code))
url = NAVER_QUARTERLY % (code)
tree = tree_from_url(url)
tds = tree.xpath("/html/body/table/tbody/tr[22]/td")
ROEs = [first_or_none(td.xpath('span/text()')) for td in tds]
while ROEs and ROEs[-1] is None:
ROEs.pop()
if len(ROEs) == 0:
print('*** 분기 ROE 정보가 없음 >>>')
return
ths = tree.xpath("/html/body/table/thead/tr[2]/th")
quarters = [quarter_from(th.text.strip()) for th in ths]
tds = tree.xpath("/html/body/table/tbody/tr[28]/td")
BPSs = [first_or_none(td.xpath('span/text()')) for td in tds]
QROEs = list(zip(quarters, ROEs))
QBPSs = list(zip(quarters, BPSs))
stock = {
'code': code,
'QROEs': QROEs,
'QBPSs': QBPSs,
}
stock = db.save_stock(stock)
def parse_naver_company(code: str):
url = NAVER_COMPANY + code
print('네이버 {}'.format(url))
tree = tree_from_url(url)
element = tree.xpath('//*[@id="pArea"]/div[1]/div/table/tr[3]/td/dl/dt[2]/b')
if not element:
print('수집 실패')
return False
bps = parse_int(element[0].text)
print('BPS: {}'.format(bps))
element = tree.xpath('//*[@id="pArea"]/div[1]/div/table/tr[3]/td/dl/dt[6]/b')
if element:
dividend_rate = parse_float(element[0].text)
print('배당률: {}'.format(dividend_rate))
else:
dividend_rate = 0
print('배당 수집 실패')
return False
stock = {
'code': code,
'bps': bps,
'dividend_rate': dividend_rate,
'use_fnguide': False,
}
stock = db.save_stock(stock)
return stock
def parse_snowball(code: str):
if not parse_basic(code):
print('수집 실패')
return
if parse_fnguide(code):
parse_fnguide_financial_statements(code)
parse_fnguide_financial_ratio(code)
parse_fnguide_invest_guide(code)
else:
print('FnGuide 수집실패')
if not parse_naver_company(code):
return
# print('종목 {} 스노우볼...'.format(code))
# url = NAVER_YEARLY % (code)
# print(url)
# tree = tree_from_url(url)
# try:
# years = list(filter(lambda x: x != '', map(lambda x: x.strip().split('/')[0], tree.xpath('/html/body/table/thead/tr[2]/th/text()'))))
# last_year_index = years.index(LAST_YEAR)
# except ValueError:
# return
# tds = tree.xpath('/html/body/table/tbody/tr[22]/td')
# ROEs = [first_or_none(td.xpath('span/text()')) for td in tds]
# while ROEs and ROEs[-1] is None:
# ROEs.pop()
# if len(ROEs) == 0:
# print('*** ROE 정보가 없음 >>>')
# return
# ROEs = [float_or_none(x) for x in ROEs]
# DEPTs = tree.xpath('/html/body/table/tbody/tr[24]/td/span/text()')
# DEPTs = [parse_float(x) for x in DEPTs]
# EPSs = tree.xpath('/html/body/table/tbody/tr[26]/td/span/text()')
# EPSs = [parse_float(x) for x in EPSs]
# PERs = tree.xpath('/html/body/table/tbody/tr[27]/td/span/text()')
# PERs = [parse_float(x) for x in PERs]
# BPSs = tree.xpath('/html/body/table/tbody/tr[28]/td/span/text()')
# BPSs = [parse_int(x) for x in BPSs]
# PBRs = tree.xpath('/html/body/table/tbody/tr[29]/td/span/text()')
# PBRs = [parse_float(x) for x in PBRs]
# #자산총계
# TAs = tree.xpath('/html/body/table/tbody/tr[8]/td/span/text()')
# TAs = [parse_int(x) for x in TAs]
# #당기순이익
# NPs = tree.xpath('/html/body/table/tbody/tr[5]/td/span/text()')
# NPs = [parse_int(x) for x in NPs]
# #영업활동현금흐름
# CFOs = tree.xpath('/html/body/table/tbody/tr[14]/td/span/text()')
# CFOs = [parse_int(x) for x in CFOs]
# #투자활동현금흐름
# CFIs = tree.xpath('/html/body/table/tbody/tr[15]/td/span/text()')
# CFIs = [parse_int(x) for x in CFIs]
# #투자활동현금흐름
# CFFs = tree.xpath('/html/body/table/tbody/tr[16]/td/span/text()')
# CFFs = [parse_int(x) for x in CFFs]
# CAPEXs = tree.xpath('/html/body/table/tbody/tr[17]/td/span/text()')
# CAPEXs = [parse_float(x) for x in CAPEXs]
# #잉여현금흐름
# FCFs = tree.xpath('/html/body/table/tbody/tr[18]/td/span/text()')
# FCFs = [parse_int(x) for x in FCFs]
# #발행주식수
# TIs = tree.xpath('/html/body/table/tbody/tr[33]/td/span/text()')
# TIs = [parse_int(x) for x in TIs]
# stock = {
# 'code': code,
# 'ROEs': ROEs,
# 'last_year_index': last_year_index,
# 'PBRs': PBRs,
# 'EPSs': EPSs,
# 'TAs': TAs,
# 'NPs': NPs,
# 'CFOs': CFOs,
# 'CFIs': CFIs,
# 'CFFs': CFFs,
# 'FCFs': FCFs,
# 'PERs': PERs,
# 'TIs': TIs,
# 'DEPTs': DEPTs,
# 'BPSs': BPSs,
# 'CAPEXs': CAPEXs,
# }
# stock = db.save_stock(stock)
#parse_quarterly(code)
#parse_json(code)
def parse_json(code: str):
print('종목 {} JSON...'.format(code))
url = NAVER_JSON1 % (code)
urlopen = urllib.request.urlopen(url)
data = json.loads(urlopen.read().decode())
GPs = []
if data and 'DATA' in data and data['DATA']:
yyyy = [int(y[:4]) for y in data['YYMM'] if len(y) > 4 and len(y.split('/')) > 2]
year_data_keys = {y: i+1 for i, y in enumerate(yyyy)}
for row in data['DATA']:
if 'ACC_NM' in row and row['ACC_NM'].startswith('매출총이익<당기'):
GPs = [(y, row['DATA' + str(year_data_keys[y])]) for y in sorted(list(year_data_keys.keys()))]
break
url = NAVER_JSON5 % (code)
urlopen = urllib.request.urlopen(url)
data = json.loads(urlopen.read().decode())
CPSs = []
PCRs = []
SPSs = []
PSRs = []
if data and 'DATA' in data and data['DATA']:
yyyy = [int(y[:4]) for y in data['YYMM'] if len(y) > 4 and len(y.split('/')) > 2]
year_data_keys = {y: i+1 for i, y in enumerate(yyyy)}
for row in data['DATA']:
if 'ACC_NM' in row and row['ACC_NM'].startswith('CPS'):
CPSs = [(y, row['DATA' + str(year_data_keys[y])]) for y in sorted(list(year_data_keys.keys()))]
elif 'ACC_NM' in row and row['ACC_NM'].startswith('PCR'):
PCRs = [(y, row['DATA' + str(year_data_keys[y])]) for y in sorted(list(year_data_keys.keys()))]
elif 'ACC_NM' in row and row['ACC_NM'].startswith('SPS'):
SPSs = [(y, row['DATA' + str(year_data_keys[y])]) for y in sorted(list(year_data_keys.keys()))]
elif 'ACC_NM' in row and row['ACC_NM'].startswith('PSR'):
PSRs = [(y, row['DATA' + str(year_data_keys[y])]) for y in sorted(list(year_data_keys.keys()))]
stock = {
'code': code,
'GPs': GPs,
'CPSs': CPSs,
'PCRs': PCRs,
'SPSs': SPSs,
'PSRs': PSRs,
}
print('GPs: {}'.format(GPs))
stock = db.save_stock(stock)
def parse_etf(code: str, tag: str, etf_type: str):
url = NAVER + code
print(url)
tree = tree_from_url(url, 'euc-kr')
try:
title = tree.xpath('//*[@id="middle"]/div[1]/div[1]/h2/a')[0].text
except:
return
month1 = parse_float(tree.xpath('//*[@id="tab_con1"]/div[5]/table/tbody/tr[1]/td/em')[0].text.strip())
month3 = parse_float(tree.xpath('//*[@id="tab_con1"]/div[5]/table/tbody/tr[2]/td/em')[0].text.strip())
month6 = parse_float(tree.xpath('//*[@id="tab_con1"]/div[5]/table/tbody/tr[3]/td/em')[0].text.strip())
month12 = parse_float(tree.xpath('//*[@id="tab_con1"]/div[5]/table/tbody/tr[4]/td/em')[0].text.strip())
company = tree.xpath('//table[contains(@class, "tbl_type1")]//td/span/text()')[2]
cost = parse_float(tree.xpath('//table[contains(@class, "tbl_type1")]//td/em/text()')[0])
tags = tag.split(',')
db.save_etf({
'code': code,
'title': title,
'company': company,
'month1': month1,
'month3': month3,
'month6': month6,
'month12': month12,
'cost': cost,
'tags': tags,
'type': etf_type,
})
def parse_etfs():
with codecs.open('dual_etf.txt', 'r', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parse_line(line, 'domestic')
with codecs.open('international_etf.txt', 'r', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parse_line(line, 'international')
def parse_line(line: str, etf_type: str):
line = line.strip()
if not line:
return
words = line.split(' ')
parse_etf(words[-1], words[0], etf_type)
def parse_fnguide(code: str):
print('종목 {} FnGuide...'.format(code))
url = FNGUIDE + code
print('FnGuide {}'.format(url))
tree = tree_from_url(url)
title = first_or_none(tree.xpath('//*[@id="giName"]/text()'))
if not title:
return False
groups = first_or_none(tree.xpath('//*[@id="compBody"]/div[1]/div[1]/p/span[1]/text()'))
groups = groups.split(' ')
group = groups[1] if len(groups) > 1 else None
subgroup = first_or_none(tree.xpath('//*[@id="compBody"]/div[1]/div[1]/p/span[4]/text()'))
subgroup = subgroup.replace('\xa0', '')
closing_month = first_or_none(tree.xpath('//*[@id="compBody"]/div[1]/div[1]/p/span[6]/text()'))
closing_month = parse_int(closing_month.split(' ')[0][:-1])
forward_per = parse_float(first_or_none(tree.xpath('//*[@id="corp_group2"]/dl[2]/dd/text()')))
group_per = parse_float(first_or_none(tree.xpath('//*[@id="corp_group2"]/dl[3]/dd/text()')))
dividend_rate = parse_float(first_or_none(tree.xpath('//*[@id="corp_group2"]/dl[5]/dd/text()')))
relative_earning_rate = parse_float(first_or_none(tree.xpath('//*[@id="svdMainChartTxt13"]/text()')))
momentums = tree.xpath('//*[@id="svdMainGrid1"]/table/tbody/tr[3]/td[1]/span/text()')
momentums = [parse_float(m) for m in momentums]
month1 = momentums[0] if len(momentums) >= 1 else 0
month3 = momentums[1] if len(momentums) >= 2 else 0
month6 = momentums[2] if len(momentums) >= 3 else 0
month12 = momentums[3] if len(momentums) >= 4 else 0
foreigner_weight = parse_float(first_or_none(tree.xpath('//*[@id="svdMainGrid1"]/table/tbody/tr[3]/td[2]/text()')))
beta = parse_float(first_or_none(tree.xpath('//*[@id="svdMainGrid1"]/table/tbody/tr[4]/td[2]/text()')))
stocks = first_or_none(tree.xpath('//*[@id="svdMainGrid1"]/table/tbody/tr[7]/td[1]/text()'))
stocks = stocks.split('/ ')
has_preferred_stock = False if stocks[1] == '0' else True
floating_rate = parse_float(first_or_none(tree.xpath('//*[@id="svdMainGrid1"]/table/tbody/tr[6]/td[2]/text()')))
YoY = parse_float(first_or_none(tree.xpath('//*[@id="svdMainGrid2"]/table/tbody/tr/td[4]/span/text()')))
consensus_point = parse_float(first_or_none(tree.xpath('//*[@id="svdMainGrid9"]/table/tbody/tr/td[1]/text()')))
consensus_price = parse_int(first_or_none(tree.xpath('//*[@id="svdMainGrid9"]/table/tbody/tr/td[2]/text()')))
consensus_count = parse_int(first_or_none(tree.xpath('//*[@id="svdMainGrid9"]/table/tbody/tr/td[5]/text()')))
bps = parse_int(first_or_none(tree.xpath('//*[@id="highlight_D_A"]/table/tbody/tr[19]/td[3]/text()')))
try:
years = tree.xpath('//*[@id="highlight_D_Y"]/table/thead/tr[2]/th/div/text()')
years = [x.split('/')[0] for x in years]
last_year_index = years.index(LAST_YEAR)
except ValueError:
print("** 작년 데이터 없음 **")
return
NPs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[3]/td/text()')
NPs = [parse_float(x) for x in NPs]
TAs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[6]/td/text()')
TAs = [parse_float(x) for x in TAs]
ROEs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[17]/td/text()')
ROEs = [parse_float(x) for x in ROEs]
EPSs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[18]/td/text()')
EPSs = [parse_float(x) for x in EPSs]
BPSs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[19]/td/text()')
BPSs = [parse_float(x) for x in BPSs]
DPSs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[20]/td/text()')
DPSs = [parse_float(x) for x in DPSs]
PERs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[21]/td/text()')
PERs = [parse_float(x) for x in PERs]
PBRs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[22]/td/text()')
PBRs = [parse_float(x) for x in PBRs]
DEPTs = tree.xpath('//*[@id="highlight_D_Y"]/table/tbody/tr[7]/td/text()')
DEPTs = [parse_float(x) for x in DEPTs]
stock = {
'code': code,
'group': group,
'subgroup': subgroup,
'closing_month': closing_month,
'forward_per': forward_per,
'group_per': group_per,
'dividend_rate': dividend_rate,
'relative_earning_rate': relative_earning_rate,
'month1': month1,
'month3': month3,
'month6': month6,
'month12': month12,
'foreigner_weight': foreigner_weight,
'beta': beta,
'has_preferred_stock': has_preferred_stock,
'floating_rate': floating_rate,
'YoY': YoY,
'consensus_point': consensus_point,
'consensus_price': consensus_price,
'consensus_count': consensus_count,
'bps': bps,
'use_fnguide': True,
'last_year_index': last_year_index,
'NPs': NPs,
'TAs': TAs,
'ROEs': ROEs,
'EPSs': EPSs,
'BPSs': BPSs,
'DPSs': DPSs,
'PERs': PERs,
'PBRs': PBRs,
'DEPTs': DEPTs,
}
db.save_stock(stock)
return True
def row_values_table(table, row_headers: List[str], key: str) -> List[str]:
try:
i = row_headers.index(key)
return [parse_float(v) for v in table.xpath('tbody/tr')[i].xpath('td//text()')]
except ValueError:
return []
def row_values_table_by_index(table, index: int) -> List[str]:
try:
return [parse_float(v) for v in table.xpath('tbody/tr')[index].xpath('td//text()')]
except ValueError:
return []
except IndexError:
return []
def parse_fnguide_financial_table(tree) -> dict:
if not tree.xpath('//*[@id="divDaechaY"]'):
return {}
years = tree.xpath('//*[@id="divDaechaY"]/table/thead/tr/th/text()')
years = [int(y.split('/')[0]) for y in years[1:]]
row_headers = tree.xpath("//*[@id='divDaechaY']/table/tbody/tr/th//text()")
row_headers = [h.strip() for h in row_headers if h.strip()]
row_headers = [h.replace('\xa0', '') for h in row_headers if h != '계산에 참여한 계정 펼치기']
row_values = partial(row_values_table, tree.xpath("//*[@id='divDaechaY']/table")[0], row_headers)
current_assets = row_values('유동자산')
current_liability = row_values('유동부채')
total_liability = row_values('부채')
print(years)
print(current_assets)
print(current_liability)
print(total_liability)
if len(years) != len(current_assets):
return {}
current_assets = list(zip(years, current_assets))
current_liability = list(zip(years, current_liability))
total_liability = list(zip(years, total_liability))
return {
'current_assets': current_assets,
'current_liability': current_liability,
'total_liability': total_liability,
}
def parse_fnguide_profit_table(tree) -> dict:
if not tree.xpath('//*[@id="divSonikY"]'):
return {}
years = tree.xpath('//*[@id="divSonikY"]/table/thead/tr/th/text()')
years = [int(y.split('/')[0]) for y in years if len(y.split('/')) > 1]
row_headers = tree.xpath("//*[@id='divSonikY']/table/tbody/tr/th//text()")
row_headers = [h.strip() for h in row_headers if h.strip()]
row_headers = [h.replace('\xa0', '') for h in row_headers if h != '계산에 참여한 계정 펼치기']
row_values = partial(row_values_table, tree.xpath("//*[@id='divSonikY']/table")[0], row_headers)
sales = row_values('매출액')[:len(years)]
GPs = row_values('매출총이익')[:len(years)]
GPs = list(zip(years, GPs))
sales_cost = row_values('매출원가')[:len(years)]
SGAs = row_values('판매비와관리비')[:len(years)]
sales = list(zip(years, sales))
sales_cost = list(zip(years, sales_cost))
SGAs = list(zip(years, SGAs))
print(sales)
print(sales_cost)
print(SGAs)
return {
'sales': sales,
'sales_cost': sales_cost,
'SGAs': SGAs,
'GPs': GPs,
}
def parse_fnguide_profit_flow(tree) -> dict:
if not tree.xpath('//*[@id="divCashY"]'):
return {}
years = tree.xpath('//*[@id="divCashY"]/table/thead/tr/th/text()')
years = [int(y.split('/')[0]) for y in years if len(y.split('/')) > 1]
row_headers = tree.xpath("//*[@id='divCashY']/table/tbody/tr/th//text()")
row_headers = [h.strip() for h in row_headers if h.strip()]
row_headers = [h.replace('\xa0', '') for h in row_headers if h != '계산에 참여한 계정 펼치기']
row_values = partial(row_values_table, tree.xpath("//*[@id='divCashY']/table")[0], row_headers)
CFOs = list(zip(years, row_values('영업활동으로인한현금흐름')[:len(years)]))
CFIs = list(zip(years, row_values('투자활동으로인한현금흐름')[:len(years)]))
CFFs = list(zip(years, row_values('재무활동으로인한현금흐름')[:len(years)]))
return {
'CFOs': CFOs,
'CFIs': CFIs,
'CFFs': CFFs,
}
def parse_fnguide_financial_statements(code: str) -> bool:
print('종목 {} FnGuide 재무재표 ...'.format(code))
url = FNGUIDE_FINANCIAL_STMT % (code)
print('FnGuide 재무재표 {}'.format(url))
tree = tree_from_url(url)
stock = {'code': code, **parse_fnguide_financial_table(tree), **parse_fnguide_profit_table(tree), **parse_fnguide_profit_flow(tree)}
db.save_stock(stock)
return True
def parse_fnguide_financial_ratio(code: str) -> bool:
print('종목 {} FnGuide 재무비율 ...'.format(code))
url = FNGUIDE_FINANCIAL_RATIO % (code)
print('FnGuide 재무비율 {}'.format(url))
tree = tree_from_url(url)
if not tree.xpath('//*[@id="compBody"]/div[2]/div[3]/div[2]/table'):
return False
years = tree.xpath('//*[@id="compBody"]/div[2]/div[3]/div[2]/table/thead/tr/th/text()')
years = [int(y.split('/')[0]) for y in years if len(y.split('/')) > 1]
header1 = '//*[@id="compBody"]/div[2]/div[3]/div[2]/table/tbody/tr/th/text()'
header2 = '//*[@id="compBody"]/div[2]/div[3]/div[2]/table/tbody/tr/th//a//text()'
header3 = '//*[@id="compBody"]/div[2]/div[3]/div[2]/table/tbody/tr/th/div/text()'
row_headers = tree.xpath(' | '.join([header1, header2, header3]))
row_headers = [h.strip() for h in row_headers if h.strip()]
row_headers = [h.replace('\xa0', '') for h in row_headers if h != '계산에 참여한 계정 펼치기']
row_values = partial(row_values_table, tree.xpath('//*[@id="compBody"]/div[2]/div[3]/div[2]/table')[0], row_headers)
loan_rate = list(zip(years, row_values('순차입금비율')[:len(years)]))
net_current_loan = list(zip(years, row_values('순차입부채')[:len(years)]))
interest_cost = list(zip(years, row_values('이자비용')[:len(years)]))
interest_coverage = list(zip(years, row_values('이자보상배율')[:len(years)]))
ROICs = list(zip(years, row_values('ROIC')[:len(years)]))
NOPATs = list(zip(years, row_values('세후영업이익')[:len(years)]))
ICs = list(zip(years, row_values('영업투하자본')[:len(years)]))
total_asset_turnover = list(zip(years, row_values('총자산회전율')[:len(years)]))
net_working_capital_turnover = list(zip(years, row_values('순운전자본회전율')[:len(years)]))
net_working_capital = list(zip(years, row_values('순운전자본')[:len(years)]))
stock = {
'code': code,
'loan_rate': loan_rate,
'net_current_loan': net_current_loan,
'interest_cost': interest_cost,
'interest_coverage': interest_coverage,
'ROICs': ROICs,
'NOPATs': NOPATs,
'ICs': ICs,
'total_asset_turnover': total_asset_turnover,
'net_working_capital_turnover': net_working_capital_turnover,
'net_working_capital': net_working_capital,
}
db.save_stock(stock)
return True
def parse_fnguide_invest_guide(code: str) -> bool:
print('종목 {} FnGuide 투자지표 ...'.format(code))
url = FNGUIDE_INVEST_GUIDE % (code)
print('FnGuide 투자지표 {}'.format(url))
tree = tree_from_url(url)
if not tree.xpath('//*[@id="compBody"]/div[2]/div[5]/div[2]/table'):
return False
years = tree.xpath('//*[@id="compBody"]/div[2]/div[5]/div[2]/table/thead/tr/th/text()')
years = [int(y.split('/')[0]) for y in years if len(y.split('/')) > 1]
row_values = partial(row_values_table_by_index, tree.xpath('//*[@id="compBody"]/div[2]/div[5]/div[2]/table')[0])
FCFs = list(zip(years, row_values(50)))
stock = {
'code': code,
'FCFs': FCFs,
}
db.save_stock(stock)
return True | [
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"str",
"List[str]",
"str",
"int",
"str",
"str",
"str"
] | [
2521,
4622,
4924,
5787,
6635,
9958,
12013,
12023,
12038,
13514,
13529,
13690,
18796,
18812,
19049,
22665,
23091,
25541
] | [
2524,
4625,
4927,
5790,
6638,
9961,
12016,
12026,
12041,
13517,
13532,
13693,
18805,
18815,
19052,
22668,
23094,
25544
] |
archives/0hoo_snowball.zip | script.py | import argparse
import scrapper
parser = argparse.ArgumentParser(description='Snowball utility')
parser.add_argument('--basic', help='입력된 종목코드의 기본 정보를 가지고 온다')
parser.add_argument('--snowball', help='입력된 종목코드의 스노우볼 정보를 가지고 온다')
parser.add_argument('--mysnowball', action='store_true', help='관심종목과 소유종목의 스노우볼 정보를 가지고 온다')
parser.add_argument('--allsnowball', action='store_true', help='모든 기대수익률이 0이상인 종목의 스노우볼 정보를 가지고 온다')
parser.add_argument('--allminus', action='store_true', help='기대수익률이 0이하인 종목의 스노우볼 정보를 가지고 온다')
parser.add_argument('--fill', action='store_true', help='company.csv 파일에 있는 종목을 전부 추가한다')
parser.add_argument('--sample', action='store_true', help='sample.csv 파일에 있는 종목을 추가한다')
parser.add_argument('--etf', action='store_true', help='ETF 듀얼 모멘텀 정보를 수집한다')
if __name__ == '__main__':
args = parser.parse_args()
if args.basic:
scrapper.parse_basic(args.basic)
elif args.snowball:
scrapper.parse_snowball(args.snowball)
elif args.mysnowball:
scrapper.parse_snowball_stocks(filter_bad=True, only_starred_owned=True)
elif args.allsnowball:
scrapper.parse_snowball_stocks(filter_bad=True)
elif args.allminus:
scrapper.parse_snowball_stocks(filter_bad=False)
elif args.fill:
scrapper.fill_company()
elif args.sample:
scrapper.fill_company(filename='sample.csv')
elif args.etf:
scrapper.parse_etfs()
| [] | [] | [] |
archives/0hoo_snowball.zip | setup.py | from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need
# fine tuning.
buildOptions = dict(packages = [
'idna',
'jinja2.ext',
], excludes = [])
base = 'Console'
executables = [
Executable('app.py', base=base),
Executable('script.py', base=base),
]
setup(name='snowball',
version = '1.0',
description = '',
options = dict(build_exe = buildOptions),
executables = executables)
| [] | [] | [] |
archives/0hoo_snowball.zip | test.py | import unittest
from datetime import datetime
from statistics import mean
from db import Stock, DIVIDEND_TAX_RATE
LAST_YEAR = datetime.now().year - 1
class StockTest(unittest.TestCase):
def test_dict_to_stock(self):
stock_dict = {
'code': '0001',
}
self.assertIsNotNone(Stock(stock_dict))
self.assertEqual(stock_dict['code'], Stock(stock_dict)['code'])
def test_dividend_tax_adjust(self):
stock_dict = {
'code': '0001',
}
self.assertEqual(0.0, Stock(stock_dict).dividend_tax_adjust)
dividend_rate = 3.5
stock_dict['dividend_rate'] = dividend_rate
stock = Stock(stock_dict)
self.assertAlmostEqual(0.539, Stock(stock_dict).dividend_tax_adjust)
self.assertEqual(dividend_rate * (DIVIDEND_TAX_RATE / 100), Stock(stock_dict).dividend_tax_adjust)
class StockYearStatTest(unittest.TestCase):
def test_roe_year_stat_empty(self):
stock_dict = {
'code': '0001',
}
empty_year_stat = [(0, 0)]
self.assertEqual(empty_year_stat, Stock(stock_dict).year_stat('ROEs'))
def test_roe_year_stat_should_have_last_year_index(self):
stock_dict = {
'code': '0001',
'ROEs': [3.0],
}
self.assertRaises(AssertionError, Stock(stock_dict).year_stat, 'ROEs')
stock_dict['last_year_index'] = 0
self.assertEqual([(LAST_YEAR, 3.0)], Stock(stock_dict).roes)
stock_dict['last_year_index'] = 1
self.assertEqual([(LAST_YEAR - 1, 3.0)], Stock(stock_dict).roes)
def test_roe_year_stat(self):
stock_dict = {
'code': '0001',
'ROEs': [3.0, 5.0, 4.0, 10.0],
'last_year_index': 2,
}
roes = Stock(stock_dict).year_stat('ROEs')
self.assertEqual(4, len(roes))
expected_roes = [(LAST_YEAR-2, 3.0), (LAST_YEAR-1, 5.0), (LAST_YEAR, 4.0), (LAST_YEAR+1, 10)]
self.assertEqual(expected_roes, roes)
def test_countable_roe(self):
stock_dict = {
'code': '0001',
'ROEs': [3.0, 5.0, None, 10.0],
'last_year_index': 3,
}
stock = Stock(stock_dict)
self.assertEqual([(2014, 3.0), (2015, 5.0), (2016, None), (2017, 10)], stock.year_stat('ROEs'))
self.assertEqual([3.0, 5.0, 10.0], stock.countable_roes)
self.assertEqual(mean([3.0, 5.0, 10.0]), stock.mean_roe)
def test_last_four_years_roe(self):
stock_dict = {
'code': '0001',
'ROEs': [3.0, 5.0, 4.0, 10.0],
'last_year_index': 2,
}
last_four_years = Stock(stock_dict).last_four_years_roe
self.assertEqual(3, len(last_four_years))
self.assertEqual([3.0, 5.0, 4.0], last_four_years)
stock_dict['last_year_index'] = 3
last_four_years = Stock(stock_dict).last_four_years_roe
self.assertEqual(4, len(last_four_years))
self.assertEqual([3.0, 5.0, 4.0, 10.0], last_four_years)
stock_dict['last_year_index'] = 0
last_four_years = Stock(stock_dict).last_four_years_roe
self.assertEqual(1, len(last_four_years))
self.assertEqual([3.0], last_four_years)
def test_mean_roe(self):
stock_dict = {
'code': '0001',
'ROEs': [3.0, 5.0, 4.0, 10.0],
'last_year_index': 2,
}
self.assertEqual(mean([3.0, 5.0, 4.0]), Stock(stock_dict).mean_roe)
stock_dict['last_year_index'] = 1
self.assertEqual(mean([3.0, 5.0]), Stock(stock_dict).mean_roe)
def test_calculated_roe_count(self):
stock_dict = {
'code': '0001',
'ROEs': [3.0, 5.0, 4.0, 10.0],
'last_year_index': 2,
}
self.assertEqual(3, Stock(stock_dict).calculated_roe_count)
stock_dict['last_year_index'] = 3
self.assertEqual(4, Stock(stock_dict).calculated_roe_count)
stock_dict['last_year_index'] = 0
self.assertEqual(1, Stock(stock_dict).calculated_roe_count)
def test_future_roe(self):
stock_dict = {
'code': '0001',
'ROEs': [3.0, 5.0, 4.0, 10.0],
'last_year_index': 2,
}
stock = Stock(stock_dict)
self.assertEqual(0.0, stock.dividend_tax_adjust)
self.assertEqual(stock.mean_roe, stock.future_roe)
stock_dict['dividend_rate'] = 4.5
stock = Stock(stock_dict)
self.assertAlmostEqual(0.693, stock.dividend_tax_adjust)
self.assertEqual(stock.mean_roe - stock.dividend_tax_adjust, stock.future_roe)
def test_calc_future_bps(self):
stock_dict = {
'code': '0001',
'bps': 1000,
'ROEs': [11.0, 8.0, 15.0, 10.0],
'last_year_index': 2,
'dividend_rate': 4.5,
}
stock = Stock(stock_dict)
self.assertAlmostEqual(10.64, stock.future_roe, places=1)
self.assertEqual(int(1000 + 1000 * 0.1064), stock.calc_future_bps(1))
self.assertEqual(2748, stock.calc_future_bps(10))
stock['adjusted_future_roe'] = 12.0
self.assertEqual(1973, stock.calc_future_bps(6))
self.assertEqual(3105, stock.calc_future_bps(10))
def test_expected_rate(self):
stock_dict = {
'code': '0001',
'current_price': 1200,
'bps': 1000,
'ROEs': [11.0, 8.0, 15.0, 10.0],
'last_year_index': 2,
'dividend_rate': 4.5,
}
self.assertAlmostEqual(8.63, Stock(stock_dict).expected_rate, places=1)
stock_dict['current_price'] = 1000
self.assertAlmostEqual(10.63, Stock(stock_dict).expected_rate, places=1)
stock_dict['current_price'] = 800
self.assertAlmostEqual(13.13, Stock(stock_dict).expected_rate, places=1)
def test_invest_price(self):
stock_dict = {
'code': '0001',
'bps': 1000,
'ROEs': [11.0, 8.0, 15.0, 10.0],
'last_year_index': 2,
'dividend_rate': 4.5,
}
self.assertEqual(679, Stock(stock_dict).invest_price)
stock_dict['bps'] = 1800
self.assertEqual(1222, Stock(stock_dict).invest_price)
stock_dict['ROEs'] = [15.0, 18.0, 20.0, 22.0]
self.assertEqual(2133, Stock(stock_dict).invest_price)
def test_calculable_pbr_count(self):
stock_dict = {
'code': '0001',
}
self.assertEqual(0, Stock(stock_dict).calculable_pbr_count)
stock_dict['PBRs'] = [1.0, 0.8]
stock_dict['last_year_index'] = 1
self.assertEqual(2, Stock(stock_dict).calculable_pbr_count)
stock_dict['last_year_index'] = 0
self.assertEqual(1, Stock(stock_dict).calculable_pbr_count)
stock_dict['PBRs'] = [1.0, 0.8, 2, 1.3]
stock_dict['last_year_index'] = 1
self.assertEqual(2, Stock(stock_dict).calculable_pbr_count)
stock_dict['last_year_index'] = 3
self.assertEqual(4, Stock(stock_dict).calculable_pbr_count)
def test_expected_rate_by_current_pbr(self):
stock_dict = {
'code': '0001',
'bps': 1000,
'ROEs': [11.0, 8.0, 15.0, 10.0],
'last_year_index': 2,
'dividend_rate': 4.5,
'current_price': 1200
}
stock = Stock(stock_dict)
self.assertAlmostEqual(8.63, stock.expected_rate, places=1)
stock['pbr'] = float(stock['current_price'] / stock['bps'])
self.assertAlmostEqual(1.2, stock['pbr'], places=1)
self.assertEqual(int(stock.calc_future_bps(1) * stock['pbr']), stock.calc_future_price_current_pbr(1))
self.assertEqual(1327, stock.calc_future_price_current_pbr(1))
self.assertAlmostEqual(10.63, stock.expected_rate_by_current_pbr, places=1)
def test_expected_rate_by_low_pbr(self):
stock_dict = {
'code': '0001',
'bps': 1000,
'ROEs': [11.0, 8.0, 15.0, 10.0],
'last_year_index': 2,
'dividend_rate': 4.5,
'current_price': 1200
}
stock = Stock(stock_dict)
stock['PBRs'] = [1.0, 0.8, 0.7, 0.5]
self.assertEqual(0.7, stock.low_pbr)
stock['PBRs'] = [0.0, 0.8, 0.7, 0.5]
self.assertEqual(0.7, stock.low_pbr)
self.assertEqual(774, stock.calc_future_price_low_pbr(1))
self.assertAlmostEqual(4.82, stock.expected_rate_by_low_pbr, places=1)
def test_expected_rate_by_mid_pbr(self):
stock_dict = {
'code': '0001',
'bps': 1000,
'ROEs': [11.0, 8.0, 15.0, 10.0],
'PBRs': [0.0, 0.8, 0.7, 0.5],
'last_year_index': 2,
'dividend_rate': 4.5,
'current_price': 1200,
'pbr': 0.9
}
stock = Stock(stock_dict)
self.assertEqual(0.7, stock.low_pbr)
self.assertEqual((stock['pbr'] + stock.low_pbr) / 2.0, stock.mid_pbr)
self.assertAlmostEqual(6.23, stock.expected_rate_by_mid_pbr, places=1)
def test_adjusted_eps(self):
stock_dict = {
'code': '0001',
}
self.assertEqual(0, Stock(stock_dict).adjusted_eps)
stock_dict['EPSs'] = [1000, 1500]
stock_dict['last_year_index'] = 2
self.assertEqual(0, Stock(stock_dict).adjusted_eps)
stock_dict['EPSs'] = [1000, 1500, 2000]
self.assertEqual(1666, Stock(stock_dict).adjusted_eps)
def test_intrinsic_value(self):
stock_dict = {
'code': '0001',
'bps': 1000,
'EPSs': [100, 150, 200],
'last_year_index': 2,
}
stock = Stock(stock_dict)
self.assertEqual(int((stock['bps'] + (stock.adjusted_eps * 10)) / 2), stock.intrinsic_value)
def test_intrinsic_discount_rate(self):
stock_dict = {
'code': '0001',
'bps': 1000,
'EPSs': [100, 150, 200],
'last_year_index': 2,
'current_price': 1200
}
self.assertAlmostEqual(10.83, Stock(stock_dict).intrinsic_discount_rate, places=1)
def test_eps_growth(self):
stock_dict = {
'code': '0001',
}
self.assertEqual(0, Stock(stock_dict).eps_growth)
stock_dict['EPSs'] = [100, 150, 200]
self.assertAlmostEqual(41.66, Stock(stock_dict).eps_growth, places=1)
def test_peg_current_per(self):
stock_dict = {
'code': '0001',
}
self.assertEqual(0, Stock(stock_dict).peg_current_per)
stock_dict['per'] = 6
self.assertEqual(0, Stock(stock_dict).peg_current_per)
stock_dict['EPSs'] = [100, 110, 130]
self.assertAlmostEqual(0.42, Stock(stock_dict).peg_current_per, places=1)
stock_dict['per'] = 10
self.assertAlmostEqual(0.70, Stock(stock_dict).peg_current_per, places=1)
def test_peg_mean_per(self):
stock_dict = {
'code': '0001',
}
stock = Stock(stock_dict)
self.assertEqual(0, stock.mean_per)
stock_dict['PERs'] = [8, 5.5, 11.5]
stock = Stock(stock_dict)
self.assertAlmostEqual(8.33, stock.mean_per, places=1)
stock_dict['EPSs'] = [100, 110, 130]
stock = Stock(stock_dict)
self.assertAlmostEqual(0.59, stock.peg_mean_per, places=1)
def test_fscore(self):
pass
def test_roe_max_diff(self):
stock_dict = {
'code': '0001',
}
stock = Stock(stock_dict)
self.assertEqual(0, stock.roe_max_diff)
stock_dict['ROEs'] = [10, 5, 11]
stock = Stock(stock_dict)
self.assertEqual(6, stock.roe_max_diff)
def test_quarter_roes(self):
stock_dict = {
'code': '0001',
'QROEs': [
((2016, 4, False), 12.5),
((2017, 1, False), 15.5),
((2017, 2, False), 17.0),
((2017, 3, False), 11.3),
((2017, 4, False), 10.9),
]
}
stock = Stock(stock_dict)
print(stock.QROEs)
if __name__ == '__main__':
unittest.main() | [] | [] | [] |
archives/0hoo_snowball.zip | utils.py | import statistics
def mean_or_zero(iter):
try:
return statistics.mean(iter)
except statistics.StatisticsError:
return 0
def parse_float(str):
try:
return float(str.replace(',', '').replace('%', ''))
except (ValueError, AttributeError):
return 0
def parse_int(str):
try:
return int(str.replace(',', ''))
except (ValueError, AttributeError):
return 0
def attr_or_key_getter(name, obj, default_value=0):
try:
return getattr(obj, name)
except AttributeError:
return obj.get(name, default_value)
def first_or_none(iter):
try:
return iter[0]
except IndexError:
return None
def float_or_none(x):
return None if not x else float(x.replace(',', ''))
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/client.py | from collections import deque, namedtuple
# we'll use infinity as a default distance to nodes.
import requests
inf = float('inf')
Edge = namedtuple('Edge', 'start, end, cost')
def make_edge(edge):
return Edge(edge['source'], edge['destination'], edge['cost'])
class Graph:
def __init__(self, edges):
# let's check that the data is right
self.edges = [make_edge(edge) for edge in edges]
@property
def vertices(self):
return set(
sum(
([edge.start, edge.end] for edge in self.edges), []
)
)
def get_node_pairs(self, n1, n2):
return [[n1, n2]]
def remove_edge(self, n1, n2):
node_pairs = self.get_node_pairs(n1, n2)
edges = self.edges[:]
for edge in edges:
if [edge.start, edge.end] in node_pairs:
self.edges.remove(edge)
def add_edge(self, n1, n2, cost=1):
node_pairs = self.get_node_pairs(n1, n2)
for edge in self.edges:
if [edge.start, edge.end] in node_pairs:
return ValueError('Edge {} {} already exists'.format(n1, n2))
self.edges.append(Edge(start=n1, end=n2, cost=cost))
@property
def neighbours(self):
neighbours = {vertex: set() for vertex in self.vertices}
for edge in self.edges:
neighbours[edge.start].add((edge.end, edge.cost))
return neighbours
def dijkstra(self, source, dest):
assert source in self.vertices, 'Such source node doesn\'t exist'
distances = {vertex: inf for vertex in self.vertices}
previous_vertices = {
vertex: None for vertex in self.vertices
}
distances[source] = 0
vertices = self.vertices.copy()
while vertices:
current_vertex = min(
vertices, key=lambda vertex: distances[vertex])
vertices.remove(current_vertex)
if distances[current_vertex] == inf:
break
for neighbour, cost in self.neighbours[current_vertex]:
alternative_route = distances[current_vertex] + cost
if alternative_route < distances[neighbour]:
distances[neighbour] = alternative_route
previous_vertices[neighbour] = current_vertex
path, current_vertex = deque(), dest
while previous_vertices[current_vertex] is not None:
path.appendleft(current_vertex)
current_vertex = previous_vertices[current_vertex]
if path:
path.appendleft(current_vertex)
return path
base_url = 'http://localhost:8021/v1'
edges = [
{
"source": "A",
"destination": "B",
"cost": 7
},
{
"source": "A",
"destination": "C",
"cost": 9
},
{
"source": "A",
"destination": "F",
"cost": 14
},
{
"source": "B",
"destination": "C",
"cost": 10
},
{
"source": "B",
"destination": "D",
"cost": 15
},
{
"source": "C",
"destination": "D",
"cost": 11
},
{
"source": "C",
"destination": "F",
"cost": 2
},
{
"source": "D",
"destination": "E",
"cost": 6
},
{
"source": "E",
"destination": "F",
"cost": 9
}
]
for edge in edges:
r = requests.post('{}/edges'.format(base_url), json=edge)
r = requests.get('{}/edges'.format(base_url))
response = r.json()
graph = Graph(response)
print("\npath: \n")
print(graph.dijkstra("A", "E"))
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/graph2.py | from collections import defaultdict
from collections import deque, namedtuple
# we'll use infinity as a default distance to nodes.
inf = float('inf')
Edge = namedtuple('Edge', 'start, end, cost')
def make_edge(start, end, cost=1):
return Edge(start, end, cost)
class Graph:
def __init__(self, edges):
# let's check that the data is right
wrong_edges = [i for i in edges if len(i) not in [2, 3]]
if wrong_edges:
raise ValueError('Wrong edges data: {}'.format(wrong_edges))
self.edges = [make_edge(*edge) for edge in edges]
@property
def vertices(self):
return set(
sum(
([edge.start, edge.end] for edge in self.edges), []
)
)
def get_node_pairs(self, n1, n2):
return [[n1, n2]]
def remove_edge(self, n1, n2):
node_pairs = self.get_node_pairs(n1, n2)
edges = self.edges[:]
for edge in edges:
if [edge.start, edge.end] in node_pairs:
self.edges.remove(edge)
def add_edge(self, n1, n2, cost=1):
node_pairs = self.get_node_pairs(n1, n2)
for edge in self.edges:
if [edge.start, edge.end] in node_pairs:
return ValueError('Edge {} {} already exists'.format(n1, n2))
self.edges.append(Edge(start=n1, end=n2, cost=cost))
@property
def neighbours(self):
neighbours = {vertex: set() for vertex in self.vertices}
for edge in self.edges:
neighbours[edge.start].add((edge.end, edge.cost))
return neighbours
def dijkstra(self, source, dest):
assert source in self.vertices, 'Such source node doesn\'t exist'
distances = {vertex: inf for vertex in self.vertices}
previous_vertices = {
vertex: None for vertex in self.vertices
}
distances[source] = 0
vertices = self.vertices.copy()
while vertices:
current_vertex = min(
vertices, key=lambda vertex: distances[vertex])
vertices.remove(current_vertex)
if distances[current_vertex] == inf:
break
for neighbour, cost in self.neighbours[current_vertex]:
alternative_route = distances[current_vertex] + cost
if alternative_route < distances[neighbour]:
distances[neighbour] = alternative_route
previous_vertices[neighbour] = current_vertex
path, current_vertex = deque(), dest
while previous_vertices[current_vertex] is not None:
path.appendleft(current_vertex)
current_vertex = previous_vertices[current_vertex]
if path:
path.appendleft(current_vertex)
return path
graph = Graph([
("a", "b", 7), ("a", "c", 9), ("a", "f", 14), ("b", "c", 10),
("b", "d", 15), ("c", "d", 11), ("c", "f", 2), ("d", "e", 6),
("e", "f", 9)])
print(graph.dijkstra("a", "e"))
#
# g = MemoryGraph()
#
# g.add_node("a")
# g.add_node("b")
# g.add_node("c")
# g.add_node("d")
# g.add_node("e")
# g.add_node("f")
#
# g.add_edge("a", "b", 7)
# g.add_edge("a", "c", 9)
# g.add_edge("a", "f", 14)
# g.add_edge("b", "c", 10)
# g.add_edge("b", "d", 15)
# g.add_edge("c", "d", 11)
# g.add_edge("c", "f", 2)
# g.add_edge("d", "e", 6)
# g.add_edge("e", "f", 9)
#
# print(dijsktra(g, 'a'))
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/setup.py | from glob import glob
from os.path import abspath, dirname, join as pjoin
import pkg_resources
from setuptools import setup, find_packages
root = dirname(abspath(__file__))
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
source_path = 'src'
packages = find_packages(source_path)
root_packages = [
package
for package in packages
if "." not in package
]
assert len(root_packages) == 1
package = root_packages[0]
package_directory = pjoin(root, source_path, package)
def get_variable_from_file(filepath, variable):
filepath_in_package = pjoin(package_directory, filepath)
globs = {}
execfile(filepath_in_package, globs)
variable_value = globs[variable]
return variable_value
version = get_variable_from_file('_version.py', '__version__')
with open('requirements.txt') as f:
required = f.read().splitlines()
required = [requirement for requirement in required if 'http' not in requirement]
setup(
name=package,
version=version,
python_requires='>=3.6',
author="PyMedPhys Contributors",
author_email="developers@pymedphys.com",
description='',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering :: Medical Science Apps.',
'Topic :: Scientific/Engineering :: Physics',
'Intended Audience :: Science/Research',
'Intended Audience :: Healthcare Industry'
],
install_requires=required,
packages=packages,
package_dir={'': source_path},
include_package_data=True,
package_data={package: []},
license='AGPL-3.0-or-later',
extras_require={
'test': [
'pytest'
]
}
)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/__init__.py | # project/__init__.py
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/_version.py | version_info = [0, 0, 1]
__version__ = "0.0.1"
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/app/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/app/main.py | import falcon
from graphx.configurations.app import settings
from falcon_marshmallow import JSONEnforcer, EmptyRequestDropper
from graphx.configurations.app.middlewares import RequestLoader
from injector_provider import InjectorProvider
from registry.services import Container, Registry
app = falcon.API(middleware=[
JSONEnforcer(),
EmptyRequestDropper(),
RequestLoader()
])
container = Container()
container.set(settings.Props.DI_PROVIDER, InjectorProvider())
container.set(settings.Props.FALCON, app)
service_registry = Registry()
for service in settings.services:
service_registry.register(service)
service_registry.boot(container)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/app/middlewares.py | import logging
import falcon
from falcon import HTTPUnprocessableEntity, HTTPBadRequest
from falcon_marshmallow import Marshmallow
from falcon_marshmallow.middleware import get_stashed_content
from marshmallow import ValidationError, Schema
log = logging.getLogger(__name__)
class HTTPValidationError(falcon.HTTPError):
"""
HTTPError that stores a dictionary of validation error messages.
"""
def __init__(self, status, errors=None, *args, **kwargs):
self.errors = errors
super().__init__(status, *args, **kwargs)
def to_dict(self, *args, **kwargs):
"""
Override `falcon.HTTPError` to include error messages in responses.
"""
ret = super().to_dict(*args, **kwargs)
if self.errors is not None:
ret['errors'] = self.errors
return ret
class RequestLoader(Marshmallow):
def process_resource(self, *args, **kwargs):
try:
self.process_resource_inner(*args, **kwargs)
except ValidationError as err:
raise HTTPValidationError(status=falcon.status_codes.HTTP_400, errors=err.messages)
except ValueError as err:
raise falcon.HTTPError(status=falcon.status_codes.HTTP_400, title='Validation Error', description=str(err))
def process_resource_inner(self, req, resp, resource, params):
# type: (Request, Response, object, dict) -> None
"""Deserialize request body with any resource-specific schemas
Store deserialized data on the ``req.context`` object
under the ``req_key`` provided to the class constructor
or on the ``json`` key if none was provided.
If a Marshmallow schema is defined on the passed ``resource``,
use it to deserialize the request body.
If no schema is defined and the class was instantiated with
``force_json=True``, request data will be deserialized with
any ``json_module`` passed to the class constructor or
``simplejson`` by default.
:param falcon.Request req: the request object
:param falcon.Response resp: the response object
:param object resource: the resource object
:param dict params: any parameters parsed from the url
:rtype: None
:raises falcon.HTTPBadRequest: if the data cannot be
deserialized or decoded
"""
log.debug(
'Marshmallow.process_resource(%s, %s, %s, %s)',
req, resp, resource, params
)
if req.content_length in (None, 0):
return
sch = self._get_schema(resource, req.method, 'request')
if sch is not None:
if not isinstance(sch, Schema):
raise TypeError(
'The schema and <method>_schema properties of a resource '
'must be instantiated Marshmallow schemas.'
)
try:
body = get_stashed_content(req)
parsed = self._json.loads(body)
except UnicodeDecodeError:
raise HTTPBadRequest('Body was not encoded as UTF-8')
except self._json.JSONDecodeError:
raise HTTPBadRequest('Request must be valid JSON')
log.info(sch)
data = sch.load(parsed)
req.context[self._req_key] = data
elif self._force_json:
body = get_stashed_content(req)
try:
req.context[self._req_key] = self._json.loads(body)
except (ValueError, UnicodeDecodeError):
raise HTTPBadRequest(
description=(
'Could not decode the request body, either because '
'it was not valid JSON or because it was not encoded '
'as UTF-8.'
)
)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/app/settings.py | from graphx.configurations.infrastructure.db import DataBaseService
from graphx.configurations.infrastructure.environment import EnvironmentService
from graphx.configurations.infrastructure.logging import LoggingService
from graphx.configurations.infrastructure.rest.swagger.registry import SwaggerService
from graphx.core.rest.registry import NodeService
from registry.services import Props as BaseProps
services = [
LoggingService(),
EnvironmentService(),
DataBaseService(),
NodeService(),
SwaggerService()
]
class Props(BaseProps):
DI_PROVIDER = 0
FALCON = 1
APP_URL = 'APP_URL'
NEO_URL = 'NEO_URL'
NEO_USERNAME = 'NEO_USERNAME'
NEO_PASSWORD = 'NEO_PASSWORD'
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/infrastructure/db/__init__.py | from graphx.configurations.infrastructure.db.definitions import GraphConfigurator
from registry.services import BootableService, Container
class DataBaseService(BootableService):
def boot(self, container: Container):
from graphx.configurations.app.settings import Props
url = container.get(Props.NEO_URL)
username = container.get(Props.NEO_USERNAME)
password = container.get(Props.NEO_PASSWORD)
configurator = GraphConfigurator(url, username, password)
provider = container.get(Props.DI_PROVIDER)
provider.add_configurator(configurator)
| [
"Container"
] | [
212
] | [
221
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/infrastructure/db/definitions.py | from dataclasses import dataclass
from injector import Module, singleton, provider
from py2neo import Graph
@dataclass
class GraphConfigurator(Module):
url: str
username: str
password: str
@singleton
@provider
def provide_graph(self) -> Graph:
# graph = Graph(uri=self.url, user=self.username, password=self.password)
graph = Graph("http://neo4j:7474/db/data/", user=self.username, password=self.password)
# graph.create()
return graph.begin()
# return graph
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/infrastructure/environment/__init__.py | import os
from registry.services import BootableService, Container
class EnvironmentService(BootableService):
def boot(self, container: Container):
from graphx.configurations.app.settings import Props
container.set(Props.APP_URL, os.environ.get(Props.APP_URL.value))
container.set(Props.NEO_URL, os.environ.get(Props.NEO_URL.value))
container.set(Props.NEO_USERNAME, os.environ.get(Props.NEO_USERNAME.value))
container.set(Props.NEO_PASSWORD, os.environ.get(Props.NEO_PASSWORD.value))
| [
"Container"
] | [
144
] | [
153
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/infrastructure/logging/__init__.py | import logging as registry_logging
import sys
import registry.services
class LoggingService(registry.services.BootableService):
def boot(self, app: registry.services.Container):
registry_logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=registry_logging.DEBUG)
registry_logging.getLogger().addHandler(registry_logging.StreamHandler(sys.stdout))
| [
"registry.services.Container"
] | [
155
] | [
182
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/infrastructure/rest/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/infrastructure/rest/swagger/__init__.py | import json
import falcon
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from falcon import Request
from falcon.response import Response
from falcon_apispec import FalconPlugin
from graphx.core.rest.resources import NodeCollection, EdgeCollection
from graphx.core.rest.schemas import Node, Edge
class SwaggerResource:
def __init__(self):
from graphx.configurations.app.settings import Props
from graphx.configurations.app.main import app
from graphx.configurations.app.main import container
# todo: should be moved to env vars
self.spec = APISpec(title='graphx',
version='1.0.0',
openapi_version='2.0',
plugins=[
FalconPlugin(app),
MarshmallowPlugin(),
])
injector = container.get(Props.DI_PROVIDER).get_injector()
self.spec.components.schema('Node', schema=injector.get(Node))
self.spec.path(resource=injector.get(NodeCollection))
self.spec.components.schema('Edge', schema=injector.get(Edge))
self.spec.path(resource=injector.get(EdgeCollection))
def on_get(self, req: Request, resp: Response):
resp.status = falcon.HTTP_200
resp.body = json.dumps(self.spec.to_dict(), ensure_ascii=False)
| [
"Request",
"Response"
] | [
1278,
1293
] | [
1285,
1301
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/configurations/infrastructure/rest/swagger/registry.py | import os
from registry.services import BootableService, Container
from falcon_swagger_ui import register_swaggerui_app
class SwaggerService(BootableService):
def boot(self, container: Container):
from graphx.configurations.app import settings
from graphx.configurations.infrastructure.rest.swagger import SwaggerResource
falcon = container.get(settings.Props.FALCON)
swagger_resource = SwaggerResource()
falcon.add_route('/v1/swagger.json', swagger_resource)
page_title = 'Swagger UI'
favicon_url = 'https://falconframework.org/favicon-32x32.png'
swagger_ui_url = '/v1/docs' # without trailing slash
schema_url = '{}/v1/swagger.json'.format(container.get(settings.Props.APP_URL))
register_swaggerui_app(
falcon, swagger_ui_url, schema_url,
page_title=page_title,
favicon_url=favicon_url,
config={'supportedSubmitMethods': ['get', 'post', 'put'], }
)
| [
"Container"
] | [
192
] | [
201
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/data_providers/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/data_providers/data_provider.py | from abc import ABCMeta, abstractmethod
from typing import Generic, TypeVar, Any, List
N = TypeVar('N')
E = TypeVar('E')
class DataProvider(Generic[N, E], metaclass=ABCMeta):
@abstractmethod
def save(self, node: N) -> None:
""" ads a node to the graph
Args:
node (N): The node entity
Returns:
None
"""
pass
@abstractmethod
def find_all_nodes(self) -> List[N]:
""" returns a list of nodes
Returns:
List[N] list of nodes
"""
pass
@abstractmethod
def add_edge(self, edge: E) -> None:
""" ads an edge
Args:
source: source node
destination: destination node
cost: cost of distance
Returns:
None
"""
pass
@abstractmethod
def find_by_id(self, id: str) -> N:
""" finds a node by id
Args:
id: Node id
Returns:
N
Raises:
EntityNotFoundException
"""
pass
@abstractmethod
def find_shortest_path(self, source: N, destination: N) -> Any:
""" finds the shortest path
Args:
source: Source node
destination: Destination node
Returns:
Any should be shortest path object
"""
| [
"N",
"E",
"str",
"N",
"N"
] | [
223,
654,
978,
1276,
1292
] | [
224,
655,
981,
1277,
1293
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/data_providers/memory.py | from dataclasses import dataclass, field
from typing import Any, List, Dict
from graphx.core.data_providers.data_provider import DataProvider
from graphx.core.entities import Edge, Node
from graphx.core.exceptions import EntityAlreadyExistsException
@dataclass
class MemoryNodeRepository(DataProvider[Node, Edge]):
nodes: Dict[str, Node] = field(default_factory=lambda: {})
edges: List[Edge] = field(default_factory=lambda: [])
def save(self, node: Node) -> None:
if node.id in self.nodes:
raise EntityAlreadyExistsException('Node already exists!')
self.nodes[node.id] = node
def add_edge(self, edge: Edge) -> None:
if self.edge_exists(edge):
raise EntityAlreadyExistsException('Edge already exists')
self.edges.append(edge)
def edge_exists(self, edge: Edge):
# todo shall only compare source and dest
duplicates = [existing_edge for existing_edge in self.edges if edge == existing_edge]
return len(duplicates) > 0
def find_all_nodes(self) -> List[Node]:
return [v for k, v in self.nodes.items()]
def find_by_id(self, id: str) -> Node:
pass
def find_all_edges(self) -> List[Edge]:
return self.edges
def find_shortest_path(self, source: Node, destination: Node) -> Any:
pass
| [
"Node",
"Edge",
"Edge",
"str",
"Node",
"Node"
] | [
464,
649,
834,
1145,
1285,
1304
] | [
468,
653,
838,
1148,
1289,
1308
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/data_providers/neo.py | from py2neo.ogm import GraphObject, Property, RelatedTo
from typing import Any
from py2neo import Graph as NeoGraph
from injector import inject
from dataclasses import dataclass
from graphx.core.data_providers.data_provider import DataProvider
class Node(GraphObject):
__primarykey__ = "id"
id = Property()
name = Property()
edges = RelatedTo('Node')
@inject
@dataclass
class NeoNodeRepository(DataProvider[Node]):
graph: NeoGraph
def save(self, node: Node) -> None:
self.graph.merge(node)
def add_edge(self, source: Node, destination: Node, cost: int) -> None:
pass
def find_by_id(self, id: str) -> Node:
pass
def find_shortest_path(self, source: Node, destination: Node) -> Any:
pass
| [
"Node",
"Node",
"Node",
"int",
"str",
"Node",
"Node"
] | [
485,
563,
582,
594,
651,
720,
739
] | [
489,
567,
586,
597,
654,
724,
743
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/entities.py | from dataclasses import dataclass
@dataclass
class Node:
id: str
name: str
@dataclass
class Edge:
source: str
destination: str
cost: int
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/exceptions.py | class EntityNotFoundException(Exception):
pass
class EntityAlreadyExistsException(Exception):
pass
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/rest/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/rest/assemblers.py | from typing import List
from graphx.core.entities import Node, Edge
from graphx.core.rest.schemas import Node as NodeResource
from graphx.core.rest.schemas import Edge as EdgeResource
class NodeAssembler(object):
@staticmethod
def assemble_collection(nodes: List[Node]) -> List[NodeResource]:
return [NodeResource.from_domain_object(node) for node in nodes]
class EdgeAssembler(object):
@staticmethod
def assemble_collection(edges: List[Edge]) -> List[EdgeResource]:
return [EdgeResource.from_domain_object(edge) for edge in edges]
| [
"List[Node]",
"List[Edge]"
] | [
269,
461
] | [
279,
471
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/rest/definitions.py | from injector import Module, singleton, provider
from graphx.core.data_providers.memory import MemoryNodeRepository
from graphx.core.rest.resources import NodeCollection, EdgeCollection
from graphx.core.use_cases import AddNode
from graphx.core.use_cases.add_edge import AddEdge
from graphx.core.use_cases.find_all_edges import FindAllEdges
from graphx.core.use_cases.find_all_nodes import FindAllNodes
class NodeConfigurator(Module):
@singleton
@provider
def node_collection(self) -> NodeCollection:
return NodeCollection(self.__injector__.get(AddNode), self.__injector__.get(FindAllNodes))
@singleton
@provider
def edge_collection(self) -> EdgeCollection:
return EdgeCollection(self.__injector__.get(AddEdge), self.__injector__.get(FindAllEdges))
@singleton
@provider
def repository(self) -> MemoryNodeRepository:
return MemoryNodeRepository()
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/rest/registry.py | from graphx.configurations.app import settings
from graphx.core.rest.definitions import NodeConfigurator
from graphx.core.rest.resources import NodeCollection, EdgeCollection
from registry.services import BootableService, Container
class NodeService(BootableService):
def boot(self, container: Container):
falcon = container.get(settings.Props.FALCON)
provider = container.get(settings.Props.DI_PROVIDER)
provider.add_configurator(NodeConfigurator)
injector = provider.get_injector()
falcon.add_route("/v1/nodes", injector.get(NodeCollection))
falcon.add_route("/v1/edges", injector.get(EdgeCollection))
| [
"Container"
] | [
300
] | [
309
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/rest/resources.py | import json
import logging
import falcon
from graphx.core.data_providers.memory import Node
from graphx.core.entities import Edge
from graphx.core.exceptions import EntityAlreadyExistsException
from graphx.core.rest.assemblers import NodeAssembler, EdgeAssembler
from graphx.core.rest.schemas import Node as NodeSchema
from graphx.core.rest.schemas import Edge as EdgeSchema
from graphx.core.use_cases import AddNode
from graphx.core.use_cases.add_edge import AddEdge
from graphx.core.use_cases.find_all_edges import FindAllEdges
from graphx.core.use_cases.find_all_nodes import FindAllNodes
class NodeCollection(object):
schema = NodeSchema()
def __init__(self, add_node: AddNode, find_all_nodes: FindAllNodes):
self.add_node = add_node
self.find_all_nodes = find_all_nodes
def on_post(self, req, resp):
"""
---
summary: Add a node
responses:
201:
description: Created
schema: Node
"""
node_resource = req.context['json']
node = Node(id=node_resource['id'], name=node_resource['name'])
try:
self.add_node.execute(node)
resp.body = json.dumps(node_resource)
resp.status = falcon.status_codes.HTTP_201
except EntityAlreadyExistsException:
# todo response error body
resp.status = falcon.status_codes.HTTP_422
def on_get(self, req, resp):
"""
---
summary: Find all nodes
responses:
200:
description: OK
"""
nodes = self.find_all_nodes.execute()
schema = NodeSchema(many=True)
result = schema.dump(NodeAssembler.assemble_collection(nodes)) # OR UserSchema().dump(users, many=True)
resp.body = json.dumps(result)
resp.status = falcon.status_codes.HTTP_200
class EdgeCollection(object):
schema = EdgeSchema()
def __init__(self, add_edge: AddEdge, find_all_edges: FindAllEdges):
self.add_use_case = add_edge
self.find_all_use_case = find_all_edges
def on_post(self, req, resp):
"""
---
summary: Add an edge
responses:
201:
description: Created
schema: Edge
"""
edge_resource = req.context['json']
edge = Edge(edge_resource['source'], edge_resource['destination'], edge_resource['cost'])
try:
self.add_use_case.execute(edge)
resp.body = json.dumps(edge_resource)
resp.status = falcon.status_codes.HTTP_201
except EntityAlreadyExistsException:
# todo response error body
resp.status = falcon.status_codes.HTTP_422
def on_get(self, req, resp):
"""
---
summary: Find all edges
responses:
200:
description: OK
"""
edges = self.find_all_use_case.execute()
schema = EdgeSchema(many=True)
result = schema.dump(EdgeAssembler.assemble_collection(edges)) # OR UserSchema().dump(users, many=True)
resp.body = json.dumps(result)
resp.status = falcon.status_codes.HTTP_200
| [
"AddNode",
"FindAllNodes",
"AddEdge",
"FindAllEdges"
] | [
686,
711,
2149,
2174
] | [
693,
723,
2156,
2186
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/rest/schemas.py | from marshmallow import Schema, fields
from graphx.core.entities import Node as DomainNode
class Node(Schema):
"""Node schema"""
id = fields.String(required=False)
name = fields.String(required=False)
@classmethod
def from_domain_object(cls, node: DomainNode):
object = cls()
object.id = node.id
object.name = node.name
return object
class Edge(Schema):
"""Edge schema"""
source = fields.String()
destination = fields.String()
cost = fields.Integer()
@classmethod
def from_domain_object(cls, edge):
object = cls()
object.source = edge.source
object.destination = edge.destination
object.cost = edge.cost
return object
| [
"DomainNode"
] | [
272
] | [
282
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/use_cases/__init__.py | from .add_node import AddNode
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/use_cases/add_edge.py | from dataclasses import dataclass
from injector import inject
from graphx.core.data_providers.memory import MemoryNodeRepository
from graphx.core.entities import Edge
@inject
@dataclass
class AddEdge:
repository: MemoryNodeRepository
def execute(self, edge: Edge):
# todo assert nodes exist before adding
self.repository.add_edge(edge)
| [
"Edge"
] | [
271
] | [
275
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/use_cases/add_node.py | from dataclasses import dataclass
from injector import inject
from graphx.core.data_providers.memory import Node, MemoryNodeRepository
@inject
@dataclass
class AddNode:
repository: MemoryNodeRepository
def execute(self, node: Node):
self.repository.save(node)
| [
"Node"
] | [
239
] | [
243
] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/use_cases/find_all_edges.py | from dataclasses import dataclass
from typing import List
from injector import inject
from graphx.core.data_providers.memory import Edge, MemoryNodeRepository
@inject
@dataclass
class FindAllEdges:
repository: MemoryNodeRepository
def execute(self) -> List[Edge]:
return self.repository.find_all_edges()
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/src/graphx/core/use_cases/find_all_nodes.py | from dataclasses import dataclass
from typing import List
from injector import inject
from graphx.core.data_providers.memory import Node, MemoryNodeRepository
@inject
@dataclass
class FindAllNodes:
repository: MemoryNodeRepository
def execute(self) -> List[Node]:
return self.repository.find_all_nodes()
| [] | [] | [] |
archives/0mars_graphx.zip | packages/graphx/tests/__init__.py | import sys
import os
sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../src/"))
| [] | [] | [] |
archives/0mars_graphx.zip | packages/injector_provider/setup.py | from os.path import abspath, dirname, join as pjoin
from setuptools import setup, find_packages
root = dirname(abspath(__file__))
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
source_path = 'src'
packages = find_packages(source_path)
root_packages = [
package
for package in packages
if "." not in package
]
assert len(root_packages) == 1
package = root_packages[0]
package_directory = pjoin(root, source_path, package)
def get_variable_from_file(filepath, variable):
filepath_in_package = pjoin(package_directory, filepath)
globs = {}
execfile(filepath_in_package, globs)
variable_value = globs[variable]
return variable_value
version = get_variable_from_file('_version.py', '__version__')
with open('requirements.txt') as f:
required = f.read().splitlines()
required = [requirement for requirement in required if 'http' not in requirement]
setup(
name=package,
version=version,
python_requires='>=3.6',
description='',
classifiers=[
'Development Status :: Stable',
'License :: OSI Approved :: General Public License v3 or later (AGPLv3+)',
'Programming Language :: Python :: 3.7',
'Intended Audience :: Developers'
],
packages=packages,
package_dir={'': source_path},
include_package_data=True,
package_data={package: []},
install_requires=required,
license='AGPL-3.0-or-later',
extras_require={
'test': [
'pytest'
]
}
)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/injector_provider/src/injector_provider/__init__.py | from .providers import InjectorProvider
| [] | [] | [] |
archives/0mars_graphx.zip | packages/injector_provider/src/injector_provider/_version.py | version_info = [0, 0, 1]
__version__ = "0.0.1"
| [] | [] | [] |
archives/0mars_graphx.zip | packages/injector_provider/src/injector_provider/providers.py | from typing import List
from injector import Injector
class InjectorProvider(object):
def __init__(self):
self.configurators: List = []
self.tainted: bool = True
self.injector: Injector = None
def get_injector(self) -> Injector:
if self.tainted:
self.injector = Injector(self.configurators)
self.clean()
return self.injector
else:
return self.injector
def add_configurator(self, configurator) -> None:
self.configurators.append(configurator)
self.taint()
def taint(self) -> None:
self.tainted = True
def clean(self) -> None:
self.tainted = False
| [] | [] | [] |
archives/0mars_graphx.zip | packages/injector_provider/tests/__init__.py | import sys
import os
sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../src/"))
| [] | [] | [] |
archives/0mars_graphx.zip | packages/injector_provider/tests/test_injector_provider.py | from unittest.mock import patch
from injector import Injector, Module
from injector_provider.providers import InjectorProvider
class TestObjectGraphBuilder:
def test_can_build_without_any_configurations(self):
provider = InjectorProvider()
assert isinstance(provider.get_injector(), Injector)
@patch('injector_provider.providers.Injector.__init__')
def test_add_class(self, mocked_injector_init):
mocked_injector_init.return_value = None
provider = InjectorProvider()
class Configurator(Module):
pass
configurator1 = Configurator()
provider.add_configurator(configurator1)
provider.get_injector()
mocked_injector_init.assert_called_once_with([configurator1])
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/setup.py | from glob import glob
from os.path import abspath, dirname, join as pjoin
from setuptools import setup, find_packages
root = dirname(abspath(__file__))
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
source_path = 'src'
packages = find_packages(source_path)
root_packages = [
package
for package in packages
if "." not in package
]
assert len(root_packages) == 1
package = root_packages[0]
package_directory = pjoin(root, source_path, package)
def get_variable_from_file(filepath, variable):
filepath_in_package = pjoin(package_directory, filepath)
globs = {}
execfile(filepath_in_package, globs)
variable_value = globs[variable]
return variable_value
version = get_variable_from_file('_version.py', '__version__')
install_requires = get_variable_from_file(
'_install_requires.py', 'install_requires')
setup(
name=package,
version=version,
python_requires='>=3.6',
author="PyMedPhys Contributors",
author_email="developers@pymedphys.com",
description='',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Medical Science Apps.',
'Topic :: Scientific/Engineering :: Physics',
'Intended Audience :: Science/Research',
'Intended Audience :: Healthcare Industry'
],
packages=packages,
package_dir={'': source_path},
include_package_data=True,
package_data={package: []},
license='AGPL-3.0-or-later',
install_requires=install_requires,
extras_require={
'test': [
'pytest'
]
}
)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/_install_requires.py | install_requires = [
"networkx",
"semver",
"stdlib_list"
]
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/_version.py | version_info = [0, 10, 0, 'dev0']
__version__ = "0.10.0dev0"
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/app/__init__.py | from .api import package_wheels
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/app/api.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
from .wheels import build_wheels_with_yarn, copy_wheels
def package_wheels(pymedphys_dir):
app_directory = os.path.join(pymedphys_dir, 'app')
wheels_directory = os.path.join(app_directory, 'public', 'python-wheels')
packages_directory = os.path.join(pymedphys_dir, 'packages')
build_wheels_with_yarn()
copy_wheels(packages_directory, wheels_directory)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/app/wheels.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import shutil
from glob import glob
import subprocess
import json
WHITELIST = (
'pymedphys_base',
'pymedphys_coordsandscales',
'pymedphys_dicom',
'pymedphys_fileformats',
'pymedphys_utilities',
'pymedphys_mudensity',
'pymedphys_gamma',
'pymedphys')
def build_wheels_with_yarn():
yarn = shutil.which("yarn")
subprocess.call([yarn, "pypi:clean"])
for package in WHITELIST:
subprocess.call(
[yarn, "lerna", "run", "pypi:build", "--scope={}".format(package)])
def copy_wheels(packages_dir, new_dir):
wheel_filepaths = glob(os.path.join(packages_dir, '*', 'dist', '*.whl'))
pymedphys_wheel_urls = []
for filepath in wheel_filepaths:
filename = os.path.basename(filepath)
if not filename.split('-')[0] in WHITELIST:
continue
pymedphys_wheel_urls.append(filename)
new_filepath = os.path.join(new_dir, filename)
shutil.copy(filepath, new_filepath)
filenames_filepath = os.path.join(new_dir, 'paths.json')
with open(filenames_filepath, 'w') as filenames_file:
json.dump(pymedphys_wheel_urls, filenames_file)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/clean/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/clean/core.py | import shutil
try:
shutil.rmtree('dist')
except FileNotFoundError:
pass
try:
shutil.rmtree('build')
except FileNotFoundError:
pass
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/docs/__init__.py | from .api import pre_docs_build
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/docs/api.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
from ..draw import draw_all
from .graphs import write_graphs_rst
def pre_docs_build(pymedphys_dir):
docs_directory = os.path.join(pymedphys_dir, 'docs')
docs_graphs = os.path.join(docs_directory, 'graphs')
draw_all(docs_graphs)
write_graphs_rst(docs_graphs)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/docs/graphs.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import textwrap
from glob import glob
from ..draw.utilities import remove_postfix
ROOT = os.getcwd()
def write_graphs_rst(save_directory):
search_string = os.path.join(save_directory, "*.svg")
svg_files = [
os.path.basename(filepath)
for filepath in sorted(glob(search_string), key=os.path.splitext)
]
modules = [remove_postfix(filepath, '.svg') for filepath in svg_files]
images_paths = ["../graphs/{}.svg".format(module) for module in modules]
sections = ".. This is automatically generated. DO NOT DIRECTLY EDIT.\n\n"
for module, images_path in zip(modules, images_paths):
header_border = '*' * len(module)
sections += textwrap.dedent("""\
{0}
{1}
{0}
`Back to pymedphys <#pymedphys>`_
.. raw:: html
:file: {2}
""".format(header_border, module, images_path))
save_file = os.path.join(save_directory, 'graphs.rst')
with open(save_file, 'w') as file:
file.write(sections)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/draw/__init__.py | from .api import draw_all
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/draw/api.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
from .packages import draw_packages
from .directories import draw_directory_modules
from .files import draw_file_modules
def draw_all(save_directory):
draw_packages(save_directory)
draw_directory_modules(save_directory)
draw_file_modules(save_directory)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/draw/directories.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import networkx as nx
from copy import copy
from ..tree import PackageTree
from .utilities import (
save_dot_file, remove_prefix, get_levels, create_labels, create_href)
ROOT = os.getcwd()
def draw_directory_modules(save_directory):
package_tree = PackageTree(os.path.join(ROOT, 'packages'))
internal_packages = copy(package_tree.roots)
internal_packages.remove('pymedphys')
module_paths = [
item
for package in internal_packages
for item in package_tree.digraph.neighbors(package)
]
modules = {
item: os.path.splitext(item)[0].replace(os.sep, '.')
for item in module_paths
}
dependencies = {
module.replace(os.sep, '.'): {
'.'.join(item.split('.')[0:2])
for item in
package_tree.descendants_dependencies(module)['internal_module'] +
package_tree.descendants_dependencies(module)['internal_package']
# package_tree.descendants_dependencies(module)['internal_file'] +
# list(package_tree.imports[module]['internal_module']) +
# list(package_tree.imports[module]['internal_package']) +
# list(package_tree.imports[module]['internal_file'])
}
for module in modules.keys()
}
dependents = { # type: ignore
key: set() for key in dependencies.keys()
}
try:
for key, values in dependencies.items():
for item in values:
dependents[item].add(key) # type: ignore
except KeyError:
print("\n{}".format(dependents.keys()))
print("\n{}".format(dependencies))
raise
for package in internal_packages:
build_graph_for_a_module(
package, package_tree, dependencies, dependents, save_directory)
def build_graph_for_a_module(graphed_package, package_tree, dependencies,
dependents, save_directory):
print(graphed_package)
current_modules = sorted([
item.replace(os.sep, '.')
for item in package_tree.digraph.neighbors(graphed_package)
])
outfilepath = os.path.join(
save_directory, "{}.svg".format(graphed_package.replace(os.sep, '.')))
if not current_modules:
dot_file_contents = """
strict digraph {{
subgraph cluster_0 {{
"";
label = "{}";
style = dashed;
}}
}}
""".format(graphed_package)
save_dot_file(dot_file_contents, outfilepath)
return
module_internal_relationships = {
module.replace(os.sep, '.'): [
'.'.join(item.split('.')[0:2])
for item in
package_tree.descendants_dependencies(module)['internal_module']
]
for module in sorted(list(package_tree.digraph.neighbors(graphed_package)))
}
levels = get_levels(module_internal_relationships)
internal_nodes = sorted(list(set(module_internal_relationships.keys())))
external_nodes = set()
for module in current_modules:
external_nodes |= dependencies[module]
external_nodes |= dependents[module]
external_nodes = sorted(list(external_nodes))
all_nodes = internal_nodes + external_nodes
def simplify(text):
text = remove_prefix(text, "{}.".format(graphed_package))
text = remove_prefix(text, 'pymedphys_')
return text
label_map = {
node: simplify(node)
for node in all_nodes
}
nodes = ""
for level in range(max(levels.keys()) + 1):
if levels[level]:
grouped_packages = '"; "'.join(sorted(list(levels[level])))
nodes += """
{{ rank = same; "{}"; }}
""".format(grouped_packages)
edges = ""
current_packages = ""
current_dependents = set()
current_dependencies = set()
for module in current_modules:
current_packages += '"{}";\n'.format(module)
for dependency in sorted(list(dependencies[module])):
edges += '"{}" -> "{}";\n'.format(module, dependency)
if not dependency in current_modules:
current_dependencies.add(dependency)
for dependent in sorted(list(dependents[module])):
edges += '"{}" -> "{}";\n'.format(dependent, module)
if not dependent in current_modules:
current_dependents.add(dependent)
external_ranks = ""
if current_dependents:
grouped_dependents = '"; "'.join(sorted(list(current_dependents)))
external_ranks += '{{ rank = same; "{}"; }}\n'.format(
grouped_dependents)
if current_dependencies:
grouped_dependencies = '"; "'.join(sorted(list(current_dependencies)))
external_ranks += '{{ rank = same; "{}"; }}\n'.format(
grouped_dependencies)
external_labels = create_labels(label_map)
dot_file_contents = """
strict digraph {{
rankdir = LR;
subgraph cluster_0 {{
{}
label = "{}";
URL = "{}";
style = dashed;
{}
}}
{}
{}
{}
}}
""".format(
current_packages, graphed_package, create_href(graphed_package),
nodes, external_labels, external_ranks, edges)
save_dot_file(dot_file_contents, outfilepath)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/draw/files.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import textwrap
import networkx as nx
from copy import copy
from ..tree import PackageTree
from .utilities import (
save_dot_file, remove_prefix, get_levels, remove_prefix, remove_postfix,
convert_path_to_package, create_labels, create_href)
ROOT = os.getcwd()
def draw_file_modules(save_directory):
package_tree = PackageTree(os.path.join(ROOT, 'packages'))
internal_packages = copy(package_tree.roots)
internal_packages.remove('pymedphys')
directory_module_paths = [
module_path
for package in internal_packages
for module_path in package_tree.digraph.neighbors(package)
]
file_module_paths = [
item
for directory_module_path in directory_module_paths
for item in package_tree.digraph.neighbors(directory_module_path)
]
module_map = {
item: convert_path_to_package(item)
for item in directory_module_paths + file_module_paths
}
dependencies = {
convert_path_to_package(module): {
key: [
convert_path_to_package(item)
for item in descendants_dependencies
]
for key, descendants_dependencies in package_tree.imports[module].items()
}
for module in module_map.keys()
}
dependents = {
key: [] for key in dependencies.keys()
}
for file_module, dependency_map in dependencies.items():
for where, values in dependency_map.items():
for item in values:
try:
dependents[item].append(file_module)
except KeyError:
pass
for directory_module_path in directory_module_paths:
directory_module = convert_path_to_package(directory_module_path)
print(directory_module)
package_name = directory_module.split('.')[0]
current_modules = [
convert_path_to_package(item)
for item in package_tree.digraph.neighbors(directory_module_path)
] + [directory_module]
outfilepath = os.path.join(
save_directory, "{}.svg".format(directory_module))
if len(current_modules) <= 1:
dot_file_contents = """
strict digraph {{
subgraph cluster_0 {{
"";
label = "{}";
style = dashed;
}}
}}
""".format(directory_module)
save_dot_file(dot_file_contents, outfilepath)
continue
all_current_dependencies = {
module: dependencies[module]
for module in current_modules
}
keys_to_keep = {'internal_package', 'internal_module', 'internal_file'}
current_dependencies = {
module: [
item
for key, values in dependencies[module].items()
if key in keys_to_keep
for item in values
]
for module in current_modules
}
current_dependents = {
module: dependents[module]
for module in current_modules
}
all_nodes = sorted(list(set([
*current_dependencies.keys(),
*[
item
for a_list in current_dependencies.values()
for item in a_list],
*current_dependents.keys(),
*[
item
for a_list in current_dependents.values()
for item in a_list]
])))
internal_dependencies = {
key: [
value
for value in values
if value in current_modules
]
for key, values in current_dependencies.items()
if key in current_modules
}
internal_ranks = ""
levels = get_levels(internal_dependencies)
for level in range(max(levels.keys()) + 1):
if levels[level]:
grouped_packages = '"; "'.join(sorted(list(levels[level])))
internal_ranks += textwrap.dedent("""\
{{ rank = same; "{}"; }}
""".format(grouped_packages))
in_same_module_other_dir = [
node
for node in all_nodes
if node.startswith(package_name)
and not node.startswith(directory_module)]
if in_same_module_other_dir:
in_same_module_other_dir_string = '"{}";'.format(
'";\n"'.join(in_same_module_other_dir))
else:
in_same_module_other_dir_string = ''
def simplify(text):
text = remove_prefix(text, "{}.".format(package_name))
text = remove_prefix(text, 'pymedphys_')
return text
label_map = {
node: simplify(node)
for node in all_nodes
}
label_map_str = ""
for node, label in label_map.items():
label_map_str += '"{}" [label="{}"] {};\n'.format(
node, label, get_github_url(node))
edges = ""
for module in sorted(current_modules):
for dependency in sorted(list(current_dependencies[module])):
edges += '"{}" -> "{}";\n'.format(module, dependency)
for dependent in sorted(list(current_dependents[module])):
edges += '"{}" -> "{}";\n'.format(dependent, module)
dot_file_contents = textwrap.dedent("""\
strict digraph {{
rankdir = LR;
subgraph cluster_0 {{
{}
label = "{}";
URL = "{}";
style = dashed;
subgraph cluster_1 {{
{}
label = "{}";
URL = "{}"
}}
}}
{}
{}}}
""").format(
in_same_module_other_dir_string,
package_name,
create_href(package_name),
textwrap.indent(internal_ranks, ' '*12),
directory_module,
create_href(directory_module),
textwrap.indent(label_map_str, ' '*4),
textwrap.indent(edges, ' '*4))
save_dot_file(dot_file_contents, outfilepath)
def get_github_url(module):
url_module = module.replace('.', '/')
split_module = url_module.split('/')
if len(split_module) == 3:
url_module += '.py'
top_level_package = split_module[0]
url = "https://github.com/pymedphys/pymedphys/blob/master/packages/{}/src/{}".format(
top_level_package, url_module
)
hyperlink = '[URL="{}"]'.format(url)
return hyperlink
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/draw/packages.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import networkx as nx
from ..tree.build import PackageTree
from .utilities import save_dot_file, create_link
ROOT = os.getcwd()
def draw_packages(save_directory):
print('pymedphys')
tree = PackageTree('packages').package_dependencies_dict
tree.pop('pymedphys')
internal_packages = tuple(tree.keys())
keys = list(tree.keys())
keys.sort(reverse=True)
dag = nx.DiGraph()
for key in keys:
values = tree[key]
dag.add_node(key)
dag.add_nodes_from(values['internal'])
edge_tuples = [
(key, value) for value in values['internal']
]
dag.add_edges_from(edge_tuples)
levels = get_levels(dag, internal_packages)
dot_contents = build_dot_contents(dag, levels)
save_dot_file(dot_contents, os.path.join(save_directory, 'pymedphys.svg'))
def get_levels(dag, internal_packages):
topological = list(nx.topological_sort(dag))
level_map = {}
for package in topological[::-1]:
if package not in internal_packages:
level_map[package] = 0
else:
depencencies = nx.descendants(dag, package)
levels = {0}
for dependency in depencencies:
if dependency in internal_packages:
try:
levels.add(level_map[dependency])
except KeyError:
pass
max_level = max(levels)
level_map[package] = max_level + 1
levels = {
level: []
for level in range(max(level_map.values()) + 1)
}
for package, level in level_map.items():
levels[level].append(package)
return levels
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
else:
raise ValueError("Prefix not found.")
def build_dot_contents(dag, levels):
nodes = ""
for level in range(max(levels.keys()) + 1):
if levels[level]:
trimmed_nodes = [
'"{}" {}'.format(
remove_prefix(node, 'pymedphys_'), create_link(node))
for node in levels[level]
]
grouped_packages = '; '.join(trimmed_nodes)
nodes += """
{{ rank = same; {}; }}
""".format(grouped_packages)
edges = ""
for edge in dag.edges():
trimmed_edge = [
remove_prefix(node, 'pymedphys_') for node in edge
]
edges += "{} -> {};\n".format(*trimmed_edge)
dot_file_contents = """
strict digraph {{
rankdir = LR;
{}\n{}
}}
""".format(nodes, edges)
return dot_file_contents
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/draw/utilities.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import shutil
import subprocess
import networkx as nx
def save_dot_file(dot_contents, outfilepath):
tred = shutil.which("tred")
dot = shutil.which("dot")
if not tred or not dot:
print(
"Graph not drawn, please install graphviz and add it to "
"your path.\nOn Windows this is done with "
"`choco install graphviz.portable`.\n")
return
with open("temp.dot", 'w') as file:
file.write(dot_contents)
try:
tred_process = subprocess.Popen(
[tred, 'temp.dot'], stdout=subprocess.PIPE)
data = tred_process.stdout.read()
tred_process.wait()
with open("temp_reduced.dot", 'wb') as file:
file.write(data)
output = subprocess.check_output(
[dot, '-Tsvg', 'temp_reduced.dot', '-o', 'temp.svg'])
shutil.move("temp.svg", outfilepath)
shutil.move("temp_reduced.dot", os.path.splitext(
outfilepath)[0] + ".dot")
finally:
os.remove("temp.dot")
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
else:
return text
def get_levels(dependency_map):
dag = dag_from_hashmap_of_lists(dependency_map)
topological = list(nx.topological_sort(dag))
level_map = {}
for package in topological[::-1]:
dependencies = nx.descendants(dag, package)
levels = {0}
for dependency in sorted(list(dependencies)):
try:
levels.add(level_map[dependency])
except KeyError:
pass
max_level = max(levels)
level_map[package] = max_level + 1
levels = {
level: []
for level in range(max(level_map.values()) + 1)
}
for package, level in level_map.items():
levels[level].append(package)
return levels
def dag_from_hashmap_of_lists(dictionary):
keys = list(dictionary.keys())
keys.sort(reverse=True)
dag = nx.DiGraph()
for key in keys:
values = sorted(dictionary[key], reverse=True)
dag.add_node(key)
dag.add_nodes_from(values)
edge_tuples = [
(key, value) for value in values
]
dag.add_edges_from(edge_tuples)
return dag
def remove_postfix(text, postfix):
if text.endswith(postfix):
return text[:-len(postfix)]
else:
return text
def convert_path_to_package(path):
return remove_postfix(path.replace(os.sep, '.'), '.py')
def create_href(text):
return '#{}'.format(text.replace('_', '-').replace('.', '-'))
def create_link(text):
return '[URL="{}"]'.format(create_href(text))
def create_labels(label_map):
labels = ""
for node, label in label_map.items():
labels += '"{}" [label="{}"] {};\n'.format(
node, label, create_link(node))
return labels
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/parse/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/parse/imports.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import ast
from stdlib_list import stdlib_list
STDLIB = set(stdlib_list())
IMPORT_TYPES = {
type(ast.parse('import george').body[0]), # type: ignore
type(ast.parse('import george as macdonald').body[0])} # type: ignore
IMPORT_FROM_TYPES = {
type(ast.parse('from george import macdonald').body[0]) # type: ignore
}
ALL_IMPORT_TYPES = IMPORT_TYPES.union(IMPORT_FROM_TYPES)
CONVERSIONS = {
'attr': 'attrs',
'PIL': 'Pillow',
'Image': 'Pillow',
'mpl_toolkits': 'matplotlib',
'dateutil': 'python_dateutil'
}
def get_imports(filepath, relative_filepath, internal_packages, depth):
with open(filepath, 'r') as file:
data = file.read()
parsed = ast.parse(data)
imports = [
node for node in ast.walk(parsed) if type(node) in ALL_IMPORT_TYPES]
stdlib_imports = set()
external_imports = set()
internal_package_imports = set()
internal_module_imports = set()
internal_file_imports = set()
def get_base_converted_module(name):
name = name.split('.')[0]
try:
name = CONVERSIONS[name]
except KeyError:
pass
return name
def add_level_0(name):
base_converted = get_base_converted_module(name)
if base_converted in STDLIB:
stdlib_imports.add(base_converted)
elif base_converted in internal_packages:
internal_package_imports.add(name)
else:
external_imports.add(base_converted)
for an_import in imports:
if type(an_import) in IMPORT_TYPES:
for alias in an_import.names:
add_level_0(alias.name)
elif type(an_import) in IMPORT_FROM_TYPES:
if an_import.level == 0:
add_level_0(an_import.module)
elif an_import.level == 1 and depth == 2:
module_path = (
relative_filepath.split(os.sep)[0:2] + [an_import.module])
internal_file_imports.add('.'.join(module_path))
elif (
(an_import.level == 1 and depth == 1) or
(an_import.level == 2 and depth == 2)):
module_path = (
relative_filepath.split(os.sep)[0:1] + [an_import.module])
internal_module_imports.add('.'.join(module_path))
else:
raise ValueError(
"Unexpected depth and import level of relative "
"import")
else:
raise TypeError("Unexpected import type")
return {
'stdlib': stdlib_imports,
'external': external_imports,
'internal_package': internal_package_imports,
'internal_module': internal_module_imports,
'internal_file': internal_file_imports
}
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/propagate/__init__.py | [] | [] | [] |
|
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/propagate/dependencies.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import textwrap
import json
from glob import glob
ROOT = os.getcwd()
def main():
with open(os.path.join(ROOT, 'dependencies.json'), 'r') as file:
dependencies_data = json.load(file)
tree = dependencies_data['tree']
pypi_pins = dependencies_data['pins']['pypi']
npm_pins = dependencies_data['pins']['npm']
internal_packages = [
os.path.basename(filepath)
for filepath in glob(os.path.join(ROOT, 'packages', '*'))
]
try:
assert set(internal_packages) == set(tree.keys())
except AssertionError:
print("Internal packages not in tree: {}".format(
set(internal_packages).difference(set(tree.keys()))))
print("Tree packages not in internal: {}".format(
set(tree.keys()).difference(set(internal_packages))))
raise
try:
assert set(internal_packages) == set(pypi_pins['internal'].keys())
except AssertionError:
internal = set(internal_packages)
pypi = set(pypi_pins['internal'].keys())
print("Internal packages not in pinned: {}".format(
internal.difference(pypi)))
print("Pinned packages not in internal: {}".format(
pypi.difference(internal)))
raise
assert set(internal_packages) == set(npm_pins['internal'].keys())
for package, dependency_store in tree.items():
install_requires = []
keys_to_keep = {'internal', 'external'}
for where, dependencies in dependency_store.items():
if where in keys_to_keep:
for dependency in dependencies:
try:
pin = " " + pypi_pins[where][dependency]
except KeyError:
pin = ""
requirement_string = dependency + pin
install_requires.append(requirement_string)
install_requires.sort()
install_requires_filepath = os.path.join(
ROOT, "packages", package, "src", package, "_install_requires.py")
install_requires_contents = textwrap.dedent("""\
install_requires = {}
""").format(json.dumps(install_requires, indent=4))
with open(install_requires_filepath, 'w') as file:
file.write(install_requires_contents)
for package, dependency_store in tree.items():
internal_dependencies = {
dependency: npm_pins['internal'][dependency]
for dependency in dependency_store['internal']
}
package_json_filepath = os.path.join(
ROOT, "packages", package, "package.json")
with open(package_json_filepath, 'r') as file:
data = json.load(file)
try:
external_dependencies = {
package: pin
for package, pin in data['dependencies'].items()
if package not in internal_packages
}
except KeyError:
external_dependencies = {}
data['dependencies'] = {
**internal_dependencies,
**external_dependencies
}
with open(package_json_filepath, 'w') as file:
json.dump(data, file, indent=2, sort_keys=True)
if __name__ == "__main__":
main()
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/propagate/versions.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import json
from glob import glob
import textwrap
import semver
ROOT = os.path.dirname(os.path.dirname(os.path.abspath(os.getcwd())))
def main():
version_filepath = glob(os.path.join(
"src", "*", "_version.py"))[0]
package_name = os.path.split(os.path.dirname(version_filepath))[-1]
with open('package.json', 'r') as file:
data = json.load(file)
semver_string = data['version']
loaded_version_info = semver_string.replace(
'.', ' ').replace('-', ' ').split(' ')
version_info = [
int(number)
for number in loaded_version_info[0:3]
] + [''.join(loaded_version_info[3::])] # type: ignore
__version__ = '.'.join(
map(str, version_info[:3])) + ''.join(version_info[3:]) # type: ignore
version_file_contents = textwrap.dedent("""\
version_info = {}
__version__ = "{}"
""".format(version_info, __version__))
with open(version_filepath, 'w') as file:
file.write(version_file_contents)
semver_parsed = semver.parse(semver_string)
if semver_parsed['major'] == 0:
upper_limit = semver.bump_minor(semver_string)
npm_version_prepend = "~"
else:
upper_limit = semver.bump_major(semver_string)
npm_version_prepend = "^"
dependencies_filepath = os.path.join(ROOT, "dependencies.json")
with open(dependencies_filepath, 'r') as file:
dependencies_data = json.load(file)
dependencies_data['pins']['pypi']['internal'][package_name] = (
">= {}, < {}".format(__version__, upper_limit))
dependencies_data['pins']['npm']['internal'][package_name] = (
"{}{}".format(npm_version_prepend, semver_string))
with open(dependencies_filepath, 'w') as file:
json.dump(dependencies_data, file, indent=2, sort_keys=True)
if __name__ == "__main__":
main()
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/tree/__init__.py | from .build import build_tree, test_tree, PackageTree
from .check import is_imports_json_up_to_date, update_imports_json
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/tree/build.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import json
from copy import copy, deepcopy
import difflib
import networkx as nx
from ..parse.imports import get_imports
DEPENDENCIES_JSON_FILEPATH = 'dependencies.json'
DEFAULT_EXCLUDE_DIRS = {'node_modules', '__pycache__', 'dist', '.tox', 'build'}
DEFAULT_EXCLUDE_FILES = {'__init__.py', '_version.py', '_install_requires.py'}
DEFAULT_KEYS_TO_KEEP = {'stdlib', 'internal', 'external'}
class PackageTree:
def __init__(self, directory, exclude_dirs=None, exclude_files=None):
if exclude_dirs is None:
exclude_dirs = DEFAULT_EXCLUDE_DIRS
if exclude_files is None:
exclude_files = DEFAULT_EXCLUDE_FILES
self.exclude_dirs = exclude_dirs
self.exclude_files = exclude_files
self.directory = directory
def trim_path(self, path):
relpath = os.path.relpath(path, self.directory)
split = relpath.split(os.sep)
assert split[0] == split[2]
assert split[1] == 'src'
if split[-1] == '__init__.py':
split = split[:-1]
return os.path.join(*split[2:])
def expand_path(self, path):
split = path.split(os.sep)
relpath = os.path.join(split[0], 'src', path)
if not relpath.endswith('.py'):
relpath = os.path.join(relpath, '__init__.py')
return os.path.join(self.directory, relpath)
def build_directory_digraph(self):
digraph = nx.DiGraph()
depth = {}
for root, dirs, files in os.walk(self._directory, topdown=True):
dirs[:] = [d for d in dirs if d not in self.exclude_dirs]
if '__init__.py' in files:
module = self.trim_path(os.path.join(root, '__init__.py'))
current_depth = module.count(os.sep) + 1
files[:] = [f for f in files if f not in self.exclude_files]
digraph.add_node(module)
depth[module] = current_depth
parent_init = os.path.join(
os.path.dirname(root), '__init__.py')
if os.path.exists(parent_init):
digraph.add_edge(self.trim_path(parent_init), module)
for f in files:
if f.endswith('.py'):
filepath = self.trim_path(os.path.join(root, f))
digraph.add_node(filepath)
depth[filepath] = current_depth
digraph.add_edge(module, filepath)
if not digraph.nodes:
raise ValueError('Directory provided does not contain modules')
self.digraph = digraph
self.depth = depth
self.calc_properties()
def calc_properties(self):
self.roots = [n for n, d in self.digraph.in_degree() if d == 0]
self.imports = {
filepath: get_imports(
self.expand_path(filepath), filepath, self.roots,
self.depth[filepath])
for filepath in self.digraph.nodes()
}
self._cache = {}
self._cache['descendants_dependencies'] = {}
@property
def directory(self):
return self._directory
@directory.setter
def directory(self, value):
self._directory = value
self.build_directory_digraph()
def descendants_dependencies(self, filepath):
try:
return self._cache['descendants_dependencies'][filepath]
except KeyError:
dependencies = deepcopy(self.imports[filepath])
for descendant in nx.descendants(self.digraph, filepath):
for key in dependencies:
dependencies[key] |= self.imports[descendant][key]
for key in dependencies:
dependencies[key] = list(dependencies[key])
dependencies[key].sort()
self._cache['descendants_dependencies'][filepath] = dependencies
return dependencies
@property
def package_dependencies_dict(self):
try:
return self._cache['package_dependencies_dict']
except KeyError:
key_map = {
'internal_package': 'internal',
'external': 'external',
'stdlib': 'stdlib'
}
tree = {
package: {
key_map[key]: sorted(
list({package.split('.')[0] for package in packages}))
for key, packages in self.descendants_dependencies(package).items()
if key in key_map.keys()
}
for package in self.roots
}
self._cache['package_dependencies_dict'] = tree
return tree
@property
def package_dependencies_digraph(self):
try:
return self._cache['package_dependencies_digraph']
except KeyError:
dag = nx.DiGraph()
for key, values in self.package_dependencies_dict.items():
dag.add_node(key)
dag.add_nodes_from(values['internal'])
edge_tuples = [
(key, value) for value in values['internal']
]
dag.add_edges_from(edge_tuples)
self._cache['package_dependencies_digraph'] = dag
return dag
def is_acyclic(self):
return nx.is_directed_acyclic_graph(self.package_dependencies_digraph)
def build_tree(directory):
with open(DEPENDENCIES_JSON_FILEPATH, 'r') as file:
data = json.load(file)
data['tree'] = PackageTree(directory).package_dependencies_dict
with open(DEPENDENCIES_JSON_FILEPATH, 'w') as file:
json.dump(data, file, indent=2, sort_keys=True)
def test_tree(directory):
package_tree = PackageTree(directory)
assert package_tree.is_acyclic()
assert_tree_unchanged(package_tree.package_dependencies_dict)
def assert_tree_unchanged(tree):
with open(DEPENDENCIES_JSON_FILEPATH, 'r') as file:
data = json.load(file)
file_data = json.dumps(data['tree'], sort_keys=True, indent=2)
calced_data = json.dumps(tree, sort_keys=True, indent=2)
if file_data != calced_data:
diff = difflib.unified_diff(
file_data.split('\n'), calced_data.split('\n'))
print('\n'.join(diff))
raise AssertionError
| [] | [] | [] |
archives/0mars_graphx.zip | packages/monomanage/src/monomanage/tree/check.py | # Copyright (C) 2019 Simon Biggs
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version (the "AGPL-3.0+").
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License and the additional terms for more
# details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ADDITIONAL TERMS are also included as allowed by Section 7 of the GNU
# Affero General Public License. These additional terms are Sections 1, 5,
# 6, 7, 8, and 9 from the Apache License, Version 2.0 (the "Apache-2.0")
# where all references to the definition "License" are instead defined to
# mean the AGPL-3.0+.
# You should have received a copy of the Apache-2.0 along with this
# program. If not, see <http://www.apache.org/licenses/LICENSE-2.0>.
import os
import sys
import json
import subprocess
import shutil
from .build import PackageTree
def serialise_imports(imports):
new_imports = {}
for module_path_raw, values in imports.items():
module_path = module_path_raw.replace(os.sep, '/')
new_imports[module_path] = {}
for where, a_set in values.items():
new_imports[module_path][where] = sorted(list(a_set))
return json.dumps(new_imports, sort_keys=True, indent=2)
def is_imports_json_up_to_date(directory):
packages = os.path.join(directory, 'packages')
imports_json = os.path.join(directory, 'imports.json')
with open(imports_json) as file:
data = json.load(file)
file_data = json.dumps(data, sort_keys=True, indent=2)
calced_data = serialise_imports(PackageTree(packages).imports)
return file_data == calced_data
def commit_hook(directory):
if not is_imports_json_up_to_date(directory):
print(
"\n \033[1;31;1mThe dependency tree is out of date."
"\033[1;32;1m Will now run `yarn tree` to update.\n"
" \033[1;34;1mYou will need to rerun `git commit` after "
"this is complete.\033[0;0m\n"
)
sys.stdout.flush()
yarn = shutil.which("yarn")
git = shutil.which("git")
subprocess.call([yarn, "tree"])
subprocess.call([git, "add", "imports.json"])
subprocess.call([git, "add", "dependencies.json"])
subprocess.call([git, "add", "*package.json"])
subprocess.call([git, "add", "*_install_requires.py"])
subprocess.call([git, "add", "*.dot"])
subprocess.call([git, "status"])
print(
"\n \033[1;31;1mThe dependency tree was out of date.\n"
" \033[1;32;1mThe command `yarn tree` has been run for "
"you.\n"
" \033[1;34;1mPlease rerun your commit.\033[0;0m\n"
" To prevent this message in the future run `yarn tree` "
"whenever you change the dependency structure of "
"PyMedPhys.\n")
sys.exit(1)
def update_imports_json(directory):
packages = os.path.join(directory, 'packages')
imports_json = os.path.join(directory, 'imports.json')
with open(imports_json, 'w') as file:
file.write(serialise_imports(PackageTree(packages).imports))
| [] | [] | [] |
archives/0mars_graphx.zip | packages/registry/setup.py | from os.path import abspath, dirname, join as pjoin
from setuptools import setup, find_packages
root = dirname(abspath(__file__))
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
source_path = 'src'
packages = find_packages(source_path)
root_packages = [
package
for package in packages
if "." not in package
]
assert len(root_packages) == 1
package = root_packages[0]
package_directory = pjoin(root, source_path, package)
def get_variable_from_file(filepath, variable):
filepath_in_package = pjoin(package_directory, filepath)
globs = {}
execfile(filepath_in_package, globs)
variable_value = globs[variable]
return variable_value
version = get_variable_from_file('_version.py', '__version__')
setup(
name=package,
version=version,
python_requires='>=3.6',
description='',
classifiers=[
'Development Status :: Stable',
'License :: OSI Approved :: General Public License v3 or later (AGPLv3+)',
'Programming Language :: Python :: 3.7',
'Intended Audience :: Developers'
],
packages=packages,
package_dir={'': source_path},
include_package_data=True,
package_data={package: []},
license='AGPL-3.0-or-later',
extras_require={
'test': [
'pytest'
]
}
)
| [] | [] | [] |
archives/0mars_graphx.zip | packages/registry/src/registry/__init__.py | from . import services
| [] | [] | [] |
archives/0mars_graphx.zip | packages/registry/src/registry/_version.py | version_info = [0, 1, 0]
__version__ = "0.1.0"
| [] | [] | [] |
archives/0mars_graphx.zip | packages/registry/src/registry/services.py | from abc import ABC, abstractmethod
from enum import Enum
class Props(Enum):
pass
class Container(object):
def __init__(self):
self.vars = {}
def set(self, prop: Props, value):
self.vars[prop] = value
def get(self, key: Props):
return self.vars[key]
class BootableService(ABC):
@abstractmethod
def boot(self, container: Container):
raise NotImplemented('Service not implemented')
class Registry(object):
""" Service registry is where to register bootable services to be booted
"""
def __init__(self):
self.services: list = []
def register(self, service: BootableService):
self.services.append(service)
def boot(self, container: Container):
for service in self.services:
service.boot(container)
| [
"Props",
"Props",
"Container",
"BootableService",
"Container"
] | [
188,
259,
378,
648,
735
] | [
193,
264,
387,
663,
744
] |
archives/0mars_monoskel.zip | packages/injector_provider/setup.py | from os.path import abspath, dirname, join as pjoin
from setuptools import setup, find_packages
root = dirname(abspath(__file__))
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
source_path = 'src'
packages = find_packages(source_path)
root_packages = [
package
for package in packages
if "." not in package
]
assert len(root_packages) == 1
package = root_packages[0]
package_directory = pjoin(root, source_path, package)
def get_variable_from_file(filepath, variable):
filepath_in_package = pjoin(package_directory, filepath)
globs = {}
execfile(filepath_in_package, globs)
variable_value = globs[variable]
return variable_value
version = get_variable_from_file('_version.py', '__version__')
with open('requirements.txt') as f:
required = f.read().splitlines()
required = [requirement for requirement in required if 'http' not in requirement]
setup(
name=package,
version=version,
python_requires='>=3.6',
description='',
classifiers=[
'Development Status :: Stable',
'License :: OSI Approved :: General Public License v3 or later (AGPLv3+)',
'Programming Language :: Python :: 3.7',
'Intended Audience :: Developers'
],
packages=packages,
package_dir={'': source_path},
include_package_data=True,
package_data={package: []},
install_requires=required,
license='AGPL-3.0-or-later',
extras_require={
'test': [
'pytest'
]
}
)
| [] | [] | [] |
archives/0mars_monoskel.zip | packages/injector_provider/src/injector_provider/__init__.py | from .providers import InjectorProvider
| [] | [] | [] |
archives/0mars_monoskel.zip | packages/injector_provider/src/injector_provider/_version.py | version_info = [0, 0, 1]
__version__ = "0.0.1"
| [] | [] | [] |
archives/0mars_monoskel.zip | packages/injector_provider/src/injector_provider/providers.py | from typing import List, Union
from injector import Injector
class InjectorProvider(object):
def __init__(self):
self.configurators: List = []
self.tainted: bool = True
self.injector: Union[Injector, None] = None
def get_injector(self) -> Injector:
if self.tainted:
self.injector = Injector(self.configurators)
self.clean()
return self.injector
else:
return self.injector
def add_configurator(self, configurator) -> None:
self.configurators.append(configurator)
self.taint()
def taint(self) -> None:
self.tainted = True
def clean(self) -> None:
self.tainted = False
| [] | [] | [] |
archives/0mars_monoskel.zip | packages/injector_provider/tests/__init__.py | import sys
import os
sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../src/"))
| [] | [] | [] |