hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f8bdfba3ce0bde25189979ebc289968a2512c766 | 1,400 | py | Python | util/plot_pbt.py | Linus4world/3D-MRI-style-transfer | 6747f0b235b8a6e773a941c222d594d9eedc6a35 | [
"BSD-3-Clause"
] | 1 | 2022-01-03T16:08:35.000Z | 2022-01-03T16:08:35.000Z | util/plot_PBT.py | Linus4world/mrs-gan | 64669251584a7421cce3a5173983a2275dcb438a | [
"BSD-2-Clause"
] | null | null | null | util/plot_PBT.py | Linus4world/mrs-gan | 64669251584a7421cce3a5173983a2275dcb438a | [
"BSD-2-Clause"
] | 1 | 2022-02-11T13:26:38.000Z | 2022-02-11T13:26:38.000Z | import math
import matplotlib.pyplot as plt
import json
import os
import warnings
warnings.filterwarnings("ignore")
def make_dataset(dir, file_ext=[]):
paths = []
assert os.path.exists(dir) and os.path.isdir(dir), '{} is not a valid directory'.format(dir)
for root, _, fnames in sorted(os.walk(dir)):
for fname in fnames:
for ext in file_ext:
if fname.endswith(ext):
path = os.path.join(root, fname)
paths.append(path)
return paths
def plotPBT(path):
name = path.split('/')[-2]
paths = sorted(make_dataset(path, ['result.json']))
scores = []
for i, path in enumerate(paths):
scores.append([])
with open(path, 'r') as f:
for line in f:
step_line = json.loads(line.rstrip())
scores[-1].append(step_line['score'])
max_iter = max(list(map(len, scores)))
plt.figure()
for i in range(len(scores)):
plt.plot(scores[i])
x = int(math.ceil(max_iter*1.1/10.0))*10
plt.plot(list(range(x)), [0.15]*x, 'r--')
plt.legend([*['_nolegend_']*len(scores), '15% error mark'])
plt.xlabel("Steps")
plt.ylabel("Mean Relative Error")
plt.ylim(bottom=0)
plt.savefig('%s.png'%name, format='png', bbox_inches='tight')
if __name__ == "__main__":
plotPBT('/home/kreitnerl/mrs-gan/ray_results/test_feat/')
| 31.111111 | 96 | 0.594286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 202 | 0.144286 |
f8bec2e6574c370927ccaaf8971ce34b58a52c44 | 497 | py | Python | Cap_9/ex9.23/ex9.23.py | gguilherme42/Livro-de-Python | 465a509d50476fd1a87239c71ed741639d58418b | [
"MIT"
] | 4 | 2020-04-07T00:38:46.000Z | 2022-03-10T03:34:42.000Z | Cap_9/ex9.23/ex9.23.py | gguilherme42/Livro-de-Python | 465a509d50476fd1a87239c71ed741639d58418b | [
"MIT"
] | null | null | null | Cap_9/ex9.23/ex9.23.py | gguilherme42/Livro-de-Python | 465a509d50476fd1a87239c71ed741639d58418b | [
"MIT"
] | 1 | 2021-04-22T02:45:38.000Z | 2021-04-22T02:45:38.000Z | import agenda23
agenda23.le('Agenda.txt')
while True:
opcao = agenda23.menu()
if opcao == 0:
break
elif opcao == 1:
agenda23.novo()
elif opcao == 2:
agenda23.altera()
elif opcao == 3:
agenda23.apaga()
elif opcao == 4:
agenda23.lista()
elif opcao == 5:
agenda23.grava()
elif opcao == 6:
agenda23.le()
elif opcao == 7:
agenda23.ordena()
else:
print('Opção inválida! Digite novamente.')
| 20.708333 | 50 | 0.539235 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 50 | 0.1 |
f8bf2336876845664f66233eebc0fc4562e2c6e0 | 296 | py | Python | stock.py | lokesh-lraj/30-Day-LeetCoding-Challenge-april_2020 | 270e99be4a178587b2fc89113cdf3a931210e84f | [
"MIT"
] | 3 | 2020-04-18T09:03:21.000Z | 2020-04-18T09:03:25.000Z | stock.py | lokesh-lraj/30-Day-LeetCoding-Challenge-april_2020 | 270e99be4a178587b2fc89113cdf3a931210e84f | [
"MIT"
] | null | null | null | stock.py | lokesh-lraj/30-Day-LeetCoding-Challenge-april_2020 | 270e99be4a178587b2fc89113cdf3a931210e84f | [
"MIT"
] | null | null | null | def maxProfit(self, prices: List[int]) -> int:
with_stock = -2147483647
without_stock = 0
for stock in prices :
with_stock = max(with_stock, without_stock - stock)
without_stock = max(without_stock, with_stock + stock)
return without_stock
| 37 | 66 | 0.628378 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f8c2cf6aa69e132e2490580fb1e86b3b369033e8 | 192 | py | Python | src/qualtrutils/__init__.py | emanuele-albini/qualtrutils | 7f1fb08221edb220738445bf3e8a92a1a037bb76 | [
"MIT"
] | null | null | null | src/qualtrutils/__init__.py | emanuele-albini/qualtrutils | 7f1fb08221edb220738445bf3e8a92a1a037bb76 | [
"MIT"
] | null | null | null | src/qualtrutils/__init__.py | emanuele-albini/qualtrutils | 7f1fb08221edb220738445bf3e8a92a1a037bb76 | [
"MIT"
] | null | null | null | try:
import os
import pkg_resources # part of setuptools
__version__ = pkg_resources.get_distribution(os.path.dirname(__file__)).version
except:
pass
from .qualtrics import * | 24 | 83 | 0.744792 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 0.104167 |
f8c3a6ea72bf6a5606bd1c07a86991ac84f55edc | 97 | py | Python | image_web/image_app/apps.py | datasciencee/Image_web | 28d0d8a3006e64da05bb6ede03b037daaa2897b2 | [
"MIT"
] | null | null | null | image_web/image_app/apps.py | datasciencee/Image_web | 28d0d8a3006e64da05bb6ede03b037daaa2897b2 | [
"MIT"
] | null | null | null | image_web/image_app/apps.py | datasciencee/Image_web | 28d0d8a3006e64da05bb6ede03b037daaa2897b2 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class ImageAppConfig(AppConfig):
name = 'image_app'
| 16.166667 | 34 | 0.721649 | 56 | 0.57732 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 0.113402 |
f8c595e3b1f5711087075fef4510b16d73d51d2b | 9,650 | py | Python | pyramboia/tasks/views.py | bicofino/Pyramboia | 1c291b8fdc71f057a99e7ffbfaa8ba4e713346fd | [
"MIT"
] | 1 | 2016-03-09T13:40:06.000Z | 2016-03-09T13:40:06.000Z | pyramboia/tasks/views.py | bicofino/Pyramboia | 1c291b8fdc71f057a99e7ffbfaa8ba4e713346fd | [
"MIT"
] | null | null | null | pyramboia/tasks/views.py | bicofino/Pyramboia | 1c291b8fdc71f057a99e7ffbfaa8ba4e713346fd | [
"MIT"
] | 1 | 2018-03-24T18:03:09.000Z | 2018-03-24T18:03:09.000Z | import time
import requests
import xml.dom.minidom
from lxml import etree
from django.shortcuts import render
from django.http import HttpResponse
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.template import RequestContext, loader
from .models import Project, Target, Header, Argument, Task, History
from django.shortcuts import render, render_to_response, RequestContext, HttpResponseRedirect
from .forms import TaskForm, ProjectForm, ArgumentForm, HeaderForm, TargetForm
from django.views.generic.edit import FormView
from django.db.models import Count
from jsonrpc import jsonrpc_method
from django.views.generic.edit import CreateView, UpdateView, DeleteView
# Create your views here.
def home(request):
return render_to_response('base.html', locals(), context_instance=RequestContext(request))
@jsonrpc_method('tasks.queueTask')
def queueTask(id=None, arguments={}):
'''Roda a task'''
if id:
argument = None
tasks = Task.objects.get(id=id)
if tasks.requires:
result, time_total, text, headers_full, required_tasks, arguments = queueTask(
tasks.requires.id, arguments)
start = time.time()
headers = Header.objects.get(task=tasks.id)
target = Target.objects.get(task=tasks.id)
headers_full = {'Content-Type': '0', 'SOAPAction': '0'}
headers_full['Content-Type'] = '%s;%s' % (
headers.contenttype, headers.charset,)
headers_full['SOAPAction'] = tasks.header.soapaction
data = tasks.request % arguments
response = requests.post(
str(target.url), data=data, headers=headers_full).content
#tree = etree.XML(response.decode('utf-8', 'ignore'))
tree = etree.XML(response)
result = (etree.tostring(tree, pretty_print=True, encoding='utf-8'))
end = time.time()
time_total = end - start
if time_total > tasks.threshold:
text = 'Alert'
else:
text = 'OK'
args = arguments
if argument == None:
args = ['']
return result, time_total, text, headers_full, tasks, args
elif len(Argument.objects.filter(argument=tasks.arguments.argument)) > 0 and not None:
for argument in Argument.objects.filter(argument=tasks.arguments.argument):
valor = ''
try:
tree = etree.XML(response)
valor = tree.xpath("//*[local-name() = '{0}']".format(tasks.arguments.argument))[0].text
except:
valor = ''
args.setdefault(argument.value, valor)
return result, time_total, text, headers_full, tasks, args
@jsonrpc_method('tasks.run')
def runTask(request, id):
'''Apresenta a task'''
result, time_total, text, headers_full, tasks, args = queueTask(id, {})
return render_to_response('task_result.html', locals(), context_instance=RequestContext(request))
class ProjectDeleteView(DeleteView):
model = Project
success_url = reverse_lazy('project-list')
def delete(self, request, *args, **kwargs):
messages.success(
self.request, 'Project {} Successfully Deleted'.format(self.get_object()))
return super(ProjectDeleteView, self).delete(self, request, *args, **kwargs)
class ProjectListView(ListView):
model = Project
template_name_suffix = '_list'
class ProjectCreateView(CreateView):
template_name_suffix = '_create'
model = Project
form_class = ProjectForm
def form_valid(self, form):
messages.success(
self.request, 'Project {} Successfully Created'.format(self.object))
return super(ProjectCreateView, self).form_valid(form)
class ProjectDetailView(DetailView):
model = Project
class ProjectUpdateView(UpdateView):
template_name_suffix = '_create'
model = Project
form_class = ProjectForm
def form_valid(self, form):
messages.success(self.request, u'Project updated.')
return super(ProjectUpdateView, self).form_valid(form)
def all_projects(request):
projects = Project.objects.all()
return render_to_response('all_projects.html', locals(), context_instance=RequestContext(request))
def addTask(request):
form = TasksForm(request.POST or None)
if form.is_valid():
save_it = form.save(commit=False)
save_it.save()
message = 'Add a new task'
messages.success(request, 'Your task has been added.')
return HttpResponseRedirect('/')
return render_to_response('addtask.html', locals(), context_instance=RequestContext(request))
def addArguments(request):
form = ArgumentsForm(request.POST or None)
if form.is_valid():
save_it = form.save(commit=False)
save_it.save()
message = 'Add a new argument'
messages.success(request, 'Your argument has been added.')
return HttpResponseRedirect('/')
return render_to_response('addargs.html', locals(), context_instance=RequestContext(request))
class HeaderDeleteView(DeleteView):
model = Header
success_url = reverse_lazy('header-list')
def delete(self, request, *args, **kwargs):
messages.success(
self.request, 'Header {} Successfully Deleted'.format(self.get_object()))
return super(HeaderDeleteView, self).delete(self, request, *args, **kwargs)
class HeaderListView(ListView):
model = Header
template_name_suffix = '_list'
class HeaderCreateView(CreateView):
template_name_suffix = '_create'
model = Header
form_class = HeaderForm
def form_valid(self, form):
messages.success(
self.request, 'Project {} Successfully Created'.format(self.object))
return super(HeaderCreateView, self).form_valid(form)
class HeaderDetailView(DetailView):
model = Header
class HeaderUpdateView(UpdateView):
template_name_suffix = '_create'
model = Header
form_class = HeaderForm
def form_valid(self, form):
messages.success(self.request, u'Project updated.')
return super(HeaderUpdateView, self).form_valid(form)
# Target Views
class TargetDeleteView(DeleteView):
model = Target
success_url = reverse_lazy('target-list')
def delete(self, request, *args, **kwargs):
messages.success(
self.request, 'Target {} Successfully Deleted'.format(self.get_object()))
return super(TargetDeleteView, self).delete(self, request, *args, **kwargs)
class TargetListView(ListView):
model = Target
template_name_suffix = '_list'
class TargetCreateView(CreateView):
template_name_suffix = '_create'
model = Target
form_class = TargetForm
def form_valid(self, form):
messages.success(
self.request, 'Target {} Successfully Created'.format(self.object))
return super(TargetCreateView, self).form_valid(form)
class TargetDetailView(DetailView):
model = Target
class TargetUpdateView(UpdateView):
template_name_suffix = '_create'
model = Target
form_class = TargetForm
def form_valid(self, form):
messages.success(self.request, u'Project updated.')
return super(TargetUpdateView, self).form_valid(form)
# Argument Views
class ArgumentDeleteView(DeleteView):
model = Argument
success_url = reverse_lazy('argument-list')
def delete(self, request, *args, **kwargs):
messages.success(
self.request, 'Argument {} Successfully Deleted'.format(self.get_object()))
return super(ArgumentDeleteView, self).delete(self, request, *args, **kwargs)
class ArgumentListView(ListView):
model = Argument
template_name_suffix = '_list'
class ArgumentCreateView(CreateView):
template_name_suffix = '_create'
model = Argument
form_class = ArgumentForm
def form_valid(self, form):
messages.success(
self.request, 'Argument {} Successfully Created'.format(self.object))
return super(ArgumentCreateView, self).form_valid(form)
class ArgumentDetailView(DetailView):
model = Argument
class ArgumentUpdateView(UpdateView):
template_name_suffix = '_create'
model = Argument
form_class = ArgumentForm
def form_valid(self, form):
messages.success(self.request, u'Argument updated.')
return super(ArgumentUpdateView, self).form_valid(form)
# Task Views
class TaskDeleteView(DeleteView):
model = Task
success_url = reverse_lazy('task-list')
def delete(self, request, *args, **kwargs):
messages.success(
self.request, 'Task {} Successfully Deleted'.format(self.get_object()))
return super(TaskDeleteView, self).delete(self, request, *args, **kwargs)
class TaskListView(ListView):
model = Task
template_name_suffix = '_list'
class TaskCreateView(CreateView):
template_name_suffix = '_create'
model = Task
form_class = TaskForm
def form_valid(self, form):
messages.success(
self.request, 'Task {} Successfully Created'.format(self.object))
return super(TaskCreateView, self).form_valid(form)
class TaskDetailView(DetailView):
model = Task
class TaskUpdateView(UpdateView):
template_name_suffix = '_create'
model = Task
form_class = TaskForm
def form_valid(self, form):
messages.success(self.request, u'Argument updated.')
return super(TaskUpdateView, self).form_valid(form)
| 31.129032 | 108 | 0.684249 | 5,411 | 0.560725 | 0 | 0 | 2,151 | 0.222902 | 0 | 0 | 1,090 | 0.112953 |
f8c5dfc6d0cdbf14f1da548855e712503b3de0e5 | 230 | py | Python | frameworks/MLNet/__init__.py | Ennosigaeon/automlbenchmark | bd3e529d641b64300a075d59408203d537311b7e | [
"MIT"
] | 282 | 2018-09-19T09:45:46.000Z | 2022-03-30T04:05:51.000Z | frameworks/MLNet/__init__.py | Ennosigaeon/automlbenchmark | bd3e529d641b64300a075d59408203d537311b7e | [
"MIT"
] | 267 | 2018-11-02T11:43:11.000Z | 2022-03-31T08:58:16.000Z | frameworks/MLNet/__init__.py | Ennosigaeon/automlbenchmark | bd3e529d641b64300a075d59408203d537311b7e | [
"MIT"
] | 104 | 2018-10-17T19:32:36.000Z | 2022-03-19T22:47:59.000Z | from amlb.utils import call_script_in_same_dir
def setup(*args, **kwargs):
call_script_in_same_dir(__file__, "setup.sh", *args, **kwargs)
def run(dataset, config):
from .exec import run
return run(dataset, config)
| 20.909091 | 66 | 0.721739 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.043478 |
f8c656a1fcea7a9ee14e5a204d4f559a5949f3d3 | 189 | py | Python | generate_pentagons.py | YessOn/ProjectEuler-Toolkits | cc196c1c6faca32f7613ae40ddb3c7f7b86e071e | [
"MIT"
] | null | null | null | generate_pentagons.py | YessOn/ProjectEuler-Toolkits | cc196c1c6faca32f7613ae40ddb3c7f7b86e071e | [
"MIT"
] | null | null | null | generate_pentagons.py | YessOn/ProjectEuler-Toolkits | cc196c1c6faca32f7613ae40ddb3c7f7b86e071e | [
"MIT"
] | null | null | null | def generate_pentagons(n_of_pentagons):
pentagons = (num * (3 * num - 1) // 2 for num in range(1, n_of_pentagons))
for _ in range(n_of_pentagons - 1):
yield next(pentagons)
| 37.8 | 78 | 0.661376 | 0 | 0 | 188 | 0.994709 | 0 | 0 | 0 | 0 | 0 | 0 |
f8c6b59947b8e1e01fbc267420d89e101ab3f722 | 932 | py | Python | util_test.py | svennickel/itunes-app-scraper | 14b857bd40a237825cb6bd93be388e6bcd083c01 | [
"MIT"
] | 10 | 2020-08-12T06:47:04.000Z | 2021-12-04T03:06:19.000Z | util_test.py | svennickel/itunes-app-scraper | 14b857bd40a237825cb6bd93be388e6bcd083c01 | [
"MIT"
] | 5 | 2020-11-19T07:53:19.000Z | 2022-03-16T15:06:37.000Z | util_test.py | iaine/itunes-app-scraper | de60c8c0b369e78d4c87a0cb11284b2ef576c090 | [
"MIT"
] | 11 | 2020-08-12T06:47:31.000Z | 2022-03-19T23:36:18.000Z | from itunes_app_scraper.util import AppStoreException, AppStoreCollections, AppStoreCategories, AppStoreUtils
import json
import pytest
import os
def test_category_exists():
category = AppStoreCategories()
assert category.BOOKS == 6018
def test_category_does_not_exist():
category = AppStoreCategories()
with pytest.raises(AttributeError, match="'AppStoreCategories' object has no attribute 'METHOD'"):
category.METHOD
def test_collection_exists():
collection = AppStoreCollections()
assert collection.NEW_IOS == 'newapplications'
def test_collection_does_not_exist():
collection = AppStoreCollections()
with pytest.raises(AttributeError, match="'AppStoreCollections' object has no attribute 'NOTHING'"):
collection.NOTHING
def test_app_utils():
utils = AppStoreUtils()
json_object = json.loads(utils.get_entries(AppStoreCollections()))
assert "names" in json_object | 33.285714 | 109 | 0.769313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 136 | 0.145923 |
f8c6f95465da9e6fd5b7017053c85eda97db68b6 | 802 | py | Python | natasha/span.py | baltachev/natasha | b326631c510384b1ce3ac198bce8ed11818ec784 | [
"MIT"
] | 822 | 2017-09-05T08:38:42.000Z | 2022-03-31T16:08:48.000Z | natasha/span.py | baltachev/natasha | b326631c510384b1ce3ac198bce8ed11818ec784 | [
"MIT"
] | 81 | 2017-09-12T12:49:00.000Z | 2022-03-25T18:21:12.000Z | natasha/span.py | baltachev/natasha | b326631c510384b1ce3ac198bce8ed11818ec784 | [
"MIT"
] | 90 | 2017-09-05T08:38:49.000Z | 2022-03-29T12:09:22.000Z |
from .record import Record
class Span(Record):
__attributes__ = ['start', 'stop', 'type']
def adapt_spans(spans):
for span in spans:
yield Span(span.start, span.stop, span.type)
def offset_spans(spans, offset):
for span in spans:
yield Span(
offset + span.start,
offset + span.stop,
span.type
)
def envelop_spans(spans, envelopes):
index = 0
for envelope in envelopes:
chunk = []
while index < len(spans):
span = spans[index]
index += 1
if span.start < envelope.start:
continue
elif span.stop <= envelope.stop:
chunk.append(span)
else:
index -= 1
break
yield chunk
| 21.105263 | 52 | 0.516209 | 66 | 0.082294 | 696 | 0.86783 | 0 | 0 | 0 | 0 | 19 | 0.023691 |
f8c7ce0b20cdca0b81d121ae696bffeb609cd523 | 7,297 | py | Python | bingads/v13/bulk/entities/bulk_offline_conversion.py | pawelulita/BingAds-Python-SDK | e7b5a618e87a43d0a5e2c79d9aa4626e208797bd | [
"MIT"
] | 86 | 2016-02-29T03:24:28.000Z | 2022-03-29T09:30:21.000Z | bingads/v13/bulk/entities/bulk_offline_conversion.py | pawelulita/BingAds-Python-SDK | e7b5a618e87a43d0a5e2c79d9aa4626e208797bd | [
"MIT"
] | 135 | 2016-04-12T13:31:28.000Z | 2022-03-29T02:18:51.000Z | bingads/v13/bulk/entities/bulk_offline_conversion.py | pawelulita/BingAds-Python-SDK | e7b5a618e87a43d0a5e2c79d9aa4626e208797bd | [
"MIT"
] | 154 | 2016-04-08T04:11:27.000Z | 2022-03-29T21:21:07.000Z | from __future__ import print_function
from bingads.service_client import _CAMPAIGN_OBJECT_FACTORY_V13
from bingads.v13.internal.bulk.string_table import _StringTable
from bingads.v13.internal.bulk.entities.single_record_bulk_entity import _SingleRecordBulkEntity
from bingads.v13.internal.bulk.mappings import _SimpleBulkMapping
from bingads.v13.internal.extensions import *
class BulkOfflineConversion(_SingleRecordBulkEntity):
""" Represents an offline conversion that can be read or written in a bulk file.
This class exposes the :attr:`offline_conversion` property that can be read and written as fields of the Keyword record in a bulk file.
Properties of this class and of classes that it is derived from, correspond to fields of the Keyword record in a bulk file.
For more information, see Keyword at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self, offline_conversion=None):
super(BulkOfflineConversion, self).__init__()
self._offline_conversion = offline_conversion
self._adjustment_value = None
self._adjustment_time = None
self._adjustment_type = None
self._adjustment_currency_code = None
self._external_attribution_model = None
self._external_attribution_credit = None
@property
def adjustment_value(self):
return self._adjustment_value;
@adjustment_value.setter
def adjustment_value(self, value):
self._adjustment_value = value
@property
def adjustment_time(self):
return self._adjustment_time;
@adjustment_time.setter
def adjustment_time(self, value):
self._adjustment_time = value
@property
def adjustment_type(self):
return self._adjustment_type;
@adjustment_type.setter
def adjustment_type(self, value):
self._adjustment_type = value
@property
def adjustment_currency_code(self):
return self._adjustment_currency_code;
@adjustment_currency_code.setter
def adjustment_currency_code(self, value):
self._adjustment_currency_code = value
@property
def external_attribution_model(self):
return self._external_attribution_model;
@external_attribution_model.setter
def external_attribution_model(self, value):
self._external_attribution_model = value
@property
def external_attribution_credit(self):
return self._external_attribution_credit;
@external_attribution_credit.setter
def external_attribution_credit(self, value):
self._external_attribution_credit = value
@property
def offline_conversion(self):
""" The offline conversion Data Object of the Campaign Management Service.
"""
return self._offline_conversion
@offline_conversion.setter
def offline_conversion(self, value):
self._offline_conversion = value
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.ConversionCurrencyCode,
field_to_csv=lambda c: c.offline_conversion.ConversionCurrencyCode,
csv_to_field=lambda c, v: setattr(
c.offline_conversion,
'ConversionCurrencyCode',
v
)
),
_SimpleBulkMapping(
header=_StringTable.ConversionName,
field_to_csv=lambda c: c.offline_conversion.ConversionName,
csv_to_field=lambda c, v: setattr(
c.offline_conversion,
'ConversionName',
v
)
),
_SimpleBulkMapping(
header=_StringTable.MicrosoftClickId,
field_to_csv=lambda c: c.offline_conversion.MicrosoftClickId,
csv_to_field=lambda c, v: setattr(
c.offline_conversion,
'MicrosoftClickId',
v
)
),
_SimpleBulkMapping(
header=_StringTable.ConversionValue,
field_to_csv=lambda c: c.offline_conversion.ConversionValue,
csv_to_field=lambda c, v: setattr(
c.offline_conversion,
'ConversionValue',
float(v) if v else None
)
),
_SimpleBulkMapping(
header=_StringTable.ConversionTime,
field_to_csv=lambda c: bulk_datetime_str(c.offline_conversion.ConversionTime),
csv_to_field=lambda c, v: setattr(
c.offline_conversion,
'ConversionTime',
parse_datetime(v) if v else None
)
),
_SimpleBulkMapping(
header=_StringTable.AdjustmentValue,
field_to_csv=lambda c: c.adjustment_value,
csv_to_field=lambda c, v: setattr(
c,
'adjustment_value',
float(v) if v else None
)
),
_SimpleBulkMapping(
header=_StringTable.AdjustmentType,
field_to_csv=lambda c: c.adjustment_type,
csv_to_field=lambda c, v: setattr(
c,
'adjustment_type',
v
)
),
_SimpleBulkMapping(
header=_StringTable.AdjustmentCurrencyCode,
field_to_csv=lambda c: c.adjustment_currency_code,
csv_to_field=lambda c, v: setattr(
c,
'adjustment_currency_code',
v
)
),
_SimpleBulkMapping(
header=_StringTable.ExternalAttributionModel,
field_to_csv=lambda c: c.external_attribution_model,
csv_to_field=lambda c, v: setattr(
c,
'external_attribution_model',
v
)
),
_SimpleBulkMapping(
header=_StringTable.ExternalAttributionCredit,
field_to_csv=lambda c: c.external_attribution_credit,
csv_to_field=lambda c, v: setattr(
c,
'external_attribution_credit',
float(v) if v else None
)
),
_SimpleBulkMapping(
header=_StringTable.AdjustmentTime,
field_to_csv=lambda c: bulk_datetime_str(c.adjustment_time),
csv_to_field=lambda c, v: setattr(
c,
'adjustment_time',
parse_datetime(v) if v else None
)
),
]
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self._offline_conversion, 'offline_conversion')
self.convert_to_values(row_values, BulkOfflineConversion._MAPPINGS)
def process_mappings_from_row_values(self, row_values):
self._offline_conversion = _CAMPAIGN_OBJECT_FACTORY_V13.create('OfflineConversion')
row_values.convert_to_entity(self, BulkOfflineConversion._MAPPINGS)
def read_additional_data(self, stream_reader):
super(BulkOfflineConversion, self).read_additional_data(stream_reader)
| 34.582938 | 139 | 0.628752 | 6,919 | 0.948198 | 0 | 0 | 1,515 | 0.20762 | 0 | 0 | 943 | 0.129231 |
f8c98cbdffeb6bc1eca9320791dd78a1cefdb9cd | 4,320 | py | Python | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/lti_provider/tests/test_tasks.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | 3 | 2021-12-15T04:58:18.000Z | 2022-02-06T12:15:37.000Z | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/lti_provider/tests/test_tasks.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | null | null | null | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/lti_provider/tests/test_tasks.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | 1 | 2019-01-02T14:38:50.000Z | 2019-01-02T14:38:50.000Z | """
Tests for the LTI outcome service handlers, both in outcomes.py and in tasks.py
"""
from unittest.mock import MagicMock, patch
import ddt
from django.test import TestCase
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
import lms.djangoapps.lti_provider.tasks as tasks
from common.djangoapps.student.tests.factories import UserFactory
from lms.djangoapps.lti_provider.models import GradedAssignment, LtiConsumer, OutcomeService
class BaseOutcomeTest(TestCase):
"""
Super type for tests of both the leaf and composite outcome celery tasks.
"""
def setUp(self):
super().setUp()
self.course_key = CourseLocator(
org='some_org',
course='some_course',
run='some_run'
)
self.usage_key = BlockUsageLocator(
course_key=self.course_key,
block_type='problem',
block_id='block_id'
)
self.user = UserFactory.create()
self.consumer = LtiConsumer(
consumer_name='Lti Consumer Name',
consumer_key='consumer_key',
consumer_secret='consumer_secret',
instance_guid='tool_instance_guid'
)
self.consumer.save()
outcome = OutcomeService(
lis_outcome_service_url='http://example.com/service_url',
lti_consumer=self.consumer
)
outcome.save()
self.assignment = GradedAssignment(
user=self.user,
course_key=self.course_key,
usage_key=self.usage_key,
outcome_service=outcome,
lis_result_sourcedid='sourcedid',
version_number=1,
)
self.assignment.save()
self.send_score_update_mock = self.setup_patch(
'lms.djangoapps.lti_provider.outcomes.send_score_update', None
)
def setup_patch(self, function_name, return_value):
"""
Patch a method with a given return value, and return the mock
"""
mock = MagicMock(return_value=return_value)
new_patch = patch(function_name, new=mock)
new_patch.start()
self.addCleanup(new_patch.stop)
return mock
@ddt.ddt
class SendLeafOutcomeTest(BaseOutcomeTest):
"""
Tests for the send_leaf_outcome method in tasks.py
"""
@ddt.data(
(2.0, 2.0, 1.0),
(2.0, 0.0, 0.0),
(1, 2, 0.5),
)
@ddt.unpack
def test_outcome_with_score(self, earned, possible, expected):
tasks.send_leaf_outcome(
self.assignment.id,
earned,
possible
)
self.send_score_update_mock.assert_called_once_with(self.assignment, expected)
@ddt.ddt
class SendCompositeOutcomeTest(BaseOutcomeTest):
"""
Tests for the send_composite_outcome method in tasks.py
"""
def setUp(self):
super().setUp()
self.descriptor = MagicMock()
self.descriptor.location = BlockUsageLocator(
course_key=self.course_key,
block_type='problem',
block_id='problem',
)
self.course_grade = MagicMock()
self.course_grade_mock = self.setup_patch(
'lms.djangoapps.lti_provider.tasks.CourseGradeFactory.read', self.course_grade
)
self.module_store = MagicMock()
self.module_store.get_item = MagicMock(return_value=self.descriptor)
self.check_result_mock = self.setup_patch(
'lms.djangoapps.lti_provider.tasks.modulestore',
self.module_store
)
@ddt.data(
(2.0, 2.0, 1.0),
(2.0, 0.0, 0.0),
(1, 2, 0.5),
)
@ddt.unpack
def test_outcome_with_score_score(self, earned, possible, expected):
self.course_grade.score_for_module = MagicMock(return_value=(earned, possible))
tasks.send_composite_outcome(
self.user.id, str(self.course_key), self.assignment.id, 1
)
self.send_score_update_mock.assert_called_once_with(self.assignment, expected)
def test_outcome_with_outdated_version(self):
self.assignment.version_number = 2
self.assignment.save()
tasks.send_composite_outcome(
self.user.id, str(self.course_key), self.assignment.id, 1
)
assert self.course_grade_mock.call_count == 0
| 31.532847 | 92 | 0.634491 | 3,836 | 0.887963 | 0 | 0 | 2,122 | 0.491204 | 0 | 0 | 743 | 0.171991 |
f8c9d560d993e370d3b1363238c43807ccc5dfd5 | 1,954 | py | Python | agents/dumbagent.py | dbelliss/Starcraft2AI | a3044f0eb3c1bb18084fa59265a430ddcdfab80b | [
"MIT"
] | 2 | 2018-04-17T00:37:40.000Z | 2018-04-30T03:04:20.000Z | agents/dumbagent.py | dbelliss/Starcraft2AI | a3044f0eb3c1bb18084fa59265a430ddcdfab80b | [
"MIT"
] | null | null | null | agents/dumbagent.py | dbelliss/Starcraft2AI | a3044f0eb3c1bb18084fa59265a430ddcdfab80b | [
"MIT"
] | null | null | null | from loser_agent import *
class DumbAgent(LoserAgent):
def __init__(self, is_logging = False, is_printing_to_console = False, isMainAgent = False, fileName = ""):
super().__init__(is_logging, is_printing_to_console, isMainAgent)
# For debugging
self.is_logging = is_logging # Setting this to true to write information to log files in the agents/logs directory
self.is_printing_to_console = is_printing_to_console # Setting this to true causes all logs to be printed to the console
#ZerglingBanelingRushAgent.mainAgent = self
async def on_step(self, iteration, strategy_num = -1):
# self.log("Step: %s Overlord: %s" % (str(iteration), str(self.units(OVERLORD).amount)))
# self.log("Step: " + str(iteration))
# TEMP: Until strategy is given by Q table
#strategy_num = (int)(iteration / 75) % 8
# Build lings, queen, overlords, drones, and meleeattack1
await self.basic_build(iteration)
# Perform actions based on given strategy
if strategy_num == -1:
# self.mainAgent.log("No given strategy")
pass
else:
await self.perform_strategy(iteration, strategy_num)
async def basic_build(self, iteration):
larvae = self.mainAgent.units(LARVA)
if larvae.exists and self.mainAgent.can_afford(DRONE) and self.mainAgent.supply_left > 0:
await self.mainAgent.do(larvae.random.train(DRONE))
if larvae.exists and self.mainAgent.can_afford(OVERLORD) and self.mainAgent.supply_left == 0:
await self.mainAgent.do(larvae.random.train(OVERLORD))
def main():
# Start game with LoserAgent as the Bot, and begin logging
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Zerg, DumbAgent(True, False, True)),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False)
if __name__ == '__main__':
main()
| 41.574468 | 129 | 0.666837 | 1,631 | 0.834698 | 0 | 0 | 0 | 0 | 1,056 | 0.54043 | 644 | 0.32958 |
f8caa3e778c29557bd6611746d149fdf5e4f18a9 | 113 | py | Python | juno/server/http/handler/api.py | DSciLab/juno | 1d572c8d3fd06a6c1fcc51b42a6539dd3ae0927e | [
"MIT"
] | null | null | null | juno/server/http/handler/api.py | DSciLab/juno | 1d572c8d3fd06a6c1fcc51b42a6539dd3ae0927e | [
"MIT"
] | null | null | null | juno/server/http/handler/api.py | DSciLab/juno | 1d572c8d3fd06a6c1fcc51b42a6539dd3ae0927e | [
"MIT"
] | null | null | null | from .base.api_handler import APIBaseHandler
class APIHandler(APIBaseHandler):
def get(self):
pass
| 16.142857 | 44 | 0.725664 | 65 | 0.575221 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f8cc12080c230a16858bbc18a05bcd5b93430fe7 | 317 | py | Python | Python/mathematics/find_missing_number.py | RCubedClub/cp_algo | ec254055ef745224b0a1c766ef16709a3eea7087 | [
"MIT"
] | null | null | null | Python/mathematics/find_missing_number.py | RCubedClub/cp_algo | ec254055ef745224b0a1c766ef16709a3eea7087 | [
"MIT"
] | null | null | null | Python/mathematics/find_missing_number.py | RCubedClub/cp_algo | ec254055ef745224b0a1c766ef16709a3eea7087 | [
"MIT"
] | null | null | null | import random
def find(array):
summation = sum(array)
n = len(array)
total = n*(n+1)//2
miss = total - summation
return miss
def main():
arr = [i for i in range(99)]
print(arr)
result = find(arr)
print("The missing number is-", result)
if __name__ == '__main__':
main()
| 14.409091 | 43 | 0.577287 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 34 | 0.107256 |
f8cda283a32c2452d1728b137c6f236a6921d3ec | 1,099 | py | Python | 091. Decode Ways.py | joshlyman/Josh-LeetCode | cc9e2cc406d2cbd5a90ee579efbcaeffb842c5ed | [
"MIT"
] | null | null | null | 091. Decode Ways.py | joshlyman/Josh-LeetCode | cc9e2cc406d2cbd5a90ee579efbcaeffb842c5ed | [
"MIT"
] | null | null | null | 091. Decode Ways.py | joshlyman/Josh-LeetCode | cc9e2cc406d2cbd5a90ee579efbcaeffb842c5ed | [
"MIT"
] | null | null | null | # Problem Reduction: variation of n-th staircase with n = [1, 2] steps.
# Approach: We generate a bottom up DP table.
# The tricky part is handling the corner cases (e.g. s = "30").
# Most elegant way to deal with those error/corner cases, is to allocate an extra space, dp[0].
# Let dp[ i ] = the number of ways to parse the string s[1: i + 1]
# For example:
# s = "231"
# index 0: extra base offset. dp[0] = 1
# index 1: # of ways to parse "2" => dp[1] = 1
# index 2: # of ways to parse "23" => "2" and "23", dp[2] = 2
# index 3: # of ways to parse "231" => "2 3 1" and "23 1" => dp[3] = 2
def numDecodings(self, s: str) -> int:
if not s or s[0]=='0':
return 0
dp = [0 for x in range(len(s) + 1)]
# base case initialization
dp[0:2] = [1,1]
for i in range(2, len(s) + 1):
# One step jump
if 0 < int(s[i-1:i]): #(2)
dp[i] = dp[i - 1]
# Two step jump
if 10 <= int(s[i-2:i]) <= 26: #(3)
dp[i] += dp[i - 2]
return dp[-1]
| 28.921053 | 95 | 0.497725 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 648 | 0.589627 |
f8cddb9ef6bd722c93b8a3657d1eaf9e8803d45f | 4,525 | py | Python | Scripts/Cutter.py | rhong3/CPTAC-UCEC | ec83fbee234b5ad3df6524cdd960b5f0f3da9ea9 | [
"MIT"
] | 4 | 2019-01-04T21:11:03.000Z | 2020-12-11T16:56:15.000Z | Scripts/Cutter.py | rhong3/CPTAC-UCEC | ec83fbee234b5ad3df6524cdd960b5f0f3da9ea9 | [
"MIT"
] | null | null | null | Scripts/Cutter.py | rhong3/CPTAC-UCEC | ec83fbee234b5ad3df6524cdd960b5f0f3da9ea9 | [
"MIT"
] | null | null | null | """
Tile svs/scn files
Created on 11/01/2018
@author: RH
"""
import time
import matplotlib
import os
import shutil
import pandas as pd
matplotlib.use('Agg')
import Slicer
import staintools
import re
# Get all images in the root directory
def image_ids_in(root_dir, mode, ignore=['.DS_Store', 'dict.csv']):
ids = []
for id in os.listdir(root_dir):
if id in ignore:
print('Skipping ID:', id)
else:
if mode == 'CPTAC':
dirname = id.split('_')[-3]
sldnum = id.split('_')[-2].split('-')[-1]
ids.append((id, dirname, sldnum))
if mode == 'TCGA':
dirname = re.split('-01Z|-02Z', id)[0]
sldnum = id.split('-')[5].split('.')[0]
ids.append((id, dirname, sldnum))
return ids
# cut; each level is 2 times difference (20x, 10x, 5x)
def cut():
# load standard image for normalization
std = staintools.read_image("../colorstandard.png")
std = staintools.LuminosityStandardizer.standardize(std)
CPTACpath = '../images/CPTAC/'
TCGApath = '../images/TCGA/'
ref = pd.read_csv('../dummy_His_MUT_joined.csv', header=0)
refls = ref['name'].tolist()
# cut tiles with coordinates in the name (exclude white)
start_time = time.time()
CPTAClist = image_ids_in(CPTACpath, 'CPTAC')
TCGAlist = image_ids_in(TCGApath, 'TCGA')
CPTACpp = pd.DataFrame(CPTAClist, columns=['id', 'dir', 'sld'])
CPTACcc = CPTACpp['dir'].value_counts()
CPTACcc = CPTACcc[CPTACcc > 1].index.tolist()
print(CPTACcc)
TCGApp = pd.DataFrame(TCGAlist, columns=['id', 'dir', 'sld'])
TCGAcc = TCGApp['dir'].value_counts()
TCGAcc = TCGAcc[TCGAcc > 1].index.tolist()
print(TCGAcc)
# CPTAC
for i in CPTAClist:
matchrow = ref.loc[ref['name'] == i[1]]
if matchrow.empty:
continue
try:
os.mkdir("../tiles/{}".format(i[1]))
except(FileExistsError):
pass
for m in range(4):
if m == 0:
tff = 1
level = 0
elif m == 1:
tff = 2
level = 0
elif m == 2:
tff = 1
level = 1
elif m == 3:
tff = 2
level = 1
otdir = "../tiles/{}/level{}".format(i[1], str(m))
try:
os.mkdir(otdir)
except(FileExistsError):
pass
try:
n_x, n_y, raw_img, ct = Slicer.tile(image_file='CPTAC/'+i[0], outdir=otdir,
level=level, std_img=std, dp=i[2], ft=tff)
except(IndexError):
pass
if len(os.listdir(otdir)) < 2:
shutil.rmtree(otdir, ignore_errors=True)
# else:
# print("pass: {}".format(str(i)))
# TCGA
for i in TCGAlist:
matchrow = ref.loc[ref['name'] == i[1]]
if matchrow.empty:
continue
try:
os.mkdir("../tiles/{}".format(i[1]))
except(FileExistsError):
pass
for m in range(4):
if m == 0:
tff = 2
level = 0
elif m == 1:
tff = 1
level = 1
elif m == 2:
tff = 2
level = 1
elif m == 3:
tff = 1
level = 2
otdir = "../tiles/{}/level{}".format(i[1], str(m))
try:
os.mkdir(otdir)
except(FileExistsError):
pass
try:
n_x, n_y, raw_img, ct = Slicer.tile(image_file='TCGA/'+i[0], outdir=otdir,
level=level, std_img=std, dp=i[2], ft=tff)
except Exception as e:
print('Error!')
pass
if len(os.listdir(otdir)) < 2:
shutil.rmtree(otdir, ignore_errors=True)
print("--- %s seconds ---" % (time.time() - start_time))
subfolders = [f.name for f in os.scandir('../tiles/') if f.is_dir()]
for w in subfolders:
if w not in refls:
print(w)
# # Time measure tool
# start_time = time.time()
# print("--- %s seconds ---" % (time.time() - start_time))
# Run as main
if __name__ == "__main__":
if not os.path.isdir('../tiles'):
os.mkdir('../tiles')
cut()
| 29.769737 | 106 | 0.478232 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 811 | 0.179227 |
f8cde62d3add298d347b197159cd3ef0fad71443 | 2,850 | py | Python | brake.py | tensorpro/AutonomousBraking | 9861e5c0423d8ca1a2f3f640003b3581a3074459 | [
"MIT"
] | 8 | 2017-05-04T22:04:48.000Z | 2020-03-27T13:06:39.000Z | brake.py | tensorpro/AutonomousBraking | 9861e5c0423d8ca1a2f3f640003b3581a3074459 | [
"MIT"
] | null | null | null | brake.py | tensorpro/AutonomousBraking | 9861e5c0423d8ca1a2f3f640003b3581a3074459 | [
"MIT"
] | 2 | 2019-07-22T02:19:57.000Z | 2020-09-29T21:00:00.000Z | from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
m = 4
b = -.2
bl = -.1
br = -.1
sh = .13
def show_ped(image, bb):
im = np.zeros(image.shape[:2])
[ymin, xmin, ymax, xmax] = bb
im[ymin:ymax,xmin:xmax]=1
plt.imshow(im)
plt.show()
def in_region(x,y, m=0, b=0, above=True, from_left=False):
x = 1 - x if from_left else x
return ((m*x+b) <= y) == above
def brakezone(x,y,m=m,b=b,sh=.3):
left = in_region(x,y,m,b, above=False)
right = in_region(x,y,m,b, above=False, from_left=True)
top = in_region(x,y,b=.3, above=False)
return left and right and top
def brake_policy(m=m, b=b,sh=sh):
def policy(x,y):
return brakezone(x,y,m=m,b=b, sh=sh)
return policy
def to_bb(res, img):
h, w = img.shape[:2]
xmin = res['topleft']['x']/w
xmax = res['bottomright']['x']/w
ymin = res['topleft']['y']/h
ymax = res['bottomright']['y']/h
return [ymin, xmin, ymax, xmax]
def res_policy(brake_policy):
def should_brake(res, in_trajectory=brake_policy):
brake = []
for r in res:
if r['label'] == 'person':
print("Person found")
x,y = feet(r)
brake.append(in_trajectory(x,y))
return any(brake)
return should_brake
def feet(res):
bb = res['box']
x = (bb.xmax+bb.xmin)/2
y = bb.ymax
return x,y
def show_brakezone(img, brake_fn=brakezone, saveas=None, show=False):
if img is None:
out = np.zeros(size)
else:
out = img.copy()
size = img.shape[:2]
img_h, img_w = size
zone = np.zeros(size)
for y_ in range(img_h):
for x_ in range(img_w):
y = 1-y_/img_h
x = x_/img_w
brake = brake_fn(x,y) #and not safe_fn(bb)
zone[y_,x_]=brake
if img is not None and brake:
out[y_,x_,0]+=35
# out[y_,x_,0]=min(200,out[y_,x_][0])
if show:
plt.imshow(out)
plt.show()
if saveas:
plt.savefig(saveas)
return out
from visualizations import show_bboxes
def find_horizon(img, save="horizon", detect=None, res=None,sh=sh,b=b,m=m):
if detect:
res = detect(img)
sh_in = (raw_input("Enter horizon: "))
b_in = ( raw_input("Enter Intc: "))
m_in = ( raw_input("Enter Slope: "))
update = lambda x, default: float(x) if x is not '' else float(default)
b = update(b_in, b)
m = update(m_in, m)
sh = update(sh_in, sh)
print('(b,m,sh)',b,m,sh)
brake_fn=brake_policy(sh=sh, m=m, b=b)
masked=show_brakezone(img, show=False, brake_fn=brake_fn)
if detect:
plt.close()
res = detect(img)
if res:
print(res)
show_bboxes(masked, res)
print(res_policy(brake_fn)(res))
plt.show()
| 26.635514 | 75 | 0.561404 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 214 | 0.075088 |
f8d06ccac9f1d3cb709d0653c37332224ffae6f4 | 20,941 | py | Python | pysnmp/DPS-MIB-V38.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/DPS-MIB-V38.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/DPS-MIB-V38.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module DPS-MIB-V38 (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DPS-MIB-V38
# Produced by pysmi-0.3.4 at Mon Apr 29 18:39:21 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
iso, ModuleIdentity, Unsigned32, Counter64, Gauge32, ObjectIdentity, IpAddress, enterprises, NotificationType, Integer32, Bits, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "ModuleIdentity", "Unsigned32", "Counter64", "Gauge32", "ObjectIdentity", "IpAddress", "enterprises", "NotificationType", "Integer32", "Bits", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Counter32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
dpsInc = MibIdentifier((1, 3, 6, 1, 4, 1, 2682))
dpsAlarmControl = MibIdentifier((1, 3, 6, 1, 4, 1, 2682, 1))
tmonXM = MibIdentifier((1, 3, 6, 1, 4, 1, 2682, 1, 1))
tmonIdent = MibIdentifier((1, 3, 6, 1, 4, 1, 2682, 1, 1, 1))
tmonIdentManufacturer = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonIdentManufacturer.setStatus('mandatory')
tmonIdentModel = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonIdentModel.setStatus('mandatory')
tmonIdentSoftwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonIdentSoftwareVersion.setStatus('mandatory')
tmonAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2), )
if mibBuilder.loadTexts: tmonAlarmTable.setStatus('mandatory')
tmonAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1), ).setIndexNames((0, "DPS-MIB-V38", "tmonAIndex"))
if mibBuilder.loadTexts: tmonAlarmEntry.setStatus('mandatory')
tmonAIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAIndex.setStatus('mandatory')
tmonASite = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(30, 30)).setFixedLength(30)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonASite.setStatus('mandatory')
tmonADesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonADesc.setStatus('mandatory')
tmonAState = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAState.setStatus('mandatory')
tmonASeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonASeverity.setStatus('mandatory')
tmonAChgDate = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAChgDate.setStatus('mandatory')
tmonAChgTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAChgTime.setStatus('mandatory')
tmonAAuxDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(30, 30)).setFixedLength(30)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAAuxDesc.setStatus('mandatory')
tmonADispDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(14, 14)).setFixedLength(14)).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonADispDesc.setStatus('mandatory')
tmonAPntType = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAPntType.setStatus('mandatory')
tmonAPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAPort.setStatus('mandatory')
tmonAAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAAddress.setStatus('mandatory')
tmonADisplay = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonADisplay.setStatus('mandatory')
tmonAPoint = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 1, 2, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonAPoint.setStatus('mandatory')
tmonCommandGrid = MibIdentifier((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3))
tmonCPType = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCPType.setStatus('mandatory')
tmonCPort = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCPort.setStatus('mandatory')
tmonCAddress = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCAddress.setStatus('mandatory')
tmonCDisplay = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCDisplay.setStatus('mandatory')
tmonCPoint = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCPoint.setStatus('mandatory')
tmonCEvent = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCEvent.setStatus('mandatory')
tmonCAction = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 17, 18, 19))).clone(namedValues=NamedValues(("latch", 1), ("release", 2), ("momentary", 3), ("ack", 17), ("tag", 18), ("untag", 19)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCAction.setStatus('mandatory')
tmonCAuxText = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(30, 30)).setFixedLength(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmonCAuxText.setStatus('mandatory')
tmonCResult = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 1, 3, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("success", 1), ("failure", 2), ("pending", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmonCResult.setStatus('mandatory')
dpsRTU = MibIdentifier((1, 3, 6, 1, 4, 1, 2682, 1, 2))
dpsRTUIdent = MibIdentifier((1, 3, 6, 1, 4, 1, 2682, 1, 2, 1))
dpsRTUManufacturer = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(30, 30)).setFixedLength(30)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUManufacturer.setStatus('mandatory')
dpsRTUModel = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(30, 30)).setFixedLength(30)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUModel.setStatus('mandatory')
dpsRTUFirmwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(20, 20)).setFixedLength(20)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUFirmwareVersion.setStatus('mandatory')
dpsRTUDateTime = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(23, 23)).setFixedLength(23)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dpsRTUDateTime.setStatus('mandatory')
dpsRTUSyncReq = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("sync", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dpsRTUSyncReq.setStatus('mandatory')
dpsRTUDisplayGrid = MibTable((1, 3, 6, 1, 4, 1, 2682, 1, 2, 2), )
if mibBuilder.loadTexts: dpsRTUDisplayGrid.setStatus('mandatory')
dpsRTUDisplayEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2682, 1, 2, 2, 1), ).setIndexNames((0, "DPS-MIB-V38", "dpsRTUPort"), (0, "DPS-MIB-V38", "dpsRTUAddress"), (0, "DPS-MIB-V38", "dpsRTUDisplay"))
if mibBuilder.loadTexts: dpsRTUDisplayEntry.setStatus('mandatory')
dpsRTUPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUPort.setStatus('mandatory')
dpsRTUAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUAddress.setStatus('mandatory')
dpsRTUDisplay = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUDisplay.setStatus('mandatory')
dpsRTUDispDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(20, 20)).setFixedLength(20)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUDispDesc.setStatus('mandatory')
dpsRTUPntMap = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(71, 71)).setFixedLength(71)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUPntMap.setStatus('mandatory')
dpsRTUControlGrid = MibIdentifier((1, 3, 6, 1, 4, 1, 2682, 1, 2, 3))
dpsRTUCPort = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 3, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dpsRTUCPort.setStatus('mandatory')
dpsRTUCAddress = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 3, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dpsRTUCAddress.setStatus('mandatory')
dpsRTUCDisplay = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 3, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dpsRTUCDisplay.setStatus('mandatory')
dpsRTUCPoint = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 3, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dpsRTUCPoint.setStatus('mandatory')
dpsRTUCAction = MibScalar((1, 3, 6, 1, 4, 1, 2682, 1, 2, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("latch", 1), ("release", 2), ("momentary", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dpsRTUCAction.setStatus('mandatory')
dpsRTUAlarmGrid = MibTable((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5), )
if mibBuilder.loadTexts: dpsRTUAlarmGrid.setStatus('mandatory')
dpsRTUAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5, 1), ).setIndexNames((0, "DPS-MIB-V38", "dpsRTUAPort"), (0, "DPS-MIB-V38", "dpsRTUAAddress"), (0, "DPS-MIB-V38", "dpsRTUADisplay"), (0, "DPS-MIB-V38", "dpsRTUAPoint"))
if mibBuilder.loadTexts: dpsRTUAlarmEntry.setStatus('mandatory')
dpsRTUAPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUAPort.setStatus('mandatory')
dpsRTUAAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUAAddress.setStatus('mandatory')
dpsRTUADisplay = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUADisplay.setStatus('mandatory')
dpsRTUAPoint = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUAPoint.setStatus('mandatory')
dpsRTUAPntDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(21, 21)).setFixedLength(21)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUAPntDesc.setStatus('mandatory')
dpsRTUAState = MibTableColumn((1, 3, 6, 1, 4, 1, 2682, 1, 2, 5, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dpsRTUAState.setStatus('mandatory')
tmonCRalarmSet = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,10)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
tmonCRalarmClr = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,11)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
tmonMJalarmSet = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,12)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
tmonMJalarmClr = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,13)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
tmonMNalarmSet = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,14)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
tmonMNalarmClr = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,15)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
tmonSTalarmSet = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,16)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
tmonSTalarmClr = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 1) + (0,17)).setObjects(("DPS-MIB-V38", "tmonASite"), ("DPS-MIB-V38", "tmonADesc"), ("DPS-MIB-V38", "tmonAState"), ("DPS-MIB-V38", "tmonASeverity"), ("DPS-MIB-V38", "tmonAChgDate"), ("DPS-MIB-V38", "tmonAChgTime"), ("DPS-MIB-V38", "tmonAAuxDesc"), ("DPS-MIB-V38", "tmonADispDesc"), ("DPS-MIB-V38", "tmonAPntType"), ("DPS-MIB-V38", "tmonAPort"), ("DPS-MIB-V38", "tmonAAddress"), ("DPS-MIB-V38", "tmonADisplay"), ("DPS-MIB-V38", "tmonAPoint"), ("DPS-MIB-V38", "tmonCEvent"))
dpsRTUPointSet = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 2) + (0,20)).setObjects(("DPS-MIB-V38", "sysDescr"), ("DPS-MIB-V38", "sysLocation"), ("DPS-MIB-V38", "dpsRTUDateTime"), ("DPS-MIB-V38", "dpsRTUAPort"), ("DPS-MIB-V38", "dpsRTUAAddress"), ("DPS-MIB-V38", "dpsRTUADisplay"), ("DPS-MIB-V38", "dpsRTUAPoint"), ("DPS-MIB-V38", "dpsRTUAPntDesc"), ("DPS-MIB-V38", "dpsRTUAState"))
dpsRTUPointClr = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 2) + (0,21)).setObjects(("DPS-MIB-V38", "sysDescr"), ("DPS-MIB-V38", "sysLocation"), ("DPS-MIB-V38", "dpsRTUDateTime"), ("DPS-MIB-V38", "dpsRTUAPort"), ("DPS-MIB-V38", "dpsRTUCAddress"), ("DPS-MIB-V38", "dpsRTUADisplay"), ("DPS-MIB-V38", "dpsRTUAPoint"), ("DPS-MIB-V38", "dpsRTUAPntDesc"), ("DPS-MIB-V38", "dpsRTUAState"))
dpsRTUsumPSet = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 2) + (0,101)).setObjects(("DPS-MIB-V38", "sysDescr"), ("DPS-MIB-V38", "sysLocation"), ("DPS-MIB-V38", "dpsRTUDateTime"))
dpsRTUsumPClr = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 2) + (0,102)).setObjects(("DPS-MIB-V38", "sysDescr"), ("DPS-MIB-V38", "sysLocation"), ("DPS-MIB-V38", "dpsRTUDateTime"))
dpsRTUcomFailed = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 2) + (0,103)).setObjects(("DPS-MIB-V38", "sysDescr"), ("DPS-MIB-V38", "sysLocation"), ("DPS-MIB-V38", "dpsRTUDateTime"))
dpsRTUcomRestored = NotificationType((1, 3, 6, 1, 4, 1, 2682, 1, 2) + (0,104)).setObjects(("DPS-MIB-V38", "sysDescr"), ("DPS-MIB-V38", "sysLocation"), ("DPS-MIB-V38", "dpsRTUDateTime"))
mibBuilder.exportSymbols("DPS-MIB-V38", tmonAlarmEntry=tmonAlarmEntry, tmonAIndex=tmonAIndex, tmonAChgDate=tmonAChgDate, tmonCAddress=tmonCAddress, tmonSTalarmClr=tmonSTalarmClr, dpsRTUFirmwareVersion=dpsRTUFirmwareVersion, tmonAChgTime=tmonAChgTime, dpsRTUCPoint=dpsRTUCPoint, dpsRTU=dpsRTU, dpsRTUPntMap=dpsRTUPntMap, dpsRTUsumPSet=dpsRTUsumPSet, tmonASite=tmonASite, tmonAlarmTable=tmonAlarmTable, dpsRTUcomFailed=dpsRTUcomFailed, tmonCRalarmSet=tmonCRalarmSet, dpsRTUDisplayEntry=dpsRTUDisplayEntry, dpsRTUAlarmGrid=dpsRTUAlarmGrid, tmonAAddress=tmonAAddress, dpsRTUADisplay=dpsRTUADisplay, dpsRTUDisplayGrid=dpsRTUDisplayGrid, tmonADispDesc=tmonADispDesc, dpsRTUManufacturer=dpsRTUManufacturer, dpsRTUModel=dpsRTUModel, dpsRTUDispDesc=dpsRTUDispDesc, tmonCPoint=tmonCPoint, tmonMJalarmSet=tmonMJalarmSet, dpsRTUAPntDesc=dpsRTUAPntDesc, dpsInc=dpsInc, dpsAlarmControl=dpsAlarmControl, tmonAPort=tmonAPort, dpsRTUAlarmEntry=dpsRTUAlarmEntry, dpsRTUSyncReq=dpsRTUSyncReq, tmonIdent=tmonIdent, tmonASeverity=tmonASeverity, tmonMNalarmClr=tmonMNalarmClr, dpsRTUcomRestored=dpsRTUcomRestored, tmonCAction=tmonCAction, tmonIdentSoftwareVersion=tmonIdentSoftwareVersion, tmonIdentModel=tmonIdentModel, dpsRTUCAction=dpsRTUCAction, tmonMNalarmSet=tmonMNalarmSet, tmonADesc=tmonADesc, tmonCEvent=tmonCEvent, tmonSTalarmSet=tmonSTalarmSet, tmonADisplay=tmonADisplay, dpsRTUIdent=dpsRTUIdent, dpsRTUAPort=dpsRTUAPort, dpsRTUAAddress=dpsRTUAAddress, dpsRTUAddress=dpsRTUAddress, dpsRTUCPort=dpsRTUCPort, tmonAPntType=tmonAPntType, dpsRTUCAddress=dpsRTUCAddress, dpsRTUCDisplay=dpsRTUCDisplay, dpsRTUAState=dpsRTUAState, tmonCResult=tmonCResult, tmonXM=tmonXM, dpsRTUDateTime=dpsRTUDateTime, dpsRTUAPoint=dpsRTUAPoint, dpsRTUsumPClr=dpsRTUsumPClr, tmonCommandGrid=tmonCommandGrid, tmonCPType=tmonCPType, tmonAState=tmonAState, dpsRTUPort=dpsRTUPort, tmonMJalarmClr=tmonMJalarmClr, dpsRTUDisplay=dpsRTUDisplay, dpsRTUPointSet=dpsRTUPointSet, dpsRTUPointClr=dpsRTUPointClr, tmonAPoint=tmonAPoint, tmonCRalarmClr=tmonCRalarmClr, tmonIdentManufacturer=tmonIdentManufacturer, tmonCAuxText=tmonCAuxText, dpsRTUControlGrid=dpsRTUControlGrid, tmonAAuxDesc=tmonAAuxDesc, tmonCPort=tmonCPort, tmonCDisplay=tmonCDisplay)
| 146.440559 | 2,200 | 0.719641 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,974 | 0.285278 |
f8d0c7ea7f201118a072a6fce98f54b42edb4e97 | 524 | py | Python | Lab5/load_graph.py | YuryMalyshev/CAD-with-Python | ecbb82b8efb436e7089b0895dc898cf956351046 | [
"MIT"
] | null | null | null | Lab5/load_graph.py | YuryMalyshev/CAD-with-Python | ecbb82b8efb436e7089b0895dc898cf956351046 | [
"MIT"
] | null | null | null | Lab5/load_graph.py | YuryMalyshev/CAD-with-Python | ecbb82b8efb436e7089b0895dc898cf956351046 | [
"MIT"
] | null | null | null | import numpy as np
def load_graph(path):
edges = np.array([])
with open(path, 'r', encoding='utf-8', errors='ignore') as g_file:
next(g_file) # skip the header line
for line in g_file:
try:
fields = line.split(",")
edges = np.append(edges, [int(fields[0]), int(fields[1]), int(fields[2])], axis=None)
edges = np.reshape(edges, (-1,3))
except Exception as e:
pass
return np.min(edges), np.max(edges), edges
| 32.75 | 101 | 0.532443 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.082061 |
f8d0d6ecca8d12cee0a53f9628644c363e8839b3 | 1,055 | py | Python | python/smqtk/utils/simple_timer.py | jbeezley/SMQTK | e6b00f94be95f39bbca52a7983ac3d6d1f86f847 | [
"BSD-3-Clause"
] | 82 | 2015-01-07T15:33:29.000Z | 2021-08-11T18:34:05.000Z | python/smqtk/utils/simple_timer.py | jbeezley/SMQTK | e6b00f94be95f39bbca52a7983ac3d6d1f86f847 | [
"BSD-3-Clause"
] | 230 | 2015-04-08T14:36:51.000Z | 2022-03-14T17:55:30.000Z | python/smqtk/utils/simple_timer.py | DigitalCompanion/SMQTK | fc9404b69150ef44f24423844bc80735c0c2b669 | [
"BSD-3-Clause"
] | 65 | 2015-01-04T15:00:16.000Z | 2021-11-19T18:09:11.000Z | import time
from smqtk.utils import SmqtkObject
class SimpleTimer (SmqtkObject):
"""
Little class to wrap the timing of things. To be use with the ``with``
statement.
"""
def __init__(self, msg, log_func=None, *args):
"""
Additional arguments are passed to the logging method
:param msg:
:param log_func:
:param args:
:return:
"""
self._log_func = log_func
self._msg = msg
self._msg_args = args
self._s = 0.0
def __enter__(self):
if self._log_func:
self._log_func(self._msg, *self._msg_args)
else:
self._log.info(self._msg % self._msg_args)
self._s = time.time()
def __exit__(self, *_):
if self._log_func:
self._log_func("%s -> %f s", self._msg % self._msg_args,
time.time() - self._s)
else:
self._log.info("%s -> %f s" % (self._msg % self._msg_args,
time.time() - self._s))
| 27.051282 | 74 | 0.525118 | 1,003 | 0.950711 | 0 | 0 | 0 | 0 | 0 | 0 | 285 | 0.270142 |
f8d1533d26da78b1c9ff1203760b4a5ae33a69c7 | 206 | py | Python | 1546.py | ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python | 9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da | [
"MIT"
] | 1 | 2022-01-14T08:45:32.000Z | 2022-01-14T08:45:32.000Z | 1546.py | ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python | 9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da | [
"MIT"
] | null | null | null | 1546.py | ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python | 9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da | [
"MIT"
] | null | null | null | arr = ['', 'Rolien', 'Naej', 'Elehcim', 'Odranoel']
n = int(input())
while n != 0:
n -= 1
k = int(input())
while k != 0:
k -= 1
num = int(input())
print(arr[num]) | 22.888889 | 52 | 0.417476 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 35 | 0.169903 |
f8d1e3f53857745560685cc9254effe945b354f9 | 3,314 | py | Python | portl.py | blackc8/portl | 8be36d67db2041071d5169204902ec9fff6aabe9 | [
"MIT"
] | null | null | null | portl.py | blackc8/portl | 8be36d67db2041071d5169204902ec9fff6aabe9 | [
"MIT"
] | 1 | 2020-10-31T15:32:31.000Z | 2020-10-31T15:33:11.000Z | portl.py | blackc8/portl | 8be36d67db2041071d5169204902ec9fff6aabe9 | [
"MIT"
] | null | null | null | import socket, time, sys
import argparse
__version__="0.1"
min_port=0
#max_port=65535
max_port=10000
parser = argparse.ArgumentParser(description="a simple python port scanner",epilog="author: blackc8")
parser.add_argument("hostname",metavar="<hostname>",help="host to scan")
parser.add_argument("-dp","--ddport",help="do not display port",action="store_true")
parser.add_argument("-sF","--show_filtered",help="show filtered ports",action="store_true")
parser.add_argument("-b","--banner",help="grab the banners of ports",action="store_true")
parser.add_argument("-v","--version",help="dispaly version",action="version",version="%(prog)s ("+__version__+")")
args=parser.parse_args()
def w_log(msg):
print(msg)
def _exit(error):
w_log("[-] {}".format(error))
w_log("exited")
sys.exit()
def resolve_hostname(hostname):
try:
IPaddr=socket.gethostbyname(hostname)
return IPaddr
except socket.error:
return 0
def validIP(address):
parts = address.split(".")
if len(parts) != 4:
return False
for item in parts:
if not 0 <= int(item) <= 255:
return False
return True
def is_open(host,port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.5)
con = sock.connect_ex((host,port))
sock.close()
return con
def grab_banner(host,port):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
con = sock.connect((host,port))
sock.settimeout(3)
banner = sock.recv(1024)
banner = banner.decode().strip('\n')
return banner
except:
return "<no banner>"
def scan(hostname,ddport=False,gbanner=False,show_filtered=False):
open_ports=[]
filtered_ports=[]
banners=[]
if not validIP(hostname):
hostIP=resolve_hostname(hostname)
if hostIP == 0:
_exit("Unable to resolve hostname ({})")
else: host_info="{} ({})".format(hostname,hostIP)
else:
hostIP=hostname
host_info=hostname
if not validIP(hostIP):
_exit("Invalid IP adddress {}".format(hostIP))
w_log("[i] Scan started at: {}".format(time.asctime()))
start_time=time.time()
w_log("[+] Scaning host {}".format(host_info))
for port in range(min_port,max_port):
port_stat=is_open(hostIP,port)
if port_stat == 0: # open port
open_ports.append(port)
if not ddport:
w_log("port: {}".format(port))
w_log(" state: open")
if gbanner:
banner=grab_banner(hostname,port)
banners.append([port, banner])
w_log(" banner: {}".format(banner))
elif port_stat == 11: # filtered port
filtered_ports.append(port)
if show_filtered:
w_log("port: {}".format(port))
w_log(" state: filtered")
stop_time=time.time()
time_taken=stop_time-start_time
w_log("[i] {} open, {} filtered ports are discovered.".format(len(open_ports),len(filtered_ports)))
w_log("[i] Scan completed in {:.2f} seconds.".format(time_taken))
return True,open_ports,banners,time_taken
if __name__ == "__main__":
scan(args.hostname,args.ddport,args.banner,args.show_filtered)
| 30.971963 | 114 | 0.624925 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 654 | 0.197345 |
f8d25c456ce1d78680f761522a288c787f746b68 | 4,730 | py | Python | Python/MachineLearning_Ng/examples/ex2.py | Ritetsu/lizhe_Notes | 4c465b5e23c1e520f9508314cfda7f26517d6dd3 | [
"MIT"
] | null | null | null | Python/MachineLearning_Ng/examples/ex2.py | Ritetsu/lizhe_Notes | 4c465b5e23c1e520f9508314cfda7f26517d6dd3 | [
"MIT"
] | null | null | null | Python/MachineLearning_Ng/examples/ex2.py | Ritetsu/lizhe_Notes | 4c465b5e23c1e520f9508314cfda7f26517d6dd3 | [
"MIT"
] | 1 | 2021-07-07T12:01:42.000Z | 2021-07-07T12:01:42.000Z | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 16 20:15:55 2019
@author: Shinelon
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
path='ex2data1.txt'
data=pd.read_csv(path,header=None,names=['Exam1','Exam2','Admitted'])
data.head()
#两个分数的散点图,并用颜色编码可视化
positive=data[data['Admitted'].isin([1])]
negative=data[data['Admitted'].isin([0])]
fig,ax=plt.subplots(figsize=(12,8))
#c=color;s表示图形大小,默认20
ax.scatter(positive['Exam1'],positive['Exam2'],c='b',marker='o',label='Admitted')
ax.scatter(negative['Exam1'],negative['Exam2'],c='r',marker='o',label='Unadimitted')
ax.legend(loc=4)
ax.set_xlabel('Exam1 Score');ax.set_ylabel('Exam2 Score')
plt.show()#可见两类间有一个清晰分界
#sigmoid函数
def sigmoid(z):
return 1/(1+np.exp(-z))#exp)-z)即e的-z次方
#检验/观察一下sigmoid函数
nums=np.arange(-10,10,1)
fig,ax=plt.subplots(figsize=(12,8))
ax.plot(nums,sigmoid(nums),'r')
plt.show()
def cost(theta,X,y):
theta=np.matrix(theta)
X=np.matrix(X)
y=np.matrix(y)
first=np.multiply(-y,np.log(sigmoid(X*theta.T)))
second=np.multiply((1-y),np.log(1-sigmoid(X*theta.T)))
return np.sum(first-second)/len(X)
data.insert(0,'ones',1)#在0列和1列间插入1
cols=data.shape[1]
X=data.iloc[:,0:cols-1]
y=data.iloc[:,cols-1:cols]
X=np.array(X.values)
y=np.array(y.values)
theta=np.zeros(3)
cost(theta,X,y)
def gradientDescent(theta,X,y):
theta=np.matrix(theta)
X=np.matrix(X)
y=np.matrix(y)
parameters=int(theta.ravel().shape[1])
grad=np.zeros(parameters)
error=sigmoid(X*theta.T)-y
for i in range(parameters):
term=np.multiply(error,X[:,i])
grad[i]=np.sum(term)/len(X)
return grad
gradientDescent(theta,X,y)#仅求了一次更新的theta
#用SciPy的TruncatedNewton寻找最优参数
import scipy.optimize as opt
result=opt.fmin_tnc(func=cost,x0=theta,fprime=gradientDescent,args=(X,y))
result#输出theta和代价
cost(result[0],X,y)
#建立预测函数
def predict(theta,X):
probability=sigmoid(X*theta.T)
return [1 if x>=0.5 else 0 for x in probability]
theta_min=np.matrix(result[0])#theta_min是1x3数组
predictions=predict(theta_min,X)
correct=[1 if((a==1 and b==1) or (a==0 and b==0))\
else 0 for (a,b) in zip(predictions,y)]
accuracy=(sum(map(int,correct))/len(correct))
print('accuracy={}'.format(accuracy))#要注意这是训练集的精确度
path2='ex2data2.txt'
data2=pd.read_csv(path2,header=None,names=['Test1','Test2','Accepted'])
data2.head()
positive=data2[data2['Accepted'].isin([1])]
negative=data2[data2['Accepted'].isin([0])]
fig,ax=plt.subplots(figsize=(12,8))
ax.scatter(positive['Test1'],positive['Test2'],s=50,c='b',\
marker='o',label='Accepted')
ax.scatter(negative['Test1'],negative['Test2'],s=50,c='r',\
marker='x',label='Rejected')
ax.legend()
ax.set_xlabel('Test1 Score')
ax.set_ylabel('Test2 Score')
plt.show()
#非常复杂,没有线性界限;通过线性构造原始特征的多项式中的特征
degree=5
x1=data2['Test1']
x2=data2['Test2']
data2.insert(3,'ones',1)
for i in range(1,degree):
for j in range(0,i):
data2['F'+str(i)+str(j)]=np.power(x1,i-j)*np.power(x2,j)
data2.drop('Test1',axis=1,inplace=True)#axis=0为行,1为列;TRUE为在原数据上改动
data2.drop('Test2',axis=1,inplace=True)
data2.head()
#正则化代价函数
def costReg(theta,X,y,learningRate):
theta=np.matrix(theta)
X=np.matrix(X)
y=np.matrix(y)
first=np.multiply(-y,np.log(sigmoid(X*theta.T)))
second=np.multiply((1-y),np.log(1-sigmoid(X*theta.T)))
reg=(learningRate/(2*len(X)))*np.sum(np.power(theta[:,1:theta.shape[1]],2))
return np.sum(first-second)/len(X)+reg
#通过正则化参数加大对theta的惩罚
def gradientReg(theta,X,y,learningRate):
theta=np.matrix(theta)
X=np.matrix(X)
y=np.matrix(y)
parameters=int(theta.ravel().shape[1])
grad=np.zeros(parameters)
error=sigmoid(X*theta.T)-y
for i in range(parameters):
term=np.multiply(error,X[:,i])
if(i==0):
grad[i]=np.sum(term)/len(X)
else:
grad[i]=(np.sum(term)/len(X))+(learningRate/len(X))*theta[:,i]
return grad#grad即theta
cols=data2.shape[1]
X2=data2.iloc[:,1:cols]
y2=data2.iloc[:,0:1]
X2=np.array(X2.values)
y2=np.array(y2.values)
theta2=np.zeros(11)
learningRate=1
costReg(theta2,X2,y2,learningRate)
gradientReg(theta2,X2,y2,learningRate)
result2=opt.fmin_tnc(func=costReg,x0=theta2,fprime=gradientReg,\
args=(X2,y2,learningRate))
result2
#查看杂训练数据上的准确度
theta_min=np.matrix(result2[0])
predictions=predict(theta_min,X2)
correct=[1 if ((a==1 and b==1) or (a==0 and b==0))\
else 0 for (a,b) in zip(predictions,y2)]
accuracy=(sum(map(int,correct))/len(correct))
print('accuracy2={}%'.format(accuracy*100))
#用sklearn直接实现
from sklearn import linear_model
model=linear_model.LogisticRegression(penalty='l2',\
C=1.0,solver='liblinear')
model.fit(X2,y2.ravel())
model.score(X2,y2) | 31.533333 | 84 | 0.679281 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,152 | 0.225882 |
6ef1130a48a6b7d1320ed14916f6226e73d226a4 | 855 | py | Python | questioning/trade/migrations/0003_auto_20190901_1710.py | PythonerKK/questioning | c30fb2d31a221398df392f4b8faab539d6921ac0 | [
"MIT"
] | 6 | 2019-12-31T05:23:27.000Z | 2021-03-12T19:23:34.000Z | questioning/trade/migrations/0003_auto_20190901_1710.py | PythonerKK/questioning | c30fb2d31a221398df392f4b8faab539d6921ac0 | [
"MIT"
] | 4 | 2020-05-12T13:39:40.000Z | 2021-02-08T20:35:18.000Z | questioning/trade/migrations/0003_auto_20190901_1710.py | PythonerKK/questioning | c30fb2d31a221398df392f4b8faab539d6921ac0 | [
"MIT"
] | 1 | 2019-09-02T07:15:16.000Z | 2019-09-02T07:15:16.000Z | # Generated by Django 2.1.9 on 2019-09-01 09:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('trade', '0002_accountdetail_orderinfo'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='orderinfo',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户'),
),
migrations.AddField(
model_name='accountdetail',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户'),
),
]
| 29.482759 | 129 | 0.661988 | 704 | 0.815759 | 0 | 0 | 0 | 0 | 0 | 0 | 138 | 0.159907 |
6ef13886b158bd50cda282a2108b24f47033b23c | 3,557 | py | Python | Ch5/rbf.py | jason-168/MLCode | 429c17e004fb41ba16c371416c8f73833ab8fc1d | [
"Xnet",
"X11"
] | 146 | 2016-05-24T02:55:53.000Z | 2022-03-23T14:54:42.000Z | Ch5/rbf.py | coky/MarslandMLAlgo | 4277b24db88c4cb70d6b249921c5d21bc8f86eb4 | [
"Xnet",
"X11"
] | 1 | 2017-08-17T23:07:39.000Z | 2017-08-18T08:27:19.000Z | Ch5/rbf.py | coky/MarslandMLAlgo | 4277b24db88c4cb70d6b249921c5d21bc8f86eb4 | [
"Xnet",
"X11"
] | 94 | 2016-05-06T12:34:33.000Z | 2022-03-30T03:31:04.000Z |
# Code from Chapter 5 of Machine Learning: An Algorithmic Perspective (2nd Edition)
# by Stephen Marsland (http://stephenmonika.net)
# You are free to use, change, or redistribute the code in any way you wish for
# non-commercial purposes, but please maintain the name of the original author.
# This code comes with no warranty of any kind.
# Stephen Marsland, 2008, 2014
import numpy as np
import pcn
import kmeans
class rbf:
""" The Radial Basis Function network
Parameters are number of RBFs, and their width, how to train the network
(pseudo-inverse or kmeans) and whether the RBFs are normalised"""
def __init__(self,inputs,targets,nRBF,sigma=0,usekmeans=0,normalise=0):
self.nin = np.shape(inputs)[1]
self.nout = np.shape(targets)[1]
self.ndata = np.shape(inputs)[0]
self.nRBF = nRBF
self.usekmeans = usekmeans
self.normalise = normalise
if usekmeans:
self.kmeansnet = kmeans.kmeans(self.nRBF,inputs)
self.hidden = np.zeros((self.ndata,self.nRBF+1))
if sigma==0:
# Set width of Gaussians
d = (inputs.max(axis=0)-inputs.min(axis=0)).max()
self.sigma = d/np.sqrt(2*nRBF)
else:
self.sigma = sigma
self.perceptron = pcn.pcn(self.hidden[:,:-1],targets)
# Initialise network
self.weights1 = np.zeros((self.nin,self.nRBF))
def rbftrain(self,inputs,targets,eta=0.25,niterations=100):
if self.usekmeans==0:
# Version 1: set RBFs to be datapoints
indices = range(self.ndata)
np.random.shuffle(indices)
for i in range(self.nRBF):
self.weights1[:,i] = inputs[indices[i],:]
else:
# Version 2: use k-means
self.weights1 = np.transpose(self.kmeansnet.kmeanstrain(inputs))
for i in range(self.nRBF):
self.hidden[:,i] = np.exp(-np.sum((inputs - np.ones((1,self.nin))*self.weights1[:,i])**2,axis=1)/(2*self.sigma**2))
if self.normalise:
self.hidden[:,:-1] /= np.transpose(np.ones((1,np.shape(self.hidden)[0]))*self.hidden[:,:-1].sum(axis=1))
# Call Perceptron without bias node (since it adds its own)
self.perceptron.pcntrain(self.hidden[:,:-1],targets,eta,niterations)
def rbffwd(self,inputs):
hidden = np.zeros((np.shape(inputs)[0],self.nRBF+1))
for i in range(self.nRBF):
hidden[:,i] = np.exp(-np.sum((inputs - np.ones((1,self.nin))*self.weights1[:,i])**2,axis=1)/(2*self.sigma**2))
if self.normalise:
hidden[:,:-1] /= np.transpose(ones((1,np.shape(hidden)[0]))*hidden[:,:-1].sum(axis=1))
# Add the bias
hidden[:,-1] = -1
outputs = self.perceptron.pcnfwd(hidden)
return outputs
def confmat(self,inputs,targets):
"""Confusion matrix"""
outputs = self.rbffwd(inputs)
nClasses = np.shape(targets)[1]
if nClasses==1:
nClasses = 2
outputs = np.where(outputs>0,1,0)
else:
# 1-of-N encoding
outputs = np.argmax(outputs,1)
targets = np.argmax(targets,1)
cm = np.zeros((nClasses,nClasses))
for i in range(nClasses):
for j in range(nClasses):
cm[i,j] = np.sum(np.where(outputs==i,1,0)*np.where(targets==j,1,0))
print cm
print np.trace(cm)/np.sum(cm)
| 34.872549 | 127 | 0.576047 | 3,135 | 0.881361 | 0 | 0 | 0 | 0 | 0 | 0 | 769 | 0.216193 |
6ef1936814cf84ad0a8d2c89da28ee0ee1c74c44 | 929 | py | Python | _correlation_grad.py | jgorgenucsd/corr_tf | ad777821283f7d18d8bdd04d584e12df9a3fba69 | [
"BSD-2-Clause"
] | 22 | 2017-12-27T07:37:14.000Z | 2021-11-15T05:51:44.000Z | _correlation_grad.py | jgorgenucsd/corr_tf | ad777821283f7d18d8bdd04d584e12df9a3fba69 | [
"BSD-2-Clause"
] | 4 | 2018-04-03T17:08:43.000Z | 2019-08-07T08:55:24.000Z | _correlation_grad.py | jgorgenucsd/corr_tf | ad777821283f7d18d8bdd04d584e12df9a3fba69 | [
"BSD-2-Clause"
] | 12 | 2018-02-06T02:35:12.000Z | 2022-03-02T07:18:19.000Z | #!/usr/bin/env python3
"""
Gradients for inner product.
"""
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import sparse_ops
correlation_grad_module = tf.load_op_library('./build/libcorrelation_grad.so')
@ops.RegisterGradient("Correlation")
def _correlation_grad_cc(op, grad):
"""
The gradient for `correlation` using the operation implemented in C++.
:param op: `correlation` `Operation` that we are differentiating, which we can use
to find the inputs and outputs of the original op.
:param grad: gradient with respect to the output of the `correlation` op.
:return: gradients with respect to the input of `correlation`.
"""
return correlation_grad_module.correlation_grad(grad, op.inputs[0], op.inputs[1],stride=op.get_attr('stride'),max_displacement=op.get_attr('max_displacement'))
| 38.708333 | 163 | 0.750269 | 0 | 0 | 0 | 0 | 628 | 0.675996 | 0 | 0 | 511 | 0.550054 |
6ef32ce891baec71eb1386e2c2b81b8e89a8b9a4 | 123 | py | Python | src/hcrystalball/metrics/__init__.py | betatim/hcrystalball | 693b9b406f05afa23cfc4647c43260166a7076fe | [
"MIT"
] | 1 | 2021-04-12T17:08:17.000Z | 2021-04-12T17:08:17.000Z | src/hcrystalball/metrics/__init__.py | betatim/hcrystalball | 693b9b406f05afa23cfc4647c43260166a7076fe | [
"MIT"
] | null | null | null | src/hcrystalball/metrics/__init__.py | betatim/hcrystalball | 693b9b406f05afa23cfc4647c43260166a7076fe | [
"MIT"
] | 1 | 2022-01-03T16:02:35.000Z | 2022-01-03T16:02:35.000Z | from ._scorer import make_ts_scorer
from ._scorer import get_scorer
__all__ = [
"get_scorer",
"make_ts_scorer",
]
| 15.375 | 35 | 0.723577 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.227642 |
6ef9b4082cb1779ade1e3f88552ad789562c6383 | 2,776 | py | Python | tests/selenium/auth/test_user.py | bodik/sner4-web | cb054d79c587b2f8468c73a88754b7c0d5cd5a95 | [
"MIT"
] | 9 | 2019-05-15T11:33:43.000Z | 2022-02-17T04:05:28.000Z | tests/selenium/auth/test_user.py | bodik/sner4 | cb054d79c587b2f8468c73a88754b7c0d5cd5a95 | [
"MIT"
] | 1 | 2019-03-01T11:48:13.000Z | 2019-03-01T11:48:13.000Z | tests/selenium/auth/test_user.py | bodik/sner4-web | cb054d79c587b2f8468c73a88754b7c0d5cd5a95 | [
"MIT"
] | 3 | 2020-03-03T21:06:37.000Z | 2021-01-11T14:40:56.000Z | # This file is part of sner4 project governed by MIT license, see the LICENSE.txt file.
"""
auth.views.user selenium tests
"""
from flask import url_for
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from sner.server.auth.models import User
from sner.server.extensions import db
from tests.selenium import dt_inrow_delete, dt_rendered, webdriver_waituntil
def test_user_list_route(live_server, sl_admin, user): # pylint: disable=unused-argument
"""simple test ajaxed datatable rendering"""
sl_admin.get(url_for('auth.user_list_route', _external=True))
dt_rendered(sl_admin, 'user_list_table', user.username)
def test_user_list_route_inrow_delete(live_server, sl_admin, user): # pylint: disable=unused-argument
"""delete user inrow button"""
user_id = user.id
db.session.expunge(user)
sl_admin.get(url_for('auth.user_list_route', _external=True))
# in this test-case there are multiple items in the table (current_user, test_user), hence index which to delete has to be used
dt_inrow_delete(sl_admin, 'user_list_table', 1)
assert not User.query.get(user_id)
def test_user_apikey_route(live_server, sl_admin, user): # pylint: disable=unused-argument
"""apikey generation/revoking feature tests"""
sl_admin.get(url_for('auth.user_list_route', _external=True))
dt_rendered(sl_admin, 'user_list_table', user.username)
# disable fade, the timing interferes with the test
sl_admin.execute_script('$("div#modal-global").toggleClass("fade")')
sl_admin.find_element_by_xpath('//a[@data-url="%s"]' % url_for('auth.user_apikey_route', user_id=user.id, action='generate')).click()
webdriver_waituntil(sl_admin, EC.visibility_of_element_located((By.XPATH, '//h4[@class="modal-title" and text()="Apikey operation"]')))
sl_admin.find_element_by_xpath('//div[@id="modal-global"]//button[@class="close"]').click()
webdriver_waituntil(sl_admin, EC.invisibility_of_element_located((By.XPATH, '//div[@class="modal-global"')))
dt_rendered(sl_admin, 'user_list_table', user.username)
db.session.refresh(user)
assert user.apikey
sl_admin.find_element_by_xpath('//a[@data-url="%s"]' % url_for('auth.user_apikey_route', user_id=user.id, action='revoke')).click()
webdriver_waituntil(sl_admin, EC.visibility_of_element_located((By.XPATH, '//h4[@class="modal-title" and text()="Apikey operation"]')))
sl_admin.find_element_by_xpath('//div[@id="modal-global"]//button[@class="close"]').click()
webdriver_waituntil(sl_admin, EC.invisibility_of_element_located((By.XPATH, '//div[@class="modal-global"')))
dt_rendered(sl_admin, 'user_list_table', user.username)
db.session.refresh(user)
assert not user.apikey
| 46.266667 | 139 | 0.747839 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,100 | 0.396254 |
6efaa56371bdc91af714b2ef343d987547b208e3 | 936 | py | Python | isobmff/media_file.py | kentoku24/isobmff | 6877505a75915caf440bbb80b6024ba6bf9f3baa | [
"MIT"
] | 6 | 2017-08-31T01:55:37.000Z | 2018-12-26T03:03:24.000Z | isobmff/media_file.py | kentoku24/isobmff | 6877505a75915caf440bbb80b6024ba6bf9f3baa | [
"MIT"
] | 4 | 2017-08-29T03:47:16.000Z | 2017-09-05T09:00:17.000Z | isobmff/media_file.py | m-hiki/isbmff | 0724b9892884ae35bdd0796a97a9506098c4cd25 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from .box import indent
from .box import read_box
class MediaFile(object):
def __init__(self):
self.ftyp = None
self.mdats = []
self.meta = None
self.moov = None
def __repr__(self):
rep = self.ftyp.__repr__() + '\n'
rep += self.meta.__repr__() + '\n'
rep += self.moov.__repr__() + '\n'
for mdat in self.mdats:
rep += mdat.__repr__() + '\n'
return 'ISOBaseMediaFile\n' + indent(rep)
def read(self, file_name):
file = open(file_name, 'rb')
try:
while True:
box = read_box(file)
if not box:
break
if box.box_type == 'mdat':
self.mdats.append(box)
else:
setattr(self, box.box_type, box)
finally:
file.close()
| 26.742857 | 53 | 0.459402 | 853 | 0.911325 | 0 | 0 | 0 | 0 | 0 | 0 | 70 | 0.074786 |
6efc120e05e2c5cbca1587bd26026c11c811582c | 2,257 | py | Python | python homework 2.py | pkpatricia/python34 | 5ee4f864444b8835f8ee6cf416bc9fd1d969595b | [
"bzip2-1.0.6"
] | null | null | null | python homework 2.py | pkpatricia/python34 | 5ee4f864444b8835f8ee6cf416bc9fd1d969595b | [
"bzip2-1.0.6"
] | null | null | null | python homework 2.py | pkpatricia/python34 | 5ee4f864444b8835f8ee6cf416bc9fd1d969595b | [
"bzip2-1.0.6"
] | null | null | null | Python 3.4.1 (v3.4.1:c0e311e010fc, May 18 2014, 10:38:22) [MSC v.1600 32 bit (Intel)] on win32
Type "copyright", "credits" or "license()" for more information.
>>> ================================ RESTART ================================
>>>
>>> ================================ RESTART ================================
>>>
What is your favorite color?blue
I like that color too
>>> ================================ RESTART ================================
>>>
What is your favorite color?black
I do not care too much for that color
>>> ================================ RESTART ================================
>>>
What is your favorite color?green
That is my favorite color.
>>> ================================ RESTART ================================
>>>
What is your favorite color?green
That is my 2nd favorite color.
>>> ================================ RESTART ================================
>>>
What is your favorite color?violet
Traceback (most recent call last):
File "C:/Users/P/Desktop/two_b.py", line 7, in <module>
rank = color.index(pick) + 1
ValueError: 'violet' is not in list
>>> ================================ RESTART ================================
>>>
What is your favorite color?violet
I do not care too much for that color
>>> ================================ RESTART ================================
>>>
What is your favorite color?yello
I do not care too much for that color
>>> ================================ RESTART ================================
>>>
What is your favorite color?yellow
That is my 6th favorite color.
>>> ================================ RESTART ================================
>>>
Bach
Antheil
Chopin
Mozart
Handel
>>> ================================ RESTART ================================
>>>
Please enter a lower bound: 4
Please enter an upper bound: 23
2**4=16
2**5=32
2**6=64
2**7=128
2**8=256
2**9=512
2**10=1024
2**11=2048
2**12=4096
2**13=8192
2**14=16384
2**15=32768
2**16=65536
2**17=131072
2**18=262144
2**19=524288
2**20=1048576
2**21=2097152
2**22=4194304
>>> ================================ RESTART ================================
>>>
Please enter a lower bound: 0
Please enter an upper bound: 6
2**0 = 1
2**1 = 2
2**2 = 4
2**3 = 8
2**4 = 16
2**5 = 32
2**6 = 64
>>>
| 27.52439 | 94 | 0.438635 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 68 | 0.030128 |
6efc25feb8365613f08bcea149b9338afcb635e2 | 3,690 | py | Python | mlw/build_database.py | imjoseangel/hacktheplanet2021 | bffc4f9a4f821fcfe2215244f5b563effe6982e5 | [
"MIT"
] | 1 | 2021-02-24T12:05:06.000Z | 2021-02-24T12:05:06.000Z | mlw/build_database.py | imjoseangel/hacktheplanet2021 | bffc4f9a4f821fcfe2215244f5b563effe6982e5 | [
"MIT"
] | null | null | null | mlw/build_database.py | imjoseangel/hacktheplanet2021 | bffc4f9a4f821fcfe2215244f5b563effe6982e5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (division, absolute_import, print_function,
unicode_literals)
from glob import glob
import logging
import os
from os.path import abspath, dirname, normpath
import re
from shutil import rmtree
import sqlite3
import sys
import folium
from folium.plugins import FastMarkerCluster
from zipfile import ZipFile
import pandas as pd
import requests
from config import db
from models import MarinaLitterWatch
CLEAN_FILES = ('./CSV_1', './CSV_2')
ZIP_FILE = 'fme.zip'
DB_FILE = 'mlw.db'
MAP_FILE = 'locations.html'
# Set Logging
logging.basicConfig(format="%(asctime)s %(levelname)s: %(message)s",
datefmt="%d-%b-%y %H:%M:%S", stream=sys.stdout, level=logging.INFO)
# Set local path
here = normpath(abspath(dirname(__file__)))
# Download data
logging.info("Downloading data...")
response = requests.get(
'http://fme.discomap.eea.europa.eu/fmedatadownload/MarineLitter/MLWPivotExport.fmw'
'?CommunityCode=&FromDate=2010-01-01&ToDate=2022-12-31'
'&opt_showresult=false&opt_servicemode=sync')
downloadlink = re.search(
r"<a\s+(?:[^>]*?\s+)?href=([\"'])(.*?)\1>", response.content.decode()).group(2)
logging.info("Saving data...")
zipfile = requests.get(downloadlink)
open(f'{here}/{ZIP_FILE}', 'wb').write(zipfile.content)
logging.info("Uzipping data...")
zipObject = ZipFile(f'{here}/{ZIP_FILE}', 'r')
zipObject.extractall(path=here)
logging.info("Loading data...")
# Data to initialize database with
data = pd.read_csv(
f'{here}/CSV_1/MLW_PivotExport/MLW_Data.csv', encoding="ISO-8859-1")
# Delete database file if it exists currently
if os.path.exists(f'{here}/{DB_FILE}'):
os.remove(f'{here}/{DB_FILE}')
# Create the database
db.create_all()
# populate the database
conn = sqlite3.connect(f'{here}/{DB_FILE}')
data.to_sql('mlw', conn, if_exists='append')
db.session.commit()
# Create Map
folium_map = folium.Map(location=[40.416729, -3.703339],
zoom_start=3, min_zoom=3,
tiles='Stamen Terrain')
callback = ('function (row) {'
'var marker = L.marker(new L.LatLng(row[0], row[1]), {color: "red"});'
'var icon = L.AwesomeMarkers.icon({'
"icon: 'info-sign',"
"iconColor: 'white',"
"markerColor: 'red',"
"prefix: 'glyphicon',"
"extraClasses: 'fa-rotate-0'"
'});'
'marker.setIcon(icon);'
"var popup = L.popup({maxWidth: '300'});"
"const display_text = {text: row[2]};"
"var mytext = $(`<div id='mytext' class='display_text' style='width: 100.0%; height: 100.0%;'> ${display_text.text}</div>`)[0];"
"popup.setContent(mytext);"
"marker.bindPopup(popup);"
'return marker};')
FastMarkerCluster(data=list(
zip(data['lat_y1'].values, data['lon_x1'].values, data['BeachName'].values)), callback=callback).add_to(folium_map)
folium.LayerControl().add_to(folium_map)
folium_map.save(f'{here}/templates/{MAP_FILE}')
# Clean files
logging.info("Cleaning files...")
for path_spec in CLEAN_FILES:
# Make paths absolute and relative to this path
abs_paths = glob(os.path.normpath(
os.path.join(here, path_spec)))
for path in [str(p) for p in abs_paths]:
if not path.startswith(here):
# Die if path in CLEAN_FILES is absolute + outside this directory
raise ValueError(
"%s is not a path inside %s" % (path, here))
logging.info(f'removing {os.path.relpath(path)}')
rmtree(path)
logging.info(f'removing {ZIP_FILE}')
os.remove(f'{here}/{ZIP_FILE}')
| 31.810345 | 140 | 0.644986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,666 | 0.451491 |
6efcad9f388b05b3d7f79c0c4ad5c784bb1826e5 | 3,486 | py | Python | domotica/configuration.py | jjmartinr01/gauss3 | 1c71c44430e0f15fb2f3f83d32ad66bb1b7e3e94 | [
"MIT"
] | null | null | null | domotica/configuration.py | jjmartinr01/gauss3 | 1c71c44430e0f15fb2f3f83d32ad66bb1b7e3e94 | [
"MIT"
] | null | null | null | domotica/configuration.py | jjmartinr01/gauss3 | 1c71c44430e0f15fb2f3f83d32ad66bb1b7e3e94 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
TIPO = 'selectable' # 'basic' or 'selectable'. 'basic': necesario para el funcionamiento del programa
# 'selectable': No necesario. Añade nuevas funcionalidades al programa
# Por ejemplo autenticar es 'basic', pero actas es prescindible
# El code_menu debe ser único y se configurará como un permiso del sistema
MENU_DEFAULT = [
{'code_menu': 'acceso_domotica',
'texto_menu': 'Domótica',
'href': '',
'nivel': 1,
'tipo': 'Accesible',
'pos': 1,
},
{'code_menu': 'acceso_grupos_domotica',
'texto_menu': 'Agrupaciones de dispositivos',
'href': 'grupos_domotica',
'nivel': 2,
'tipo': 'Accesible',
'pos': 1,
'parent': 'acceso_domotica'
},
{'code_menu': 'acceso_configura_domotica',
'texto_menu': 'Configurar domótica',
'href': 'configura_domotica',
'nivel': 2,
'tipo': 'Accesible',
'pos': 2,
'parent': 'acceso_domotica'
}
]
# Se añaden otros permisos para el usuario
PERMISOS = [{'code_nombre': 'crea_grupos_domotica',
'nombre': 'Permiso para crear un grupo de dispositivos domóticos',
'menu': 'acceso_grupos_domotica'
},
{'code_nombre': 'borra_grupos_domotica',
'nombre': 'Permiso para borrar cualquier grupo que contiene domótica',
'menu': 'acceso_grupos_domotica'
},
{'code_nombre': 'edita_grupos_domotica',
'nombre': 'Permiso para modificar cualquier grupo que contiene domótica',
'menu': 'acceso_grupos_domotica'
},
{'code_nombre': 'crea_dispositivos_domotica',
'nombre': 'Permiso para crear un dispositivo domótico',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'borra_dispositivos_domotica',
'nombre': 'Permiso para borrar cualquier dispositivo domótico',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'edita_dispositivos_domotica',
'nombre': 'Permiso para editar cualquier dispositivo domótico',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'crea_secuencias_domotica',
'nombre': 'Permiso para crear una secuencia domótica',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'borra_secuencias_domotica',
'nombre': 'Permiso para borrar cualquier secuencia domótica',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'edita_secuencias_domotica',
'nombre': 'Permiso para modificar cualquier secuencia domótica',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'crea_conjuntos_domotica',
'nombre': 'Permiso para crear un conjunto de dispositivos domóticos',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'borra_conjuntos_domotica',
'nombre': 'Permiso para borrar cualquier conjunto de dispositivos domóticos',
'menu': 'acceso_configura_domotica'
},
{'code_nombre': 'edita_conjuntos_domotica',
'nombre': 'Permiso para modificar cualquier conjunto de dispositivos domóticos',
'menu': 'acceso_configura_domotica'
}
]
| 41.011765 | 103 | 0.592943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,415 | 0.689212 |
6efceaaf9fe7bf6e6a3d8409b3f03d38e6342a11 | 5,944 | py | Python | eval.py | itisianlee/hawk-facedet | 55774ac5619f9a4c76a3a872ff11940a874b32d1 | [
"Apache-2.0"
] | null | null | null | eval.py | itisianlee/hawk-facedet | 55774ac5619f9a4c76a3a872ff11940a874b32d1 | [
"Apache-2.0"
] | null | null | null | eval.py | itisianlee/hawk-facedet | 55774ac5619f9a4c76a3a872ff11940a874b32d1 | [
"Apache-2.0"
] | null | null | null | import os
import cv2
import fire
import time
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
from configs.common import config as cfg
from hawkdet.models.build import build_detor
from hawkdet.lib.numpy_nms import np_nms
from hawkdet.lib.box_utils import decode, decode_landm
from hawkdet.lib.prior_box import PriorBox
class Timer(object):
"""A simple timer."""
def __init__(self):
self.total_time = 0.
self.calls = 0
self.start_time = 0.
self.diff = 0.
self.average_time = 0.
def tic(self):
# using time.time instead of time.clock because time time.clock
# does not normalize for multithreading
self.start_time = time.time()
def toc(self, average=True):
self.diff = time.time() - self.start_time
self.total_time += self.diff
self.calls += 1
self.average_time = self.total_time / self.calls
if average:
return self.average_time
else:
return self.diff
def clear(self):
self.total_time = 0.
self.calls = 0
self.start_time = 0.
self.diff = 0.
self.average_time = 0.
def run(
model_path,
top_k=5000,
keep_top_k=750,
nms_threshold=0.4,
origin_size=True,
confidence_threshold=0.02,
save_folder='./eval_results',
testset_dir='/root/paddlejob/workspace/hawk-facedet/data/widerface/val',
):
torch.set_grad_enabled(False)
cudnn.benchmark = True
device = torch.cuda.current_device()
net = build_detor(cfg.Detector)
state_dict = torch.load(model_path)['model']
net.load_state_dict(state_dict)
net.eval()
net = net.to(device)
testset_folder = os.path.join(testset_dir, 'images')
testset_list = os.path.join(testset_dir, 'wider_val.txt')
with open(testset_list, 'r') as fr:
test_dataset = fr.read().split()
num_images = len(test_dataset)
_t = {'forward_pass': Timer(), 'misc': Timer()}
# testing begin
for i, img_name in enumerate(test_dataset):
image_path = testset_folder + img_name
img_raw = cv2.imread(image_path, cv2.IMREAD_COLOR)
img = np.float32(img_raw)
# testing scale
target_size = 1600
max_size = 2150
im_shape = img.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
resize = float(target_size) / float(im_size_min)
# prevent bigger axis from being more than max_size:
if np.round(resize * im_size_max) > max_size:
resize = float(max_size) / float(im_size_max)
if origin_size:
resize = 1
if resize != 1:
img = cv2.resize(img, None, None, fx=resize, fy=resize, interpolation=cv2.INTER_LINEAR)
im_height, im_width, _ = img.shape
scale = torch.Tensor([img.shape[1], img.shape[0], img.shape[1], img.shape[0]])
img -= (104, 117, 123)
img = img.transpose(2, 0, 1)
img = torch.from_numpy(img).unsqueeze(0)
img = img.to(device)
scale = scale.to(device)
_t['forward_pass'].tic()
loc, conf, landms = net(img) # forward pass
conf = F.softmax(conf, dim=-1)
_t['forward_pass'].toc()
_t['misc'].tic()
priors = PriorBox(cfg.min_sizes, cfg.steps, cfg.clip, image_size=(im_height, im_width)).forward()
priors = priors.to(device)
prior_data = priors.data
boxes = decode(loc.data.squeeze(0), prior_data, cfg.variance)
boxes = boxes * scale / resize
boxes = boxes.cpu().numpy()
scores = conf.squeeze(0).data.cpu().numpy()[:, 1]
landms = decode_landm(landms.data.squeeze(0), prior_data, cfg.variance)
scale1 = torch.Tensor([img.shape[3], img.shape[2], img.shape[3], img.shape[2],
img.shape[3], img.shape[2], img.shape[3], img.shape[2],
img.shape[3], img.shape[2]])
scale1 = scale1.to(device)
landms = landms * scale1 / resize
landms = landms.cpu().numpy()
# ignore low scores
inds = np.where(scores > confidence_threshold)[0]
boxes = boxes[inds]
landms = landms[inds]
scores = scores[inds]
# keep top-K before NMS
order = scores.argsort()[::-1]
# order = scores.argsort()[::-1][:top_k]
boxes = boxes[order]
landms = landms[order]
scores = scores[order]
# do NMS
dets = np.hstack((boxes, scores[:, np.newaxis])).astype(np.float32, copy=False)
keep = np_nms(dets, nms_threshold)
dets = dets[keep, :]
landms = landms[keep]
# keep top-K faster NMS
# dets = dets[:keep_top_k, :]
# landms = landms[:keep_top_k, :]
dets = np.concatenate((dets, landms), axis=1)
_t['misc'].toc()
save_name = save_folder + img_name[:-4] + ".txt"
dirname = os.path.dirname(save_name)
if not os.path.isdir(dirname):
os.makedirs(dirname)
with open(save_name, "w") as fd:
bboxs = dets
file_name = os.path.basename(save_name)[:-4] + "\n"
bboxs_num = str(len(bboxs)) + "\n"
fd.write(file_name)
fd.write(bboxs_num)
for box in bboxs:
x = int(box[0])
y = int(box[1])
w = int(box[2]) - int(box[0])
h = int(box[3]) - int(box[1])
confidence = str(box[4])
line = str(x) + " " + str(y) + " " + str(w) + " " + str(h) + " " + confidence + " \n"
fd.write(line)
print('im_detect: {:d}/{:d} forward_pass_time: {:.4f}s misc: {:.4f}s'.format(
i + 1, num_images, _t['forward_pass'].average_time, _t['misc'].average_time))
if __name__ == '__main__':
fire.Fire({"run": run})
exit()
| 33.206704 | 105 | 0.57924 | 842 | 0.141655 | 0 | 0 | 0 | 0 | 0 | 0 | 694 | 0.116756 |
6efda603db469ce3236855097b257374262f3443 | 178 | py | Python | Python/Samples/Flyweight/Flyweight_models.py | plasroom46/DesignPattern.Sample | 86c05c5ae356cb01f3d075f248c45da3e6534d07 | [
"MIT"
] | 9 | 2019-03-14T01:54:31.000Z | 2021-11-26T13:00:32.000Z | Python/Samples/Flyweight/Flyweight_models.py | plasroom46/DesignPattern.Sample | 86c05c5ae356cb01f3d075f248c45da3e6534d07 | [
"MIT"
] | null | null | null | Python/Samples/Flyweight/Flyweight_models.py | plasroom46/DesignPattern.Sample | 86c05c5ae356cb01f3d075f248c45da3e6534d07 | [
"MIT"
] | 2 | 2019-08-19T06:00:04.000Z | 2021-07-15T01:23:52.000Z | class Content:
def __init__(self, id="", value=""):
if(id == ""):
raise TypeError
else:
self.id = id
self.value = value
| 17.8 | 40 | 0.438202 | 175 | 0.983146 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0.033708 |
6efe244e5a0524f99c737e6f3d3da045c5866cd7 | 101 | py | Python | hello.py | zhuiyue568/test27 | f96438c29711b62000eb363ff32c059529a0e142 | [
"MIT"
] | null | null | null | hello.py | zhuiyue568/test27 | f96438c29711b62000eb363ff32c059529a0e142 | [
"MIT"
] | null | null | null | hello.py | zhuiyue568/test27 | f96438c29711b62000eb363ff32c059529a0e142 | [
"MIT"
] | null | null | null | name="zhuiyue"
num="123456"
num=111
num3=333
str="keep going"
num4=666
num5=888
num5=777
num6=999
| 7.769231 | 16 | 0.722772 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 29 | 0.287129 |
3e00ea020dca2ee0cd420f43a2015391aba2eabc | 2,491 | py | Python | src/keydra/providers/contentful.py | jangroth/keydra | 9bab1b21e025ceb6ae074ea936d693e36efae5a4 | [
"MIT"
] | 12 | 2021-05-04T10:47:02.000Z | 2022-03-10T13:25:04.000Z | src/keydra/providers/contentful.py | jangroth/keydra | 9bab1b21e025ceb6ae074ea936d693e36efae5a4 | [
"MIT"
] | 17 | 2021-05-04T00:53:49.000Z | 2022-01-18T10:01:49.000Z | src/keydra/providers/contentful.py | jangroth/keydra | 9bab1b21e025ceb6ae074ea936d693e36efae5a4 | [
"MIT"
] | 9 | 2021-05-04T00:46:38.000Z | 2022-02-16T02:55:50.000Z | from keydra.clients.contentful import ContentfulClient
from keydra.providers.base import BaseProvider
from keydra.providers.base import exponential_backoff_retry
from keydra.exceptions import DistributionException
from keydra.exceptions import RotationException
from keydra.logging import get_logger
LOGGER = get_logger()
PW_FIELD = 'secret'
class Client(BaseProvider):
def __init__(self, session=None, credentials=None, region_name=None):
self._secret_key = credentials['key']
self._cfclient = ContentfulClient(token=credentials[PW_FIELD])
def _rotate_secret(self, secret):
try:
curr_tokens = self._cfclient.get_tokens()
new_token = self._cfclient.create_token(
name=self._secret_key,
readonly=False
)
except Exception as error:
LOGGER.error(
"Failed to rotate Contentful token '{}' - {}".format(
self._secret_key,
error
)
)
raise RotationException(
'Error rotating token {} on Contentful - '
'error : {}'.format(
self._secret_key,
error
)
)
try:
# Revoke all existing tokens, just leaving our new one
for token in curr_tokens:
self._cfclient.revoke_token(token_id=token.id)
except Exception as error:
LOGGER.error(
'Failed to revoke Contentful token'
)
raise RotationException(
'Error revoking token on Contentful - '
'error : {}'.format(
error
)
)
LOGGER.info(
"Contentful token '{}' successfully rotated.".format(
self._secret_key
)
)
return {
'provider': 'contentful',
'key': self._secret_key,
f'{PW_FIELD}': new_token.token,
}
@exponential_backoff_retry(3)
def rotate(self, secret):
return self._rotate_secret(secret)
def distribute(self, secret, destination):
raise DistributionException('Contentful does not support distribution')
@classmethod
def redact_result(cls, result, spec=None):
if 'value' in result and PW_FIELD in result['value']:
result['value'][PW_FIELD] = '***'
return result
| 28.965116 | 79 | 0.566439 | 2,141 | 0.859494 | 0 | 0 | 292 | 0.117222 | 0 | 0 | 405 | 0.162585 |
3e017ed1492cc6fe4bfc5ac25bc91b6acc5c2bd6 | 1,266 | py | Python | numbas_lti/migrations/0063_auto_20210211_1307.py | jhoobergs/numbas-lti-provider | 9d673e0ec8dcb085bd783e949c3ee179e507be5c | [
"Apache-2.0"
] | 6 | 2016-12-12T14:41:33.000Z | 2021-04-18T01:04:23.000Z | numbas_lti/migrations/0063_auto_20210211_1307.py | jhoobergs/numbas-lti-provider | 9d673e0ec8dcb085bd783e949c3ee179e507be5c | [
"Apache-2.0"
] | 206 | 2016-08-24T13:53:07.000Z | 2022-03-31T09:14:43.000Z | numbas_lti/migrations/0063_auto_20210211_1307.py | jhoobergs/numbas-lti-provider | 9d673e0ec8dcb085bd783e949c3ee179e507be5c | [
"Apache-2.0"
] | 13 | 2016-10-23T04:53:30.000Z | 2022-02-17T09:25:00.000Z | # Generated by Django 2.2.13 on 2021-02-11 13:07
from django.db import migrations, models
import django.db.models.deletion
def set_exam_resources(apps, schema_editor):
Resource = apps.get_model('numbas_lti', 'Resource')
Attempt = apps.get_model('numbas_lti', 'Attempt')
for r in Resource.objects.exclude(exam=None):
r.exam.resource = r
r.exam.save()
for a in Attempt.objects.exclude(exam=None):
if a.exam.resource is None:
a.exam.resource = a.resource
a.exam.save()
class Migration(migrations.Migration):
dependencies = [
('numbas_lti', '0062_scormelementdiff'),
]
operations = [
migrations.AddField(
model_name='exam',
name='resource',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='exams', to='numbas_lti.Resource'),
),
migrations.AlterField(
model_name='resource',
name='exam',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='main_exam_of', to='numbas_lti.Exam'),
),
migrations.RunPython(set_exam_resources,migrations.RunPython.noop),
]
| 33.315789 | 156 | 0.650869 | 730 | 0.576619 | 0 | 0 | 0 | 0 | 0 | 0 | 217 | 0.171406 |
3e035da887a72ca05d47f4e04f4fd021e19671d0 | 1,356 | py | Python | sahyun_bot/utils_session.py | TheGoodlike13/sahyun-bot | 8ebc3d4e58a0acf9bde3c9ea8339145abcc53fcb | [
"MIT"
] | 1 | 2022-02-21T18:55:34.000Z | 2022-02-21T18:55:34.000Z | sahyun_bot/utils_session.py | TheGoodlike13/sahyun-bot | 8ebc3d4e58a0acf9bde3c9ea8339145abcc53fcb | [
"MIT"
] | null | null | null | sahyun_bot/utils_session.py | TheGoodlike13/sahyun-bot | 8ebc3d4e58a0acf9bde3c9ea8339145abcc53fcb | [
"MIT"
] | null | null | null | from requests import Session
from requests.adapters import HTTPAdapter
from urllib3 import Retry
from sahyun_bot.utils_logging import HttpDump
DEFAULT_RETRY_COUNT = 3
RETRY_ON_METHOD = frozenset([
'HEAD', 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'
])
RETRY_ON_STATUS = frozenset([
403, 429, 500, 502, 503, 504
])
class SessionFactory:
"""
Creates Session objects for use with the application. These objects will log HTTP information and retry requests.
Retry count is configurable.
All other kwargs will be passed into HttpDump.
"""
def __init__(self, retry_count: int = DEFAULT_RETRY_COUNT, **dump_kwargs):
self.__dump = HttpDump(**dump_kwargs)
self.__retry_count = max(0, retry_count) or DEFAULT_RETRY_COUNT
def with_retry(self, session: Session = None) -> Session:
session = session or Session()
session.hooks['response'] = [self.__dump.all]
retry = Retry(
total=self.__retry_count,
connect=self.__retry_count,
read=self.__retry_count,
method_whitelist=RETRY_ON_METHOD,
status_forcelist=RETRY_ON_STATUS,
backoff_factor=1
)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
| 30.133333 | 117 | 0.668142 | 1,020 | 0.752212 | 0 | 0 | 0 | 0 | 0 | 0 | 289 | 0.213127 |
3e03fc65e12b6935503f8e6630624fed1809bd0e | 5,763 | py | Python | EzLibrarianApplication/DAO/BookCirculationDAO.py | coregameHD/SmartLib_Librarian | 31b58a4aab648ee9110ba6a78d5fcab942267380 | [
"MIT"
] | null | null | null | EzLibrarianApplication/DAO/BookCirculationDAO.py | coregameHD/SmartLib_Librarian | 31b58a4aab648ee9110ba6a78d5fcab942267380 | [
"MIT"
] | null | null | null | EzLibrarianApplication/DAO/BookCirculationDAO.py | coregameHD/SmartLib_Librarian | 31b58a4aab648ee9110ba6a78d5fcab942267380 | [
"MIT"
] | 2 | 2018-10-01T14:08:25.000Z | 2020-09-30T03:02:15.000Z | import json
import requests
from datetime import datetime, timedelta
from BookCirculation import BookCirculation
from DAO.AbstractDAO import AbstractDAO
from DAO.BookDAO import BookDAO
from DAO.UserDAO import UserDAO
from constant import *
from datetime import datetime
class BookCirculationDAO(AbstractDAO):
def __init__(self, parent = None):
AbstractDAO.__init__(self)
self.parent = parent
def borrow(self, user, books):
borrow_list = []
for book in books:
borrow_list.append({"user": {"user_id": user.user_id}, "book": {"book_id": book.book_id}})
# try:
path = '/borrow'
response = requests.post(self.server_ip + path, json=borrow_list, timeout = self.timeout, headers=self.get_authentication_header(path))
if response.status_code == 200: #Success
book_circulations = []
for raw_book_circulation in response.json():
book_circulations.append(self.construct_book_ciruclation(raw_book_circulation))
if self.parent is not None:
due_time = book_circulations[0].due_time
print(str(due_time))
self.parent.borrowBookCallback(due_time)
def getAllCirculations(self):
try:
path = '/history'
response = requests.get(self.server_ip + path , timeout = self.timeout, headers=self.get_authentication_header(path))
circulations = None
if response.status_code == 200:
to_return = []
for raw_data in response.json():
to_return.append(self.construct_book_circulation(raw_data ))
return to_return
else:
print("Request failed")
except requests.exceptions.ConnectTimeout: # Connection timeout, use offline mockup data
print("Timeout")
return None
def getAllOnBorrowCirculation(self):
try:
path = '/borrow'
response = requests.get(self.server_ip + path , timeout = self.timeout, headers=self.get_authentication_header(path))
circulations = None
if response.status_code == 200:
to_return = []
for raw_data in response.json():
to_return.append(self.construct_book_circulation(raw_data ))
return to_return
else:
print("Request failed")
except requests.exceptions.ConnectTimeout: # Connection timeout, use offline mockup data
print("Timeout")
@staticmethod
def construct_book_circulation(arguments):
time_args = ["borrow_time", "due_time", "return_time"]
for time_arg in time_args:
if time_arg in arguments.keys() and arguments[time_arg] is not None:
arguments[time_arg] = datetime.strptime(arguments[time_arg], rfc_822_format)
arguments["book"] = BookDAO.constructBook(arguments["book"])
arguments["user"] = UserDAO.constructUser(arguments["user"])
return BookCirculation(**arguments)
def getBorrowIDFromBookID(self,bookID):
for circulation in self.getAllOnBorrowCirculation():
if(str(circulation.book.book_id) == str(bookID)):
return circulation.borrow_id
return None
def returnBook(self,borrowID):
path = '/return/' + str(borrowID)
response = requests.delete(self.server_ip + path, timeout=self.timeout, headers=self.get_authentication_header(path))
if response.status_code == 200: # Success
print(response.text)
pass
else:
print("Failed")
def searchHistory(self, keyword):
if keyword == "" or keyword.startswith(' '):
return self.getAllCirculations()
try:
path = '/history/search/' + keyword
response = requests.get(self.server_ip + path, timeout = self.timeout, headers=self.get_authentication_header(path))
circulations = None
if response.status_code == 200:
to_return = []
for raw_data in response.json():
to_return.append(self.construct_book_circulation(raw_data ))
return to_return
else:
print("Request failed")
except requests.exceptions.ConnectTimeout: # Connection timeout, use offline mockup data
print("Timeout")
return None
def searchOnBorrow(self, keyword):
if keyword == "" or keyword.startswith(' '):
return self.getAllOnBorrowCirculation()
try:
path = '/borrow/search/' + keyword
response = requests.get(self.server_ip + path , timeout = self.timeout, headers=self.get_authentication_header(path))
circulations = None
if response.status_code == 200:
to_return = []
for raw_data in response.json():
to_return.append(self.construct_book_circulation(raw_data ))
return to_return
else:
print("Request failed")
except requests.exceptions.ConnectTimeout: # Connection timeout, use offline mockup data
print("Timeout")
return None
def getOverdueCirculation(self):
overdueCirculations = []
for circulation in self.getAllOnBorrowCirculation():
if (circulation.due_time.replace(tzinfo=None) < datetime.now()):
overdueCirculations.append(circulation)
return overdueCirculations
if __name__ == "__main__":
bookCirculationDAO = BookCirculationDAO()
for circulation in bookCirculationDAO.getAllCirculations():
print(str(circulation)) | 39.472603 | 143 | 0.622245 | 5,322 | 0.923477 | 0 | 0 | 517 | 0.08971 | 0 | 0 | 494 | 0.085719 |
3e063c3a08ca1b49f1f08adcb5b79cf09de3aefe | 4,128 | py | Python | flask_mm/managers/__init__.py | szkkteam/flask_mm | ea96899a41a0573e51792f1554550c6d77f22a07 | [
"MIT"
] | 1 | 2021-03-21T18:46:36.000Z | 2021-03-21T18:46:36.000Z | flask_mm/managers/__init__.py | szkkteam/flask_mm | ea96899a41a0573e51792f1554550c6d77f22a07 | [
"MIT"
] | null | null | null | flask_mm/managers/__init__.py | szkkteam/flask_mm | ea96899a41a0573e51792f1554550c6d77f22a07 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Common Python library imports
import os
# Pip package imports
from six.moves.urllib.parse import urljoin
from flask import url_for, request, abort
from werkzeug import secure_filename, FileStorage, cached_property
# Internal package imports
from flask_mm.utils import UuidNameGen
from flask_mm.files import extension, lower_extension
from flask_mm.storages import BaseStorage
DEFAULT_MANAGER = 'file'
class BaseManager(object):
def __init__(self, app, name, storage, *args, **kwargs):
self.name = name
assert isinstance(storage, BaseStorage), "Storage object must be a subclass of BaseStorage"
self.storage = storage
# Optional parameters
self.allowed_extensions = kwargs.get('extensions', None)
self.namegen = kwargs.get('name_gen', UuidNameGen)
def _clean_url(self, url):
if not url.startswith('http://') and not url.startswith('https://'):
url = ('https://' if request.is_secure else 'http://') + url
if not url.endswith('/'):
url += '/'
return url
def url(self, filename, external=False):
if isinstance(filename, FileStorage):
filename = filename.filename
if filename.startswith('/'):
filename = filename[1:]
if self.storage.has_url:
# TODO: Clean url or not?
return urljoin(self._clean_url(self.storage.base_url), self.storage.path(filename))
else:
return url_for('mm.get_file', mm=self.name, filename=filename, _external=external)
def is_file_allowed(self, filename):
if not self.allowed_extensions:
return True
return (extension(filename) in self.allowed_extensions)
def generate_name(self, filename_or_wfs):
if isinstance(filename_or_wfs, FileStorage):
return self.namegen.generate_name(filename_or_wfs.filename)
return self.namegen.generate_name(filename_or_wfs)
def path(self, filename):
if not hasattr(self.storage, 'path'):
raise RuntimeError("Direct file access is not supported by " + self.storage.__class__.__name__)
return self.storage.path(filename)
def archive_files(self, out_filename, files, *args, **kwargs):
return self.storage.archive_files(out_filename, files, *args, **kwargs)
def exists(self, filename):
return self.storage.exists(filename)
def is_allowed(self, filename):
return self.is_file_allowed(filename)
def read(self, filename):
if not self.exists(filename):
raise FileNotFoundError(filename)
return self.storage.read(filename)
def open(self, filename, mode='r', **kwargs):
if 'r' in mode and not self.storage.exists(filename):
raise FileNotFoundError(filename)
return self.storage.open(filename, mode, **kwargs)
def write(self, filename, content, overwrite=False):
if not overwrite and self.exists(filename):
raise FileExistsError(filename)
return self.storage.write(filename, content)
def delete(self, filename):
return self.storage.delete(filename)
def save(self, file_or_wfs, filename=None, **kwargs):
if not filename and isinstance(file_or_wfs, FileStorage):
filename = lower_extension(secure_filename(file_or_wfs.filename))
if not filename:
raise ValueError('filename is required')
if not self.is_allowed(filename):
raise ValueError('File type is not allowed.')
self.storage.save(file_or_wfs, filename, **kwargs)
return filename
def list_files(self):
return self.storage.list_file()
def metadata(self, filename):
metadata = self.storage.metadata(filename)
metadata['filename'] = os.path.basename(filename)
# TODO: Impelement url getter
#metadata['url'] = self.url
def serve(self, filename):
'''Serve a file given its filename'''
if not self.exists(filename):
abort(404)
return self.storage.serve(filename) | 35.282051 | 107 | 0.66376 | 3,672 | 0.889535 | 0 | 0 | 0 | 0 | 0 | 0 | 511 | 0.123789 |
3e07225d9f986640eeceeb3fecfcd08a0bbf84a5 | 1,627 | py | Python | web/api/user/core.py | cclrobotics/ARTBot | a0bffabebbc09361bf7748741fe3d30c78af8fbd | [
"MIT"
] | 5 | 2020-12-04T19:28:42.000Z | 2021-12-07T16:14:28.000Z | web/api/user/core.py | cclrobotics/ARTBot | a0bffabebbc09361bf7748741fe3d30c78af8fbd | [
"MIT"
] | 50 | 2019-10-08T19:47:24.000Z | 2021-07-26T05:43:37.000Z | web/api/user/core.py | cclrobotics/ARTBot | a0bffabebbc09361bf7748741fe3d30c78af8fbd | [
"MIT"
] | 4 | 2019-10-23T04:14:49.000Z | 2021-08-01T01:22:37.000Z | from functools import partial
from marshmallow import ValidationError
from web.extensions import db
from .validators import validate_user_token
from .serializers import SuperUserSchema
from .exceptions import InvalidUsage
from .user import SuperUser
def validate_and_extract_user_data(json_data, skipped_fields: tuple= (), new_user: bool=False):
try:
data = SuperUserSchema(new_user).load(json_data, partial=skipped_fields)
except ValidationError as err:
raise InvalidUsage.from_validation_error(err)
return data
def create_superuser(email, password, role = SuperUser.default_role()):
s_user = SuperUser.from_email(email, role=role)
s_user.set_password(password)
db.session.commit()
return s_user.id, True
def update_superuser_role(email, new_role):
s_user = SuperUser.get_by_email(email)
old_role = s_user.role
s_user.set_role(new_role)
db.session.commit()
return s_user.email, old_role.value, s_user.role.value
def update_superuser_password(email, new_password, created_at_timestamp):
s_user = SuperUser.get_by_email(email)
validate_user_token(s_user, created_at_timestamp)
s_user.set_password(new_password)
db.session.commit()
return s_user.email, True
def delete_superuser(id, created_at_timestamp):
"""
Delete a user record from the SuperUser table
For added security, must provide exact creation datetime
of the user, in timestamp format
"""
s_user = SuperUser.get_by_id(id)
validate_user_token(s_user, created_at_timestamp)
s_user.delete()
db.session.commit()
return s_user.email, True | 31.288462 | 95 | 0.761524 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 159 | 0.097726 |
3e07f9fff3837dd41ad8b264e8c09d1d22e6939d | 12,853 | py | Python | digsby/src/util/auxencodings.py | ifwe/digsby | f5fe00244744aa131e07f09348d10563f3d8fa99 | [
"Python-2.0"
] | 35 | 2015-08-15T14:32:38.000Z | 2021-12-09T16:21:26.000Z | digsby/src/util/auxencodings.py | niterain/digsby | 16a62c7df1018a49eaa8151c0f8b881c7e252949 | [
"Python-2.0"
] | 4 | 2015-09-12T10:42:57.000Z | 2017-02-27T04:05:51.000Z | digsby/src/util/auxencodings.py | niterain/digsby | 16a62c7df1018a49eaa8151c0f8b881c7e252949 | [
"Python-2.0"
] | 15 | 2015-07-10T23:58:07.000Z | 2022-01-23T22:16:33.000Z | '''
Registers auxillary encodings in the codecs module.
>>> 'x\x9cK\xc9L/N\xaa\x04\x00\x08\x9d\x02\x83'.decode('zip')
'digsby'
'''
from peak.util.imports import lazyModule
sys = lazyModule('sys')
warnings = lazyModule('warnings')
locale = lazyModule('locale')
collections = lazyModule('collections')
urllib = lazyModule('urllib')
urllib2 = lazyModule('urllib2')
codecs = lazyModule('codecs')
StringIO = lazyModule('StringIO')
zipfile = lazyModule('zipfile')
gzip = lazyModule('gzip')
htmlentitydefs = lazyModule('htmlentitydefs')
base64 = lazyModule('base64')
#pylzma = lazyModule('pylzma')
HAVE_LZMA = False #until proven otherwise
ENCODE_LZMA = False
__simplechars_enc = {
ord('<') : 'lt',
ord('>') : 'gt',
#ord("'") : 'apos',
ord('"') : 'quot',
ord('&') : 'amp',
}
__simplechars_dec = dict((v, unichr(k)) for k,v in __simplechars_enc.items())
__simplechars_dec['apos'] = unichr(ord("'"))
_encodings = [
lambda: locale.getpreferredencoding(),
lambda: sys.getfilesystemencoding(),
lambda: sys.getdefaultencoding(),
]
_to_register = [
]
def register_codec(name, encode, decode):
'An easy way to register a pair of encode/decode functions with a name.'
global _to_register
def _search(n):
if n == name:
return codecs.CodecInfo(name=name, encode=encode, decode=decode)
_to_register.append(_search)
def install():
global _to_register
to_register, _to_register[:] = _to_register[:], []
for codec in to_register:
codecs.register(codec)
def fuzzydecode(s, encoding = None, errors = 'strict'):
'''
Try decoding the string using several encodings, in this order.
- the one(s) you give as "encoding"
- the system's "preferred" encoding
'''
if isinstance(s, unicode):
import warnings; warnings.warn('decoding unicode is not supported!')
return s
encodings = [enc() for enc in _encodings]
if isinstance(encoding, basestring):
encodings.insert(0, encoding)
elif encoding is None:
pass
else:
encodings = list(encoding) + encodings
assert all(isinstance(e, basestring) for e in encodings)
for e in encodings:
try:
res = s.decode(e, errors)
except (UnicodeDecodeError, LookupError), _ex:
# LookupError will catch missing encodings
import warnings; warnings.warn("Exception when fuzzydecoding %r: %r" % (s, _ex))
else:
return res
return s.decode(encoding, 'replace')
def fuzzyencode(s, errors='strict'):
raise NotImplementedError
def _xml_encode(input, errors='simple'):
simple = 'simple' in errors
origtype = type(input)
if simple:
chars = __simplechars_enc
else:
chars = htmlentitydefs.codepoint2name
res = []
append = res.append
for ch in input:
och = ord(ch)
if och in chars:
append('&%s;' % chars[och])
else:
append(ch)
return origtype(''.join(res)), len(input)
def _xml_decode(input, errors='strict'):
data = collections.deque(input)
res = []
append = res.append
popleft = data.popleft
extendleft = data.extendleft
name2codepoint = htmlentitydefs.name2codepoint
while data:
ch = popleft()
if ch == '&':
curtoken = ''
is_ref = False
is_num = False
is_hex = False
while len(curtoken) < 10 and data: # so we don't loop to the end of the input
nch = popleft()
if nch == '#':
is_num = True
if is_num and len(curtoken) == 1 and nch == 'x':
is_hex = True
if nch == ';':
is_ref = True
break
curtoken += nch
if nch == '&':
break
if not is_ref:
extendleft(reversed(curtoken)) # put it back
append('&') # this should not have been here, but we're nice so we'll put it back
continue
else:
if is_num:
try:
curtoken = curtoken[1:] # chop the #
if is_hex:
curtoken = curtoken[1:] # chop the x
och = int(curtoken, 16)
else:
och = int(curtoken, 10)
append(unichr(och))
except (UnicodeError, ValueError, TypeError):
pass
else:
continue
if curtoken in name2codepoint:
append(unichr(name2codepoint[curtoken]))
elif curtoken in __simplechars_dec:
append(__simplechars_dec[curtoken])
else:
append('&%s;' % (curtoken))
else:
append(ch)
return u''.join(res), len(input)
register_codec('xml', _xml_encode, _xml_decode)
def _pk_decode(input, errors='strict'):
li = len(input)
input = StringIO.StringIO(input)
z = zipfile.ZipFile(input, mode='r')
zi = z.filelist[0]
return z.read(zi.filename), li
def _pk_encode(input, errors='strict'):
li = len(input)
s = StringIO.StringIO();
z = zipfile.ZipFile(s, mode='wb', compression=zipfile.ZIP_DEFLATED)
z.writestr('file', input)
z.close()
return s.getvalue(), li
def _gzip_decode(input, errors='strict'):
li = len(input)
input = StringIO.StringIO(input)
g = gzip.GzipFile(mode='rb', fileobj=input)
return g.read(), li
def _gzip_encode(input, errors='strict'):
li = len(input)
s = StringIO.StringIO()
g = gzip.GzipFile(mode='wb', fileobj=s)
g.write(input)
g.close()
return s.getvalue(), li
def search(name):
if name == 'gzip':
name = 'gzip'
return codecs.CodecInfo(name = name, encode = _gzip_encode, decode = _gzip_decode)
_to_register.append(search)
del search
def _fuzzyzip_decode(input, errors='strict'):
magic_num = input[:2]
if magic_num == 'PK':
return _pk_decode(input, errors=errors)
elif magic_num == 'BZ':
return input.decode('bz2'), len(input)
elif magic_num == '\x1f\x8b':
return _gzip_decode(input, errors=errors)
else:
global HAVE_LZMA
if HAVE_LZMA:
try:
return pylzma.decompress(input), len(input)
except ImportError:
HAVE_LZMA = False
except Exception:
pass
return input.decode('zip'), len(input)
def _fuzzyzip_encode(input, errors='strict'):
li = len(input)
funcs = [
# lambda: _pk_encode(input)[0],
# lambda: _gzip_encode(input)[0],
lambda: input.encode('bz2'),
lambda: input.encode('zlib'),
]
if HAVE_LZMA and ENCODE_LZMA:
funcs.append(lambda: pylzma.compress(input))
shortest_val = None
shortest_len = -1
for func in funcs:
newval = func()
newlen = len(newval)
assert newlen > 0
if shortest_len < 0 or newlen < shortest_len:
shortest_len = newlen
shortest_val = newval
return shortest_val, li
def search(name):
if name == 'z' or name == 'fuzzyzip':
name = 'fuzzyzip'
return codecs.CodecInfo(name = 'fuzzyzip', encode = _fuzzyzip_encode, decode = _fuzzyzip_decode)
_to_register.append(search)
del search
def search(name):
if name.startswith('fuzzy'):
if name == 'fuzzyzip': return None
encoding = name[len('fuzzy'):] or None
elif name.endswith('?'):
encoding = name[:-1] or None
else:
return None
name = 'fuzzy'
encode = fuzzyencode
def decode(s, errors='strict'):
if encoding:
encs = filter(bool, encoding.split())
else:
encs = None
return fuzzydecode(s, encs, errors), len(s)
return codecs.CodecInfo(name=name, encode=encode, decode=decode)
_to_register.append(search)
del search
__locale_encoding = lambda: locale.getpreferredencoding()
register_codec('locale',
lambda s, errors = 'strict': (s.encode(__locale_encoding()), len(s)),
lambda s, errors = 'strict': (s.decode(__locale_encoding()), len(s)))
__filesysencoding = lambda: sys.getfilesystemencoding()
def _filesys_encode(s, errors = 'strict'):
if isinstance(s, str):
return s, len(s)
else:
return s.encode(__filesysencoding()), len(s)
def _filesys_decode(s, errors = 'strict'):
if isinstance(s, unicode):
return s, len(s)
else:
return s.decode(__filesysencoding()), len(s)
register_codec('filesys',
_filesys_encode,
_filesys_decode)
del _filesys_encode
del _filesys_decode
def _url_encode(input, errors='strict'):
return urllib2.quote(input), len(input)
def _url_decode(input, errors='strict'):
return urllib.unquote_plus(input), len(input)
register_codec('url', _url_encode, _url_decode)
# codec: utf8url
# encode = utf8 encode -> url encode
# decode = url decode -> utf8 decode
def _utf8url_encode(input, errors='strict'):
output = input.encode('utf-8', errors)
output = urllib2.quote(output)
return output, len(input)
def _utf8url_decode(input, errors='strict'):
output = input.encode('ascii', errors)
output = urllib.unquote_plus(output)
output = output.decode('utf-8', errors)
return output, len(input)
register_codec('utf8url', _utf8url_encode, _utf8url_decode)
b64_codecs = {}
b64_names = frozenset(('b64', 'b32', 'b16'))
def make_funcs(encode, decode):
def _encode(input, errors='strict'):
return encode(input), len(input)
def _decode(input, errors='strict'):
return decode(input), len(input)
return dict(encode=_encode, decode=_decode)
def search_base64(name):
if name not in b64_names:
return
try:
return b64_codecs[name]
except KeyError:
if name == 'b64':
codec = b64_codecs[name] = codecs.CodecInfo(name = name, **make_funcs(base64.b64encode, base64.b64decode))
if name == 'b32':
codec = b64_codecs[name] = codecs.CodecInfo(name = name, **make_funcs(base64.b32encode, base64.b32decode))
if name == 'b16':
codec = b64_codecs[name] = codecs.CodecInfo(name = name, **make_funcs(base64.b16encode, base64.b16decode))
return codec
_to_register.append(search_base64)
del search_base64
def _binary_encode(input, errors='strict'):
def align(s):
return '0'*(8-len(s)) + s
output = ''.join((align(bin(ord(x))[2:]) for x in input))
return output, len(input)
def _binary_decode(input, errors='strict'):
assert not (len(input) % 8)
#if using for serious work, fix the eval
output = ''.join((chr(eval('0b' + input[x:x+8])) for x in range(0, len(input), 8)))
return output, len(input)
register_codec('binary', _binary_encode, _binary_decode)
__all__ = []
if __name__ == '__main__':
install()
def gen_rand_str(length = 5000):
from random import randint, choice as randchoice
from string import ascii_letters
ents = list(__simplechars_dec)
data = []
append = data.append
for x in xrange(length):
r = randint(0, 10)
if r == 0:
append('&%s;' % randchoice(ents))
elif r == 1:
append('&%d;' % randint(0, 65535))
elif r == 2:
append('&%x;' % randint(0, 65535))
if r > 3:
append(randchoice(ascii_letters))
return ''.join(data)
strings = [gen_rand_str() for x in xrange(100)]
results1 = []
results2 = []
from time import clock
def timeit(func):
before = clock()
func()
return clock() - before
def foo(encoding, res):
for s in strings:
res.append(s.decode(encoding))
print 'xml', timeit(lambda: foo('xml', results1))
print 'xml2', timeit(lambda: foo('xml2', results2))
assert results1 == results2
| 29.821346 | 119 | 0.561503 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,634 | 0.12713 |
3e08ccba7d47176de06f3bb412445c1550a56baf | 463 | py | Python | jaxfg/core/__init__.py | AvanDavad/jaxfg | 6d1559126ba872b452eca6a13c2688349f1c5f7e | [
"MIT"
] | 120 | 2020-11-28T19:43:31.000Z | 2022-03-29T02:35:46.000Z | jaxfg/core/__init__.py | AvanDavad/jaxfg | 6d1559126ba872b452eca6a13c2688349f1c5f7e | [
"MIT"
] | 12 | 2021-05-24T09:02:12.000Z | 2022-03-30T19:51:40.000Z | jaxfg/core/__init__.py | AvanDavad/jaxfg | 6d1559126ba872b452eca6a13c2688349f1c5f7e | [
"MIT"
] | 9 | 2021-05-06T15:31:23.000Z | 2022-03-23T12:06:44.000Z | from ._factor_base import FactorBase
from ._factor_stack import FactorStack
from ._stacked_factor_graph import StackedFactorGraph
from ._storage_metadata import StorageMetadata
from ._variable_assignments import VariableAssignments
from ._variables import RealVectorVariable, VariableBase
__all__ = [
"FactorStack",
"FactorBase",
"StackedFactorGraph",
"StorageMetadata",
"VariableAssignments",
"RealVectorVariable",
"VariableBase",
]
| 27.235294 | 56 | 0.792657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 117 | 0.2527 |
3e09b3a92c71458b7e09905c1beec58ee515ed7a | 106 | py | Python | cython/wrap_c/test_cython_wrapper.py | tleonhardt/Python_Interface_Cpp | 398eab0c6e7f5e0358edb6644c71b5fdc6b2606a | [
"MIT"
] | 64 | 2017-03-10T09:32:22.000Z | 2022-01-25T08:44:06.000Z | cython/wrap_c/test_cython_wrapper.py | tleonhardt/Python_Interface_Cpp | 398eab0c6e7f5e0358edb6644c71b5fdc6b2606a | [
"MIT"
] | null | null | null | cython/wrap_c/test_cython_wrapper.py | tleonhardt/Python_Interface_Cpp | 398eab0c6e7f5e0358edb6644c71b5fdc6b2606a | [
"MIT"
] | 13 | 2017-03-13T23:28:56.000Z | 2021-06-07T08:37:03.000Z | # coding=utf-8
import cyfib
def test_valid():
assert (17711 == cyfib.compute_fibonacci_wrapper(20))
| 15.142857 | 57 | 0.726415 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.132075 |
3e0aba9a6fd99c2588436a872d706b50b1c4f2cd | 1,612 | py | Python | Server/server.py | mjbogusz/CCVR | 65b11d39c1412134f8a695b30955368eb43c2518 | [
"MIT"
] | null | null | null | Server/server.py | mjbogusz/CCVR | 65b11d39c1412134f8a695b30955368eb43c2518 | [
"MIT"
] | null | null | null | Server/server.py | mjbogusz/CCVR | 65b11d39c1412134f8a695b30955368eb43c2518 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from http.server import SimpleHTTPRequestHandler, HTTPServer
from urllib.parse import parse_qs
import time
class CCVRRequestHandler(SimpleHTTPRequestHandler):
def do_GET(self):
# Add 'files' prefix
self.path = '/files' + self.path
super().do_GET()
def do_HEAD(self):
# Add 'files' prefix
self.path = '/files' + self.path
super().do_GET()
def do_POST(self):
content_length = int(self.headers['Content-Length'])
data = parse_qs(self.rfile.read(content_length).decode('utf-8'))
if not data.get('type') or not data.get('content'):
self.send_response(400, 'Bad request')
return
filename = 'files/'
if data.get('type')[0] == 'map':
filename += 'map.txt'
elif data.get('type')[0] == 'sensors':
filename += 'sensors.txt'
else:
self.send_response(400, 'Bad type')
try:
dataFile = open(filename, 'w')
dataFile.write(data.get('content')[0])
dataFile.close()
self.send_response(200, 'OK')
except Exception as e:
print('Error writing file:', e)
self.send_response(500, 'Error writing file')
def run(port = 8080, hostName = ''):
server_address = (hostName, port)
server = HTTPServer(server_address, CCVRRequestHandler)
print(time.asctime(), "Server Starts - %s:%s" % (hostName, port))
try:
server.serve_forever()
except KeyboardInterrupt:
pass
server.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (hostName, port))
if __name__ == "__main__":
from sys import argv
if len(argv) == 3:
run(port = int(argv[1]), hostName = str(argv[2]))
elif len(argv) == 2:
run(port = int(argv[1]))
else:
run()
| 25.587302 | 66 | 0.673077 | 941 | 0.583747 | 0 | 0 | 0 | 0 | 0 | 0 | 309 | 0.191687 |
3e0adca23e72763263f72a46a3ff5aad270ff8c2 | 4,907 | py | Python | dags/dag_update.py | alyildiz/btc_forecast | b1e70431c9f18bee0afda71b96805f6194072548 | [
"MIT"
] | 5 | 2021-09-06T08:42:02.000Z | 2021-11-15T15:04:57.000Z | dags/dag_update.py | alyildiz/sncf_forecast | b1e70431c9f18bee0afda71b96805f6194072548 | [
"MIT"
] | null | null | null | dags/dag_update.py | alyildiz/sncf_forecast | b1e70431c9f18bee0afda71b96805f6194072548 | [
"MIT"
] | null | null | null | import os
from datetime import datetime, timedelta
from airflow import DAG
from airflow.operators.docker_operator import DockerOperator
from docker.types import Mount
default_args = {
"owner": "airflow",
"description": "Use of the DockerOperator",
"depend_on_past": False,
"start_date": datetime(2021, 5, 1),
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=5),
}
BASE_DIR = "/home/baris/PROJECTS/sncf_forecast/"
dic_env = {
"API_KEY": os.environ["API_KEY"],
"API_KEY_SECRET": os.environ["API_KEY_SECRET"],
"ACCESS_TOKEN": os.environ["ACCESS_TOKEN"],
"ACCESS_TOKEN_SECRET": os.environ["ACCESS_TOKEN_SECRET"],
"MONGODB_HOST": os.environ["MONGODB_HOST"],
"MONGODB_PORT": os.environ["MONGODB_PORT"],
"MONGO_INITDB_ROOT_USERNAME": os.environ["MONGO_INITDB_ROOT_USERNAME"],
"MONGO_INITDB_ROOT_PASSWORD": os.environ["MONGO_INITDB_ROOT_PASSWORD"],
}
with DAG("daily_update_new", default_args=default_args, schedule_interval="0 2 * * *", catchup=False) as dag:
update_db = DockerOperator(
task_id="task_____daily_update_dbmongo",
image="sncf_forecast_update",
environment=dic_env,
container_name="task_____daily_update_dbmongo",
api_version="auto",
auto_remove=True,
command="python3 /workdir/update.py",
docker_url="unix://var/run/docker.sock",
working_dir="/workdir",
mount_tmp_dir=False,
mounts=[
Mount(source=BASE_DIR + "shared", target="/workdir/shared", type="bind"),
Mount(source=BASE_DIR + "backend/modeling/src", target="/workdir/src", type="bind"),
Mount(source=BASE_DIR + "backend/update", target="/workdir", type="bind"),
],
network_mode="sncf_forecast_default",
)
update_lstm = DockerOperator(
task_id="task_____daily_update_lstm",
image="sncf_forecast_modeling",
environment=dic_env,
container_name="task_____daily_update_lstm",
api_version="auto",
auto_remove=True,
command="python3 /workdir/bin/train_model.py -m lstm",
docker_url="unix://var/run/docker.sock",
working_dir="/workdir",
mount_tmp_dir=False,
mounts=[
Mount(source=BASE_DIR + "backend/modeling/bin", target="/workdir/bin", type="bind"),
Mount(source=BASE_DIR + "backend/modeling/src", target="/workdir/src", type="bind"),
Mount(source=BASE_DIR + "shared", target="/workdir/shared", type="bind"),
Mount(source=BASE_DIR + "mlflow/db", target="/workdir/data", type="bind"),
Mount(source=BASE_DIR + "mlflow/artifacts", target="/workdir/artifacts", type="bind"),
],
network_mode="sncf_forecast_default",
)
update_baseline = DockerOperator(
task_id="task_____daily_update_baseline",
image="sncf_forecast_modeling",
environment=dic_env,
container_name="task_____daily_update_baseline",
api_version="auto",
auto_remove=True,
command="python3 /workdir/bin/train_model.py -m baseline",
docker_url="unix://var/run/docker.sock",
working_dir="/workdir",
mount_tmp_dir=False,
mounts=[
Mount(source=BASE_DIR + "backend/modeling/bin", target="/workdir/bin", type="bind"),
Mount(source=BASE_DIR + "backend/modeling/src", target="/workdir/src", type="bind"),
Mount(source=BASE_DIR + "shared", target="/workdir/shared", type="bind"),
Mount(source=BASE_DIR + "mlflow/db", target="/workdir/data", type="bind"),
Mount(source=BASE_DIR + "mlflow/artifacts", target="/workdir/artifacts", type="bind"),
],
network_mode="sncf_forecast_default",
)
update_autoencoder = DockerOperator(
task_id="task_____daily_update_autoencoder",
image="sncf_forecast_modeling",
environment=dic_env,
container_name="task_____daily_update_autoencoder",
api_version="auto",
auto_remove=True,
command="python3 /workdir/bin/train_model.py -m autoencoder",
docker_url="unix://var/run/docker.sock",
working_dir="/workdir",
mount_tmp_dir=False,
mounts=[
Mount(source=BASE_DIR + "backend/modeling/bin", target="/workdir/bin", type="bind"),
Mount(source=BASE_DIR + "backend/modeling/src", target="/workdir/src", type="bind"),
Mount(source=BASE_DIR + "shared", target="/workdir/shared", type="bind"),
Mount(source=BASE_DIR + "mlflow/db", target="/workdir/data", type="bind"),
Mount(source=BASE_DIR + "mlflow/artifacts", target="/workdir/artifacts", type="bind"),
],
network_mode="sncf_forecast_default",
)
update_db >> update_lstm
update_db >> update_baseline
update_db >> update_autoencoder
| 41.584746 | 109 | 0.651314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,960 | 0.399429 |
3e0b03ec64f84131a309427f748ab4fc729497d0 | 1,723 | py | Python | sustainableCityManagement/main_project/Population_API/views_population.py | Josh-repository/Dashboard-CityManager- | 6287881be9fb2c6274a755ce5d75ad355346468a | [
"RSA-MD"
] | null | null | null | sustainableCityManagement/main_project/Population_API/views_population.py | Josh-repository/Dashboard-CityManager- | 6287881be9fb2c6274a755ce5d75ad355346468a | [
"RSA-MD"
] | null | null | null | sustainableCityManagement/main_project/Population_API/views_population.py | Josh-repository/Dashboard-CityManager- | 6287881be9fb2c6274a755ce5d75ad355346468a | [
"RSA-MD"
] | 1 | 2021-05-13T16:33:18.000Z | 2021-05-13T16:33:18.000Z | from django.http import JsonResponse
from django.http import HttpResponse
from rest_framework.views import APIView
from .store_population import StorePopulation
import time as processTiming
import uuid
# API to fetch Ireland population used by frontend. The result consist of population estimate and year.
class IrelandPopulationView(APIView):
@classmethod
def get(self, request, fetch_population=StorePopulation()):
startTime = processTiming.time()
call_uuid = uuid.uuid4()
ID = "IRELAND_POPULATION_INFO"
result = fetch_population.fetch_irish_population()
# If query param doesn't match any condition above.
return JsonResponse(
{
"API_ID": ID,
"CALL_UUID": call_uuid,
"DATA": {
"RESULT": result
},
"TIMESTAMP": "{} seconds".format(float(round(processTiming.time() - startTime, 2)))}
)
# API to fetch Dublin population used by frontend. The result consist of population estimate and year.
class DublinPopulationView(APIView):
@classmethod
def get(self, request, fetch_population=StorePopulation()):
startTime = processTiming.time()
call_uuid = uuid.uuid4()
ID = "DUBLIN_POPULATION_INFO"
result = fetch_population.fetch_dublin_population()
# If query param doesn't match any condition above.
return JsonResponse(
{
"API_ID": ID,
"CALL_UUID": call_uuid,
"DATA": {
"RESULT": result
},
"TIMESTAMP": "{} seconds".format(float(round(processTiming.time() - startTime, 2)))}
) | 39.159091 | 103 | 0.62101 | 1,311 | 0.760882 | 0 | 0 | 1,228 | 0.71271 | 0 | 0 | 468 | 0.271619 |
3e105c7bee23ddd23731ff6b0bc65a97faa40678 | 2,536 | py | Python | examples/tutorial7.py | fangj99/gifmaze | fd0f7fbf592537a26b13359ccf87dab836d9b1b3 | [
"MIT"
] | 7 | 2018-04-28T17:25:25.000Z | 2021-08-15T17:52:11.000Z | examples/tutorial7.py | fangj99/gifmaze | fd0f7fbf592537a26b13359ccf87dab836d9b1b3 | [
"MIT"
] | null | null | null | examples/tutorial7.py | fangj99/gifmaze | fd0f7fbf592537a26b13359ccf87dab836d9b1b3 | [
"MIT"
] | 2 | 2019-10-30T03:40:50.000Z | 2022-01-02T05:44:33.000Z | # -*- coding: utf-8 -*-
"""
This script shows how to embed the animation into a
background image (it's also possible to embed the animation
into another animation, but that's too complicated to implement
in a simple program ...)
"""
from colorsys import hls_to_rgb
import gifmaze as gm
from gifmaze.algorithms import wilson, bfs
from gifmaze.utils import generate_text_mask
# firstly define the size and color_depth of the image.
width, height = 600, 400
color_depth = 8
# define a surface to draw on.
surface = gm.GIFSurface.from_image('teacher.png', color_depth)
# set the 0-th color to be the same with the blackboard's.
palette = [52, 51, 50, 200, 200, 200, 255, 0, 255]
for i in range(256):
rgb = hls_to_rgb((i / 360.0) % 1, 0.5, 1.0)
palette += [int(round(255 * x)) for x in rgb]
surface.set_palette(palette)
# next define an animation environment to run the algorithm.
anim = gm.Animation(surface)
# set the speed, delay, and transparent color we want.
anim.set_control(speed=50, delay=2, trans_index=3)
# add a maze instance.
mask = generate_text_mask(surface.size, 'UST', 'ubuntu.ttf', 350)
# specify the region that where the animation is embedded.
left, top, right, bottom = 66, 47, 540, 343
maze = anim.create_maze_in_region(cell_size=4,
region=(left, top, right, bottom),
mask=mask)
anim.pad_delay_frame(100)
# paint the blackboard
surface.rectangle(left, top, right - left + 1, bottom - top + 1, 0)
# in the first algorithm only 4 colors occur in the image, so we can use
# a smaller minimum code length, this can help reduce the file size significantly.
surface.set_lzw_compress(2)
# pad one second delay, get ready!
anim.pad_delay_frame(100)
# the animation runs here.
wilson(maze, root=(0, 0))
# pad three seconds delay to see the result clearly.
anim.pad_delay_frame(300)
# now we run the maze solving algorithm.
# this time we use full 256 colors, hence the minimum code length is 8.
surface.set_lzw_compress(8)
# the tree and wall are unchanged throughout the maze solving algorithm hence
# it's safe to use 0 as the transparent color and color the wall and tree transparent.
anim.set_colormap({0: 0, 1: 0, 2: 2, 3: 3})
anim.set_control(speed=30, delay=5, trans_index=0)
# run the maze solving algorithm.
bfs(maze,
start=(0, 0),
end=(maze.size[0] - 1, maze.size[1] - 1))
# pad five seconds delay to see the path clearly.
anim.pad_delay_frame(500)
# save the result.
surface.save('wilson_bfs.gif')
surface.close()
| 31.308642 | 86 | 0.714117 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,276 | 0.503155 |
3e105caf515da97595cf131c9228511ab5a47c2b | 313 | py | Python | 2-mouth02/socket/communnication.py | gary-gggggg/gary | d8ba30ea4bc2b662a2d6a87d247f813e5680d63e | [
"Apache-2.0"
] | 4 | 2021-02-01T10:28:11.000Z | 2021-02-01T10:34:40.000Z | 2-mouth02/socket/communnication.py | gary-gggggg/gary | d8ba30ea4bc2b662a2d6a87d247f813e5680d63e | [
"Apache-2.0"
] | null | null | null | 2-mouth02/socket/communnication.py | gary-gggggg/gary | d8ba30ea4bc2b662a2d6a87d247f813e5680d63e | [
"Apache-2.0"
] | null | null | null | from socket import *
a=input("请输入IP地址:")
b=input("请输入端口:")
ADDR = ("176.17.12.178", 31414)
giao = socket(AF_INET, SOCK_DGRAM)
while 1:
m = input(":")
if not m:
break
else:
giao.sendto(m.encode(), ADDR)
d, a = giao.recvfrom(1024)
print("意思是", d.decode())
giao.close()
| 18.411765 | 37 | 0.5623 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.206997 |
3e11bd4f5fe50f533d78b84480d62520eb696807 | 151 | py | Python | NhMedicalSite/panel/urls.py | Dogruyer/ecommerce | aa505b401e42882a96e6ef6375bd1a1ed95c5b85 | [
"Apache-2.0"
] | null | null | null | NhMedicalSite/panel/urls.py | Dogruyer/ecommerce | aa505b401e42882a96e6ef6375bd1a1ed95c5b85 | [
"Apache-2.0"
] | null | null | null | NhMedicalSite/panel/urls.py | Dogruyer/ecommerce | aa505b401e42882a96e6ef6375bd1a1ed95c5b85 | [
"Apache-2.0"
] | 1 | 2018-11-01T11:10:58.000Z | 2018-11-01T11:10:58.000Z | from django.conf.urls import url
from django.conf import settings
from panel.views import *
urlpatterns = [
url(r'^$', index, name='index'),
]
| 13.727273 | 36 | 0.688742 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.07947 |
3e11beb96e30d1e453934e9af1acf5d6478cd742 | 244 | py | Python | nice_paintig.py | rushdi21-meet/meet2019y1lab6 | e87c2f04593c8f7e3a5c1c66260c49a3690db90c | [
"MIT"
] | null | null | null | nice_paintig.py | rushdi21-meet/meet2019y1lab6 | e87c2f04593c8f7e3a5c1c66260c49a3690db90c | [
"MIT"
] | null | null | null | nice_paintig.py | rushdi21-meet/meet2019y1lab6 | e87c2f04593c8f7e3a5c1c66260c49a3690db90c | [
"MIT"
] | null | null | null | import turtle
color=["green", "yellow",'orange','blue','pruple','red','pink']
x=10
y= 270
i=0
turtle.bgcolor("black")
while True:
turtle.color(color[0])
turtle.forward(x)
turtle.left(y)
x+=10
y-=1
i+=1
turtle.mainloop()
| 16.266667 | 63 | 0.614754 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 55 | 0.22541 |
3e1247da76756de4876b84765ac8609022ba7513 | 2,446 | py | Python | enzynet/models.py | gdarkwah/enzynet | 7367635ae73595822133577054743a4c4c327cf3 | [
"MIT"
] | 189 | 2017-07-20T22:16:22.000Z | 2022-02-21T17:57:41.000Z | enzynet/models.py | gdarkwah/enzynet | 7367635ae73595822133577054743a4c4c327cf3 | [
"MIT"
] | 16 | 2019-05-09T14:47:44.000Z | 2021-09-19T00:25:59.000Z | enzynet/models.py | gdarkwah/enzynet | 7367635ae73595822133577054743a4c4c327cf3 | [
"MIT"
] | 93 | 2017-07-20T22:55:41.000Z | 2022-03-12T19:42:14.000Z | """Model definitions."""
# Authors: Afshine Amidi <lastname@mit.edu>
# Shervine Amidi <firstname@stanford.edu>
# MIT License
import numpy as np
from enzynet import constants
from keras import initializers
from keras import layers
from keras.layers import advanced_activations
from keras import models
from keras import regularizers
def enzynet(input_v_size: int, n_channels: int) -> models.Sequential:
"""Returns EnzyNet as a Keras model."""
# Parameters.
stddev_conv3d = np.sqrt(2.0/n_channels)
# Initialization.
model = models.Sequential()
# Add layers.
model.add(
layers.Conv3D(
filters=32,
kernel_size=9,
strides=2,
padding='valid',
kernel_initializer=initializers.RandomNormal(
mean=0.0,
stddev=stddev_conv3d * 9 ** (-3 / 2)),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None,
input_shape=(input_v_size,)*constants.N_DIMENSIONS + (n_channels,)))
model.add(advanced_activations.LeakyReLU(alpha=0.1))
model.add(layers.Dropout(rate=0.2))
model.add(
layers.Conv3D(
filters=64,
kernel_size=5,
strides=1,
padding='valid',
kernel_initializer=initializers.RandomNormal(
mean=0.0,
stddev=stddev_conv3d * 5 ** (-3 / 2)),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None))
model.add(advanced_activations.LeakyReLU(alpha=0.1))
model.add(layers.MaxPooling3D(pool_size=(2, 2, 2)))
model.add(layers.Dropout(rate=0.3))
model.add(layers.Flatten())
model.add(
layers.Dense(
units=128,
kernel_initializer=initializers.RandomNormal(mean=0.0, stddev=0.01),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None))
model.add(layers.Dropout(rate=0.4))
model.add(
layers.Dense(
units=constants.N_CLASSES,
kernel_initializer=initializers.RandomNormal(mean=0.0, stddev=0.01),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None))
model.add(layers.Activation('softmax'))
return model
| 28.114943 | 80 | 0.618561 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 263 | 0.107522 |
3e13f1a614fdcd99556bcda63d31e15a470031fa | 998 | py | Python | entity/address_entity.py | rpinaa/python-flask-microservice | 3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d | [
"MIT"
] | 1 | 2019-09-23T20:00:52.000Z | 2019-09-23T20:00:52.000Z | entity/address_entity.py | rpinaa/python-flask-microservice | 3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d | [
"MIT"
] | null | null | null | entity/address_entity.py | rpinaa/python-flask-microservice | 3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d | [
"MIT"
] | null | null | null | from db import db
class AddressEntity(db.Model):
__tablename__ = "address"
id = db.Column(db.Integer, primary_key=True)
int_number = db.Column(db.String(15), nullable=False)
ext_number = db.Column(db.String(15), nullable=False)
block = db.Column(db.String(15), nullable=False)
number = db.Column(db.String(15), nullable=False)
street = db.Column(db.String(75), nullable=False)
colony = db.Column(db.String(75), nullable=False)
municipality = db.Column(db.String(50), nullable=False)
state = db.Column(db.String(50), nullable=False)
country = db.Column(db.String(50), nullable=False)
created_at = db.Column(db.DateTime, nullable=True, server_default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=True, server_onupdate=db.func.current_timestamp())
deleted = db.Column(db.DateTime, nullable=False, default=False)
@classmethod
def save(cls) -> None:
db.session.add(cls)
db.session.commit()
| 39.92 | 99 | 0.700401 | 977 | 0.978958 | 0 | 0 | 95 | 0.09519 | 0 | 0 | 9 | 0.009018 |
3e140c63bd33992dd5d90e07a79edb1db5f260ce | 10,357 | py | Python | FeatureCloud/api/cli/test/commands.py | FeatureCloud/FeatureCloud | 3421bc9621201ae4a888192f09886122b0cb571a | [
"Apache-2.0"
] | null | null | null | FeatureCloud/api/cli/test/commands.py | FeatureCloud/FeatureCloud | 3421bc9621201ae4a888192f09886122b0cb571a | [
"Apache-2.0"
] | null | null | null | FeatureCloud/api/cli/test/commands.py | FeatureCloud/FeatureCloud | 3421bc9621201ae4a888192f09886122b0cb571a | [
"Apache-2.0"
] | null | null | null | import os
import click
import requests
from FeatureCloud.api.imp.exceptions import FCException
from FeatureCloud.api.imp.test import commands
from FeatureCloud.api.cli.test.workflow.commands import workflow
@click.group("test")
def test() -> None:
"""Testbed related commands"""
test.add_command(workflow)
@test.command('help')
def help():
_, msg = commands.help()
click.echo(msg)
@test.command('start')
@click.option('--controller-host', default='http://localhost:8000',
help='Address of your running controller instance (e.g. featurecloud test start --controller-host=http://localhost:8000).',
required=True)
@click.option('--client-dirs', default='.,.',
help='Client directories separated by comma. The number of clients is based on the number of directories supplied here (e.g. `featurecloud test start --client-dirs=.,.,.,.` command will start 4 clients).',
required=True)
@click.option('--generic-dir', default='.',
help='Generic directory available for all clients. Content will be copied to the input folder of all '
'instances (e.g. featurecloud test start --generic-dir=.).',
required=True)
@click.option('--app-image', default='test_app',
help='The repository url of the app image (e.g. featurecloud test start --app-image=featurecloud.ai/test_app).',
required=True)
@click.option('--channel', default='local',
help='The communication channel to be used. Possible values: "local" or "internet" (e.g. featurecloud test start --channel=local).',
required=True)
@click.option('--query-interval', default=2,
help='The interval after how many seconds the status call will be performed (e.g. featurecloud test start --query-interval=2).',
required=True)
@click.option('--download-results',
help='A directory name where to download results. This will be created into /data/tests directory (e.g. featurecloud test start --download-results=./results).',
default='')
def start(controller_host: str, client_dirs: str, generic_dir: str, app_image: str, channel: str, query_interval,
download_results: str):
'''Starts testbed run with the specified parameters'''
try:
result = commands.start(controller_host, client_dirs, generic_dir, app_image, channel, query_interval,
download_results)
click.echo(f"Test id={result} started")
except requests.exceptions.InvalidSchema:
click.echo(f'No connection adapters were found for {controller_host}')
except requests.exceptions.MissingSchema:
click.echo(f' Invalid URL {controller_host}: No scheme supplied. Perhaps you meant http://{controller_host}?')
except FCException as e:
click.echo(f'Error: {e}')
@test.command('stop')
@click.option('--controller-host', default='http://localhost:8000',
help='Http address of your running controller instance (e.g. featurecloud test stop --controller-host=http://localhost:8000).',
required=True)
@click.option('--test-id', help='The test id of the test to be stopped. The test id is returned by the start command (e.g.featurecloud test stop --test-id=1).')
def stop(controller_host: str, test_id: str or int):
'''Stops test with specified test id'''
try:
result = commands.stop(controller_host, test_id)
click.echo(f"Test id={result} stopped")
except requests.exceptions.InvalidSchema:
click.echo(f'No connection adapters were found for {controller_host}')
except requests.exceptions.MissingSchema:
click.echo(f' Invalid URL {controller_host}: No scheme supplied. Perhaps you meant http://{controller_host}?')
except FCException as e:
click.echo(f'Error: {e}')
@test.command('delete')
@click.option('--controller-host', default='http://localhost:8000',
help='Address of your running controller instance. (e.g. featurecloud test delete all --controller-host=http://localhost:8000)',)
@click.option('--test-id', help='The test id of the test to be deleted. The test id is returned by the start command.'
'To delete all tests omit this option and use "delete all".')
@click.argument('all', type=str, nargs=1, required=False)
def delete(controller_host: str, test_id: str or int, all: str):
'''
Deletes test with specified id or alternatively, deletes all tests
ALL - delete all tests
Examples:
featurecloud test delete --test-id=1
featurecloud test delete all
'''
try:
result = commands.delete(controller_host, test_id, all)
if all is not None:
if all.lower() == 'all':
click.echo(f"All tests deleted")
else:
click.echo(f'Wrong parameter {all}')
else:
click.echo(f"Test id={result} deleted")
except requests.exceptions.InvalidSchema:
click.echo(f'No connection adapters were found for {controller_host}')
except requests.exceptions.MissingSchema:
click.echo(f' Invalid URL {controller_host}: No scheme supplied. Perhaps you meant http://{controller_host}?')
except FCException as e:
click.echo(f'Error: {e}')
@test.command('list')
@click.option('--controller-host', default='http://localhost:8000',
help='Address of your running controller instance (e.g. featurecloud test list --controller-host=http://localhost:8000).',
required=True)
@click.option('--format', help='Format of the test list. Possible options: json or dataframe (e.g. featurecloud test list --format=dataframe).', required=True, default='dataframe')
def list(controller_host: str, format: str):
'''List all tests'''
try:
result = commands.list(controller_host, format)
if len(result) == 0:
click.echo('No tests available')
else:
click.echo(result)
except requests.exceptions.InvalidSchema:
click.echo(f'No connection adapters were found for {controller_host}')
except requests.exceptions.MissingSchema:
click.echo(f' Invalid URL {controller_host}: No scheme supplied. Perhaps you meant http://{controller_host}?')
except FCException as e:
click.echo(f'Error: {e}')
@test.command('info')
@click.option('--controller-host', default='http://localhost:8000',
help='Address of your running controller instance (e.g. featurecloud test info --controller-host=http://localhost:8000).',
required=True)
@click.option('--test-id', help='Test id to get info about (e.g. featurecloud test info --test-id=1).', required=True)
@click.option('--format', help='Format of the test info. Possible values: json or dataframe (e.g. featurecloud test info --format=dataframe).', required=True, default='dataframe')
def info(controller_host: str, test_id: str or int, format: str):
'''Get information about a running test'''
try:
result = commands.info(controller_host, test_id, format)
click.echo(result)
except requests.exceptions.InvalidSchema:
click.echo(f'No connection adapters were found for {controller_host}')
except requests.exceptions.MissingSchema:
click.echo(f' Invalid URL {controller_host}: No scheme supplied. Perhaps you meant http://{controller_host}?')
except FCException as e:
click.echo(f'Error: {e}')
@test.command('traffic')
@click.option('--controller-host', default='http://localhost:8000',
help='Address of your running controller instance (e.g. featurecloud test traffic --controller-host=http://localhost:8000).',
required=True)
@click.option('--test-id', help='The test id to get traffic info about (e.g. featurecloud test traffic --test-id=1).')
@click.option('--format', help='Format of the test traffic. Possible values: json or dataframe (e.g. featurecloud test traffic --format=dataframe).e', required=True, default='dataframe')
def traffic(controller_host: str, test_id: str or int, format: str):
'''Displays traffic information inside tests'''
try:
result = commands.traffic(controller_host, test_id, format)
click.echo(result)
except requests.exceptions.InvalidSchema:
click.echo(f'No connection adapters were found for {controller_host}')
except requests.exceptions.MissingSchema:
click.echo(f' Invalid URL {controller_host}: No scheme supplied. Perhaps you meant http://{controller_host}?')
except FCException as e:
click.echo(f'Error: {e}')
@test.command('logs')
@click.option('--controller-host', default='http://localhost:8000',
help='Address of your running controller instance (e.g. featurecloud test logs --controller-host=http://localhost:8000).',
required=True)
@click.option('--test-id', help='The test id to get logs about (e.g. featurecloud test logs --test-id=1).', required=True)
@click.option('--instance-id', help='The instance id of the test client. Instance ids can be obtained by running the info command (e.g. featurecloud test logs --test-id=1 --instance-id=0).', required=True)
@click.option('--from-row', help='Get logs from a certain row number (e.g. featurecloud test logs --test-id=1 --instance-id=0 --from-row=0).', default='', required=True)
def logs(controller_host: str, test_id: str or int, instance_id: str or int, from_row: str):
'''Get logs from test client'''
try:
result = commands.logs(controller_host, test_id, instance_id, from_row)
log_lines = ""
for line in result:
log_lines += str(line) + os.linesep
click.echo(log_lines)
except requests.exceptions.InvalidSchema:
click.echo(f'No connection adapters were found for {controller_host}')
except requests.exceptions.MissingSchema:
click.echo(f' Invalid URL {controller_host}: No scheme supplied. Perhaps you meant http://{controller_host}?')
except FCException as e:
click.echo(f'Error: {e}')
if __name__ == "__main__":
test()
| 51.527363 | 220 | 0.663609 | 0 | 0 | 0 | 0 | 10,008 | 0.966303 | 0 | 0 | 5,251 | 0.507 |
3e14c4fe464f76c3e655c88c87bd66bc84933f25 | 4,188 | py | Python | axi_plot/utils.py | zoso95/axi_plot | 1a8c1f601c75e149d60377ccc4a437c33b3620bb | [
"MIT"
] | null | null | null | axi_plot/utils.py | zoso95/axi_plot | 1a8c1f601c75e149d60377ccc4a437c33b3620bb | [
"MIT"
] | null | null | null | axi_plot/utils.py | zoso95/axi_plot | 1a8c1f601c75e149d60377ccc4a437c33b3620bb | [
"MIT"
] | null | null | null | import subprocess
import logging
import os, time
from pathlib import Path
from shutil import copyfile
import pandas as pd
from datetime import datetime
def estimate_time(filename, config, layer=None):
base_commands = ['axicli', filename, '--config', config]
end_command = ['-vTC']
if layer is None:
process = subprocess.run(base_commands+end_command, stdout=subprocess.PIPE, universal_newlines=True)
else:
commands = base_commands + ['--mode', 'layers', '--layer', str(layer)] + end_command
process = subprocess.run(commands, stdout=subprocess.PIPE, universal_newlines=True)
return process.stdout
def plot(filename, config, checkpoint_file, layer=None):
base_commands = ['axicli', filename, '--config', config]
end_commands = ['-o', checkpoint_file]
if layer is None:
commands = base_commands + end_commands
else:
commands = base_commands + ['--mode', 'layers', '--layer', str(layer)] + end_commands
process = subprocess.run(commands, stdout=subprocess.PIPE, universal_newlines=True)
return process.stdout
def res_plot(filename, config, checkpoint_file):
"""
base_commands = ['axicli', filename, '--config', config, '--mode', 'res_plot']
end_commands = ['-o', checkpoint_file]
commands = base_commands + end_commands
process = subprocess.run(commands, stdout=subprocess.PIPE, universal_newlines=True)
return process.stdout
"""
raise Exception()
def toggle_pen(config):
process = subprocess.run(['axicli', '-mtoggle', '--config', config], stdout=subprocess.PIPE, universal_newlines=True)
return process.stdout
def return_home(filename):
process = subprocess.run(['axicli', filename, '--mode', 'res_home'], stdout=subprocess.PIPE, universal_newlines=True)
return process.stdout
def backup_drawing(file):
"""
Check to see if $PLOTTER_BACKUP exists. If it does, then copy over the file
if it doesnt exist, and add to the print logs that we are printing it.
"""
if 'PLOTTER_BACKUP' in os.environ:
logging.info("backing up {}".format(file))
filename = os.path.basename(file)
backup_dir = os.path.join(os.environ.get('PLOTTER_BACKUP'))
backup_path = os.path.join(backup_dir, filename)
if not os.path.exists(backup_path):
copyfile(file, backup_path)
print_logs = os.path.join(backup_dir, "print_logs.csv")
if os.path.exists(print_logs):
logs = pd.read_csv(print_logs)
else:
logs = pd.DataFrame({})
df = pd.DataFrame([{'name':filename, 'time_printed':datetime.now().strftime('%Y-%m-%d %H:%M')}], columns=['name', 'time_printed'])
logs = logs.append(df, sort=False)
logs.to_csv(print_logs, index=False)
else:
logging.info("Skipping backup for {}, no $PLOTTER_BACKUP path given".format(file))
def get_checkpoint_file(file, tmp_folder="tmp"):
filename = os.path.basename(file)
tmp_dir = os.path.join(os.getcwd(), tmp_folder)
Path(tmp_dir).mkdir(parents=True, exist_ok=True)
temp_file = os.path.join(tmp_dir, filename)
logging.info("making tempfile {}".format(temp_file))
now = time.time()
# delete files older than a week
for f in os.listdir(tmp_dir):
if os.stat(os.path.join(tmp_dir, f)).st_mtime < now - 7 * 86400:
os.remove(os.path.join(tmp_dir, f))
return temp_file
def get_checkpoint_and_new_checkpoint(file, tmp_folder="tmp"):
checkpoint = get_checkpoint_file(file, tmp_folder)
active_checkpoint = "{}-active".format(checkpoint)
os.rename(checkpoint, active_checkpoint)
return active_checkpoint, checkpoint
def clean_tmp_file(file):
try:
os.remove(file)
except:
logging.warning("Could not delete temp file {}".format(file))
def get_config_names(config_folder = 'configs'):
dir = os.path.join(os.getcwd(), config_folder)
configs = []
for file in os.listdir(dir):
configs.append(os.path.basename(file))
return configs
def get_full_config_path(config, config_folder = 'configs'):
dir = os.path.join(os.getcwd(), config_folder)
return os.path.join(dir, config)
| 36.417391 | 138 | 0.680755 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 907 | 0.216571 |
3e14f76f2adf0f315a94c191c5946f1de65d9fa9 | 5,258 | py | Python | scripts/regions_optimize.py | jason-neal/Starfish | 4ffa45e0190fb6f3262511d57d1a563e5ee711de | [
"BSD-3-Clause"
] | 1 | 2017-07-10T00:06:36.000Z | 2017-07-10T00:06:36.000Z | scripts/regions_optimize.py | jason-neal/Starfish | 4ffa45e0190fb6f3262511d57d1a563e5ee711de | [
"BSD-3-Clause"
] | null | null | null | scripts/regions_optimize.py | jason-neal/Starfish | 4ffa45e0190fb6f3262511d57d1a563e5ee711de | [
"BSD-3-Clause"
] | 5 | 2016-06-11T09:48:16.000Z | 2019-08-07T19:52:41.000Z | #!/usr/bin/env python
import argparse
parser = argparse.ArgumentParser(prog="region_optimize.py", description="Find the kernel parameters for Gaussian region zones.")
parser.add_argument("spectrum", help="JSON file containing the data, model, and residual.")
parser.add_argument("--sigma0", type=float, default=2, help="(AA) to use in fitting")
args = parser.parse_args()
import json
import numpy as np
from scipy.optimize import fmin
from scipy.linalg import cho_factor, cho_solve
from numpy.linalg import slogdet
import Starfish
from Starfish.model import PhiParam
from Starfish.covariance import get_dense_C, make_k_func
from Starfish import constants as C
# Load the spectrum and then take the data products.
f = open(args.spectrum, "r")
read = json.load(f) # read is a dictionary
f.close()
wl = np.array(read["wl"])
# data_full = np.array(read["data"])
# model = np.array(read["model"])
resid = np.array(read["resid"])
sigma = np.array(read["sigma"])
spectrum_id = read["spectrum_id"]
order = read["order"]
fname = Starfish.specfmt.format(spectrum_id, order) + "regions.json"
f = open(fname, "r")
read = json.load(f) # read is a dictionary
f.close()
mus = np.array(read["mus"])
assert spectrum_id == read["spectrum_id"], "Spectrum/Order mismatch"
assert order == read["order"], "Spectrum/Order mismatch"
# Load the guesses for the global parameters from the .json
# If the file exists, optionally initiliaze to the chebyshev values
fname = Starfish.specfmt.format(spectrum_id, order) + "phi.json"
try:
phi = PhiParam.load(fname)
except FileNotFoundError:
print("No order parameter file found (e.g. sX_oXXphi.json), please run `star.py --initPhi` first.")
raise
# Puposely set phi.regions to none for this exercise, since we don't care about existing regions, and likely we want to overwrite them.
phi.regions = None
def optimize_region_residual(wl, residuals, sigma, mu):
'''
Determine the optimal parameters for the line kernels by fitting a Gaussian directly to the residuals.
'''
# Using sigma0, truncate the wavelength vector and residulas to include
# only those portions that fall in the range [mu - sigma, mu + sigma]
ind = (wl > mu - args.sigma0) & (wl < mu + args.sigma0)
wl = wl[ind]
R = residuals[ind]
sigma = sigma[ind]
sigma_mat = phi.sigAmp * sigma**2 * np.eye(len(wl))
max_r = 6.0 * phi.l # [km/s]
k_func = make_k_func(phi)
# Use the full covariance matrix when doing the likelihood eval
CC = get_dense_C(wl, k_func=k_func, max_r=max_r) + sigma_mat
factor, flag = cho_factor(CC)
logdet = np.sum(2 * np.log((np.diag(factor))))
rr = C.c_kms/mu * np.abs(mu - wl) # Km/s
def fprob(p):
# The likelihood function
# Requires sign about amplitude, so we can't use log.
amp, sig = p
gauss = amp * np.exp(-0.5 * rr**2/sig**2)
r = R - gauss
# Create a Gaussian using these parameters, and re-evaluate the residual
lnprob = -0.5 * (np.dot(r, cho_solve((factor, flag), r)) + logdet)
return lnprob
par = Starfish.config["region_params"]
p0 = np.array([10**par["logAmp"], par["sigma"]])
f = lambda x: -fprob(x)
try:
p = fmin(f, p0, maxiter=10000, maxfun=10000, disp=False)
# print(p)
return p
except np.linalg.linalg.LinAlgError:
return p0
def optimize_region_covariance(wl, residuals, sigma, mu):
'''
Determine the optimal parameters for the line kernels by actually using a chunk of the covariance matrix.
Note this actually uses the assumed global parameters.
'''
# Using sigma0, truncate the wavelength vector and residulas to include
# only those portions that fall in the range [mu - sigma, mu + sigma]
ind = (wl > mu - args.sigma0) & (wl < mu + args.sigma0)
wl = wl[ind]
R = residuals[ind]
sigma = sigma[ind]
sigma_mat = phi.sigAmp * sigma**2 * np.eye(len(wl))
max_rl = 6.0 * phi.l # [km/s]
# Define a probability function for the residuals
def fprob(p):
logAmp, sigma = p
# set phi.regions = p
phi.regions = np.array([logAmp, mu, sigma])[np.newaxis, :]
max_rr = 4.0 * sigma
max_r = max(max_rl, max_rr)
k_func = make_k_func(phi)
CC = get_dense_C(wl, k_func=k_func, max_r=max_r) + sigma_mat
factor, flag = cho_factor(CC)
logdet = np.sum(2 * np.log((np.diag(factor))))
lnprob = -0.5 * (np.dot(R, cho_solve((factor, flag), R)) + logdet)
# print(p, lnprob)
return lnprob
par = Starfish.config["region_params"]
p0 = np.array([par["logAmp"], par["sigma"]])
f = lambda x: -fprob(x)
try:
p = fmin(f, p0, maxiter=10000, maxfun=10000)
print(p)
return p
except np.linalg.linalg.LinAlgError:
return p0
# Regions will be a 2D array with shape (nregions, 3)
regions = []
for mu in mus:
# amp, sig = optimize_region_residual(wl, resid, sigma, mu)
# regions.append([np.log10(np.abs(amp)), mu, sig])
logAmp, sig = optimize_region_covariance(wl, resid, sigma, mu)
regions.append([logAmp, mu, sig])
# Add these values back to the phi parameter file and save
phi.regions = np.array(regions)
phi.save()
| 30.929412 | 135 | 0.659947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,046 | 0.389121 |
3e15b565f2c5c8e4188c7106981c4468935c3719 | 2,261 | py | Python | Bases/download_bases.py | lucas26xd/Estudo-Dados-COVID19-BR | cba0278e1cbd2464b4b4c7faa866d05d9968247d | [
"MIT"
] | null | null | null | Bases/download_bases.py | lucas26xd/Estudo-Dados-COVID19-BR | cba0278e1cbd2464b4b4c7faa866d05d9968247d | [
"MIT"
] | null | null | null | Bases/download_bases.py | lucas26xd/Estudo-Dados-COVID19-BR | cba0278e1cbd2464b4b4c7faa866d05d9968247d | [
"MIT"
] | null | null | null | import requests
from urllib.request import urlopen
from bs4 import BeautifulSoup
def get_urls_and_last_updates(): # Pega a url e a ultima data de atualização das bases disponíveis no OpenDataSUS
urls = list()
last_ups = list()
try:
html = BeautifulSoup(urlopen('https://opendatasus.saude.gov.br/dataset/casos-nacionais', timeout=1).read(), 'html.parser')
p = 0
anchor_data = html.select('a.resource-url-analytics')
anchor_last = html.select('a.heading')
for url_data, last_up in zip(anchor_data, anchor_last):
if 'pretty' not in url_data['href']:
urls.append(url_data['href'])
html = BeautifulSoup(urlopen(f'https://opendatasus.saude.gov.br{last_up["href"]}', timeout=1).read(), 'html.parser')
last_ups.append(html.select('td')[0].text)
p += 1
print('\r[', u'\u2588' * p, ' ' * (len(anchor_data) - p), f'] - {p*100/len(anchor_data):.2f}%', end='')
print()
except Exception as e:
print(e)
finally:
return urls, last_ups
def download(url_base): # Realiza o download da base passada por parâmetro e salva na pasta Bases
r = requests.get(url_base, stream=True)
if r.status_code == requests.codes.OK:
arq = url_base[url_base.rfind("/") + 1:]
with open(f'./Bases/{arq}', 'wb') as file:
file_len = int(r.headers.get('content-length'))
p = 0
for data in r.iter_content(chunk_size=1024):
p += len(data)
print('\r[', u'\u2588' * int(30 * p / file_len), ' ' * (30 - int(30 * p / file_len)), end='] - ')
print(f'{p * 100 / file_len:.2f}%', end='')
file.write(data)
print()
else:
r.raise_for_status()
print('Pegando informações para download das bases...')
urls_bases, last_updates = get_urls_and_last_updates()
if len(urls_bases) > 0:
print('Iniciando Downloads...')
progress = 0
for url in urls_bases:
print(f'Baixando {url[url.rfind("/") + 1:]} - {last_updates[progress]} - ({progress + 1:0>2}/{len(urls_bases)})')
download(url)
progress += 1
else:
print('Problema ao resgatar as URLs das bases!')
| 39.666667 | 132 | 0.587793 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 722 | 0.318483 |
3e16ddbf593ddf87a424ef3546058ed337f938d3 | 10,699 | py | Python | rax/_src/utils_test.py | google/rax | d6370d574246db9fb0566317f7cac8cd331526d7 | [
"Apache-2.0"
] | 19 | 2022-01-25T12:37:51.000Z | 2022-03-30T17:12:45.000Z | rax/_src/utils_test.py | google/rax | d6370d574246db9fb0566317f7cac8cd331526d7 | [
"Apache-2.0"
] | 1 | 2022-02-08T23:02:42.000Z | 2022-02-08T23:02:42.000Z | rax/_src/utils_test.py | google/rax | d6370d574246db9fb0566317f7cac8cd331526d7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pytype: skip-file
"""Tests for rax._src.utils."""
import doctest
from absl.testing import absltest
import jax
import jax.numpy as jnp
import numpy as np
import rax
from rax._src import utils
class NormalizeProbabilitiesTest(absltest.TestCase):
def test_sums_to_one_for_given_axis(self):
arr = jnp.asarray([[0., 1., 2.], [3., 4., 5.]])
result1 = utils.normalize_probabilities(arr, axis=0)
result2 = utils.normalize_probabilities(arr, axis=1)
np.testing.assert_array_equal(
result1, jnp.asarray([[0., 1. / 5., 2. / 7.], [1., 4. / 5., 5. / 7.]]))
np.testing.assert_array_equal(
result2,
jnp.asarray([[0., 1. / 3., 2. / 3.], [3. / 12., 4. / 12., 5. / 12.]]))
def test_sums_to_one_for_default_axis(self):
arr = jnp.asarray([[0., 1., 2.], [3., 4., 5.]])
result = utils.normalize_probabilities(arr)
np.testing.assert_array_equal(
result,
jnp.asarray([[0., 1. / 3., 2. / 3.], [3. / 12., 4. / 12., 5. / 12.]]))
def test_handles_where(self):
arr = jnp.asarray([[0., 1., 2.], [3., 4., 5.]])
where = jnp.asarray([[True, False, True], [True, True, True]])
result = utils.normalize_probabilities(arr, where, axis=1)
np.testing.assert_array_equal(
jnp.sum(result, axis=1, where=where), jnp.asarray([1., 1.]))
def test_correctly_sets_all_zeros(self):
arr = jnp.asarray([[0., 0., 0.], [0., 0., 0.]])
result1 = utils.normalize_probabilities(arr, axis=0)
result2 = utils.normalize_probabilities(arr, axis=1)
np.testing.assert_array_equal(
jnp.sum(result1, axis=0), jnp.asarray([1., 1., 1.]))
np.testing.assert_array_equal(
jnp.sum(result2, axis=1), jnp.asarray([1., 1.]))
def test_correctly_handles_all_masked(self):
arr = jnp.asarray([[2., 1., 3.], [1., 1., 1.]])
where = jnp.asarray([[False, False, False], [False, False, False]])
result1 = utils.normalize_probabilities(arr, where, axis=0)
result2 = utils.normalize_probabilities(arr, where, axis=1)
np.testing.assert_array_equal(
jnp.sum(result1, axis=0), jnp.asarray([1., 1., 1.]))
np.testing.assert_array_equal(
jnp.sum(result2, axis=1), jnp.asarray([1., 1.]))
class LogCumsumExp(absltest.TestCase):
def test_computes_logcumsumexp(self):
x = jnp.asarray([-4., 5., 2.3, 0.])
result = utils.logcumsumexp(x)
np.testing.assert_array_equal(
result,
jnp.asarray([
jnp.log(jnp.exp(-4.)),
jnp.log(jnp.exp(-4.) + jnp.exp(5.)),
jnp.log(jnp.exp(-4.) + jnp.exp(5.) + jnp.exp(2.3)),
jnp.log(jnp.exp(-4.) + jnp.exp(5.) + jnp.exp(2.3) + jnp.exp(0.))
]))
def test_computes_over_specified_axis(self):
x = jnp.asarray([[-4., 2.3, 0.], [2.2, -1.2, 1.1]])
result = utils.logcumsumexp(x, axis=-1)
np.testing.assert_array_equal(result[0, :], utils.logcumsumexp(x[0, :]))
np.testing.assert_array_equal(result[1, :], utils.logcumsumexp(x[1, :]))
result = utils.logcumsumexp(x, axis=0)
np.testing.assert_array_equal(result[:, 0], utils.logcumsumexp(x[:, 0]))
np.testing.assert_array_equal(result[:, 1], utils.logcumsumexp(x[:, 1]))
np.testing.assert_array_equal(result[:, 2], utils.logcumsumexp(x[:, 2]))
def test_computes_reversed(self):
x = jnp.asarray([-4., 5., 2.3, 0.])
x_flipped = jnp.asarray([0., 2.3, 5., -4.])
result_reverse = utils.logcumsumexp(x, reverse=True)
result_flipped = jnp.flip(utils.logcumsumexp(x_flipped))
np.testing.assert_array_equal(result_reverse, result_flipped)
def test_computes_with_where_mask(self):
x = jnp.asarray([-4., 5., 2.3, 0.])
where = jnp.asarray([True, False, True, True])
x_masked = jnp.asarray([-4., 2.3, 0.])
result_where = utils.logcumsumexp(x, where=where)
result_masked = utils.logcumsumexp(x_masked)
np.testing.assert_array_equal(result_where[0], result_masked[0])
np.testing.assert_array_equal(result_where[2], result_masked[1])
np.testing.assert_array_equal(result_where[3], result_masked[2])
def test_handles_extreme_values(self):
x = jnp.asarray([-4., -2.1e26, 5., 3.4e38, 10., -2.99e26])
result = utils.logcumsumexp(x)
np.testing.assert_array_equal(
result, jnp.asarray([-4., -4., 5.0001235, 3.4e38, 3.4e38, 3.4e38]))
class SortByTest(absltest.TestCase):
def test_sorts_by_scores(self):
scores = jnp.asarray([0., 3., 1., 2.])
tensors_to_sort = [jnp.asarray([10., 13., 11., 12.])]
result = utils.sort_by(scores, tensors_to_sort)[0]
np.testing.assert_array_equal(result, jnp.asarray([13., 12., 11., 10.]))
def test_sorts_by_given_axis(self):
scores = jnp.asarray([[3., 1., 2.], [1., 5., 3.]])
tensors_to_sort = [jnp.asarray([[0., 1., 2.], [3., 4., 5.]])]
result_0 = utils.sort_by(scores, tensors_to_sort, axis=0)[0]
result_1 = utils.sort_by(scores, tensors_to_sort, axis=1)[0]
np.testing.assert_array_equal(result_0,
jnp.asarray([[0., 4., 5.], [3., 1., 2.]]))
np.testing.assert_array_equal(result_1,
jnp.asarray([[0., 2., 1.], [4., 5., 3.]]))
def test_sorts_multiple_tensors(self):
scores = jnp.asarray([0., 3., 1., 2.])
tensors_to_sort = [
jnp.asarray([10., 13., 11., 12.]),
jnp.asarray([50., 56., 52., 54.]),
jnp.asarray([75., 78., 76., 77.])
]
result = utils.sort_by(scores, tensors_to_sort)
np.testing.assert_array_equal(result[0], jnp.asarray([13., 12., 11., 10.]))
np.testing.assert_array_equal(result[1], jnp.asarray([56., 54., 52., 50.]))
np.testing.assert_array_equal(result[2], jnp.asarray([78., 77., 76., 75.]))
def test_places_masked_values_last(self):
scores = jnp.asarray([0., 3., 1., 2.])
tensors_to_sort = [jnp.asarray([10., 13., 11., 12.])]
where = jnp.asarray([True, True, False, False])
result = utils.sort_by(scores, tensors_to_sort, where=where)[0]
np.testing.assert_array_equal(result, jnp.asarray([13., 10., 12., 11.]))
def test_breaks_ties_randomly_when_key_is_provided(self):
scores = jnp.asarray([0., 1., 1., 2.])
tensors_to_sort = [jnp.asarray([10., 11.1, 11.2, 12.])]
key = jax.random.PRNGKey(4242)
key1, key2 = jax.random.split(key)
result1 = utils.sort_by(scores, tensors_to_sort, key=key1)[0]
result2 = utils.sort_by(scores, tensors_to_sort, key=key2)[0]
np.testing.assert_array_equal(result1, jnp.asarray([12., 11.2, 11.1, 10.]))
np.testing.assert_array_equal(result2, jnp.asarray([12., 11.1, 11.2, 10.]))
class RanksTest(absltest.TestCase):
def test_ranks_by_sorting_scores(self):
scores = jnp.asarray([[0., 1., 2.], [2., 1., 3.]])
ranks = utils.ranks(scores)
np.testing.assert_array_equal(ranks, jnp.asarray([[3, 2, 1], [2, 3, 1]]))
def test_ranks_along_given_axis(self):
scores = jnp.asarray([[0., 1., 2.], [1., 2., 0.]])
ranks = utils.ranks(scores, axis=0)
np.testing.assert_array_equal(ranks, jnp.asarray([[2, 2, 1], [1, 1, 2]]))
def test_ranks_with_ties_broken_randomly(self):
scores = jnp.asarray([2., 1., 1.])
key = jax.random.PRNGKey(1)
key1, key2 = jax.random.split(key)
ranks1 = utils.ranks(scores, key=key1)
ranks2 = utils.ranks(scores, key=key2)
np.testing.assert_array_equal(ranks1, jnp.asarray([1, 2, 3]))
np.testing.assert_array_equal(ranks2, jnp.asarray([1, 3, 2]))
class ApproxRanksTest(absltest.TestCase):
def test_computes_approx_ranks(self):
scores = jnp.asarray([-3., 1., 2.])
ranks = utils.approx_ranks(scores)
sigmoid = jax.nn.sigmoid
np.testing.assert_array_equal(
ranks,
jnp.asarray([
sigmoid(3. + 1.) + sigmoid(3. + 2.) + 1.0,
sigmoid(-1. - 3.) + sigmoid(-1. + 2.) + 1.0,
sigmoid(-2. - 3.) + sigmoid(-2. + 1.) + 1.0
]))
def test_maintains_order(self):
scores = jnp.asarray([-4., 1., -3., 2.])
ranks = utils.approx_ranks(scores)
true_ranks = utils.ranks(scores)
np.testing.assert_array_equal(jnp.argsort(ranks), jnp.argsort(true_ranks))
def test_computes_approx_ranks_with_where(self):
scores_without_where = jnp.asarray([3.33, 1.125])
scores = jnp.asarray([3.33, 2.5, 1.125])
where = jnp.asarray([True, False, True])
ranks = utils.approx_ranks(scores_without_where)
ranks_with_where = utils.approx_ranks(scores, where=where)
np.testing.assert_array_equal(
ranks, jnp.asarray([ranks_with_where[0], ranks_with_where[2]]))
class SafeReduceTest(absltest.TestCase):
def test_reduces_values_according_to_fn(self):
a = jnp.array([[3., 2.], [4.5, 1.2]])
res_mean = utils.safe_reduce(a, reduce_fn=jnp.mean)
res_sum = utils.safe_reduce(a, reduce_fn=jnp.sum)
res_none = utils.safe_reduce(a, reduce_fn=None)
np.testing.assert_allclose(res_mean, jnp.mean(a))
np.testing.assert_allclose(res_sum, jnp.sum(a))
np.testing.assert_allclose(res_none, a)
def test_reduces_values_with_mask(self):
a = jnp.array([[3., 2., 0.01], [4.5, 1.2, 0.9]])
where = jnp.array([[True, False, True], [True, True, False]])
res_mean = utils.safe_reduce(a, where=where, reduce_fn=jnp.mean)
res_sum = utils.safe_reduce(a, where=where, reduce_fn=jnp.sum)
res_none = utils.safe_reduce(a, where=where, reduce_fn=None)
np.testing.assert_allclose(res_mean, jnp.mean(a, where=where))
np.testing.assert_allclose(res_sum, jnp.sum(a, where=where))
np.testing.assert_allclose(res_none, jnp.where(where, a, 0.))
def test_reduces_mean_with_all_masked(self):
a = jnp.array([[3., 2., 0.01], [4.5, 1.2, 0.9]])
where = jnp.array([[False, False, False], [False, False, False]])
res_mean = utils.safe_reduce(a, where=where, reduce_fn=jnp.mean)
np.testing.assert_allclose(res_mean, jnp.array(0.))
def load_tests(loader, tests, ignore):
del loader, ignore # Unused.
tests.addTests(
doctest.DocTestSuite(
utils, extraglobs={
"jax": jax,
"jnp": jnp,
"rax": rax
}))
return tests
if __name__ == "__main__":
absltest.main()
| 34.291667 | 79 | 0.639873 | 9,608 | 0.898028 | 0 | 0 | 0 | 0 | 0 | 0 | 646 | 0.060379 |
3e182689577a11bad1e8f7437a3d622ced715f94 | 427 | py | Python | examples/decorators.py | FusionSid/FusionSidAPI.py | e1b50622bf4fcec8265f8fd4e9b3ac79b580d286 | [
"MIT"
] | 5 | 2022-03-05T23:29:33.000Z | 2022-03-20T07:44:20.000Z | examples/decorators.py | FusionSid/FusionSidAPI.py | e1b50622bf4fcec8265f8fd4e9b3ac79b580d286 | [
"MIT"
] | null | null | null | examples/decorators.py | FusionSid/FusionSidAPI.py | e1b50622bf4fcec8265f8fd4e9b3ac79b580d286 | [
"MIT"
] | null | null | null | import asyncio
from fusionsid import Decorators
deco = Decorators
do_roast = deco.roast
@deco.compliment() # will give you a complement before the function is run
@Decorators.fact() # you can just put the class name and use that instead of setting it to a var
@do_roast() # you can set it to a variable and use that
async def main():
print("Wassup")
loop = asyncio.new_event_loop()
loop.run_until_complete(main())
| 23.722222 | 97 | 0.744731 | 0 | 0 | 0 | 0 | 268 | 0.627635 | 37 | 0.086651 | 183 | 0.428571 |
3e188c93ed7a3552c4548ac6fc5970107dcdbcdb | 2,303 | py | Python | configs/raubtierv2b/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu.py | esf-bt2020/mmdetection | abc5fe060e0fcb716f845c85441be3741b22d3cf | [
"Apache-2.0"
] | null | null | null | configs/raubtierv2b/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu.py | esf-bt2020/mmdetection | abc5fe060e0fcb716f845c85441be3741b22d3cf | [
"Apache-2.0"
] | null | null | null | configs/raubtierv2b/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu.py | esf-bt2020/mmdetection | abc5fe060e0fcb716f845c85441be3741b22d3cf | [
"Apache-2.0"
] | null | null | null | # Basiskonfigurationsfile
_base_ = '../centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py'
model = dict(
type='CornerNet',
backbone=dict(
type='HourglassNet',
downsample_times=5,
num_stacks=2,
stage_channels=[256, 256, 384, 384, 384, 512],
stage_blocks=[2, 2, 2, 2, 2, 4],
norm_cfg=dict(type='BN', requires_grad=True)),
neck=None,
bbox_head=dict(
type='CentripetalHead',
num_classes=3,
in_channels=256,
num_feat_levels=2,
corner_emb_channels=0,
loss_heatmap=dict(
type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1),
loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1),
loss_guiding_shift=dict(
type='SmoothL1Loss', beta=1.0, loss_weight=0.05),
loss_centripetal_shift=dict(
type='SmoothL1Loss', beta=1.0, loss_weight=1))
)
dataset_type = 'COCODataset'
classes = ('luchs', 'rotfuchs', 'wolf')
data = dict(
samples_per_gpu=3, #default 6
workers_per_gpu=1, #default 3
train=dict(
img_prefix='customData/train/',
classes=classes,
ann_file='customData/train/_annotations.coco.json'),
val=dict(
img_prefix='customData/valid/',
classes=classes,
ann_file='customData/valid/_annotations.coco.json'),
test=dict(
img_prefix='customData/test/',
classes=classes,
ann_file='customData/test/_annotations.coco.json'))
#optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) #8 GPUs => 8*6=48
optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001) #2 GPUs => 2*3=6 => 6/48= 1/8 cheetah
#optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001) #(1x6=6)
evaluation = dict(classwise=True, interval=4, metric='bbox')
load_from = 'checkpoints/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth'
work_dir = '/media/storage1/projects/WilLiCam/checkpoint_workdir/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu'
#http://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth
| 37.145161 | 191 | 0.691706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,065 | 0.46244 |
3e1c92be5d3fa432577c6a625de6487e656413d6 | 3,175 | py | Python | firecares/firestation/tests/test_feedback.py | FireCARES/firecares | aa708d441790263206dd3a0a480eb6ca9031439d | [
"MIT"
] | 12 | 2016-01-30T02:28:35.000Z | 2019-05-29T15:49:56.000Z | firecares/firestation/tests/test_feedback.py | FireCARES/firecares | aa708d441790263206dd3a0a480eb6ca9031439d | [
"MIT"
] | 455 | 2015-07-27T20:21:56.000Z | 2022-03-11T23:26:20.000Z | firecares/firestation/tests/test_feedback.py | FireCARES/firecares | aa708d441790263206dd3a0a480eb6ca9031439d | [
"MIT"
] | 14 | 2015-07-29T09:45:53.000Z | 2020-10-21T20:03:17.000Z | import json
import mock
import os
from django.contrib.auth import get_user_model
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import Client
from firecares.firestation.models import FireDepartment, FireStation, DataFeedback
from firecares.firecares_core.tests.base import BaseFirecaresTestcase
User = get_user_model()
class FeedbackTests(BaseFirecaresTestcase):
@mock.patch('geopy.geocoders.base.urllib_urlopen')
def test_feedback_form(self, urllib_urlopen):
"""
Test the feedback form submission
"""
c = urllib_urlopen.return_value
c.read.return_value = open(os.path.join(os.path.dirname(__file__), 'mock/geocode.json')).read()
c.headers.getparam.return_value = 'utf-8'
c = Client()
with self.settings(DATA_FEEDBACK_EMAILS=(('Test Admin', 'admin@example.com'),)):
# Create fire department and fire station
fd = FireDepartment.objects.create(name='Fire Department 1')
fs = FireStation.create_station(department=fd, address_string='1', name='Fire Station 1')
feedback_url = reverse('firedepartment_data_feedback_slug', kwargs={'pk': fd.id, 'slug': fd.slug})
response = c.get(feedback_url)
self.assert_redirect_to_login(response)
# Test only post allowed
c.login(**self.non_admin_creds)
get_response = c.get(feedback_url)
self.assertEqual(get_response.status_code, 405)
# Test email sent
response = c.post(feedback_url, {
'department': fd.id,
'firestation': fs.id,
'user': self.non_admin_user.id,
'message': 'This is a test'
})
self.assertEqual(response.status_code, 201)
self.assertEqual(DataFeedback.objects.filter(department=fd, firestation=fs).count(), 1)
self.assertEqual(len(mail.outbox), 1)
self.assert_email_appears_valid(mail.outbox[0])
self.assertListEqual(mail.outbox[0].reply_to, ['non_admin@example.com'])
mail_body = mail.outbox[0].body
self.assertTrue(fd.name in mail_body)
self.assertTrue(fs.name in mail_body)
self.assertTrue(self.non_admin_user.username in mail_body)
self.assertTrue(self.non_admin_user.email in mail_body)
self.assertTrue('This is a test' in mail_body)
# Test without fire station
response = c.post(feedback_url, {
'department': fd.id,
'user': self.non_admin_user.id,
'message': 'This is a test'
})
self.assertEqual(len(mail.outbox), 2)
self.assert_email_appears_valid(mail.outbox[1])
self.assertTrue('Fire Station:' not in mail.outbox[1].body)
# Test invalid data
response = c.post(feedback_url, {
'department': fd.id,
'message': 'This is a test'
})
self.assertEqual(response.status_code, 400)
self.assertTrue('user' in json.loads(response.content))
| 42.333333 | 110 | 0.627402 | 2,808 | 0.884409 | 0 | 0 | 2,760 | 0.869291 | 0 | 0 | 558 | 0.175748 |
3e1d0a3af98bd0674017ebccc343c54e9d715f7e | 325 | py | Python | reactivex/scheduler/mainloop/__init__.py | christiansandberg/RxPY | 036027d2858ea6c9d45839c863bd791e5bb50c36 | [
"MIT"
] | null | null | null | reactivex/scheduler/mainloop/__init__.py | christiansandberg/RxPY | 036027d2858ea6c9d45839c863bd791e5bb50c36 | [
"MIT"
] | null | null | null | reactivex/scheduler/mainloop/__init__.py | christiansandberg/RxPY | 036027d2858ea6c9d45839c863bd791e5bb50c36 | [
"MIT"
] | null | null | null | from .gtkscheduler import GtkScheduler
from .pygamescheduler import PyGameScheduler
from .qtscheduler import QtScheduler
from .tkinterscheduler import TkinterScheduler
from .wxscheduler import WxScheduler
__all__ = [
"GtkScheduler",
"PyGameScheduler",
"QtScheduler",
"TkinterScheduler",
"WxScheduler",
]
| 23.214286 | 46 | 0.775385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 0.230769 |
3e1d352c7922c834041d16e3fdaa531ed2d9b63c | 75 | py | Python | map2annotation/__main__.py | aas-integration/integration-test2 | dc9a9b4593cd59841f0d8348056cbff80a9c2a21 | [
"MIT"
] | 3 | 2016-10-10T20:18:51.000Z | 2018-05-01T19:42:10.000Z | map2annotation/__main__.py | aas-integration/integration-test2 | dc9a9b4593cd59841f0d8348056cbff80a9c2a21 | [
"MIT"
] | 38 | 2016-08-22T03:20:25.000Z | 2018-06-11T19:13:05.000Z | map2annotation/__main__.py | aas-integration/integration-test2 | dc9a9b4593cd59841f0d8348056cbff80a9c2a21 | [
"MIT"
] | 7 | 2016-08-29T17:37:42.000Z | 2022-01-28T00:30:10.000Z | import map2annotation
if __name__ == '__main__':
map2annotation.main() | 18.75 | 26 | 0.746667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.133333 |
3e1f67673a3fc56ac1f18b117525630eebc9c6b7 | 207 | py | Python | convenient_ai/nlp/spacy/types/RulePattern.py | leftshiftone/convenient-nlp | 8f24ff070ffaa2117af02ae16aaf00bc4d7cf32f | [
"MIT"
] | null | null | null | convenient_ai/nlp/spacy/types/RulePattern.py | leftshiftone/convenient-nlp | 8f24ff070ffaa2117af02ae16aaf00bc4d7cf32f | [
"MIT"
] | null | null | null | convenient_ai/nlp/spacy/types/RulePattern.py | leftshiftone/convenient-nlp | 8f24ff070ffaa2117af02ae16aaf00bc4d7cf32f | [
"MIT"
] | null | null | null | import dataclasses
from dataclasses import dataclass
@dataclass
class RulePattern:
id: str
label: str
pattern: str
@property
def as_dict(self):
return dataclasses.asdict(self)
| 14.785714 | 39 | 0.695652 | 140 | 0.676329 | 0 | 0 | 151 | 0.729469 | 0 | 0 | 0 | 0 |
3e1ff4f3721e11c343d5fecb39e8c79bd6aafa15 | 1,014 | py | Python | cocos2d/tools/cocos2d-console/plugins/plugin_run.py | meiry/Cocos2d-x-EarthWarrior3D-win-desktop-version | 6611cf5bfac64e22f0b053681ef9bf563f93f06c | [
"MIT"
] | 7 | 2015-05-19T07:54:48.000Z | 2021-12-17T06:01:24.000Z | cocos2d/tools/cocos2d-console/plugins/plugin_run.py | meiry/Cocos2d-x-EarthWarrior3D-win-desktop-version | 6611cf5bfac64e22f0b053681ef9bf563f93f06c | [
"MIT"
] | null | null | null | cocos2d/tools/cocos2d-console/plugins/plugin_run.py | meiry/Cocos2d-x-EarthWarrior3D-win-desktop-version | 6611cf5bfac64e22f0b053681ef9bf563f93f06c | [
"MIT"
] | 4 | 2015-11-24T06:27:16.000Z | 2021-05-29T10:50:44.000Z | #!/usr/bin/python
# ----------------------------------------------------------------------------
# cocos2d "install" plugin
#
# Authr: Luis Parravicini
#
# License: MIT
# ----------------------------------------------------------------------------
'''
"run" plugin for cocos2d command line tool
'''
__docformat__ = 'restructuredtext'
import sys
import os
import cocos2d
class CCPluginRun(cocos2d.CCPlugin):
"""
Compiles a project and install it on a device
"""
@staticmethod
def depends_on():
return ('compile', 'install')
@staticmethod
def plugin_name():
return "run"
@staticmethod
def brief_description():
return "compiles a project and install the files on a device"
def run(self, argv, dependencies):
cocos2d.Logging.info("starting application")
self.parse_args(argv)
install_dep = dependencies['install']
self._run_cmd("adb shell am start -n %s/%s" %
(install_dep.package, install_dep.activity))
| 24.142857 | 78 | 0.554241 | 640 | 0.631164 | 0 | 0 | 240 | 0.236686 | 0 | 0 | 506 | 0.499014 |
3e22427e89b56fa4293c96f943f7ce0b77c3a1a7 | 2,759 | py | Python | source/configuration.py | yux1991/PyRHEED | b39ad03651c92e3649069919ae48b1e5158cd3dd | [
"MIT"
] | 14 | 2019-01-08T14:32:31.000Z | 2021-11-17T21:07:10.000Z | source/configuration.py | yux1991/PyRHEED | b39ad03651c92e3649069919ae48b1e5158cd3dd | [
"MIT"
] | 2 | 2019-05-14T08:56:36.000Z | 2020-12-22T16:44:30.000Z | source/configuration.py | yux1991/PyRHEED | b39ad03651c92e3649069919ae48b1e5158cd3dd | [
"MIT"
] | 4 | 2019-03-12T20:03:54.000Z | 2022-03-08T14:24:46.000Z | import configparser
class Configuration():
DefaultDic = {'windowDefault':{'HS' : 0,\
'VS' : 0,\
'energy' : 20,\
'azimuth' : 0,\
'scaleBarLength' : 5,\
'chiRange' : 60,\
'width' : 0.4,\
'widthSliderScale' : 100,\
'radius' : 5,\
'radiusMaximum' : 20,\
'radiusSliderScale' : 10,\
'tiltAngle' : 0,\
'tiltAngleSliderScale' : 10},\
'propertiesDefault':{'sensitivity': 361.13,\
'electronEnergy': 20,\
'azimuth': 0,\
'scaleBarLength': 5,\
'brightness': 20,\
'brightnessMinimum': 0,\
'brightnessMaximum': 100,\
'blackLevel': 50,\
'blackLevelMinimum': 0,\
'blackLevelMaximum': 500,\
'integralHalfWidth': 0.4,\
'widthMinimum': 0,\
'widthMaximum': 1,\
'widthSliderScale': 100,\
'chiRange': 60,\
'chiRangeMinimum': 0,\
'chiRangeMaximum': 180,\
'radius': 5,\
'radiusMinimum': 0,\
'radiusMaximum': 20,\
'radiusSliderScale': 10,\
'tiltAngle': 0,\
'tiltAngleMinimum': -15,\
'tiltAngleMaximum': 15,\
'tiltAngleSliderScale': 10},\
'canvasDefault':{'widthInAngstrom' : 0.4,\
'radiusMaximum' : 20,\
'span' : 60,\
'tilt' : 0,\
'max_zoom_factor' : 21},\
'chartDefault':{'theme':1}}
def save_defaults(self,Dic = DefaultDic):
config = configparser.ConfigParser()
config.read_dict(Dic)
with open('./configuration.ini','w') as configfile:
config.write(configfile)
| 51.092593 | 66 | 0.303733 | 2,737 | 0.992026 | 0 | 0 | 0 | 0 | 0 | 0 | 695 | 0.251903 |
3e24e04ad5a6a1e6faafb25c71a578a2c2c42a6c | 4,772 | py | Python | api/api/endpoints/sensor_info.py | andschneider/ss_api | 4ddf5cd60d5e0e87e7641e97c9fbe78965c4b522 | [
"MIT"
] | null | null | null | api/api/endpoints/sensor_info.py | andschneider/ss_api | 4ddf5cd60d5e0e87e7641e97c9fbe78965c4b522 | [
"MIT"
] | 2 | 2019-12-26T17:31:56.000Z | 2020-01-06T19:45:05.000Z | api/api/endpoints/sensor_info.py | andschneider/soil_sense | 4ddf5cd60d5e0e87e7641e97c9fbe78965c4b522 | [
"MIT"
] | null | null | null | import datetime
import json
from flask import Response, request, Blueprint
from flask_jwt_extended import jwt_required
from flask_restplus import Api, Namespace, Resource, reqparse
from sqlalchemy.exc import IntegrityError
from api.core.db_execptions import bad_db_response
from api.core.models import SensorInfoModel, SensorDataModel
from api import db
api = Namespace(
"sensor_info",
description="Sensor information: sensor id, plant name, and moisture alert level.",
)
post_args = reqparse.RequestParser()
post_args.add_argument("plant", type=str, required=True, help="Plant name.")
post_args.add_argument(
"alert_level", type=int, required=True, help="Alert level for moisture."
)
@api.route("/sensor_info/<int:sensor_id>")
class SensorInfo(Resource):
@jwt_required
def get(self, sensor_id):
"""Get sensor info for a given sensor_id."""
try:
sensor_info = SensorInfoModel.query.filter_by(sensor_id=sensor_id).first()
response = {
"message": "success",
"data": {
"sensor_id": sensor_info.sensor_id,
"plant_name": sensor_info.plant,
"alert_level": sensor_info.alert_level,
},
}
return Response(
response=json.dumps(response), status=200, mimetype="application/json"
)
except Exception as e:
return bad_db_response(e.args)
@jwt_required
@api.expect(post_args)
def post(self, sensor_id):
"""Creates a new sensor info entry."""
args = post_args.parse_args()
try:
sensor_info = SensorInfoModel(
sensor_id=sensor_id,
plant=args["plant"],
alert_level=args["alert_level"],
)
db.session.add(sensor_info)
db.session.commit()
response = {"message": "success"}
except IntegrityError:
response = {
"message": f"Sensor id {sensor_id} already exists in database. Try updating or deleting first."
}
return Response(
response=json.dumps(response), status=409, mimetype="application/json"
)
except Exception as e:
return bad_db_response(e.args)
return Response(
response=json.dumps(response), status=201, mimetype="application/json"
)
@jwt_required
@api.doc(
params={"plant": "Plant name.", "alert_level": "Alert level for moisture."}
)
def put(self, sensor_id):
"""Updates a sensor info entry.
One or both of 'plant' and 'alert_level' must be supplied.
"""
parser = reqparse.RequestParser()
parser.add_argument("plant", type=str)
parser.add_argument("alert_level", type=int)
args = parser.parse_args()
if not any(list(args.values())):
return Response(
response=json.dumps(
{
"message": "Both arguments are empty. Try checking your parameter names."
}
),
status=400,
mimetype="application/json",
)
now = datetime.datetime.utcnow()
sensor_info = SensorInfoModel.query.filter_by(sensor_id=sensor_id).first()
if sensor_info:
try:
if args["plant"]:
sensor_info.plant = args["plant"]
if args["alert_level"]:
sensor_info.alert_level = args["alert_level"]
sensor_info.updated = now
db.session.commit()
response = {"message": f"Sensor id {sensor_id} successfully updated"}
return Response(
response=json.dumps(response),
status=200,
mimetype="application/json",
)
except Exception as e:
return bad_db_response(e.args)
# TODO handle updating entry that doesn't exist
@jwt_required
def delete(self, sensor_id):
"""Deletes a sensor info entry."""
# TODO need to handle deleting an entry that doesn't exist
try:
sensor_info = (
db.session.query(SensorInfoModel).filter_by(sensor_id=sensor_id).first()
)
db.session.delete(sensor_info)
db.session.commit()
response = {"message": f"Sensor id {sensor_id} successfully deleted"}
return Response(
response=json.dumps(response), status=200, mimetype="application/json"
)
except Exception as e:
return bad_db_response(e.args)
| 33.843972 | 111 | 0.573135 | 4,023 | 0.843043 | 0 | 0 | 4,066 | 0.852054 | 0 | 0 | 1,103 | 0.23114 |
3e24efcd76f37df58a59baf6375172236bdbc8f9 | 116 | py | Python | notifications/utils.py | Natgeoed/django-notifications | a73c01f167bdf796b609d44a2be5323d08180b7f | [
"BSD-3-Clause"
] | 1 | 2019-05-28T14:55:58.000Z | 2019-05-28T14:55:58.000Z | notifications/utils.py | Natgeoed/django-notifications | a73c01f167bdf796b609d44a2be5323d08180b7f | [
"BSD-3-Clause"
] | null | null | null | notifications/utils.py | Natgeoed/django-notifications | a73c01f167bdf796b609d44a2be5323d08180b7f | [
"BSD-3-Clause"
] | 1 | 2020-03-29T10:13:28.000Z | 2020-03-29T10:13:28.000Z | # -*- coding: utf-8 -*-
def slug2id(slug):
return long(slug) - 110909
def id2slug(id):
return id + 110909
| 14.5 | 30 | 0.603448 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.198276 |
3e263e2d36efcfc4b3135f0a65636317114a2c8d | 995 | py | Python | hash calculator.py | Andrea1141/hash-calculator | 182d2f9bcfa0227ad70f7fdb03dde4599717cafa | [
"MIT"
] | 1 | 2021-10-02T12:48:25.000Z | 2021-10-02T12:48:25.000Z | hash calculator.py | Andrea1141/hash-calculator | 182d2f9bcfa0227ad70f7fdb03dde4599717cafa | [
"MIT"
] | null | null | null | hash calculator.py | Andrea1141/hash-calculator | 182d2f9bcfa0227ad70f7fdb03dde4599717cafa | [
"MIT"
] | 1 | 2021-10-18T12:34:26.000Z | 2021-10-18T12:34:26.000Z | import tkinter, hashlib
root = tkinter.Tk()
root.title("Hash Calculator")
label = tkinter.Label(text="Write the string to hash")
label.pack()
option = tkinter.StringVar()
option.set("sha224")
string = tkinter.StringVar()
entry = tkinter.Entry(root, textvariable=string, width=150, justify="center")
entry.pack()
hexdigest = tkinter.StringVar()
label = tkinter.Entry(text="", textvariable=hexdigest, width=150, justify="center", state="readonly")
label.pack()
def callback(*args):
encoded_string = string.get().encode()
command = "hashlib." + option.get() + "(encoded_string)"
result = eval(command)
hexdigest.set(result.hexdigest())
string.trace_add("write", callback)
option.trace_add("write", callback)
algorithms = ["sha224", "sha1", "blake2s", "sha3_384", "sha256", "blake2b", "sha384", "sha3_256", "sha3_512", "md5", "sha512", "sha3_224"]
menu = tkinter.OptionMenu(root, option, *algorithms)
menu.pack()
callback()
root.mainloop()
| 28.428571 | 139 | 0.684422 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 222 | 0.223116 |
3e28aa85ecfaa56c22716b2abd9f954c4b0ab246 | 136 | py | Python | tensorflow/__init__.py | vsilyaev/tensorflow | f41959ccb2d9d4c722fe8fc3351401d53bcf4900 | [
"Apache-2.0"
] | 4 | 2015-11-10T14:11:39.000Z | 2021-11-17T11:11:25.000Z | tensorflow/__init__.py | TheRockStarDBA/tensorflow | db0b5da485e1d1f23003ee08ed2e191451ee0319 | [
"Apache-2.0"
] | null | null | null | tensorflow/__init__.py | TheRockStarDBA/tensorflow | db0b5da485e1d1f23003ee08ed2e191451ee0319 | [
"Apache-2.0"
] | 2 | 2015-11-13T21:11:49.000Z | 2015-11-29T04:13:49.000Z | # Bring in all of the public TensorFlow interface into this
# module.
# pylint: disable=wildcard-import
from tensorflow.python import *
| 27.2 | 59 | 0.786765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 101 | 0.742647 |
3e28b00c8be476ae7052942943b00801e357b52e | 4,337 | py | Python | cpauto/objects/threat.py | krnnrt/cpauto | 6076ee0e3f55769aac5b2480453d82f99371a31f | [
"Apache-2.0"
] | 16 | 2016-12-07T02:45:31.000Z | 2022-01-20T11:46:24.000Z | cpauto/objects/threat.py | krnnrt/cpauto | 6076ee0e3f55769aac5b2480453d82f99371a31f | [
"Apache-2.0"
] | 2 | 2017-07-20T21:12:27.000Z | 2021-09-09T14:57:01.000Z | cpauto/objects/threat.py | krnnrt/cpauto | 6076ee0e3f55769aac5b2480453d82f99371a31f | [
"Apache-2.0"
] | 5 | 2017-07-28T14:06:25.000Z | 2021-09-06T12:01:18.000Z | # -*- coding: utf-8 -*-
# Copyright 2016 Dana James Traversie and Check Point Software Technologies, Ltd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# cpauto.objects.threat
# ~~~~~~~~~~~~~~~~~~~~~
"""This module contains the classes needed to manage threat prevention objects."""
from ._common import _CommonClient
class ThreatProfile:
"""Manage threat profiles."""
def __init__(self, core_client):
self.__common_client = _CommonClient(core_client)
def add(self, name='', params={}):
"""Adds a threat profile.
https://sc1.checkpoint.com/documents/R80/APIs/#web/add-threat-profile
:param name: A name for the new threat profile.
:param params: (optional) A dictionary of additional, supported
parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._add('add-threat-profile', name, params)
def show(self, name='', uid='', details_level=''):
"""Shows details of an threat profile with the specified name or uid.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-threat-profile
:param name: (optional) The name of an existing threat profile.
:param uid: (optional) The unique identifier of an existing threat profile.
:param details_level: (optional) The level of detail to show. Default
value is 'standard' and the other options are: 'uid' or 'full'
:rtype: CoreClientResult
"""
return self.__common_client._show('show-threat-profile', name=name, uid=uid, details_level=details_level)
def set(self, name='', uid='', params={}):
"""Sets new values for an existing threat profile with the specified
name or uid.
https://sc1.checkpoint.com/documents/R80/APIs/#web/set-threat-profile
:param name: (optional) The name of an existing threat profile.
:param uid: (optional) The unique identifier of an existing threat profile.
:param params: (optional) A dictionary of additional, supported parameter names and values.
:rtype: CoreClientResult
"""
return self.__common_client._set('set-threat-profile', name=name, uid=uid, params=params)
def delete(self, name='', uid='', params={}):
"""Deletes an existing threat profile with the specified
name or uid.
https://sc1.checkpoint.com/documents/R80/APIs/#web/delete-threat-profile
:param name: (optional) The name of an existing threat profile.
:param uid: (optional) The unique identifier of an existing threat profile.
:param params: (optional) A dictionary of additional, supported parameter name$
:rtype: CoreClientResult
"""
return self.__common_client._delete('delete-threat-profile', name=name, uid=uid, params=params)
def show_all(self, limit=50, offset=0, order=[], details_level=''):
"""Shows all threat profiles with some reasonable limitations.
https://sc1.checkpoint.com/documents/R80/APIs/#web/show-threat-profiles
:param limit: (optional) Limit the total number of results shown.
The default value is 50 and allowed values are in the range 1 to 500.
:param offset: (optional) Skip a number of items in the results
before they are shown. Default value is 0.
:param order: (optional) Sort the results by the specified field. The
default is a random order.
:param details_level: (optional) The level of detail to show. Default
value is 'standard' and the other options are: 'uid' or 'full'
:rtype: CoreClientResult
"""
return self.__common_client._show_all('show-threat-profiles', limit=limit,
offset=offset, order=order, details_level=details_level)
| 44.255102 | 113 | 0.679733 | 3,491 | 0.804934 | 0 | 0 | 0 | 0 | 0 | 0 | 3,424 | 0.789486 |
3e28e0f9797870a68b28678349b8f468bf2771ae | 387 | py | Python | src/tandlr/notifications/routing.py | shrmoud/schoolapp | 7349ce18f56658d67daedf5e1abb352b5c15a029 | [
"Apache-2.0"
] | null | null | null | src/tandlr/notifications/routing.py | shrmoud/schoolapp | 7349ce18f56658d67daedf5e1abb352b5c15a029 | [
"Apache-2.0"
] | null | null | null | src/tandlr/notifications/routing.py | shrmoud/schoolapp | 7349ce18f56658d67daedf5e1abb352b5c15a029 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from channels.staticfiles import StaticFilesConsumer
from tandlr.notifications import consumers
channel_routing = {
'http.request': StaticFilesConsumer(),
# Wire up websocket channels to our consumers:
'websocket.connect': consumers.ws_connect,
'websocket.receive': consumers.ws_receive,
'websocket.disconnect': consumers.ws_disconnect,
}
| 25.8 | 52 | 0.74677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 143 | 0.369509 |
3e297317547f88cd2d57145599c9dcd9b0299b5a | 646 | py | Python | 2018/d03.py | m1el/advent-of-code | 0944579fd58c586ce5a72b4152c5105ec07846a1 | [
"MIT"
] | null | null | null | 2018/d03.py | m1el/advent-of-code | 0944579fd58c586ce5a72b4152c5105ec07846a1 | [
"MIT"
] | null | null | null | 2018/d03.py | m1el/advent-of-code | 0944579fd58c586ce5a72b4152c5105ec07846a1 | [
"MIT"
] | null | null | null | from collections import defaultdict, Counter
from itertools import product
import re
with open('03.txt') as fd:
inp = []
for l in fd.readlines():
groups = re.findall(r'\d+', l)
inp.append(list(map(int, groups)))
claims = defaultdict(int)
for (id, l,t, w,h) in inp:
for y in range(t,t+h):
for x in range(l,l+w):
claims[(x,y)] += 1
c=0
for n in claims.values():
if n > 1: c+= 1
print(c)
for (id, l,t, w,h) in inp:
bad = False
for y in range(t,t+h):
for x in range(l,l+w):
if claims[(x,y)] > 1:
bad = True
break
if bad: break
if not bad:
print(id)
| 20.1875 | 45 | 0.547988 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.021672 |
3e2a44b8d417cc833a2bb62cb532d7fa7ff0e6b8 | 2,591 | py | Python | files/lambda/tagger.py | mbasri/generic-spot-cluster | cccfbee4660ae26742e1442f495dc9f523d0a2fd | [
"MIT"
] | 1 | 2019-12-24T18:53:34.000Z | 2019-12-24T18:53:34.000Z | files/lambda/tagger.py | mbasri/generic-spot-cluster | cccfbee4660ae26742e1442f495dc9f523d0a2fd | [
"MIT"
] | null | null | null | files/lambda/tagger.py | mbasri/generic-spot-cluster | cccfbee4660ae26742e1442f495dc9f523d0a2fd | [
"MIT"
] | null | null | null | import os
import sys
import logging
import boto3
def handler(event, context):
logger = setup_logging(context.aws_request_id)
logger.setLevel(logging.INFO)
logger.info('## ENVIRONMENT VARIABLES')
logger.info(os.environ)
logger.info('## EVENT')
logger.info(event)
count = '1'
CLUSTER_NAME = os.environ['cluster_name']
asg = boto3.client('autoscaling')
ec2 = boto3.client('ec2')
asg_response = asg.describe_auto_scaling_groups(
AutoScalingGroupNames=[
CLUSTER_NAME
]
)
instances = []
try:
for i in asg_response['AutoScalingGroups'][0]['Instances']:
if i['LifecycleState'] == 'InService' or i['LifecycleState'] == 'Pending':
instances.append(i['InstanceId'])
except IndexError :
logger.error('IndexError on autoscaling')
count = '1'
logger.info('## INSTANCE(S) FOUND ON THE ASG')
logger.info('instances=['+','.join(instances)+']')
ec2_response = ec2.describe_instances(
Filters=[
{
'Name': 'instance-state-name',
'Values': [
'pending',
'running',
'stopping',
'stopped',
]
},
{
'Name': 'tag-key',
'Values': [
'Count',
]
}
],
InstanceIds = instances
)
logger.info('## ACTIVE INSTANCE(S) FOUND ON THE ASG')
logger.info('ec2_response='+str(ec2_response))
counts = []
try :
for i in ec2_response['Reservations']:
for j in i['Instances']:
for z in j['Tags']:
if z['Key'] == 'Count':
counts.append(z['Value'])
except IndexError :
logger.error('IndexError on ec2')
count = '1'
#counts.sort()
for i in counts :
if count in counts:
count = str(int(count)+1)
else:
break
ec2.create_tags(
Resources = [
event['instance_id']
],
Tags=[
{
'Key': 'Count',
'Value': count
}
]
)
response = {
'cluster_name': CLUSTER_NAME,
'count': count,
'instance_id': event['instance_id']
}
logger.info('## RESPONSE')
logger.info('response' + str(response))
return response
def setup_logging(uuid):
logger = logging.getLogger()
for handler in logger.handlers:
logger.removeHandler(handler)
handler = logging.StreamHandler(sys.stdout)
formatter = f"[%(asctime)s] [Bastion] [{uuid}] [%(levelname)s] %(message)s"
handler.setFormatter(logging.Formatter(formatter))
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger | 22.530435 | 81 | 0.582015 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 655 | 0.252798 |
3e2c2255a47604390ebe0475399a05f36907ad33 | 4,478 | py | Python | Python/biopsy/data/bergman_fly_motifs.py | JohnReid/biopsy | 1eeb714ba5b53f2ecf776d865d32e2078cbc0338 | [
"MIT"
] | null | null | null | Python/biopsy/data/bergman_fly_motifs.py | JohnReid/biopsy | 1eeb714ba5b53f2ecf776d865d32e2078cbc0338 | [
"MIT"
] | null | null | null | Python/biopsy/data/bergman_fly_motifs.py | JohnReid/biopsy | 1eeb714ba5b53f2ecf776d865d32e2078cbc0338 | [
"MIT"
] | null | null | null | #
# Copyright John Reid 2010
#
"""
Code to deal with the Bergman curated set of fly motifs.
"""
import os, biopsy.data as D, numpy as N
import xml.etree.ElementTree as ET
def xms_filename():
"@return: The filename of the XMS file where the motifs are stored."
return os.path.join(D.data_dir(), "Bergman-Fly-Motifs", "SelexConsensus1.1.xms")
def parse_xms(f):
"Parse an XMS file. @return: Yield the motifs."
tree = ET.parse(f)
root = tree.getroot()
for motif in root.findall('motif'):
name = motif.find('name').text
weightmatrix = motif.find('weightmatrix')
columns = int(weightmatrix.get('columns'))
alphabet = weightmatrix.get('alphabet')
if 'DNA' == alphabet:
alphabet_size = 4
matrix = N.zeros((columns, alphabet_size))
for column in weightmatrix.findall('column'):
pos = int(column.get('pos'))
for weight in column.findall('weight'):
symbol = weight.get('symbol')
if 'thymine' == symbol:
b = 3
elif 'guanine' == symbol:
b = 2
elif 'cytosine' == symbol:
b = 1
elif 'adenine' == symbol:
b = 0
else:
raise RuntimeError('Unrecognized symbol: ' + symbol)
value = float(weight.text)
matrix[pos,b] = value
properties = dict()
for prop in motif.findall('prop'):
key = prop.find('key')
value = prop.find('value')
assert None != key
assert None != value
properties[key.text] = value.text
threshold = float(motif.find('threshold').text)
yield name, alphabet, matrix, properties, threshold
def write_as_custom_pssm(f, id_, name, matrix, comments=None, url=None, field_width=3, scale=1):
"""
Write the motif as a custom PSSM to the file, f.
#
# Drosophila Hunchback from JASPAR
#
ID DN-000001
NA D$Hunchback
WI 10
PO 01 02 03 04 05 06 07 08 09 10
CA 01 06 09 04 13 16 16 14 15 09
CC 05 08 03 03 01 00 00 00 01 02
CG 08 02 04 01 00 00 00 02 00 02
CT 02 00 00 08 02 00 00 00 00 03
IU G C A T A A A A A A
UR None
"""
if None != comments:
print >> f, '#'
for comment in comments:
print >> f, '# %s' % comment
print >> f, '#'
print >> f, 'ID %s' % id_
print >> f, 'NA %s' % name
print >> f, 'WI %s' % len(matrix)
print >> f, 'PO %s' % ' '.join('%*d' % (field_width, i+1) for i in xrange(len(matrix)))
for b, tag in enumerate(('CA', 'CC', 'CG', 'CT')):
print >> f, '%s %s' % (tag, ' '.join('%*d' % (field_width, int(v)) for v in matrix[:,b]*scale))
print >> f, 'UR %s' % (None != url and url or 'None')
def normalise_matrix(matrix):
"@return: A normalised version of the argument."
return (matrix.T / matrix.sum(axis=1)).T
def smooth_matrix_with_pseudo_count(matrix, pseudo_count):
"@return: A smoothed version the matrix using the given pseudo counts."
smoothed = matrix + pseudo_count
return normalise_matrix(smoothed)
def write_matrix_to_file(f, id_, name, alphabet, matrix, properties, threshold, scale=1):
"Write the matrix to the file in the custom PSSM format."
comments = [
'PSSM parsed from set of fly TFs curated by Bergman.'
]
comments.extend('%20s : %s' % (k, v) for k, v in properties.iteritems())
write_as_custom_pssm(f, id_, name, matrix, comments=comments, scale=scale)
if '__main__' == __name__:
import sys
output_dir = '/home/john/Data/custom-pssms'
pssm_set_tag = 'BG'
scale = 30
pssm_set_f = open(os.path.join(output_dir, 'bergman-fly.pssm_set'), 'w')
print >> pssm_set_f, '#'
print >> pssm_set_f, '# Set of fly TFs curated by Bergman.'
print >> pssm_set_f, '# PSSMs were scaled as if there were %d observations.' % scale
print >> pssm_set_f, '#'
for i, (name, alphabet, matrix, properties, threshold) in enumerate(parse_xms(open(xms_filename()))):
id_ = '%s-%06d' % (pssm_set_tag, i+1)
print id_, name
print >> pssm_set_f, id_
f = open(os.path.join(output_dir, '%s.pssm' % id_), 'w')
properties['Equivalent # observations'] = str(scale)
write_matrix_to_file(f, id_, name, alphabet, matrix, properties, threshold, scale=scale)
f.close()
pssm_set_f.close()
| 33.41791 | 105 | 0.585306 | 0 | 0 | 1,468 | 0.327825 | 0 | 0 | 0 | 0 | 1,327 | 0.296338 |
3e2c4ce8c6ded9f25bc03ff3e20ecd6211356ad1 | 7,950 | py | Python | addressbook/views.py | webskate101/django-polymer-addressbook | bf41b6a83e7b9228b383129958488f1c8075c728 | [
"Apache-2.0"
] | null | null | null | addressbook/views.py | webskate101/django-polymer-addressbook | bf41b6a83e7b9228b383129958488f1c8075c728 | [
"Apache-2.0"
] | null | null | null | addressbook/views.py | webskate101/django-polymer-addressbook | bf41b6a83e7b9228b383129958488f1c8075c728 | [
"Apache-2.0"
] | null | null | null | """Holds the HTTP handlers for the addressbook app."""
from django import db
from django import http
from django.views import generic
import json
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from addressbook import models
JSON_XSSI_PREFIX = ")]}'\n"
def json_response(data, status_code=200):
response = http.HttpResponse()
response.status_code = status_code
response['Content-Type'] = 'application/javascript'
# These three lines needed to defeat XSSI attacks
response['X-Content-Type-Options'] = 'nosniff'
response['Content-Disposition'] = 'attachment'
response.content = JSON_XSSI_PREFIX + json.dumps(data)
return response
def _update_contact_details(has_contact_details, update_dict):
has_contact_details.email = update_dict['email']
has_contact_details.phone = update_dict['phone']
has_contact_details.street_address = update_dict['streetAddress']
has_contact_details.city = update_dict['city']
has_contact_details.postal_code = update_dict['postalCode']
@method_decorator(login_required, name='get')
class IndexView(generic.base.TemplateView):
"""Renders the base index file."""
template_name = 'index.html'
class LoginRequiredRESTHandler(generic.View):
def dispatch(self, *args, **kwargs):
"""Require authenticated user for all REST requests."""
if not self.request.user.is_authenticated():
return json_response({'status': 'Unauthorized'}, status_code=401)
self.user = self.request.user
return super(LoginRequiredRESTHandler, self).dispatch(*args, **kwargs)
class OrganizationListRESTHandler(LoginRequiredRESTHandler):
"""REST handler for multiple organization requests."""
def get(self, request):
data = [
{
'id': organization.id,
'name': organization.name,
'email': organization.email,
'phone': organization.phone,
'streetAddress': organization.street_address,
'city': organization.city,
'postalCode': organization.postal_code,
'members': [
{
'id': person.id,
'firstName': person.first_name,
'lastName': person.last_name,
}
for person in organization.members.filter(
owner=self.user).order_by('last_name', 'first_name')
],
}
for organization in models.Organization.objects.filter(
owner=self.user).order_by('name')
]
return json_response(data)
class OrganizationMembershipRESTHandler(LoginRequiredRESTHandler):
"""REST handler to manage membership of an organization."""
@db.transaction.atomic
def put(self, request, organization_id, person_id):
"""Add a member to an organization."""
# TODO(john): Better error handling - get() raises if not found, but there's
# no messaging back to the client yet
organization = models.Organization.objects.get(
owner=self.user, id=organization_id)
person = models.Person.objects.get(owner=self.user, id=person_id)
organization.members.add(person)
organization.save()
return json_response({
'type': 'membership',
'organization_id': organization_id,
'person_id': person_id,
'action': 'added'})
@db.transaction.atomic
def delete(self, request, organization_id, person_id):
"""Remove a member from an organization."""
# TODO(john): Better error handling - get() raises if not found, but there's
# no messaging back to the client yet
organization = models.Organization.objects.get(
owner=self.user, id=organization_id)
person = models.Person.objects.get(owner=self.user, id=person_id)
organization.members.remove(person)
organization.save()
return json_response({
'type': 'membership',
'organization_id': organization_id,
'person_id': person_id,
'action': 'deleted'})
class OrganizationRESTHandler(LoginRequiredRESTHandler):
"""REST handler for single organization requests."""
def get(self, request, organization_id):
raise NotImplementedError()
@db.transaction.atomic
def post(self, request):
"""Adds a new organization."""
organization = models.Organization(owner=self.user)
# TODO(john): Server-side data validation before blindly copying the data
# into the target object
self._update_organization(organization, json.loads(request.body))
return json_response(
{'type': 'organization', 'id': organization.id, 'action': 'added'})
@db.transaction.atomic
def put(self, request, organization_id):
"""Receives updates to an existing organization."""
# TODO(john): Better error handling - get() raises if not found, but there's
# no messaging back to the client yet
organization = models.Organization.objects.get(
owner=self.user, id=organization_id)
# TODO(john): Server-side data validation before blindly copying the data
# into the target object
self._update_organization(organization, json.loads(request.body))
return json_response(
{'type': 'organization', 'id': organization_id, 'action': 'updated'})
@db.transaction.atomic
def delete(self, request, organization_id):
"""Delete an organization."""
organization = models.Organization.objects.get(
owner=self.user, id=organization_id)
organization.delete()
return json_response(
{'type': 'organization', 'id': organization_id, 'action': 'deleted'})
def _update_organization(self, organization, update_dict):
organization.name = update_dict['name']
_update_contact_details(organization, update_dict)
organization.save()
class PersonListRESTHandler(LoginRequiredRESTHandler):
"""REST handler for multiple person requests."""
def get(self, request):
data = [
{
'id': person.id,
'firstName': person.first_name,
'lastName': person.last_name,
'email': person.email,
'phone': person.phone,
'streetAddress': person.street_address,
'city': person.city,
'postalCode': person.postal_code,
}
for person in models.Person.objects.filter(owner=self.user)
]
return json_response(data)
class PersonRESTHandler(LoginRequiredRESTHandler):
"""REST handler for single person requests."""
def get(self, request, person_id):
raise NotImplementedError()
@db.transaction.atomic
def post(self, request):
"""Adds a new person."""
person = models.Person(owner=self.user)
# TODO(john): Server-side data validation before blindly copying the data
# into the target object
self._update_person(person, json.loads(request.body))
return json_response(
{'type': 'person', 'id': person.id, 'action': 'added'})
@db.transaction.atomic
def put(self, request, person_id):
"""Receives updates to an existing person."""
# TODO(john): Better error handling - get() raises if not found, but there's
# no messaging back to the client yet
person = models.Person.objects.get(owner=self.user, id=person_id)
# TODO(john): Server-side data validation before blindly copying the data
# into the target object
self._update_person(person, json.loads(request.body))
return json_response(
{'type': 'person', 'id': person_id, 'action': 'updated'})
@db.transaction.atomic
def delete(self, request, person_id):
"""Delete a person."""
person = models.Person.objects.get(owner=self.user, id=person_id)
person.delete()
return json_response(
{'type': 'person', 'id': person_id, 'action': 'deleted'})
def _update_person(self, person, update_dict):
person.first_name = update_dict['firstName']
person.last_name = update_dict['lastName']
_update_contact_details(person, update_dict)
person.save()
| 33.544304 | 80 | 0.684528 | 6,823 | 0.858239 | 0 | 0 | 4,024 | 0.506164 | 0 | 0 | 2,338 | 0.294088 |
3e2dc345629e84a8ce9faa979c3f69774ad29ef7 | 132 | py | Python | backend/actions.py | HiroshiFuu/django-rest-drf-yasg-boilerplate | 93221b2dbca0635eb42a18096e805b00f36ff9c1 | [
"Apache-2.0"
] | null | null | null | backend/actions.py | HiroshiFuu/django-rest-drf-yasg-boilerplate | 93221b2dbca0635eb42a18096e805b00f36ff9c1 | [
"Apache-2.0"
] | null | null | null | backend/actions.py | HiroshiFuu/django-rest-drf-yasg-boilerplate | 93221b2dbca0635eb42a18096e805b00f36ff9c1 | [
"Apache-2.0"
] | null | null | null | from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
| 33 | 54 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
3e2de9f463b88672a9f0881711bb0f7f45018e12 | 1,124 | py | Python | Housing Price/HouseRegression.py | anupriyamranjit/machinelearning | 5e1deef38d356fddcedfe0a23094571500c1c82d | [
"MIT"
] | null | null | null | Housing Price/HouseRegression.py | anupriyamranjit/machinelearning | 5e1deef38d356fddcedfe0a23094571500c1c82d | [
"MIT"
] | null | null | null | Housing Price/HouseRegression.py | anupriyamranjit/machinelearning | 5e1deef38d356fddcedfe0a23094571500c1c82d | [
"MIT"
] | null | null | null |
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import tensorflow as tf
import keras
import os
print(os.listdir("../input"))
print("Success")
# Any results you write to the current directory are saved as output.
# importing models/layers
from keras.models import Sequential
from keras.layers import Dense
print("Success")
my_data = pd.read_csv('../input/kc_house_data.csv')
my_data.head()
#Splitting Data Up
predictors = my_data.drop(columns=["price","date"])
output = my_data['price']
print("Success")
model = Sequential()
n_cols = predictors.shape[1]
print("Success")
#Dense Layers
model.add(Dense(5,activation ="relu", input_shape=(n_cols,)))
model.add(Dense(5,activation ="relu"))
model.add(Dense(1))
print("Success")
#Optimizer
model.compile(optimizer="adam", loss ="mean_squared_error")
print("Success")
#fitting
from keras.callbacks import EarlyStopping
early_stopping_monitor = EarlyStopping(patience=3)
model.fit(predictors,output,validation_split=0.2, epochs=30, callbacks=[early_stopping_monitor])
#prediction
prediction = model.predict()
| 22.039216 | 96 | 0.758897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 370 | 0.329181 |
3e2e001920079b806a3731784374226e2f26379a | 1,194 | py | Python | migrations/versions/29e48091912e_remove_unique_constraint_from_user_table.py | GitauHarrison/somasoma_V1 | 2d74ad3b58f7e4ea5334e240d5bd30938f615e24 | [
"MIT"
] | null | null | null | migrations/versions/29e48091912e_remove_unique_constraint_from_user_table.py | GitauHarrison/somasoma_V1 | 2d74ad3b58f7e4ea5334e240d5bd30938f615e24 | [
"MIT"
] | 2 | 2021-11-11T19:04:10.000Z | 2021-11-11T19:08:42.000Z | migrations/versions/29e48091912e_remove_unique_constraint_from_user_table.py | GitauHarrison/somasoma_V1 | 2d74ad3b58f7e4ea5334e240d5bd30938f615e24 | [
"MIT"
] | 1 | 2021-09-09T13:44:26.000Z | 2021-09-09T13:44:26.000Z | """remove unique constraint from user table
Revision ID: 29e48091912e
Revises: f73df8de1f1f
Create Date: 2021-12-22 22:26:20.918461
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '29e48091912e'
down_revision = 'f73df8de1f1f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_index('ix_user_email')
batch_op.create_index(batch_op.f('ix_user_email'), ['email'], unique=False)
batch_op.drop_index('ix_user_name')
batch_op.create_index(batch_op.f('ix_user_name'), ['name'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_user_name'))
batch_op.create_index('ix_user_name', ['name'], unique=1)
batch_op.drop_index(batch_op.f('ix_user_email'))
batch_op.create_index('ix_user_email', ['email'], unique=1)
# ### end Alembic commands ###
| 30.615385 | 83 | 0.69598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 541 | 0.453099 |
3e2e6a8e43d315af581125fc3cb4dc17b915f7a7 | 6,065 | py | Python | VBx/models/resnet.py | Jamiroquai88/VBx | 35e7954ac0042ea445dcec657130e2c3c0b94ee0 | [
"Apache-2.0"
] | 145 | 2020-02-13T09:08:59.000Z | 2022-03-28T02:05:38.000Z | VBx/models/resnet.py | Jamiroquai88/VBx | 35e7954ac0042ea445dcec657130e2c3c0b94ee0 | [
"Apache-2.0"
] | 39 | 2021-01-12T02:49:37.000Z | 2022-02-17T18:49:54.000Z | VBx/models/resnet.py | Jamiroquai88/VBx | 35e7954ac0042ea445dcec657130e2c3c0b94ee0 | [
"Apache-2.0"
] | 44 | 2020-02-13T03:57:35.000Z | 2022-03-31T07:05:09.000Z | '''ResNet in PyTorch.
For Pre-activation ResNet, see 'preact_resnet.py'.
Reference:
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1, reduction=16):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
#self.se = SELayer(planes, reduction)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
#out = self.se(out)
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1, reduction=16):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
#self.se = SELayer(planes * 4, reduction)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
#out = self.se(out)
out += self.shortcut(x)
out = F.relu(out)
return out
class SELayer(nn.Module):
def __init__(self, channel, reduction=16):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction, bias=False),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel, bias=False),
nn.Sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y.expand_as(x)
class ResNet(nn.Module):
def __init__(self, block, num_blocks, m_channels=32, feat_dim=40, embed_dim=128, squeeze_excitation=False):
super(ResNet, self).__init__()
self.in_planes = m_channels
self.feat_dim = feat_dim
self.embed_dim = embed_dim
self.squeeze_excitation = squeeze_excitation
if block is BasicBlock:
self.conv1 = nn.Conv2d(1, m_channels, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(m_channels)
self.layer1 = self._make_layer(block, m_channels, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, m_channels*2, num_blocks[1], stride=2)
current_freq_dim = int((feat_dim - 1) / 2) + 1
self.layer3 = self._make_layer(block, m_channels*4, num_blocks[2], stride=2)
current_freq_dim = int((current_freq_dim - 1) / 2) + 1
self.layer4 = self._make_layer(block, m_channels*8, num_blocks[3], stride=2)
current_freq_dim = int((current_freq_dim - 1) / 2) + 1
self.embedding = nn.Linear(m_channels * 8 * 2 * current_freq_dim, embed_dim)
elif block is Bottleneck:
self.conv1 = nn.Conv2d(1, m_channels, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(m_channels)
self.layer1 = self._make_layer(block, m_channels, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, m_channels*2, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, m_channels*4, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, m_channels*8, num_blocks[3], stride=2)
self.embedding = nn.Linear(int(feat_dim/8) * m_channels * 16 * block.expansion, embed_dim)
else:
raise ValueError(f'Unexpected class {type(block)}.')
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
x = x.unsqueeze_(1)
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
pooling_mean = torch.mean(out, dim=-1)
meansq = torch.mean(out * out, dim=-1)
pooling_std = torch.sqrt(meansq - pooling_mean ** 2 + 1e-10)
out = torch.cat((torch.flatten(pooling_mean, start_dim=1),
torch.flatten(pooling_std, start_dim=1)), 1)
embedding = self.embedding(out)
return embedding
def ResNet101(feat_dim, embed_dim, squeeze_excitation=False):
return ResNet(Bottleneck, [3, 4, 23, 3], feat_dim=feat_dim, embed_dim=embed_dim, squeeze_excitation=squeeze_excitation)
| 40.433333 | 123 | 0.622754 | 5,572 | 0.918714 | 0 | 0 | 0 | 0 | 0 | 0 | 360 | 0.059357 |
3e345a0575b803502ed9bfed61051d0d9fb3fa57 | 5,159 | py | Python | bc/recruitment/utils.py | Buckinghamshire-Digital-Service/buckinghamshire-council | bbbdb52b515bcdfc79a2bd9198dfa4828405370e | [
"BSD-3-Clause"
] | 1 | 2021-02-27T07:27:17.000Z | 2021-02-27T07:27:17.000Z | bc/recruitment/utils.py | Buckinghamshire-Digital-Service/buckinghamshire-council | bbbdb52b515bcdfc79a2bd9198dfa4828405370e | [
"BSD-3-Clause"
] | null | null | null | bc/recruitment/utils.py | Buckinghamshire-Digital-Service/buckinghamshire-council | bbbdb52b515bcdfc79a2bd9198dfa4828405370e | [
"BSD-3-Clause"
] | 1 | 2021-06-09T15:56:54.000Z | 2021-06-09T15:56:54.000Z | import json
from django import forms
from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector
from django.core.exceptions import ValidationError
from django.db.models import F
from django.db.models.functions import ACos, Cos, Radians, Sin
import requests
from bc.recruitment.constants import JOB_FILTERS
from bc.recruitment.models import JobCategory, RecruitmentHomePage, TalentLinkJob
def is_recruitment_site(site):
return site and isinstance(site.root_page.specific, RecruitmentHomePage)
def get_current_search(querydict):
"""
Returns search query and filters in request.GET as json string
"""
search = {}
if querydict.get("query", None):
search["query"] = querydict["query"]
if querydict.get("postcode", None):
search["postcode"] = querydict["postcode"]
# Loop through our filters so we don't just store any query params
for filter in JOB_FILTERS:
selected = querydict.getlist(filter["name"])
if selected:
selected = list(dict.fromkeys(selected)) # Remove duplicate options
search[filter["name"]] = sorted(selected) # Sort options alphabetically
return json.dumps(search)
def get_job_search_results(querydict, homepage, queryset=None):
if queryset is None:
queryset = TalentLinkJob.objects.all()
queryset = queryset.filter(homepage=homepage)
search_query = querydict.get("query", None)
if search_query:
vector = (
SearchVector("title", weight="A")
+ SearchVector("job_number", weight="A")
# + SearchVector("short_description", weight="A")
+ SearchVector("location_name", weight="B")
+ SearchVector("location_city", weight="B")
+ SearchVector("description", weight="C")
)
query = SearchQuery(search_query, search_type="phrase")
search_results = (
queryset.annotate(rank=SearchRank(vector, query))
.filter(rank__gte=0.1)
.order_by("-rank")
)
else:
# Order by newest job at top
search_results = queryset.order_by("posting_start_date")
# Process 'hide schools and early years job'
if querydict.get("hide_schools_and_early_years", False):
schools_and_early_years_categories = (
JobCategory.get_school_and_early_years_categories()
)
search_results = search_results.exclude(
subcategory__categories__slug__in=schools_and_early_years_categories
)
# Process filters
for filter in JOB_FILTERS:
# QueryDict.update() used in send_job_alerts.py adds the values as list instead of multivalue dict.
if isinstance(querydict.get(filter["name"]), list):
selected = querydict.get(filter["name"])
else:
selected = querydict.getlist(
filter["name"]
) # will return empty list if not found
try:
selected = [forms.CharField().clean(value) for value in selected]
except ValidationError:
# Abort any invalid string literals, e.g. SQL injection attempts
continue
if selected:
search_results = search_results.filter(
**{
filter["filter_key"] + "__in": selected
} # TODO: make case insensitive
)
# Process postcode search
search_postcode = querydict.get("postcode", None)
if search_postcode:
postcode_response = requests.get(
"https://api.postcodes.io/postcodes/" + search_postcode
)
if postcode_response.status_code == 200:
postcode_response_json = postcode_response.json()
search_lon = postcode_response_json["result"]["longitude"]
search_lat = postcode_response_json["result"]["latitude"]
search_results = search_results.annotate(
distance=GetDistance(search_lat, search_lon)
).order_by("distance")
if search_query:
# Rank is only used when there is a search query
search_results = search_results.order_by("distance", "-rank")
return search_results
def GetDistance(point_latitude, point_longitude):
# Calculate distance. See https://www.thutat.com/web/en/programming-and-tech-stuff/
# web-programming/postgres-query-with-gps-distance-calculations-without-postgis/
distance = (
ACos(
Sin(Radians(F("location_lat"))) * Sin(Radians(point_latitude))
+ Cos(Radians(F("location_lat")))
* Cos(Radians(point_latitude))
* Cos(Radians(F("location_lon") - point_longitude))
)
* 6371
* 1000
)
return distance
def get_school_and_early_years_count(search_results):
schools_and_early_years_categories = (
JobCategory.get_school_and_early_years_categories()
)
if len(schools_and_early_years_categories):
search_results = search_results.filter(
subcategory__categories__slug__in=schools_and_early_years_categories
)
return len(search_results)
| 34.393333 | 107 | 0.652064 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,203 | 0.233185 |
3e37452fb8273aa4b7fb354676b63c94081558fd | 7,264 | py | Python | classification/ClassificationModelLargeViz.py | geigerf/STAG_slim | 391e7a8031a7e128509f276113b19fd7f13897ec | [
"Apache-2.0"
] | null | null | null | classification/ClassificationModelLargeViz.py | geigerf/STAG_slim | 391e7a8031a7e128509f276113b19fd7f13897ec | [
"Apache-2.0"
] | null | null | null | classification/ClassificationModelLargeViz.py | geigerf/STAG_slim | 391e7a8031a7e128509f276113b19fd7f13897ec | [
"Apache-2.0"
] | null | null | null | import argparse
import os
import shutil
import time, math, datetime, re
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import numpy as np
from torch.autograd.variable import Variable
from shared.BaseModel import BaseModel
from shared.resnet_9x9 import resnet18
from shared import dataset_tools
'''
Vizualization version of the final model.
'''
class SmallNet(nn.Module):
'''
This model extract features for each single input frame.
'''
def __init__(self):
super(SmallNet, self).__init__()
self.features = resnet18(pretrained=False)
self.features.fc = nn.Threshold(-1e20, -1e20) # should do nothing nn.Linear(512, num_classes)
def forward(self, pressure):
# Resize input 3x
x = F.interpolate(pressure, scale_factor = 3, mode = 'bilinear')
x = self.features(x)
return x
class TouchNet(nn.Module):
'''
This model represents our classification network for 1..N input frames.
'''
def __init__(self, num_classes=1000, nFrames=5):
super(TouchNet, self).__init__()
self.net = SmallNet()
self.combination = nn.Conv2d(128*nFrames, 128, kernel_size=1, padding=0)
self.classifier = nn.Linear(128, num_classes)
self.avgpool = nn.AdaptiveAvgPool2d(1)
def forward(self, x):
xs = []
# CNN of each input frame
for i in range(x.size(1)):
xi = x[:,i:i+1,...]
xi = self.net(xi)
xs += [xi]
x = torch.cat(xs, dim=1)
# combine
x = self.combination(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class ClassificationModelLargeViz(BaseModel):
'''
This class encapsulates the network and handles I/O.
This version uses 9x9 conv1 kernels and was only used for vizualization.
'''
@property
def name(self):
return 'ClassificationModelLargeViz'
def initialize(self, numClasses, sequenceLength = 1, baseLr = 1e-3):
BaseModel.initialize(self)
self.baseLr = baseLr
self.numClasses = numClasses
self.sequenceLength = sequenceLength
self.model = TouchNet(num_classes = self.numClasses, nFrames = self.sequenceLength)
self.model = torch.nn.DataParallel(self.model)
self.model.cuda()
cudnn.benchmark = True
momentum = 0.9
weight_decay = 1e-4
self.optimizer = torch.optim.Adam([
{'params': self.model.module.parameters(),'lr_mult': 1.0},
], self.baseLr)
self.optimizers = [self.optimizer]
self.criterion = nn.CrossEntropyLoss().cuda()
self.epoch = 0
self.error = 1e20 # last error
self.bestPrec = 1e20 # best error
self.dataProcessor = None
def step(self, inputs, isTrain = True, params = {}):
if isTrain:
self.model.train()
assert not inputs['objectId'] is None
else:
self.model.eval()
image = torch.autograd.Variable(inputs['image'].cuda(async=True), requires_grad = (isTrain))
pressure = torch.autograd.Variable(inputs['pressure'].cuda(async=True), requires_grad = (isTrain))
objectId = torch.autograd.Variable(inputs['objectId'].cuda(async=True), requires_grad=False) if 'objectId' in inputs else None
if isTrain:
output = self.model(pressure)
else:
with torch.no_grad():
output = self.model(pressure)
_, pred = output.data.topk(1, 1, True, True)
res = {
'gt': None if objectId is None else objectId.data,
'pred': pred,
}
if objectId is None:
return res, {}
loss = self.criterion(output, objectId.view(-1))
(prec1, prec3) = self.accuracy(output, objectId, topk=(1, min(3, self.numClasses)))
if isTrain:
# compute gradient and do SGD step
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
losses = OrderedDict([
('Loss', loss.data.item()),
('Top1', prec1),
('Top3', prec3),
])
return res, losses
def accuracy(self, output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.data.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.data.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size).item())
return res[0], res[1]
def importState(self, save):
params = save['state_dict']
if hasattr(self.model, 'module'):
try:
self.model.load_state_dict(params, strict=True)
except:
self.model.module.load_state_dict(params, strict=True)
else:
params = self._clearState(params)
self.model.load_state_dict(params, strict=True)
self.epoch = save['epoch'] if 'epoch' in save else 0
self.bestPrec = save['best_prec1'] if 'best_prec1' in save else 1e20
self.error = save['error'] if 'error' in save else 1e20
print('Imported checkpoint for epoch %05d with loss = %.3f...' % (self.epoch, self.bestPrec))
def _clearState(self, params):
res = dict()
for k,v in params.items():
kNew = re.sub('^module\.', '', k)
res[kNew] = v
return res
def exportState(self):
dt = datetime.datetime.now()
state = self.model.state_dict()
for k in state.keys():
#state[k] = state[k].share_memory_()
state[k] = state[k].cpu()
return {
'state_dict': state,
'epoch': self.epoch,
'error': self.error,
'best_prec1': self.bestPrec,
'datetime': dt.strftime("%Y-%m-%d %H:%M:%S")
}
def updateLearningRate(self, epoch):
self.adjust_learning_rate_new(epoch, self.baseLr)
def adjust_learning_rate_new(self, epoch, base_lr, period = 100): # train for 2x100 epochs
gamma = 0.1 ** (1.0/period)
lr_default = base_lr * (gamma ** (epoch))
print('New lr_default = %f' % lr_default)
for optimizer in self.optimizers:
for param_group in optimizer.param_groups:
param_group['lr'] = param_group['lr_mult'] * lr_default | 31.859649 | 135 | 0.56663 | 6,589 | 0.907076 | 0 | 0 | 76 | 0.010463 | 0 | 0 | 1,020 | 0.140419 |
3e38f387d0ad96aa627dd060a7aa1188e154c4a3 | 10,017 | py | Python | graphingVisHullTwoD.py | cm-1/2D-External-Visual-Hulls | 579e7d18d048d403b636d326840e5cb2a4e3a3e8 | [
"MIT"
] | 1 | 2022-02-10T07:07:35.000Z | 2022-02-10T07:07:35.000Z | graphingVisHullTwoD.py | cm-1/2D-External-Visual-Hulls | 579e7d18d048d403b636d326840e5cb2a4e3a3e8 | [
"MIT"
] | null | null | null | graphingVisHullTwoD.py | cm-1/2D-External-Visual-Hulls | 579e7d18d048d403b636d326840e5cb2a4e3a3e8 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
from visHullTwoD import Scene, SegmentType
#%%
def doubleFaceTest(f):
doubleFace = False
origHE = f.halfEdge
he = f.halfEdge.next
while he != origHE:
if f.index != he.leftFace.index:
doubleFace = True
break
he = he.next
if doubleFace:
print("Double face ({0}):".format(f.index))
origHE = f.halfEdge
he = f.halfEdge.next
while he != origHE:
fIndex = he.leftFace.index
v0 = he.prev.headVertex.position
v1 = he.headVertex.position
print(" - F{0}, {1}->{2}".format(fIndex, v0, v1))
he = he.next
v0 = he.prev.headVertex.position
v1 = he.headVertex.position
print(" - F{0}, {1}->{2}".format(fIndex, v0, v1))
print("-----")
def checkEventEquality(w0, w1):
print("== Event check ==")
numEvents0 = len(w0.eventsRecord)
numEvents1 = len(w1.eventsRecord)
if numEvents0 != numEvents1:
print("NUMBER OF EVENT RECORDS DIFFERENT! w0: {0}, w1: {1}".format(numEvents0, numEvents1))
minEvents = min(numEvents0, numEvents1)
for i in range(minEvents):
eventsEq = w0.eventsRecord[i].debugEq(w1.eventsRecord[i])
if not np.all(list(eventsEq.values())):
print(" - DIFF AT {0}: {1}".format(i, eventsEq))
print("Done event check!\n")
#%%
def drawScene(scene):
print("cwList:", scene.cwList)
# Plot all polygons.
for obj in scene.polygons:
x,y = obj.getSeparateXYs()
plt.fill(x,y, "#A0A0A0") # light grey fill
plt.plot(x,y, "#505050") # dark grey edges/outline
'''
for ln in scene.lines:
p0, p1 = scene.sceneBorderHitPoints(ln)
plt.plot([p0[0], p1[0]], [p0[1], p1[1]], "k--")
'''
for ln in scene.activeSegments:
colString = "g"
if ln.activeType == SegmentType.A:
colString = "r"
elif ln.activeType == SegmentType.B:
colString = "b"
# Magenta if vn increase to right (for vert lines) or down
# Cyan otherwise
colString2 = "c"
if ln.isVertical:
if (ln.p1[1] > ln.p0[1] and ln.increasesToTheRight) or (ln.p1[1] < ln.p0[1] and not ln.increasesToTheRight):
colString2 = "m"
else:
if (ln.p1[0] > ln.p0[0] and ln.increasesToTheRight) or (ln.p1[0] < ln.p0[0] and not ln.increasesToTheRight):
colString2 = "m"
plt.plot([ln.p0[0], ln.p1[0]], [ln.p0[1], ln.p1[1]], colString2)
'''for halfEdge in partitionMesh.halfEdges:
if halfEdge.headVertex is not None and halfEdge.pair.headVertex is not None:
v0 = halfEdge.headVertex.position
v1 = halfEdge.pair.headVertex.position
plt.plot([v0[0], v1[0]], [v0[1], v1[1]], "r--")
else:
print("Some problem")'''
colours = ["k", "r", "g", "b", "y"]
for f in scene.drawableFaces:
#print("Visual number:", f.visualNumber)
regionColour = colours[min(f.visualNumber, len(colours) - 1)]
pts = f.getCoords()
xs = pts[:, 0]
ys = pts[:, 1]
plt.fill(xs, ys, regionColour)
convex = []
concave = []
for i in range(scene.vertices.shape[0]):
if scene.isVertexConcave(i):
concave.append(scene.vertices[i])
else:
convex.append(scene.vertices[i])
npConvex = np.array(convex)
npConcave = np.array(concave)
'''
for maxSeg in self.activeSegments:
for succSeg in self.activeSegments:
succInt = maxSeg.intersection(succSeg)
onFirstSegment = succInt.meetS > -EQUAL_THRESHOLD and succInt.meetS < maxSeg.length + EQUAL_THRESHOLD
onSecondSegment = succInt.meetT > -EQUAL_THRESHOLD and succInt.meetT < succSeg.length + EQUAL_THRESHOLD
if succInt.doMeet and onFirstSegment and onSecondSegment:
plt.plot([succInt.meetPt[0]], [succInt.meetPt[1]], 'ko')
'''
'''if npConvex.shape[0] > 0:
plt.plot(npConvex[:, 0], npConvex[:, 1], 'bo')
if npConcave.shape[0] > 0:
plt.plot(npConcave[:, 0], npConcave[:, 1], 'go')'''
plt.show()
world0 = Scene()
world1 = Scene()
world2 = Scene()
world3 = Scene()
world4 = Scene()
world5 = Scene()
world6 = Scene()
world7 = Scene()
world8 = Scene()
world9 = Scene()
world10 = Scene()
world11 = Scene()
world12 = Scene()
# These are the tris from Petitjean's diagram
polygon1 = [(0, 0), (2.25, 0.5), (1.25, 2.3)] # [(0,3),(1,1),(3,0),(4,0),(3,4)]
polygon2 = [(1.15, 3.15), (4, 4), (0.9, 5.25)] # [(1,4),(2,5),(2,1),(1,3)]
polygon3 = [(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
world0.addPolygon(polygon1)
world0.addPolygon(polygon2)
world0.addPolygon(polygon3)
#world0.addPolygon(polygon4)
polygon1 = [(0, 0), (5, 0), (5, 5), (4, 5), (4, 3), (1, 3), (1, 5), (0, 5)]
world1.addPolygon(polygon1)
polygon1 = [(0, 0), (5, 0), (5, 3), (4, 3), (4, 5), (1, 5), (1, 3), (0, 3)]
polygon2 = [(1, 7), (3, 7), (5, 9), (4, 11), (4, 9), (1, 8), (2, 10), (0, 10)]
world2.addPolygon(polygon1)
world2.addPolygon(polygon2)
polygon1 = [(0, 2), (1,1), (2,2), (1,0)]
polygon2 = [(3,3), (4,2), (5,3)]
# polygon2 = [(p[0] - 3, p[1]) for p in polygon2]
# Horizontal flip for testing purposes.
polygon1 = [(-p[0], p[1]) for p in polygon1]
polygon2 = [(-p[0], p[1]) for p in polygon2]
world3.addPolygon(polygon1)
world3.addPolygon(polygon2)
polygon1 = [(0, 7), (2.25, 5), (1.25, 4), (5, 5)] # [(0, 0), (2.25, 0.5), (1.25, 2.3)] # [(0,3),(1,1),(3,0),(4,0),(3,4)]
polygon2 = [(1.15, -3.15), (4, -4), (2, -7), (0.9, -5.25)] #[(1.15, 3.15), (4, 4), (0.9, 5.25)] # [(1,4),(2,5),(2,1),(1,3)]
polygon3 = [(3, 1), (3, 0.0), (4.85, 0.75), (4.85, 2.4), (5,4)] #[(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
polygon4 = [(-0.5, -1), (-0.5, 1.0), (0.5, 1), (0.5, -1)] #[(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
world4.addPolygon(polygon1)
world4.addPolygon(polygon2)
world4.addPolygon(polygon3)
world4.addPolygon(polygon4)
polygon1 = [(0, 0.6), (1.5, 0), (2.5, 1.25), (1.25, 0.75), (1.125, 1.8)]
polygon2 = [(1.3, 2.25), (2.8, 2.8), (1.65, 3.125)]
polygon3 = [(2.8, 1.25), (4.125, 0.25), (3.5, 2.0)]
world5.addPolygon(polygon1)
world5.addPolygon(polygon2)
world5.addPolygon(polygon3)
polygon1 = [(0,0), (2.5, 0), (0, 1.5)]
polygon2 = [(0, 3.25), (5, 4.25), (0, 4.25)]
polygon3 = [(3.5, 0), (5, 0), (5, 2.75), (3.5, 2.75)]
world6.addPolygon(polygon1)
world6.addPolygon(polygon2)
world6.addPolygon(polygon3)
polygon1 = [(-1, 1), (-2, 1), (-2, -1), (-1, -1), (0, 0), (1, -1), (2, -1), (2, 1), (1, 1), (0, 2)]
world7.addPolygon(polygon1)
polygon1 = [(-1, 1), (-2, 1), (-2, -1), (-1, -1)]
polygon2 = [(-1, -1), (0, 0), (1, -1), (1, 1), (0, 2), (-1, 1)]
polygon3 = [(1, -1), (2, -1), (2, 1), (1, 1)]
# polygon1 = [(p[0], 0.9*p[1]) for p in polygon1]
# polygon3 = [(p[0], 0.9*p[1]) for p in polygon3]
world8.addPolygon(polygon1)
world8.addPolygon(polygon2)
world8.addPolygon(polygon3)
# 0.9999995231628418
polygon1 = [(-1, -1), (1, -1), (1, 1), (-1, 1)]
polygon2 = [(1, 1), (2, -1), (3, 0), (2, 1)]
world9.addPolygon(polygon1)
world9.addPolygon(polygon2)
polygon1 = [(0.734870970249176, 0.26040399074554443), (-0.045375000685453415, 0.8651400208473206), (-0.8234530091285706, 0.4177840054035187), (-0.14182999730110168, 0.21450699865818024)]
polygon2 = [(-1.0, 1.0108875036239624), (1.0, 1.010890007019043), (1.0, 1.3735400438308716), (-1.0, 1.373543620109558)]
world10.addPolygon(polygon2)
world10.addPolygon(polygon1)
polygon0 = [(0.734870970249176, -1.1526894569396973), (-0.045375000685453415, 1.1651400327682495), (-0.8234530091285706, -0.9953095316886902), (-0.14182999730110168, -1.1985864639282227)]
polygon1 = [(2.1045942306518555, -2.0704498291015625), (2.1045916080474854, 1.9576737880706787), (1.7419415712356567, 1.9576740264892578), (1.7419381141662598, -2.0704498291015625)]
polygon2 = [(-1.7419382333755493, -2.0704498291015625), (-1.741940975189209, 1.9576740264892578), (-2.10459041595459, 1.9576740264892578), (-2.1045944690704346, -2.0704495906829834)]
world11.addPolygon(polygon0)
world11.addPolygon(polygon1)
world11.addPolygon(polygon2)
polygon0 = [(0.7000000476837158, -1.2000000476837158), (-0.10000000149011612, 1.2000000476837158), (-0.800000011920929, -1.0), (-0.10000000149011612, -1.25)]
polygon1 = [(2.0999999046325684, -2.0999999046325684), (2.0999999046325684, 1.899999976158142), (1.7000000476837158, 1.899999976158142), (1.7000000476837158, -2.0999999046325684)]
polygon2 = [(-1.7000000476837158, -2.0999999046325684), (-1.7000000476837158, 1.899999976158142), (-2.1000001430511475, 1.899999976158142), (-2.1000001430511475, -2.0999999046325684)]
world12.addPolygon(polygon0)
world12.addPolygon(polygon1)
world12.addPolygon(polygon2)
#world.addLine((0, 2.5), (3, 2.5))
worlds = [world0, world1, world2, world3, world4, world5, world6, world7, world8, world9, world10]
worldIndex = 0
for w in worlds:
print("\nWorld:", worldIndex)
worldIndex += 1
w.calcFreeLines()
drawScene(w)
faceList = w.partitionMesh.faces
for k in faceList:
doubleFaceTest(faceList[k])
checkEventEquality(world12, world11)
#%%
reminders = [
"Is there a better way, using cos(), to handle parallelism in isLineInsideEdgeAngle()?",
"Pruning of lines that intersect obj at CONTACT verts. (I sort of forget what this self-reminder meant...)",
"Pruning of segments outside convex hull.",
"Right now, swapDir() side effect in findIntersections(). Should this be changed?",
"Just generally take a second look at how floating-point precision problems are handled.\nEspecially for the y-intercept of MyLine, since a very small difference in coordinates can lead to a larger difference in y-intercepts.\nSo instead of comparing y-intercepts, something else should maybe be compared!"
]
for reminder in reminders:
sep = "==========="
print("\n" + sep + "\n" + reminder + "\n" + sep + "\n") | 38.526923 | 311 | 0.605471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,840 | 0.283518 |
3e3c50b123745c81d1f91068db3b602d8d3f128d | 5,966 | py | Python | dynamo/preprocessing/dynast.py | xing-lab-pitt/dynamo-release | 76c1f2a270dd6722b88f4700aac1a1a725a0c261 | [
"BSD-3-Clause"
] | 236 | 2019-07-09T22:06:21.000Z | 2022-03-31T17:56:07.000Z | dynamo/preprocessing/dynast.py | xing-lab-pitt/dynamo-release | 76c1f2a270dd6722b88f4700aac1a1a725a0c261 | [
"BSD-3-Clause"
] | 115 | 2019-07-12T19:06:21.000Z | 2022-03-31T17:34:18.000Z | dynamo/preprocessing/dynast.py | xing-lab-pitt/dynamo-release | 76c1f2a270dd6722b88f4700aac1a1a725a0c261 | [
"BSD-3-Clause"
] | 34 | 2019-07-10T03:34:04.000Z | 2022-03-22T12:44:22.000Z | import numpy as np
from scipy.sparse import issparse
from sklearn.utils import sparsefuncs
import anndata
from typing import Union
from ..dynamo_logger import LoggerManager, main_tqdm
from ..utils import copy_adata
def lambda_correction(
adata: anndata.AnnData,
lambda_key: str = "lambda",
inplace: bool = True,
copy: bool = False,
) -> Union[anndata.AnnData, None]:
"""Use lambda (cell-wise detection rate) to estimate the labelled RNA.
Parameters
----------
adata:
adata object generated from dynast.
lambda_key:
The key to the cell-wise detection rate.
inplace:
Whether to inplace update the layers. If False, new layers that append '_corrected" to the existing will be
used to store the updated data.
copy:
Whether to copy the adata object or update adata object inplace.
Returns
-------
adata: :class:`~anndata.AnnData`
An new or updated anndata object, based on copy parameter, that are updated with Size_Factor, normalized
expression values, X and reduced dimensions, etc.
"""
logger = LoggerManager.gen_logger("dynamo-lambda_correction")
logger.log_time()
adata = copy_adata(adata) if copy else adata
logger.info("apply detection rate correction to adata...", indent_level=1)
if lambda_key not in adata.obs.keys():
raise ValueError(
f"the lambda_key {lambda_key} is not included in adata.obs! Please ensure you have calculated "
"per-cell detection rate!"
)
logger.info("retrieving the cell-wise detection rate..", indent_level=1)
detection_rate = adata.obs[lambda_key].values[:, None]
logger.info("identify the data type..", indent_level=1)
all_layers = adata.layers.keys()
has_ul = np.any([i.contains("ul_") for i in all_layers])
has_un = np.any([i.contains("un_") for i in all_layers])
has_sl = np.any([i.contains("sl_") for i in all_layers])
has_sn = np.any([i.contains("sn_") for i in all_layers])
has_l = np.any([i.contains("_l_") for i in all_layers])
has_n = np.any([i.contains("_n_") for i in all_layers])
if sum(has_ul + has_un + has_sl + has_sn) == 4:
datatype = "splicing_labeling"
elif sum(has_l + has_n):
datatype = "labeling"
logger.info(f"the data type identified is {datatype}", indent_level=2)
logger.info("retrieve relevant layers for detection rate correction", indent_level=1)
if datatype == "splicing_labeling":
layers, match_tot_layer = [], []
for layer in all_layers:
if "ul_" in layer:
layers += layer
match_tot_layer += "unspliced"
elif "un_" in layer:
layers += layer
match_tot_layer += "unspliced"
elif "sl_" in layer:
layers += layer
match_tot_layer += "spliced"
elif "sn_" in layer:
layers += layer
match_tot_layer += "spliced"
elif "spliced" in layer:
layers += layer
elif "unspliced" in layer:
layers += layer
if len(layers) != 6:
raise ValueError(
"the adata object has to include ul, un, sl, sn, unspliced, spliced, "
"six relevant layers for splicing and labeling quantified datasets."
)
elif datatype == "labeling":
layers, match_tot_layer = [], []
for layer in all_layers:
if "_l_" in layer:
layers += layer
match_tot_layer += ["total"]
elif "_n_" in layer:
layers += layer
match_tot_layer += ["total"]
elif "total" in layer:
layers += layer
if len(layers) != 3:
raise ValueError(
"the adata object has to include labeled, unlabeled, three relevant layers for labeling quantified "
"datasets."
)
logger.info("detection rate correction starts", indent_level=1)
for i, layer in enumerate(main_tqdm(layers, desc="iterating all relevant layers")):
if i < len(match_tot_layer):
cur_layer = adata.layers[layer] if inplace else adata.layers[layer].copy()
cur_total = adata.layers[match_tot_layer[i]]
# even layers is labeled RNA and odd unlabeled RNA
if i % 2 == 0:
# formula: min(L / lambda, (L + U)) from scNT-seq
if issparse(cur_layer):
sparsefuncs.inplace_row_scale(cur_layer, 1 / detection_rate)
else:
cur_layer /= detection_rate
if inplace:
adata.layers[layer] = sparse_mimmax(cur_layer, cur_total)
else:
adata.layers[layer + "_corrected"] = sparse_mimmax(cur_layer, cur_total)
else:
if inplace:
adata.layers[layer] = cur_total - adata.layers[layer[i - 1]]
else:
adata.layers[layer + "_corrected"] = cur_total - adata.layers[layer[i - 1]]
logger.finish_progress(progress_name="lambda_correction")
if copy:
return adata
return None
def sparse_mimmax(A, B, type="mim"):
"""Return the element-wise mimimum/maximum of sparse matrices `A` and `B`.
Parameters
----------
A:
The first sparse matrix
B:
The second sparse matrix
type:
The type of calculation, either mimimum or maximum.
Returns
-------
M:
A sparse matrix that contain the element-wise maximal or mimimal of two sparse matrices.
"""
AgtB = (A < B).astype(int) if type == "min" else (A > B).astype(int)
M = AgtB.multiply(A - B) + B
return M
| 35.301775 | 120 | 0.578947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,217 | 0.371606 |
3e3cee6ba011350960f8e52993ae0b2666144798 | 4,095 | py | Python | tests/fullscale/poroelasticity/cryer/TestCryer.py | cehanagan/pylith | cf5c1c34040460a82f79b6eb54df894ed1b1ee93 | [
"MIT"
] | 93 | 2015-01-08T16:41:22.000Z | 2022-02-25T13:40:02.000Z | tests/fullscale/poroelasticity/cryer/TestCryer.py | sloppyjuicy/pylith | ac2c1587f87e45c948638b19560813d4d5b6a9e3 | [
"MIT"
] | 277 | 2015-02-20T16:27:35.000Z | 2022-03-30T21:13:09.000Z | tests/fullscale/poroelasticity/cryer/TestCryer.py | sloppyjuicy/pylith | ac2c1587f87e45c948638b19560813d4d5b6a9e3 | [
"MIT"
] | 71 | 2015-03-24T12:11:08.000Z | 2022-03-03T04:26:02.000Z | #!/usr/bin/env nemesis
#
# ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University at Buffalo
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2021 University of California, Davis
#
# See LICENSE.md for license information.
#
# ----------------------------------------------------------------------
#
# @file tests/fullscale/poroelasticity/cryer/TestCryer.py
#
# @brief Test suite for testing pylith with Cryer's problem.
import unittest
from pylith.testing.FullTestApp import (FullTestCase, Check, check_data)
import meshes
import cryer_soln
# We do not include trace_strain in the test of the solution fields, because of the
# poor convergence of the series solution.
SOLUTION_FIELDS = ["displacement", "pressure"]
SOLUTION_TOLERANCE = 0.5
# -------------------------------------------------------------------------------------------------
class TestCase(FullTestCase):
def setUp(self):
defaults = {
"filename": "output/{name}-{mesh_entity}.h5",
"exact_soln": cryer_soln.AnalyticalSoln(),
"mesh": self.mesh,
}
self.checks = [
Check(
mesh_entities=["domain"],
vertex_fields=SOLUTION_FIELDS,
defaults=defaults,
tolerance=SOLUTION_TOLERANCE,
),
Check(
mesh_entities=["poroelastic"],
filename="output/{name}-{mesh_entity}_info.h5",
cell_fields = [
"biot_coefficient",
"biot_modulus",
"drained_bulk_modulus",
"fluid_density",
"fluid_viscosity",
"isotropic_permeability",
"porosity",
"shear_modulus",
"solid_density",
],
defaults=defaults,
),
Check(
mesh_entities=["poroelastic"],
vertex_fields = SOLUTION_FIELDS,
defaults=defaults,
tolerance=SOLUTION_TOLERANCE,
),
Check(
mesh_entities=["x_neg", "y_neg", "z_neg", "surface_pressure"],
filename="output/{name}-{mesh_entity}_info.h5",
vertex_fields=["initial_amplitude"],
defaults=defaults,
),
Check(
mesh_entities=["x_neg", "y_neg", "z_neg", "surface_pressure"],
vertex_fields=SOLUTION_FIELDS,
defaults=defaults,
tolerance=SOLUTION_TOLERANCE,
),
]
def run_pylith(self, testName, args):
FullTestCase.run_pylith(self, testName, args)
# -------------------------------------------------------------------------------------------------
class TestHex(TestCase):
def setUp(self):
self.name = "cryer_hex"
self.mesh = meshes.Hex()
super().setUp()
TestCase.run_pylith(self, self.name, ["cryer.cfg", "cryer_hex.cfg"])
return
# -------------------------------------------------------------------------------------------------
class TestTet(TestCase):
def setUp(self):
self.name = "cryer_tet"
self.mesh = meshes.Tet()
super().setUp()
TestCase.run_pylith(self, self.name, ["cryer.cfg", "cryer_tet.cfg"])
return
# -------------------------------------------------------------------------------------------------
def test_cases():
return [
TestHex,
TestTet,
]
# -------------------------------------------------------------------------------------------------
if __name__ == '__main__':
FullTestCase.parse_args()
suite = unittest.TestSuite()
for test in test_cases():
suite.addTest(unittest.makeSuite(test))
unittest.TextTestRunner(verbosity=2).run(suite)
# End of file
| 31.022727 | 99 | 0.477656 | 2,313 | 0.564835 | 0 | 0 | 0 | 0 | 0 | 0 | 1,775 | 0.433455 |
3e3e8c87814094936e4351a80831e5bb8fce82f9 | 3,551 | py | Python | util/data.py | pinaryazgan/GDN | 469e63fa8c2dce596c6f7e99f2620ac6eec7dadf | [
"MIT"
] | 156 | 2021-03-01T12:49:25.000Z | 2022-03-28T08:27:33.000Z | util/data.py | pinaryazgan/GDN | 469e63fa8c2dce596c6f7e99f2620ac6eec7dadf | [
"MIT"
] | 24 | 2021-04-19T10:08:35.000Z | 2022-03-28T11:42:54.000Z | util/data.py | pinaryazgan/GDN | 469e63fa8c2dce596c6f7e99f2620ac6eec7dadf | [
"MIT"
] | 54 | 2021-04-16T17:26:30.000Z | 2022-03-28T06:08:43.000Z | # util functions about data
from scipy.stats import rankdata, iqr, trim_mean
from sklearn.metrics import f1_score, mean_squared_error
import numpy as np
from numpy import percentile
def get_attack_interval(attack):
heads = []
tails = []
for i in range(len(attack)):
if attack[i] == 1:
if attack[i-1] == 0:
heads.append(i)
if i < len(attack)-1 and attack[i+1] == 0:
tails.append(i)
elif i == len(attack)-1:
tails.append(i)
res = []
for i in range(len(heads)):
res.append((heads[i], tails[i]))
# print(heads, tails)
return res
# calculate F1 scores
def eval_scores(scores, true_scores, th_steps, return_thresold=False):
padding_list = [0]*(len(true_scores) - len(scores))
# print(padding_list)
if len(padding_list) > 0:
scores = padding_list + scores
scores_sorted = rankdata(scores, method='ordinal')
th_steps = th_steps
# th_steps = 500
th_vals = np.array(range(th_steps)) * 1.0 / th_steps
fmeas = [None] * th_steps
thresholds = [None] * th_steps
for i in range(th_steps):
cur_pred = scores_sorted > th_vals[i] * len(scores)
fmeas[i] = f1_score(true_scores, cur_pred)
score_index = scores_sorted.tolist().index(int(th_vals[i] * len(scores)+1))
thresholds[i] = scores[score_index]
if return_thresold:
return fmeas, thresholds
return fmeas
def eval_mseloss(predicted, ground_truth):
ground_truth_list = np.array(ground_truth)
predicted_list = np.array(predicted)
# mask = (ground_truth_list == 0) | (predicted_list == 0)
# ground_truth_list = ground_truth_list[~mask]
# predicted_list = predicted_list[~mask]
# neg_mask = predicted_list < 0
# predicted_list[neg_mask] = 0
# err = np.abs(predicted_list / ground_truth_list - 1)
# acc = (1 - np.mean(err))
# return loss
loss = mean_squared_error(predicted_list, ground_truth_list)
return loss
def get_err_median_and_iqr(predicted, groundtruth):
np_arr = np.abs(np.subtract(np.array(predicted), np.array(groundtruth)))
err_median = np.median(np_arr)
err_iqr = iqr(np_arr)
return err_median, err_iqr
def get_err_median_and_quantile(predicted, groundtruth, percentage):
np_arr = np.abs(np.subtract(np.array(predicted), np.array(groundtruth)))
err_median = np.median(np_arr)
# err_iqr = iqr(np_arr)
err_delta = percentile(np_arr, int(percentage*100)) - percentile(np_arr, int((1-percentage)*100))
return err_median, err_delta
def get_err_mean_and_quantile(predicted, groundtruth, percentage):
np_arr = np.abs(np.subtract(np.array(predicted), np.array(groundtruth)))
err_median = trim_mean(np_arr, percentage)
# err_iqr = iqr(np_arr)
err_delta = percentile(np_arr, int(percentage*100)) - percentile(np_arr, int((1-percentage)*100))
return err_median, err_delta
def get_err_mean_and_std(predicted, groundtruth):
np_arr = np.abs(np.subtract(np.array(predicted), np.array(groundtruth)))
err_mean = np.mean(np_arr)
err_std = np.std(np_arr)
return err_mean, err_std
def get_f1_score(scores, gt, contamination):
padding_list = [0]*(len(gt) - len(scores))
# print(padding_list)
threshold = percentile(scores, 100 * (1 - contamination))
if len(padding_list) > 0:
scores = padding_list + scores
pred_labels = (scores > threshold).astype('int').ravel()
return f1_score(gt, pred_labels) | 28.18254 | 101 | 0.664602 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 484 | 0.1363 |
3e41a3d23f1cd5e224926d0f23ef2a864d4c94cb | 5,654 | py | Python | rrl-sysadmin/sysadmin.py | HyeokjuJang/sr-drl | 01fa8264c7b36f34f721303f455f37545dbce1fe | [
"MIT"
] | 14 | 2020-10-02T17:14:04.000Z | 2022-02-26T19:26:58.000Z | rrl-sysadmin/sysadmin.py | HyeokjuJang/sr-drl | 01fa8264c7b36f34f721303f455f37545dbce1fe | [
"MIT"
] | 1 | 2022-02-26T08:23:13.000Z | 2022-02-26T08:23:13.000Z | rrl-sysadmin/sysadmin.py | jaromiru/sr-drl | 01fa8264c7b36f34f721303f455f37545dbce1fe | [
"MIT"
] | 6 | 2021-05-04T13:24:12.000Z | 2021-12-06T12:51:30.000Z | import gym, random, copy, string, uuid
import numpy as np
rddl_template = string.Template('''
non-fluents nf_sysadmin_inst_$uid {
domain = sysadmin_mdp;
objects {
computer : {$objects};
};
non-fluents {
REBOOT-PROB = $reboot_prob;
$connections
};
}
instance sysadmin_inst_$uid {
domain = sysadmin_mdp;
non-fluents = nf_sysadmin_inst_$uid;
init-state {
$running
};
max-nondef-actions = $maxactions;
horizon = $horizon;
discount = $discount;
}
''')
# ----------------------------------------------------------
class SysAdminEnv(gym.Env):
REBOOT_PROB = 0.04
REBOOT_PENALTY = 0.75 # IDEA: change?
MAX_CONNECTIONS = 3
def __init__(self, offset=0, save_domain=False, **kwargs):
random.seed()
np.random.seed()
self.num_obj = kwargs["env_num_obj"]
self.max_steps = kwargs["env_max_steps"]
self.offset = offset # first-time initialize with random actions
self.save_domain = save_domain
self.multi = kwargs["multi"]
def step(self, actions):
running_ = self.running.copy()
# update the running nodes
for c in range(self.num_obj):
if self.running[c]:
conns = self.connections[0, (self.connections[1] == c)] # connections to this node
n_conns = len(conns)
n_conns_running = np.sum(self.running[conns])
# up_prob = 0.45 + 0.5 * (1 + n_conns_running) / (1 + n_conns)
up_prob = 0.9 * (1 + n_conns_running) / (1 + n_conns) # IDEA: change?
running_[c] = np.random.binomial(1, up_prob)
else:
running_[c] = np.random.binomial(1, self.REBOOT_PROB)
# restart the selected nodes
if len(actions) != 0:
running_[actions] = 1
reward = np.sum(self.running) - self.REBOOT_PENALTY * len(actions)
self.reward_total += reward
self.running = running_
# compute stats
self.steps += 1
done = self.steps >= self.max_steps
s_true = self._get_state()
info = {
'd_true': False,
'done': done,
'steps': self.steps,
's_true': s_true,
'num_obj': self.num_obj,
'reward_total': self.reward_total
}
if done:
s_ = self.reset()
else:
s_ = s_true
return s_, reward, done, info
def reset(self):
self.steps = 0
self.reward_total = 0.
self.running = np.ones(self.num_obj)
# generate random connections
self.connections = []
# IDEA: better graphs?
for node_a in range(self.num_obj):
possible_connections = np.delete( np.arange(self.num_obj), node_a )
conns_ids = np.random.choice(possible_connections, np.random.randint(1, self.MAX_CONNECTIONS), replace=False)
conns = np.stack([ np.full(len(conns_ids), node_a), conns_ids ])
self.connections.append(conns)
# self.connections.append(np.flip(conns, axis=0))
self.connections = np.concatenate(self.connections, axis=1)
self.connections = np.unique(self.connections, axis=1)
# first-time init
if self.offset > 0:
offset = self.offset % self.max_steps
self.offset = 0
for i in range(offset):
self.step([]) # noop
if self.save_domain:
uid = uuid.uuid4().hex
fn = f"_plan/sysadmin_inst_{uid}.rddl"
rddl = self._get_rddl(uid)
with open(fn, 'wt') as f:
f.write(rddl)
return self._get_state()
def _get_state(self):
node_feats = self.running.reshape(-1, 1)
edge_feats = None
return node_feats, edge_feats, self.connections
def _get_rddl(self, uid):
objects = ",".join([f"c{i}" for i in range(self.num_obj)])
connections = " ".join([f"CONNECTED(c{x[0]},c{x[1]});" for x in self.connections.T])
running = " ".join([f"running(c{i});" for i, x in enumerate(self.running)])
max_actions = self.num_obj if self.multi else 1
rddl = rddl_template.substitute(uid=uid, objects=objects, maxactions=max_actions, reboot_prob=self.REBOOT_PROB, connections=connections, running=running, horizon=self.max_steps, discount=1.0)
return rddl
# ----------------------------------------------------------
import networkx as nx
import matplotlib.pyplot as plt
COLOR_RUNNING = "#cad5fa"
COLOR_DOWN = "#e33c30"
COLOR_SELECTED_R = "#1b3eb5"
COLOR_SELECTED_D = "#701812"
class GraphVisualization:
def __init__(self, env):
self.connections = env.connections.T
self.G = nx.DiGraph()
self.G.add_edges_from(self.connections)
self.pos = nx.kamada_kawai_layout(self.G)
# self.pos = nx.spring_layout(self.G)
self.colors = [COLOR_DOWN, COLOR_RUNNING, COLOR_SELECTED_D, COLOR_SELECTED_R]
self.update_state(env)
def update_state(self, env, a=None, probs=None):
states = env.running.copy()
if (a is not None):
states[a] += 2
self.edge_colors = np.array([self.colors[int(x)] for x in states])
self.edge_colors = self.edge_colors[self.G.nodes] # re-order
if probs is not None:
self.node_labels = {i: f"{probs[i]:.1f}".lstrip("0") for i in self.G.nodes}
self.node_colors = np.array([(1-x, 1-x, 1-x) for x in probs])
self.node_colors = self.node_colors[self.G.nodes]
else:
self.node_labels = None
self.node_colors = ['w'] * len(states)
def plot(self):
plt.clf()
nx.draw_networkx(self.G, pos=self.pos, labels=self.node_labels, node_color=self.node_colors, edgecolors=self.edge_colors, linewidths=3.0, arrows=True)
return plt
# ----------------------------------------------------------
if __name__ == '__main__':
NODES = 5
env = SysAdminEnv(env_num_obj=NODES, env_max_steps=10)
s = env.reset()
gvis = GraphVisualization(env)
a = -1
while(True):
# a = np.random.randint(env.num_obj)
a = np.random.choice(NODES, np.random.randint(0, NODES), replace=False)
probs = np.random.rand(NODES)
print(a)
print(probs)
gvis.update_state(env, a, probs)
gvis.plot().show()
s, r, d, i = env.step(a)
print(a, r)
if d:
gvis = GraphVisualization(env)
| 25.241071 | 194 | 0.662363 | 4,342 | 0.767952 | 0 | 0 | 0 | 0 | 0 | 0 | 1,250 | 0.221082 |
3e43d8b9a039af747051e4f38665ccd61353394f | 3,974 | py | Python | core/language_modelling.py | lkwate/e-greedy-lm | 02e81fee93ee93faca0c1eb339b3c5ad55b4a639 | [
"MIT"
] | 1 | 2021-11-09T19:18:00.000Z | 2021-11-09T19:18:00.000Z | core/language_modelling.py | lkwate/e-greedy-lm | 02e81fee93ee93faca0c1eb339b3c5ad55b4a639 | [
"MIT"
] | null | null | null | core/language_modelling.py | lkwate/e-greedy-lm | 02e81fee93ee93faca0c1eb339b3c5ad55b4a639 | [
"MIT"
] | null | null | null | import torch
import torch.optim as optim
from transformers import AutoTokenizer
from .utils import epsilon_greedy_transform_label, uid_variance_fn, OPTIMIZER_DIC
import pytorch_lightning as pl
class RLLMLightningModule(pl.LightningModule):
def __init__(
self,
model,
action_table: torch.LongTensor,
tokenizer: AutoTokenizer,
learning_rate: float,
k: int,
epsilon: int,
beta: int,
variance_type: str,
lr_factor: float,
lr_patience: int,
optimizer_name: str,
add_variance: bool,
):
super(RLLMLightningModule, self).__init__()
self.model = model
self.epsilon = epsilon
self.beta = beta
self.action_table = action_table.to(self.device)
self.tokenizer = tokenizer
self.k = k
self.variance_type = variance_type
self.learning_rate = learning_rate
self.lr_factor = lr_factor
self.lr_patience = lr_patience
self.optimizer_name = optimizer_name
self.add_variance = add_variance
self.output_transform = (
self._add_uid_variance_fn
if self.add_variance
else self._skip_uid_variance_fn
)
def configure_optimizers(self):
optimizer = OPTIMIZER_DIC[self.optimizer_name](
self.model.parameters(), lr=self.learning_rate
)
lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(
optimizer, "min", factor=self.lr_factor, patience=self.lr_patience
)
output = {
"optimizer": optimizer,
"lr_scheduler": lr_scheduler,
"monitor": "val_loss",
}
return output
def _add_uid_variance_fn(self, loss, logits, labels, variance_type):
uid_variance = uid_variance_fn(logits, labels, variance_type=variance_type)
output = {"likelihood": loss.detach(), "uid_variance": uid_variance.detach()}
loss = loss + self.beta * uid_variance
return loss, output
def _skip_uid_variance_fn(self, loss, logits, labels, variance_type):
return loss, {}
def _compute_loss(self, input_ids, attention_mask, decoder_attention_mask, labels):
labels = epsilon_greedy_transform_label(
labels, self.action_table, self.tokenizer, epsilon=self.epsilon
)
output = self.model(
input_ids=input_ids,
attention_mask=attention_mask,
labels=labels,
)
loss, logits = output.loss, output.logits
return self.output_transform(
loss, logits, labels, variance_type=self.variance_type
)
def _unpack_batch(self, batch):
input_ids, attention_mask, decoder_attention_mask, labels = (
batch["encoder_input_ids"],
batch["encoder_attention_mask"],
batch["decoder_attention_mask"],
batch["decoder_input_ids"],
)
return input_ids, attention_mask, decoder_attention_mask, labels
def training_step(self, batch, batch_idx):
input_ids, attention_mask, decoder_attention_mask, labels = self._unpack_batch(
batch
)
loss, output = self._compute_loss(
input_ids, attention_mask, decoder_attention_mask, labels
)
output["loss"] = loss
self.log_dict(output)
return output
def validation_step(self, batch, batch_idx):
input_ids, attention_mask, decoder_attention_mask, labels = self._unpack_batch(
batch
)
loss, output = self._compute_loss(
input_ids, attention_mask, decoder_attention_mask, labels
)
output["val_loss"] = loss
self.log_dict(output, prog_bar=True)
return output
def test_step(self, batch, batch_idx):
return self.validation_step(batch, batch_idx)
def generate(self, input_ids):
return self.model.generate(input_ids)
| 32.842975 | 87 | 0.638903 | 3,778 | 0.950679 | 0 | 0 | 0 | 0 | 0 | 0 | 177 | 0.04454 |
3e46f25a0298cc777cd9c283c93eaadaceb537e7 | 324 | py | Python | tests/test_client.py | yakhinvadim/enterprise-search-python | a2010e8773a6250cb81ea48f760088bb23466bb1 | [
"Apache-2.0"
] | null | null | null | tests/test_client.py | yakhinvadim/enterprise-search-python | a2010e8773a6250cb81ea48f760088bb23466bb1 | [
"Apache-2.0"
] | null | null | null | tests/test_client.py | yakhinvadim/enterprise-search-python | a2010e8773a6250cb81ea48f760088bb23466bb1 | [
"Apache-2.0"
] | null | null | null | from unittest import TestCase
from elastic_workplace_search.client import Client
class TestClient(TestCase):
dummy_authorization_token = 'authorization_token'
def setUp(self):
self.client = Client('authorization_token')
def test_constructor(self):
self.assertIsInstance(self.client, Client)
| 23.142857 | 53 | 0.756173 | 240 | 0.740741 | 0 | 0 | 0 | 0 | 0 | 0 | 42 | 0.12963 |
3e49611f7036088bee4b0176d2681701d3c8a29d | 4,437 | py | Python | test/hash_url.py | neotext/neotext-django-server | 7cfe98cd541ade9b26a1877f627e45a986b011e8 | [
"MIT"
] | null | null | null | test/hash_url.py | neotext/neotext-django-server | 7cfe98cd541ade9b26a1877f627e45a986b011e8 | [
"MIT"
] | 7 | 2015-11-30T02:59:23.000Z | 2016-10-06T15:52:52.000Z | test/hash_url.py | neotext/neotext-django-server | 7cfe98cd541ade9b26a1877f627e45a986b011e8 | [
"MIT"
] | null | null | null | from neotext.lib.neotext_quote_context.quote import Quote
t0 = Quote(
citing_quote="""<p>I am sick and tired of watching folks like Boris Johnson, Marine Le Pen, Donald Trump and others appeal to the worst racial instincts of our species, only to be shushed by folks telling me that it’s not <i>really</i> racism driving their popularity. It’s economic angst. It’s regular folks tired of being spurned by out-of-touch elites. It’s a natural anxiety over rapid cultural change.</p>
<p>Maybe it’s all those things. But at its core, it’s the last stand of old people who have been frightened to death by cynical right-wing media empires and the demagogues who enable them—all of whom have based their appeals on racism as overt as anything we’ve seen in decades. It’s loathsome beyond belief, and not something I thought I’d ever see in my lifetime. But that’s where we are.</p>""",
citing_url='http://www.neotext.net/www.interfluidity.com/v2/6602.html',
cited_url='http://www.motherjones.com/kevin-drum/2016/06/brexit-wins'
)
t0.hashkey()
t0.hash()
from neotext.lib.neotext_quote_context.quote import Quote
t2 = Quote(
citing_quote="""
<p>I am sick and tired of watching folks like Boris Johnson, Marine Le Pen, Donald Trump and others appeal to the worst racial instincts of our species, only to be shushed by folks telling me that it’s not <i>really</i> racism driving their popularity. It’s economic angst. It’s regular folks tired of being spurned by out-of-touch elites. It’s a natural anxiety over rapid cultural change.</p>
<p>Maybe it’s all those things. But at its core, it’s the last stand of old people who have been frightened to death by cynical right-wing media empires and the demagogues who enable them—all of whom have based their appeals on racism as overt as anything we’ve seen in decades. It’s loathsome beyond belief, and not something I thought I’d ever see in my lifetime. But that’s where we are.</p>
""",
citing_url='http://www.neotext.net/www.interfluidity.com/v2/6602.html',
cited_url='http://www.motherjones.com/kevin-drum/2016/06/brexit-wins'
)
t2.hashkey()
t2.hash()
t1 = Quote (
citing_quote="one does not live by bread alone, "
"but by every word that comes from the mouth of the Lord",
citing_url='http://www.neotext.net/demo/',
cited_url='https://www.biblegateway.com/passage/?search=Deuteronomy+8&version=NRSV'
)
t1.hash() = '32b19d9333fff69d16d5bf89bc1eb76f6b39eb58'
t1.data()['citing_context_before'] = 'ted texts on biblegateway.com; and the Al Gore example referenced an article on the washingtonpost.com. Using Neotext allows the reader to more easily make the intertextual connections between the two verses, without having to leave the current page. How the Neotext Quote-Context Service Works The example I’ve given you is made possible through a WordPress Plugin that operates on the html <blockquote> tag: <blockquotecite=”https://www.biblegateway.com/passage/?search=Deuteronomy+8&version=NRSV”>'
t1.data()['citing_context_after'] = '</blockquote> As part of the wordpress saving process, the WordPress Plugin submits the url of the post to the Neotext Web Service, which looks up the surrounding context of each quote and creates json files for each citation. Each quote’s citation file is uploaded to Amazon S3 for later retreival by the client. On the client side, the Neotext custom jQuery library: uses the url from each blockquote “cite” attribute and the quote hashes the url and quote text looks up the previously generated json from the hash: http://read.neotext.net/quote/sha1/0.02/32/32b19d9333fff69d16d5bf89bc1eb76f6b39eb58.json injects the content from the json fields into hidden divs, which are made visible when the user clicks the arrows or link: Code Example: The code for displaying the looked-up information is part of a free open source jQuery plugin called neotext-quote-context.js, available as a wordpress or jQuery plugin. Get simplified html source of this page Download neotext-sample.html (view online) Sav'
t2 = Quote(
citing_quote="I took the initiative in creating the Internet.",
citing_url="http://www.neotext.net/demo/"
cited_url="https://www.washingtonpost.com/news/fact-checker/wp/2013/11/04/a-cautionary-tale-for-politicians-al-gore-and-the-invention-of-the-internet/"
)
| 90.55102 | 1,038 | 0.771918 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,979 | 0.893555 |
3e49ee4375c4fdbca12777a89f48b0e9f1e01d7a | 3,590 | py | Python | tests/imperative_vs_reactive/test_get_daily_average.py | BastiTee/bastis-python-toolbox | c313cf12607a973a1a8b8a9fbd73b2c8a47a82d8 | [
"Apache-2.0"
] | 1 | 2016-04-06T14:09:43.000Z | 2016-04-06T14:09:43.000Z | tests/imperative_vs_reactive/test_get_daily_average.py | BastiTee/bastis-python-toolbox | c313cf12607a973a1a8b8a9fbd73b2c8a47a82d8 | [
"Apache-2.0"
] | null | null | null | tests/imperative_vs_reactive/test_get_daily_average.py | BastiTee/bastis-python-toolbox | c313cf12607a973a1a8b8a9fbd73b2c8a47a82d8 | [
"Apache-2.0"
] | 1 | 2022-03-19T04:21:40.000Z | 2022-03-19T04:21:40.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Test suite for the daily average Toggl API process."""
from random import random
from tempfile import NamedTemporaryFile
from time import sleep, time
from unittest import TestCase
from recipes.imperative_vs_reactive.get_daily_average_imp import \
get_avg_daily_working_hours as imp
from recipes.imperative_vs_reactive.get_daily_average_rx import \
get_avg_daily_working_hours as rx
class TestSuite(TestCase): # noqa: D101
def test_integration(self):
"""Integration test for get_daily_average."""
# Between 23th of April and 4th of May we spend an average
# of 3.981 simulated hours at work for the given 4-hour contract.
from_day = '2018-04-23'
to_day = '2018-05-04'
expected_worktime_average = 3.98056125
expected_workdays = 10
# Run test for imperative implementation using a mocked client
now = time()
tmp_file_imp = NamedTemporaryFile()
result_imp = imp(from_day, to_day, tmp_file_imp.name,
MockedTogglApiClient)
time_imp = time() - now
print('----')
# Run test for reactive implementation using a mocked client
now = time()
tmp_file_rx = NamedTemporaryFile()
result_rx = rx(from_day, to_day, tmp_file_rx.name,
MockedTogglApiClient)
time_rx = time() - now
print('----')
# Check results
self.assertEquals(result_imp,
(expected_worktime_average, expected_workdays))
self.assertEquals(result_rx,
(expected_worktime_average, expected_workdays))
# Print results
print(f'imp-result = {round(result_imp[0], 2)} h '
+ f'@{result_rx[1]} days (took: {round(time_imp, 4)} sec)')
print(
f'rx-result = {round(result_rx[0], 2)} h '
+ f'@{result_rx[1]} days (took: {round(time_rx, 4)} sec)')
print(f'rx speed-up = {time_imp / time_rx}')
class MockedTogglApiClient():
"""A mocked Toggl API client.
Assuming that we have a 4-hour work contract, the Toggl API might
return values between 3.8 and 4.2 hours of total working hours per day.
Toggl API responses take between 0.0 and 0.5 seconds in our mocked version.
"""
def __init__(self, credentials=None): # noqa: D107
self.fake_values = {
'2018-04-23T00:00:00>>2018-04-23T23:59:59': 14853641, # 4.1260 h
'2018-04-24T00:00:00>>2018-04-24T23:59:59': 13725371,
'2018-04-25T00:00:00>>2018-04-25T23:59:59': 14209405,
'2018-04-26T00:00:00>>2018-04-26T23:59:59': 13969792,
'2018-04-27T00:00:00>>2018-04-27T23:59:59': 14591221,
'2018-04-28T00:00:00>>2018-04-28T23:59:59': 0,
'2018-04-29T00:00:00>>2018-04-29T23:59:59': 0,
'2018-04-30T00:00:00>>2018-04-30T23:59:59': 14012216,
'2018-05-01T00:00:00>>2018-05-01T23:59:59': 14802751,
'2018-05-02T00:00:00>>2018-05-02T23:59:59': 14752767,
'2018-05-03T00:00:00>>2018-05-03T23:59:59': 14601954,
'2018-05-04T00:00:00>>2018-05-04T23:59:59': 13781087
}
def get_working_hours_for_range(self, range_from, range_to): # noqa: D102
# A simulated API request takes between 0.0 and 0.5 seconds ...
sleep(random() / 2)
# ... and returns a fake value.
return self.fake_values.get('>>'.join([range_from, range_to]), 0)
if __name__ == '__main__':
TestSuite().test_integration()
| 39.450549 | 79 | 0.620334 | 3,078 | 0.857382 | 0 | 0 | 0 | 0 | 0 | 0 | 1,616 | 0.450139 |
3e4a37d31db8b27c20ff44c3b6b28b18b2dd20b1 | 4,077 | py | Python | pox/stats_monitor.py | nachtkatze/sdn-diagnosis | 22b187d276bf302ef5811abc946b1af125dd17bc | [
"Apache-2.0"
] | null | null | null | pox/stats_monitor.py | nachtkatze/sdn-diagnosis | 22b187d276bf302ef5811abc946b1af125dd17bc | [
"Apache-2.0"
] | null | null | null | pox/stats_monitor.py | nachtkatze/sdn-diagnosis | 22b187d276bf302ef5811abc946b1af125dd17bc | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 Oscar Araque
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A skeleton POX component
You can customize this to do whatever you like. Don't forget to
adjust the Copyright above, and to delete the Apache license if you
don't want to release under Apache (but consider doing so!).
Rename this file to whatever you like, .e.g., mycomponent.py. You can
then invoke it with "./pox.py mycomponent" if you leave it in the
ext/ directory.
Implement a launch() function (as shown below) which accepts commandline
arguments and starts off your component (e.g., by listening to events).
Edit this docstring and your launch function's docstring. These will
show up when used with the help component ("./pox.py help --mycomponent").
"""
# Import some POX stuff
from pox.core import core # Main POX object
import pox.openflow.libopenflow_01 as of # OpenFlow 1.0 library
import pox.lib.packet as pkt # Packet parsing/construction
from pox.lib.addresses import EthAddr, IPAddr # Address types
import pox.lib.util as poxutil # Various util functions
import pox.lib.revent as revent # Event library
import pox.lib.recoco as recoco # Multitasking library
from pox.openflow.of_json import *
import multiprocessing
import json
# Create a logger for this component
log = core.getLogger("Monitor")
def _send_to_pipe(data):
with open('/dev/shm/poxpipe','w') as pipe:
pipe.write(data)
def _to_pipe(data):
p = multiprocessing.Process(target=_send_to_pipe, args=(data,))
p.start()
def _go_up (event):
# Event handler called when POX goes into up state
# (we actually listen to the event in launch() below)
log.info("Monitor application ready.")
def _request_stats():
log.debug('Number of connections: {}'.format(len(core.openflow.connections)))
log.info('Sending stats requests')
for connection in core.openflow.connections:
log.debug("Sending stats request")
connection.send(of.ofp_stats_request(body=of.ofp_flow_stats_request()))
connection.send(of.ofp_stats_request(body=of.ofp_port_stats_request()))
def _handle_flowstats(event):
stats = flow_stats_to_list(event.stats)
dpid = poxutil.dpidToStr(event.connection.dpid)
log.debug('Received flow stats from {}'.format(dpid))
data = {'type': 'switch_flowstats', 'data': {'switch': dpid, 'stats': stats}}
log.debug(data)
data = json.dumps(data)
data += '#'
_to_pipe(data)
def _handle_portstats(event):
stats = flow_stats_to_list(event.stats)
dpid = poxutil.dpidToStr(event.connection.dpid)
log.debug('Received port stats from {}'.format(dpid))
data = {'type':"switch_portstats", "data":{'switch':dpid, 'stats':stats}}
data = json.dumps(data)
data += '#'
_to_pipe(data)
def _handle_LinkEvent(event):
is_up = event.added is True and event.removed is False
link = event.link.end
data = {'type': 'linkstats', 'data': {'link':link, 'up': is_up}}
data = json.dumps(data)
data += '#'
_to_pipe(data)
@poxutil.eval_args
def launch (bar = False):
"""
The default launcher just logs its arguments
"""
log.warn("Bar: %s (%s)", bar, type(bar))
core.addListenerByName("UpEvent", _go_up)
core.openflow_discovery.addListenerByName("LinkEvent", _handle_LinkEvent)
core.openflow.addListenerByName("FlowStatsReceived", _handle_flowstats)
core.openflow.addListenerByName("PortStatsReceived", _handle_portstats)
recoco.Timer(7, _request_stats, recurring=True)
| 36.72973 | 81 | 0.709345 | 0 | 0 | 0 | 0 | 484 | 0.118715 | 0 | 0 | 1,998 | 0.490066 |
3e4a39484ed02c469223ab4065ec6d989a83a302 | 7,623 | py | Python | tests/app_example.py | omarryhan/flask-stateless-auth | c6acefc55050d1a53235ead20cb7d5e9eb4bbf9a | [
"MIT"
] | 3 | 2018-09-13T19:55:47.000Z | 2018-09-15T18:31:22.000Z | tests/app_example.py | omarryhan/flask-stateless-auth | c6acefc55050d1a53235ead20cb7d5e9eb4bbf9a | [
"MIT"
] | null | null | null | tests/app_example.py | omarryhan/flask-stateless-auth | c6acefc55050d1a53235ead20cb7d5e9eb4bbf9a | [
"MIT"
] | null | null | null | import os
import datetime
import secrets
import json
from flask import Flask, abort, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from werkzeug.security import safe_str_cmp
from flask_stateless_auth import (
StatelessAuthError,
StatelessAuthManager,
current_stateless_user,
UserMixin,
TokenMixin,
token_required,
)
db = SQLAlchemy()
stateless_auth_manager = StatelessAuthManager()
app = Flask(__name__.split(".")[0])
class Config:
# Stateless auth configs
# DEFAULT_AUTH_TYPE = 'Bearer' # Default
# TOKEN_HEADER = 'Authorization'# Default
# ADD_CONTEXT_PROCESSOR = True # Default
# Other configs
TESTING = False
TOKENS_BYTES_LENGTH = 32
ACCESS_TOKEN_DEFAULT_EXPIRY = 3600 # seconds
REFRESH_TOKEN_DEFAULT_EXPIRY = 365 # days
DB_NAME = "flask_stateless_auth_db"
SQLALCHEMY_DATABASE_URI = "sqlite:///" + DB_NAME
SQLALCHEMY_TRACK_MODIFICATIONS = False
class User(db.Model, UserMixin):
__tablename__ = "user"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String, unique=True)
api_token = db.relationship("ApiToken", backref="user", uselist=False)
class ApiToken(db.Model, TokenMixin):
__tablename__ = "api_token"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
refresh_token = db.Column(db.String, nullable=False, unique=True, index=True)
access_token = db.Column(db.String, nullable=False, unique=True, index=True)
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
created_on = db.Column(db.DateTime, nullable=False, default=datetime.datetime.now())
refresh_token_expiry = db.Column(
db.Integer, nullable=False, default=Config.REFRESH_TOKEN_DEFAULT_EXPIRY
)
access_token_expiry = db.Column(
db.Integer, nullable=False, default=Config.ACCESS_TOKEN_DEFAULT_EXPIRY
)
def __init__(
self,
user_id,
refresh_token_expiry=None,
access_token_expiry=None,
tokens_bytes_length=Config.TOKENS_BYTES_LENGTH,
):
self.user_id = user_id
if refresh_token_expiry and type(refresh_token_expiry) == int:
self.refresh_token_expiry = refresh_token_expiry
if access_token_expiry and type(access_token_expiry) == int:
self.access_token_expiry = access_token_expiry
# create tokens
self.refresh_tokens(tokens_bytes_length)
def refresh_tokens(self, tokens_bytes_length=Config.TOKENS_BYTES_LENGTH):
self.access_token = secrets.base64.standard_b64encode(
secrets.token_bytes(tokens_bytes_length)
).decode("utf-8")
self.refresh_token = secrets.base64.standard_b64encode(
secrets.token_bytes(tokens_bytes_length)
).decode("utf-8")
self.created_on = datetime.datetime.now()
@property
def access_is_expired(self):
expiry_time = self.created_on + datetime.timedelta(
seconds=self.access_token_expiry
)
if datetime.datetime.now() <= expiry_time:
return False
else:
return True
@property
def refresh_is_expired(self):
expiry_time = self.created_on + datetime.timedelta(
days=self.refresh_token_expiry
)
if datetime.datetime.now() <= expiry_time:
return False
else:
return True
def token_expired(self, token_type, auth_type):
if token_type == "access":
return self.access_is_expired
elif token_type == "refresh":
return self.refresh_is_expired
else:
raise NameError("Invalid token name")
@property
def as_dict(self):
return {
"access_token": self.access_token,
"expiry": self.access_token_expiry,
"refresh_token": self.refresh_token,
}
@stateless_auth_manager.user_loader
def user_by_token(token):
try:
user = User.query.filter_by(id=token.user_id).one()
except NoResultFound:
raise StatelessAuthError(
msg="Server error", code=500, type_="Server"
) # Tokens should always have a user, hence the 500 not the
except Exception as e:
raise StatelessAuthError(msg="Server error", code=500, type_="Server")
# log.critical(e)
else:
return user
@stateless_auth_manager.token_loader
def token_model_by(token, auth_type, token_type="access"):
try:
if token_type == "access":
token_model = ApiToken.query.filter_by(access_token=token).one()
elif token_type == "refresh":
token_model = ApiToken.query.filter_by(refresh_token=token).one()
except NoResultFound:
raise StatelessAuthError(
msg="{} token doesn't belong to a user".format(token_type),
code=401,
type_="Token",
)
except Exception as e:
raise StatelessAuthError(msg="Server error", code=500, type_="Server")
# log.critical(e)
else:
return token_model
@app.route("/")
def index():
return "hello", 200
@app.route("/user", methods=["GET", "POST", "PUT", "DELETE"])
def user_endpoint():
data = json.loads(request.data)
if request.method == "POST":
user = User(username=data["username"])
db.session.add(user)
elif request.method == "DELETE":
user = User.query.filter_by(username=data["username"]).first()
db.session.delete(user)
db.session.commit()
data = {"msg": "Success!"}
return jsonify(data), 201
@app.route("/create_token", methods=["POST"])
def create_token():
data = json.loads(request.data)
user = User.query.filter_by(username=data["username"]).first()
if user.api_token:
token = user.api_token
token.refresh_tokens()
else:
token = ApiToken(user_id=user.id)
db.session.add(token)
db.session.commit()
return jsonify(token.as_dict), 201
@app.route("/delete_token", methods=["DELETE"])
def delete_token():
data = json.loads(request.data)
token = User.query.filter_by(username=data["username"]).one().api_token
db.session.delete(token)
db.session.commit()
return jsonify({"msg": "Success!"}), 201
@app.route("/refresh_token", methods=["PUT"])
@token_required(token_type="refresh")
def refresh_token():
current_stateless_user.api_token.refresh_tokens()
db.session.add(current_stateless_user.api_token)
db.session.commit()
return jsonify(current_stateless_user.api_token.as_dict), 201
@app.route("/secret", methods=["GET"])
@token_required(token_type="access") # access by default
def secret():
data = {"secret": "Stateless auth is awesome :O"}
return jsonify(data), 200
@app.route("/whoami", methods=["GET"])
@token_required
def whoami():
data = {"my_username": current_stateless_user.username}
return jsonify(data), 200
@app.route("/no_current_stateless_user")
def no_current_stateless_user():
if not current_stateless_user:
username = "None"
else:
username = current_stateless_user.username
data = {"current_stateless_username": username}
return jsonify(data), 200
@app.errorhandler(StatelessAuthError)
def handle_stateless_auth_error(error):
return jsonify({"error": error.full_msg}), error.code
if __name__ == "__main__":
app.config.from_object(Config())
db.init_app(app)
with app.app_context():
db.create_all()
stateless_auth_manager.init_app(app)
app.run()
| 30.987805 | 88 | 0.674406 | 3,454 | 0.453102 | 0 | 0 | 4,154 | 0.54493 | 0 | 0 | 977 | 0.128165 |
3e4e3e3f65d730e416b620ade003178d96c61532 | 920 | py | Python | stereo/stereo.py | whaleygeek/microbit_python | 1fa8e0f34cfa2a92d7c5c32fc5ee5287c5d5b105 | [
"MIT"
] | 8 | 2016-11-15T23:04:25.000Z | 2021-05-17T17:42:47.000Z | stereo/stereo.py | whaleygeek/microbit_python | 1fa8e0f34cfa2a92d7c5c32fc5ee5287c5d5b105 | [
"MIT"
] | null | null | null | stereo/stereo.py | whaleygeek/microbit_python | 1fa8e0f34cfa2a92d7c5c32fc5ee5287c5d5b105 | [
"MIT"
] | null | null | null | from microbit import *
import music
A = False
B = False
PITCH = 440
# PIN2 read_analog()
ACTION_VALUE = 50
VOLUMEUP_VALUE = 150
VOLUMEDOWN_VALUE = 350
#nothing: 944
prev_l = False
prev_r = False
l = False
r = False
while True:
v = pin2.read_analog()
if v < ACTION_VALUE:
l,r = True, True
elif v < VOLUMEUP_VALUE:
l,r = False, True
elif v < VOLUMEDOWN_VALUE:
l,r = True, False
else:
l,r = False, False
if l != prev_l:
prev_l = l
if l:
music.pitch(PITCH, pin=pin0)
display.set_pixel(0,2,9)
else:
display.set_pixel(0,2,0)
music.stop(pin0)
if r != prev_r:
prev_r = r
if r:
display.set_pixel(4,2,9)
music.pitch(PITCH, pin=pin1)
else:
display.set_pixel(4,2,0)
music.stop(pin1)
| 18.77551 | 40 | 0.519565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.03587 |
3e50073943f2d59f2a64f9e25a36110605822852 | 1,062 | py | Python | comments/migrations/0004_auto_20170531_1011.py | salazarpardo/redinnovacion | 3f7c13af0af1887112a0492aea7782871fba0129 | [
"CC-BY-3.0"
] | null | null | null | comments/migrations/0004_auto_20170531_1011.py | salazarpardo/redinnovacion | 3f7c13af0af1887112a0492aea7782871fba0129 | [
"CC-BY-3.0"
] | null | null | null | comments/migrations/0004_auto_20170531_1011.py | salazarpardo/redinnovacion | 3f7c13af0af1887112a0492aea7782871fba0129 | [
"CC-BY-3.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('comments', '0003_comment_public'),
]
operations = [
migrations.CreateModel(
name='CommentLike',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_at', models.DateTimeField(help_text='creation date', auto_now_add=True)),
('updated_at', models.DateTimeField(help_text='edition date', auto_now=True, null=True)),
('comment', models.ForeignKey(to='comments.Comment')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='commentlike',
unique_together=set([('comment', 'user')]),
),
]
| 34.258065 | 114 | 0.615819 | 920 | 0.86629 | 0 | 0 | 0 | 0 | 0 | 0 | 189 | 0.177966 |
3e50929d0bf53c378eb006069dd354ec7a7241ac | 1,096 | py | Python | hospital/migrations/0011_auto_20210502_1057.py | Shreyashm16/Hospital-Appointment-and-Information-System | 929b7eb22cc6a0399e6fff3c7012d1c65d7c47cb | [
"MIT"
] | 7 | 2021-07-15T08:59:58.000Z | 2021-12-29T20:21:36.000Z | hospital/migrations/0011_auto_20210502_1057.py | siddharth25pandey/Hospital-Information-Appointment-System | 1df5edd1f0dc2f0f385e7195db221027b4f64efb | [
"MIT"
] | null | null | null | hospital/migrations/0011_auto_20210502_1057.py | siddharth25pandey/Hospital-Information-Appointment-System | 1df5edd1f0dc2f0f385e7195db221027b4f64efb | [
"MIT"
] | 4 | 2021-05-11T08:36:02.000Z | 2021-08-08T11:45:11.000Z | # Generated by Django 3.1.5 on 2021-05-02 05:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hospital', '0010_appointment_finished'),
]
operations = [
migrations.RemoveField(
model_name='appointment',
name='appointmentDate',
),
migrations.CreateModel(
name='DoctorProfessional',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('starttime', models.TimeField()),
('endtime', models.TimeField()),
('breakstarttime', models.TimeField(null=True)),
('breakendtime', models.TimeField(null=True)),
('appfees', models.FloatField()),
('admfees', models.FloatField()),
('doctor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='DoctorProfessional', to='hospital.doctor')),
],
),
]
| 34.25 | 148 | 0.583942 | 970 | 0.885036 | 0 | 0 | 0 | 0 | 0 | 0 | 255 | 0.232664 |
3e509827f57ba47184b958f8189726f8a1765c22 | 87 | py | Python | division.py | ReverseScale/PyDemo | 9cc6f3cbb8482f6e403bf65419537b0163798e61 | [
"MIT"
] | null | null | null | division.py | ReverseScale/PyDemo | 9cc6f3cbb8482f6e403bf65419537b0163798e61 | [
"MIT"
] | null | null | null | division.py | ReverseScale/PyDemo | 9cc6f3cbb8482f6e403bf65419537b0163798e61 | [
"MIT"
] | null | null | null | try:
print(5/0)
except ZeroDivisionError:
print('你是啥子吗!')
else:
print('算了') | 14.5 | 25 | 0.62069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.257426 |
3e522957a432795bf32198db1cc68b1e2615e3f9 | 1,924 | py | Python | Script/calculate_RMSD.py | dhruvsangamwar/Protein-structure-prediction | 99364bfd62f8293ddbe8e2c9a86ca7850b270d44 | [
"MIT"
] | 1 | 2022-01-30T08:20:08.000Z | 2022-01-30T08:20:08.000Z | Script/calculate_RMSD.py | dhruvsangamwar/ECS_129_Protein-structure-prediction | 99364bfd62f8293ddbe8e2c9a86ca7850b270d44 | [
"MIT"
] | null | null | null | Script/calculate_RMSD.py | dhruvsangamwar/ECS_129_Protein-structure-prediction | 99364bfd62f8293ddbe8e2c9a86ca7850b270d44 | [
"MIT"
] | null | null | null | import pdbCleanup as pc
import fxndefinitions as f
import numpy as np
from numpy.linalg import eig
pc.takeInput1()
DataFrame1 = []
pc.CsvToDataframe(DataFrame1)
pc.takeInput2()
DataFrame2 = []
pc.CsvToDataframe(DataFrame2)
xtil = [0, 0, 0]
ytil = [0, 0, 0]
x = np.array(DataFrame1)
y = np.array(DataFrame2)
# This finds the number of CA atoms in both of the proteins
N1 = np.size(xtil, 0)
N2 = np.size(ytil, 0)
# finding the average of the x coords in protein 1 and 2 (arr1 & 2)
# these two functions calculate the barycenter
# Here we will be finding Xtil && Ytil = X && Y - G
Gx = f.findG(x, N1)
Gy = f.findG(y, N2)
xtil = np.subtract(x, Gx)
ytil = np.subtract(x, Gy)
# we now have the ~x_k Coords and the ~y_k Coords respectively
# this function will calculate all the 9 R values
R11 = R12 = R13 = R21 = R22 = R23 = R31 = R32 = R33 = 0
for i in range(0, N1):
R11 += xtil[i][0] * ytil[i][0]
R12 += xtil[i][0] * ytil[i][1]
R13 += xtil[i][0] * ytil[i][2]
R21 += xtil[i][1] * ytil[i][0]
R22 += xtil[i][1] * ytil[i][1]
R23 += xtil[i][1] * ytil[i][2]
R31 += xtil[i][2] * ytil[i][0]
R32 += xtil[i][2] * ytil[i][1]
R33 += xtil[i][2] * ytil[i][2]
# matrix given by equation 10 from the paper
Matrix = np.array([[R11+R22+R33, R23-R32, R31-R13, R12-R21],
[R23-R32, R11-R22-R33, R12+R21, R13+R31],
[R31-R13, R12+R21, -R11+R22-R33, R23+R32],
[R12-R21, R13+R31, R23+R32, -R11-R22+R33]])
# Here we calculate the maxEigenvalue for the final calucaltion
w, v = eig(Matrix)
maxEig = np.amax(w)
# Now we will find the best fit RMSD using the steps below
temp = [0, 0, 0]
for i in range(0, N1):
temp += np.add((np.square(xtil[i])), np.square(ytil[i]))
n = temp[0] + temp[1] + temp[2]
var = np.subtract(n, 2*maxEig)
temp2 = np.true_divide(var, np.size(xtil, 0))
RMSD = np.sqrt(abs(temp2))
RMSD = round(RMSD, 2)
print(RMSD)
| 24.666667 | 67 | 0.613825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 499 | 0.259356 |
3e52b2bc37f4905d4ab47d9e200507510863fee4 | 626 | py | Python | Python/Data Structure and Algorithm/Krish Naik/Amazon Que.py | omkarsutar1255/Python-Data | 169d0c54b23d9dd5a7f1aea41ab385121c3b3c63 | [
"CC-BY-3.0"
] | null | null | null | Python/Data Structure and Algorithm/Krish Naik/Amazon Que.py | omkarsutar1255/Python-Data | 169d0c54b23d9dd5a7f1aea41ab385121c3b3c63 | [
"CC-BY-3.0"
] | null | null | null | Python/Data Structure and Algorithm/Krish Naik/Amazon Que.py | omkarsutar1255/Python-Data | 169d0c54b23d9dd5a7f1aea41ab385121c3b3c63 | [
"CC-BY-3.0"
] | null | null | null | # l1 = [1095, 1094, 1095]
# del l1[:]
# l1.extend([1005, 1094, 1095])
# print(l1)
l1 = [8676, 4444, 3333, 2222, 1111]
for i, n in enumerate(l1):
print(i, n)
if int(n / 1000) == 1:
l1[i] = n + 8000
elif int(n / 1000) == 2:
l1[i] = n + 6000
elif int(n / 1000) == 3:
l1[i] = n + 4000
elif int(n / 1000) == 4:
l1[i] = n + 2000
elif int(n / 1000) == 6:
l1[i] = n - 2000
elif int(n / 1000) == 7:
l1[i] = n + 4000
elif int(n / 1000) == 8:
l1[i] = n - 6000
elif int(n / 1000) == 9:
l1[i] = n - 8000
else:
pass
print(l1)
| 21.586207 | 35 | 0.4377 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.124601 |