hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3975e522eae96a6443ccb6146ef3bb31b2d6df06 | 1,320 | py | Python | examples/bruker_processed_1d/bruker_processed_1d.py | genematx/nmrglue | 8a24cf6cbd18451e552fc0673b84c42d1dcb69a2 | [
"BSD-3-Clause"
] | 150 | 2015-01-16T12:24:13.000Z | 2022-03-03T18:01:18.000Z | examples/bruker_processed_1d/bruker_processed_1d.py | genematx/nmrglue | 8a24cf6cbd18451e552fc0673b84c42d1dcb69a2 | [
"BSD-3-Clause"
] | 129 | 2015-01-13T04:58:56.000Z | 2022-03-02T13:39:16.000Z | examples/bruker_processed_1d/bruker_processed_1d.py | genematx/nmrglue | 8a24cf6cbd18451e552fc0673b84c42d1dcb69a2 | [
"BSD-3-Clause"
] | 88 | 2015-02-16T20:04:12.000Z | 2022-03-10T06:50:30.000Z | #! /usr/bin/env python
"""
Compare bruker read_pdata to read.
"""
import nmrglue as ng
import matplotlib.pyplot as plt
# read in the data
data_dir = "data/bruker_exp/1/pdata/1"
# From pre-procced data.
dic, data = ng.bruker.read_pdata(data_dir, scale_data=True)
udic = ng.bruker.guess_udic(dic, data)
uc = ng.fileiobase.uc_from_udic(udic)
ppm_scale = uc.ppm_scale()
# From FID
dic1, data1 = ng.bruker.read(data_dir)
# remove the digital filter, this data is from an analog spectrometer.
# data = ng.bruker.remove_digital_filter(dic, data)
# process the spectrum
data1 = ng.proc_base.ls(data1, 1) # left shift
data1 = ng.proc_base.gm(data1, g2=1/2.8e3) # To match proc data...
data1 = ng.proc_base.zf_size(data1, 1024*32) # zero fill
data1 = ng.proc_base.fft_positive(data1) # FT
data1 = ng.proc_base.ps(data1, p0=93) # phase is 180 off Bruker
data1 = ng.proc_base.di(data1) # discard
udic1 = ng.bruker.guess_udic(dic1, data1)
uc1 = ng.fileiobase.uc_from_udic(udic1)
ppm_scale1 = uc1.ppm_scale()
# plot the spectrum
fig = plt.figure()
plt.hold(True)
plt.plot(ppm_scale, data)
plt.plot(ppm_scale1, data1)
plt.hold(False)
plt.xlim([50, -50])
plt.xlabel('Carbon Chemical shift (ppm from neat TMS)')
plt.title('bruker.read_pdata vs bruker.read, note ppm axis')
plt.show()
| 28.085106 | 71 | 0.712121 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 481 | 0.364394 |
397645cb5f3148b59ab74fb77253d9299c79d101 | 4,404 | py | Python | tests/unit/test_posts_get_logic.py | claranet-ch/aws-sam-application-template-python | b835ef9295e4820110fd53f50619e4fea7493155 | [
"CC-BY-4.0"
] | null | null | null | tests/unit/test_posts_get_logic.py | claranet-ch/aws-sam-application-template-python | b835ef9295e4820110fd53f50619e4fea7493155 | [
"CC-BY-4.0"
] | null | null | null | tests/unit/test_posts_get_logic.py | claranet-ch/aws-sam-application-template-python | b835ef9295e4820110fd53f50619e4fea7493155 | [
"CC-BY-4.0"
] | null | null | null | import io
import os
import unittest
import boto3
from botocore.response import StreamingBody
from botocore.stub import Stubber
from functions.posts_get.posts_get_logic import posts_get_logic
class GetSomethingLogicTest(unittest.TestCase):
def setUp(self):
# https://docs.python.org/3/library/unittest.html#unittest.TestCase.setUp
os.environ['AWS_DEFAULT_REGION'] = 'eu-west-1'
def tearDown(self):
# https://docs.python.org/3/library/unittest.html#unittest.TestCase.tearDown
pass
def __creeate_s3_object_body(self, content: str):
return StreamingBody(
io.BytesIO(content.encode()),
len(content)
)
def test_get_something(self):
DYNAMODB_TABLE = 'test-posts-meta'
S3_BUCKET = 'test-posts'
POST_1_KEY = 'post_2021-11-15T10:00:00Z.html'
POST_2_KEY = 'post_2021-11-16T10:00:00Z.html'
# region ------------------------------------------------- DynamoDB Stub
dynamodb_client = boto3.client('dynamodb')
dynamodb_stubber = Stubber(dynamodb_client)
# region ------------------------------------------------------ 1st call
dynamodb_stubber.add_response(
'get_item',
{
'Item': {
'author': {'S': 'Elia Contini'},
'id': {'S': POST_1_KEY}
}
},
{
'Key': {'id': {'S': POST_1_KEY}},
'TableName': DYNAMODB_TABLE
}
)
# endregion ------------------------------------------------------------
# region ------------------------------------------------------ 2nd call
dynamodb_stubber.add_response(
'get_item',
{
'Item': {
'author': {'S': 'Piero Bozzolo'},
'id': {'S': POST_2_KEY}
}
},
{
'Key': {'id': {'S': POST_2_KEY}},
'TableName': DYNAMODB_TABLE
}
)
# endregion ------------------------------------------------------------
dynamodb_stubber.activate()
# endregion ------------------------------------------------------------
# region ------------------------------------------------------- S3 Stub
s3_client = boto3.client('s3')
s3_stubber = Stubber(s3_client)
# region ------------------------------------------------------ 1st call
list_objects_v2_expected_params = {'Bucket': S3_BUCKET}
list_objects_v2_expected_result = {
'Contents': [{'Key': POST_1_KEY}, {'Key': POST_2_KEY}]
}
s3_stubber.add_response(
'list_objects_v2',
list_objects_v2_expected_result,
list_objects_v2_expected_params
)
# endregion ------------------------------------------------------------
# region ------------------------------------------------------ 2nd call
get_object_expected_params = {'Bucket': S3_BUCKET, 'Key': POST_1_KEY}
get_object_expected_result = {
'Body': self.__creeate_s3_object_body(
'<h1>Post 1</h1><p>Content 1.</p>'
)
}
s3_stubber.add_response(
'get_object',
get_object_expected_result,
get_object_expected_params
)
# endregion ------------------------------------------------------------
# region ------------------------------------------------------ 3rd call
get_object_expected_params = {'Bucket': S3_BUCKET, 'Key': POST_2_KEY}
get_object_expected_result = {
'Body': self.__creeate_s3_object_body(
'<h1>Post 2</h1><p>Content 2.</p>'
)
}
s3_stubber.add_response(
'get_object',
get_object_expected_result,
get_object_expected_params
)
# endregion ------------------------------------------------------------
s3_stubber.activate()
# endregion ------------------------------------------------------------
result = posts_get_logic(
dynamodb_client, DYNAMODB_TABLE, s3_client, S3_BUCKET)
self.assertEqual(len(result), 2)
dynamodb_stubber.assert_no_pending_responses()
s3_stubber.assert_no_pending_responses()
| 35.516129 | 84 | 0.449818 | 4,208 | 0.955495 | 0 | 0 | 0 | 0 | 0 | 0 | 1,613 | 0.366258 |
3978056ea17d8290a8897ffe9ef1bc60af963d5f | 21,050 | py | Python | firepy/model/geometry.py | KBeno/firefly-lca | a081b05f5d66951792bd00d2bb6ae1f8e43235e0 | [
"MIT"
] | 3 | 2020-06-16T13:39:31.000Z | 2022-01-10T09:34:52.000Z | firepy/model/geometry.py | KBeno/boblica | a081b05f5d66951792bd00d2bb6ae1f8e43235e0 | [
"MIT"
] | null | null | null | firepy/model/geometry.py | KBeno/boblica | a081b05f5d66951792bd00d2bb6ae1f8e43235e0 | [
"MIT"
] | null | null | null | from typing import Union, List
import copy
import math
import numpy as np
"""
Principles:
- geometry objects are defined by the minimum required information
- Points are made of coordinates (floats), everything else is based on Points except for Vectors
"""
class Point:
def __init__(self, x: float, y: float, z: float = 0):
self.x = float(x)
self.y = float(y)
self.z = float(z)
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return "{ind}{x}, {y}, {z} (Point)".format(x=self.x, y=self.y, z=self.z, ind=indentation)
def coordinates(self):
return self.x, self.y, self.z
def __sub__(self, other):
if isinstance(other, Point):
return Vector(x=self.x - other.x,
y=self.y - other.y,
z=self.z - other.z)
elif isinstance(other, Vector):
return Point(x=self.x - other.x,
y=self.y - other.y,
z=self.z - other.z)
def __add__(self, other):
if isinstance(other, Vector):
return Point(x=self.x + other.x,
y=self.y + other.y,
z=self.z + other.z)
def __eq__(self, other):
if self.x == other.x and self.y == other.y and self.z == other.z:
return True
else:
return False
class Vector:
def __init__(self, x, y, z: float = 0):
self.x = float(x)
self.y = float(y)
self.z = float(z)
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return "{ind}{x}, {y}, {z} (Vector)".format(x=self.x, y=self.y, z=self.z, ind=indentation)
def coordinates(self):
return self.x, self.y, self.z
def length(self) -> float:
return math.sqrt(self.x ** 2 + self.y ** 2 + self.z ** 2)
def unitize(self):
return Vector(self.x / self.length(), self.y / self.length(), self.z / self.length())
def cross_product(self, vector2):
product_x = self.y * vector2.z - self.z * vector2.y
product_y = -self.x * vector2.z + self.z * vector2.x
product_z = self.x * vector2.y - self.y * vector2.x
return Vector(product_x, product_y, product_z)
def scalar_product(self, vector2):
product = 0
for xyz in [0, 1, 2]:
product += self.coordinates()[xyz] * vector2.coordinates()[xyz]
return product
def __mul__(self, other):
if isinstance(other, Vector):
# scalar (dot) product
product = 0
for xyz in [0, 1, 2]:
product += self.coordinates()[xyz] * other.coordinates()[xyz]
return product
elif isinstance(other, (float, int)):
return Vector(self.x * other, self.y * other, self.z * other)
def angle(self, vector2):
# angle between the instance vector and the given vector in degrees
# always positive and smaller or equal to 180°
return math.degrees(math.acos(self.scalar_product(vector2) / self.length() / vector2.length()))
def __add__(self, other):
return Vector(self.x + other.x, self.y + other.y, self.z + other.z)
def __sub__(self, other):
return Vector(self.x - other.x, self.y - other.y, self.z - other.z)
def __truediv__(self, other: float):
return self * other ** -1
def __eq__(self, other):
if self.x == other.x and self.y == other.y and self.z == other.z:
return True
else:
return False
class Plane:
def __init__(self, normal: Vector, point: Point):
self.normal = normal
self.point = point
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Plane:\n'.format(ind=indentation) +\
'{ind}|--Normal: {s}\n'.format(s=self.normal.pretty_print(), ind=indentation) +\
'{ind}`--Point: {e}\n'.format(e=self.point.pretty_print(), ind=indentation)
def intersect(self, other: Union['Ray', 'Plane']):
if isinstance(other, Ray):
# solve the linear equation system aX = b
plane_eq, plane_ord = self.get_equation(standardize=True)
ray_eq, ray_ord = other.get_equation(standardize=True)
a = np.append(plane_eq, ray_eq, axis=0)
b = np.append(plane_ord, ray_ord, axis=0)
try:
solution = np.linalg.solve(a, b)
except np.linalg.LinAlgError:
# parallel
return None
return Point(
x=solution[0, 0],
y=solution[1, 0],
z=solution[2, 0]
)
if isinstance(other, Plane):
# direction of intersection ray
vector = self.normal.cross_product(other.normal)
if vector == Vector(0, 0, 0):
# parallel
return None
else:
# get largest absolute coordinate value
xyz = [abs(vector.x), abs(vector.y), abs(vector.z)]
set_0_coord = xyz.index(max(xyz))
# set this coordinate to 0 to solve the equation of the two planes
eq1, ord1 = self.get_equation(standardize=True)
eq2, ord2 = other.get_equation(standardize=True)
a = np.append(eq1, eq2, axis=0)
b = np.append(ord1, ord2, axis=0)
# delete the corresponding column from the matrix
i = [True, True, True]
i[set_0_coord] = False
a = a[:, i]
# we should be able to solve this, because parallel case was checked already
solution = np.linalg.solve(a, b)
if set_0_coord == 0:
point = Point(0, solution[0, 0], solution[1, 0])
elif set_0_coord == 1:
point = Point(solution[0, 0], 0, solution[1, 0])
else:
point = Point(solution[0, 0], solution[1, 0], 0)
return Ray(
vector=vector,
point=point
)
def get_equation(self, standardize=False):
# http://tutorial.math.lamar.edu/Classes/CalcIII/EqnsOfPlanes.aspx
a = self.normal.x
b = self.normal.y
c = self.normal.z
d = a * self.point.x + b * self.point.y + c * self.point.z
if standardize:
# return the coefficients of the equation in this form aX + bY + cZ = d
return (
np.array([
[a, b, c]
]),
np.array([
[d]
])
)
return {
'a': a, 'b': b, 'c': c, 'd': d
}
def print_equation(self):
return '{a}x + {b}y + {c}z = {d}'.format(**self.get_equation())
class Ray:
def __init__(self, vector: Vector, point: Point):
self.vector = vector
self.point = point
def get_equation(self, standardize=False):
# http://tutorial.math.lamar.edu/Classes/CalcIII/EqnsOfLines.aspx
x0 = self.point.x
y0 = self.point.y
z0 = self.point.z
a = self.vector.x
b = self.vector.y
c = self.vector.z
if standardize:
# return the coefficients of the equations in this form aX + bY + cZ + d = 0
if a == 0:
# 1X + 0Y + 0Z = x0
a1, b1, c1, d1 = 1, 0, 0, x0
if b == 0:
# 0X + 1Y + 0Z = y0
a2, b2, c2, d2 = 0, 1, 0, y0
elif c == 0:
# 0X + 0Y + 1Z = z0
a2, b2, c2, d2 = 0, 0, 1, z0
else:
# 0X + cY - bZ = y0*c - z0*b
a2, b2, c2, d2 = 0, c, -b, y0 * c - z0 * b
elif b == 0:
# 0X + 1Y + 0Z = y0
a1, b1, c1, d1 = 0, 1, 0, y0
if c == 0:
# 0X + 0Y + 1Z = z0
a2, b2, c2, d2 = 0, 0, 1, z0
else:
# cX + 0Y - aZ = x0*c - z0*a
a2, b2, c2, d2 = c, 0, -a, x0 * c - z0 * a
else:
# bX - aY + 0Z = x0*b - y0*a
a1, b1, c1, d1 = b, -a, 0, x0 * b - y0 * a
if c == 0:
# 0X + 0Y + 1Z = z0
a2, b2, c2, d2 = 0, 0, 1, z0
else:
# cX + 0Y - aZ = x0*c - z0*a
a2, b2, c2, d2 = c, 0, -a, x0 * c - z0 * a
return (
np.array([
[a1, b1, c1],
[a2, b2, c2]
]),
np.array([
[d1],
[d2]
])
)
else:
return {
'x0': x0, 'y0': y0, 'z0': z0, 'a': a, 'b': b, 'c': c,
}
def print_equation(self):
coeffs = self.get_equation()
if coeffs['a'] == 0:
eq1 = 'x = {x0}'.format(**coeffs)
if coeffs['b'] == 0:
eq2 = 'y = {y0}, '.format(**coeffs)
elif coeffs['c'] == 0:
eq2 = 'z = {z0}, '.format(**coeffs)
else:
eq2 = '(y - {y0}) / {b} = (z - {z0}) / {c}'.format(**coeffs)
elif coeffs['b'] == 0:
eq1 = 'y = {y0}'.format(**coeffs)
if coeffs['c'] == 0:
eq2 = 'z = {z0}, '.format(**coeffs)
else:
eq2 = '(x - {x0}) / {a} = (z - {z0}) / {c}'.format(**coeffs)
else:
eq1 = '(x - {x0}) / {a} = (y - {y0}) / {b}'.format(**coeffs)
if coeffs['c'] == 0:
eq2 = 'z = {z0}, '.format(**coeffs)
else:
eq2 = '(x - {x0}) / {a} = (z - {z0}) / {c}'.format(**coeffs)
return eq1 + '\n' + eq2
def intersect(self, other: Plane) -> Point:
return other.intersect(self)
class Line:
def __init__(self, start: Point, end: Point):
self.start = start
self.end = end
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Line:\n'.format(ind=indentation) +\
'{ind}|--Start: {s}\n'.format(s=self.start.pretty_print(), ind=indentation) +\
'{ind}`--End: {e}\n'.format(e=self.end.pretty_print(), ind=indentation)
def length(self):
return self.to_vector().length()
def to_points(self):
return [self.start, self.end]
def to_vector(self, reverse=False):
if reverse:
return Vector(x=self.start.x - self.end.x,
y=self.start.y - self.end.y,
z=self.start.z - self.end.z)
else:
return Vector(x=self.end.x - self.start.x,
y=self.end.y - self.start.y,
z=self.end.z - self.start.z)
def midpoint(self) -> Point:
return Point(
x=(self.start.x + self.end.x) / 2,
y=(self.start.y + self.end.y) / 2,
z=(self.start.z + self.end.z) / 2,
)
def __eq__(self, other):
if self.start == other.start and self.end == other.end:
return True
elif self.start == other.end and self.end == other.start:
return True
else:
return False
def to_ray(self) -> Ray:
return Ray(
vector=self.to_vector(),
point=self.start
)
def flip(self) -> 'Line':
return Line(start=self.end, end=self.start)
class Rectangle:
def __init__(self, side: Line, external_point: Point):
self.side = side
self.external_point = external_point
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Rectangle:\n'.format(ind=indentation) +\
'{ind}|--Side:\n'.format(ind=indentation) +\
self.side.pretty_print(indentation=indentation + '| ') +\
'{ind}`--External Point: {p}\n'.format(p=self.external_point.pretty_print(), ind=indentation)
def height(self):
side_vector = self.side.to_vector()
ext_vector = self.external_point - self.side.start
return ext_vector.cross_product(side_vector).length() / side_vector.length()
def height_vector(self):
s = self.side.to_vector()
e = self.external_point - self.side.start
proj = s * ((e * s) / (s * s))
return e - proj
def normal_vector(self):
return self.side.to_vector().cross_product(self.height_vector()).unitize()
def area(self):
return self.side.length() * self.height()
def to_points(self) -> List[Point]:
"""
:return: a list of all vertices as Point instances
"""
return self.side.to_points() + [point + self.height_vector() for point in self.side.to_points()[::-1]]
def to_lines(self) -> List[Line]:
"""
:return: a list of all edges as Line instances
"""
points = self.to_points()
return [Line(s, e) for s, e in zip(points, points[1:] + points[:1])]
def center(self) -> Point:
return self.side.midpoint() + (self.height_vector() / 2)
class Box:
def __init__(self, base: Rectangle, external_point: Point):
self.base = base
self.external_point = external_point
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Box:\n'.format(ind=indentation) +\
'{ind}|--Base:\n'.format(ind=indentation) +\
self.base.pretty_print(indentation=indentation + '| ') +\
'{ind}`--External Point: {p}\n'.format(p=self.external_point.pretty_print(), ind=indentation)
def height(self):
external_vector = self.external_point - self.base.side.start
return external_vector * self.base.normal_vector()
def height_vector(self) -> Vector:
return self.base.normal_vector() * self.height()
def to_rects(self) -> List[Rectangle]:
"""
:return: a list of all faces of the box as Rectangle instances [bottom, sides..., top]
"""
return [self.base] + [Rectangle(s, move(s.start, self.height_vector())) for s in self.base.to_lines()] +\
[move(self.base, self.height_vector())]
class Face:
"""
General type of face with any number of points
Face is treated as the projection of its points to te plane defined by
the first 2 points and the last point in the list of vertices
"""
def __init__(self, points: List[Point]):
self.vertices = points
def __str__(self):
return self.pretty_print()
def pretty_print(self, indentation=''):
return '{ind}Face:\n'.format(ind=indentation) +\
''.join([
'{ind}|--{p}\n'.format(p=po.pretty_print(), ind=indentation)
for po in self.vertices[:-1]
]) + \
'{ind}`--{p}\n'.format(p=self.vertices[-1].pretty_print(), ind=indentation)
def normal_vector(self) -> Vector:
"""
Normal vector of the projection plane of the face
If we see the vertices in counter-clockwise order, the normal
is pointing towards us
Note: we assume VertexEntryDirection == "CounterClockWise" in the idf
Note: if vertices are in random order we don't know what will happen :-)
:return: Vector
"""
# TODO normal should be flipped if the three points represent a concave edge
# look for two lines in the face that are not parallel
for i in range(len(self.vertices)):
vector1 = self.vertices[i+1] - self.vertices[0]
vector2 = self.vertices[i+2] - self.vertices[0]
normal = vector1.cross_product(vector2)
if normal != Vector(0, 0, 0):
return normal.unitize()
def area(self, signed=False) -> float:
"""
returns the area of the specified surface
method described here: http://geomalgorithms.com/a01-_area.html
:return: area of the face
"""
# close the loop of vertices without modifying the object itself
point_vectors = [Vector(v.x, v.y, v.z) for v in self.vertices]
# add the first point
point_vectors += point_vectors[:1]
normal_vector = self.normal_vector()
area = 0
for point_count in range(0, len(point_vectors) - 1):
area += normal_vector.scalar_product(
point_vectors[point_count].cross_product(point_vectors[point_count + 1]))
area /= 2
if signed:
return area
else:
return abs(area)
def perimeter(self) -> float:
return sum([side.length() for side in self.to_lines()])
def to_lines(self) -> List[Line]:
return [Line(s, e) for s, e in zip(self.vertices, self.vertices[1:] + self.vertices[:1])]
def __eq__(self, other):
if self.vertices[0] in other.vertices:
start_index = other.vertices.index(self.vertices[0])
if self.vertices == other.vertices[start_index:] + other.vertices[:start_index]:
return True
elif self.vertices == other.vertices[start_index::-1] + other.vertices[:start_index:-1]:
return True
else:
return False
else:
return False
def centroid(self) -> Point:
# https://math.stackexchange.com/questions/90463/how-can-i-calculate-the-centroid-of-polygon
# triangulation with signed areas and centroids
start_corner = self.vertices[0]
triangle_centroids = []
areas = []
for k in range(len(self.vertices) - 2):
# get vectors from first corner point pointing to next two corner points
a_k = self.vertices[k + 1] - start_corner
a_l = self.vertices[k + 2] - start_corner
# get centroid of the triangle between the two vectors
triangle_centroids.append(start_corner + (a_k + a_l) / 3)
# get signed area of the triangle
areas.append(self.normal_vector() * a_k.cross_product(a_l) / 2)
# total area
area = sum(areas)
# return weighted average of centroids (centroid of face)
return Point(
x=sum([c.x * w for c, w in zip(triangle_centroids, areas)]) / area,
y=sum([c.y * w for c, w in zip(triangle_centroids, areas)]) / area,
z=sum([c.z * w for c, w in zip(triangle_centroids, areas)]) / area,
)
def to_plane(self) -> Plane:
return Plane(
normal=self.normal_vector(),
point=self.vertices[0]
)
def move(obj: Union[Point, Line, Rectangle, Box, Face], vector: Vector, inplace=False):
if isinstance(obj, Point):
return obj + vector
else:
if inplace:
new_obj = obj
else:
new_obj = copy.deepcopy(obj)
for param, val in new_obj.__dict__.items():
if isinstance(val, (Point, Line, Rectangle, Box, Face)):
# love recursion
new_obj.__dict__[param] = move(val, vector)
elif isinstance(val, list):
new_obj.__dict__[param] = [move(p, vector) for p in val]
return new_obj
def rotate_xy(obj: Union[Point, Line, Rectangle, Box, Face], angle: float,
center: Point = Point(0, 0, 0), inplace=False):
"""
Rotate objects in the xy plane (around z axis)
:param obj: object to rotate
:param angle: angle to rotate with
:param center: center to rotate around
:param inplace: set True to modify the object instance itself
:return: rotated object
"""
if isinstance(obj, Point):
# move point to origin
obj_origin = move(obj, Point(0, 0, 0) - center)
# apply rotation around origin
new_point = Point(
x=obj_origin.x * math.cos(math.radians(angle)) - obj_origin.y * math.sin(math.radians(angle)),
y=obj_origin.x * math.sin(math.radians(angle)) + obj_origin.y * math.cos(math.radians(angle)),
z=obj_origin.z
)
# move back
return move(new_point, center - Point(0, 0, 0))
else:
if inplace:
new_obj = obj
else:
new_obj = copy.deepcopy(obj)
for param, val in new_obj.__dict__.items():
if isinstance(val, (Point, Line, Rectangle, Box, Face)):
# love recursion
new_obj.__dict__[param] = rotate_xy(val, angle, center)
elif isinstance(val, list):
new_obj.__dict__[param] = [rotate_xy(p, angle, center) for p in val]
return new_obj
| 34.850993 | 113 | 0.526366 | 18,744 | 0.890409 | 0 | 0 | 0 | 0 | 0 | 0 | 3,782 | 0.179659 |
3978db58ab61262a3273d3565d293223c2d9c041 | 556 | py | Python | danmu/log.py | awesome-archive/danmu | 2f4e943d859cecd31b289e21984e35a34515b71f | [
"WTFPL"
] | null | null | null | danmu/log.py | awesome-archive/danmu | 2f4e943d859cecd31b289e21984e35a34515b71f | [
"WTFPL"
] | null | null | null | danmu/log.py | awesome-archive/danmu | 2f4e943d859cecd31b289e21984e35a34515b71f | [
"WTFPL"
] | null | null | null | import os, logging
if not os.path.exists('config'): os.mkdir('config')
log = logging.getLogger('danmu')
log.setLevel(logging.DEBUG)
fileHandler = logging.FileHandler(os.path.join('config', 'run.log'), encoding = 'utf8')
fileHandler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)-17s <%(message)s> %(levelname)s %(filename)s[%(lineno)d]',
datefmt='%Y%m%d %H:%M:%S')
fileHandler.setFormatter(formatter)
log.addHandler(fileHandler)
if __name__ == '__main__':
log.debug('This is debug')
log.info('This is info')
| 34.75 | 101 | 0.690647 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 0.307554 |
3978e2b002dc50ec5e34788e51f2d661aefcb01f | 2,016 | py | Python | vector_env_comparison.py | neuroevolution-ai/NaturalNets-PerformanceTests | de7d99424cc9ab29fdc3691c12d20d0a35afe0fe | [
"MIT"
] | null | null | null | vector_env_comparison.py | neuroevolution-ai/NaturalNets-PerformanceTests | de7d99424cc9ab29fdc3691c12d20d0a35afe0fe | [
"MIT"
] | 1 | 2021-02-13T18:55:40.000Z | 2021-02-13T18:55:40.000Z | vector_env_comparison.py | neuroevolution-ai/NaturalNets-PerformanceTests | de7d99424cc9ab29fdc3691c12d20d0a35afe0fe | [
"MIT"
] | null | null | null | import multiprocessing
import time
import gym
import gym3
import numpy as np
from gym.vector import make as make_vec_env
from procgen import ProcgenGym3Env
population_size = 112
number_env_steps = 1000
def run_episode_full(u):
env = gym.make('procgen:procgen-heist-v0')
obs = env.reset()
reward = 0
for _ in range(number_env_steps):
action = env.action_space.sample()
obs, rew, done, info = env.step(action)
reward += rew
return reward
def run_episode_vec_env(u):
env = make_vec_env(id="procgen:procgen-heist-v0", num_envs=population_size, asynchronous=True)
obs = env.reset()
rewards = np.zeros(population_size)
for _ in range(number_env_steps):
action = env.action_space.sample()
obs, rew, done, info = env.step(action)
rewards += rew
return rewards
def run_episode_gym3_vec_env(u):
env = ProcgenGym3Env(num=population_size, env_name="heist")
rewards = np.zeros(population_size)
for _ in range(number_env_steps):
env.act(gym3.types_np.sample(env.ac_space, bshape=(env.num,)))
rew, obs, first = env.observe()
rewards += rew
return rewards
def main():
inputs = np.zeros(population_size)
# Multiprocessing
pool = multiprocessing.Pool()
t_start = time.time()
result_mp = pool.map(run_episode_full, inputs)
print("Multi-Processing map took: {:6.3f}s".format(time.time()-t_start))
# Vectorized environment
t_start = time.time()
result_vec = run_episode_vec_env([])
print("Vectorized environment took: {:6.3f}s".format(time.time()-t_start))
# Gym3 Vectorized environment
t_start = time.time()
result_gym3_vec = run_episode_gym3_vec_env([])
print("Gym3 vec environment took: {:6.3f}s".format(time.time()-t_start))
assert (len(result_mp) == len(result_vec)
and len(result_mp) == len(result_gym3_vec)
and len(result_mp) == population_size)
if __name__ == "__main__":
main()
| 22.651685 | 98 | 0.671627 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.125 |
397b7ca45c3f9235af0d2fa52c9c29634429cebe | 1,641 | py | Python | raiden_api/model/requests.py | kelsos/test-enviroment-scripts | ab8d9f1e9a1deed048dcc93ec9d014bf6b58252d | [
"MIT"
] | 1 | 2019-03-28T00:24:48.000Z | 2019-03-28T00:24:48.000Z | raiden_api/model/requests.py | kelsos/test-enviroment-scripts | ab8d9f1e9a1deed048dcc93ec9d014bf6b58252d | [
"MIT"
] | 4 | 2019-03-26T15:27:20.000Z | 2019-04-29T10:46:08.000Z | raiden_api/model/requests.py | kelsos/test-enviroment-scripts | ab8d9f1e9a1deed048dcc93ec9d014bf6b58252d | [
"MIT"
] | 2 | 2019-03-26T14:27:24.000Z | 2019-03-29T10:28:40.000Z | import time
import typing
class PaymentRequest:
def __init__(self, amount: int, identifier: int = None):
self.amount = amount
self.identifier = identifier
if identifier is None:
self.identifier = int(time.time())
def to_dict(self) -> typing.Dict[str, typing.Any]:
result = {
'amount': self.amount,
'identifier': self.identifier,
}
return result
class OpenChannelRequest:
def __init__(
self,
partner_address: str,
token_address: str,
total_deposit: int,
settle_timeout: int = 500,
):
self.partner_address = partner_address
self.token_address = token_address
self.total_deposit = total_deposit
self.settle_timeout = settle_timeout
def to_dict(self) -> typing.Dict[str, typing.Any]:
result = {
'partner_address': self.partner_address,
'token_address': self.token_address,
'total_deposit': self.total_deposit,
'settle_timeout': self.settle_timeout,
}
return result
class ManageChannelRequest:
def __init__(self, total_deposit: int = None, state: str = None):
assert state is None or state == 'closed'
self.total_deposit = total_deposit
self.state = state
def to_dict(self) -> typing.Dict[str, typing.Any]:
result: typing.Dict[str, typing.Any] = {}
if self.total_deposit:
result['total_deposit'] = self.total_deposit
if self.state:
result['state'] = self.state
return result
| 26.467742 | 69 | 0.597806 | 1,606 | 0.978672 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.06886 |
397c69961dfa90f232f4ac9c29a73bc3e9510c76 | 823 | py | Python | Dynamic/KnapNoRep.py | mladuke/Algorithms | eab5d89c5f496b2849f0646dbfa3a4db93a0b391 | [
"MIT"
] | null | null | null | Dynamic/KnapNoRep.py | mladuke/Algorithms | eab5d89c5f496b2849f0646dbfa3a4db93a0b391 | [
"MIT"
] | null | null | null | Dynamic/KnapNoRep.py | mladuke/Algorithms | eab5d89c5f496b2849f0646dbfa3a4db93a0b391 | [
"MIT"
] | null | null | null | def zeroOneKnapsack(v, w, W):
c = []
n = len(v)
c = [[0 for x in range(W+1)] for x in range(n)]
for i in range(0,n):
for j in range(0,W+1):
if (w[i] > j):
c[i][j] = c[i-1][j]
else:
c[i][j] = max(c[i-1][j],v[i] +c[i-1][j-w[i]])
return [c[n-1][W], getUsedItems(w,c)]
def getUsedItems(w,c):
i = len(c)-1
currentW = len(c[0])-1
marked = []
for i in range(i+1):
marked.append(0)
while (i >= 0 and currentW >=0):
if (i==0 and c[i][currentW] >0 )or c[i][currentW] != c[i-1][currentW]:
marked[i] =1
currentW = currentW-w[i]
i = i-1
return marked
# adapted from https://sites.google.com/site/mikescoderama/Home/0-1-knapsack-problem-in-p
W = 10
v = [9, 14, 16, 30]
w = [2, 3, 4, 6]
print(zeroOneKnapsack(v, w, W))
| 24.939394 | 90 | 0.509113 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 90 | 0.109356 |
397c6d5c141c7b6d17cf9a8f120d47ea7101ea9f | 587 | py | Python | tasks/migrations/0002_auto_20201008_2236.py | milenakowalska/todolist | 5b5208b952e88334453935652424f8168ecf9113 | [
"MIT"
] | null | null | null | tasks/migrations/0002_auto_20201008_2236.py | milenakowalska/todolist | 5b5208b952e88334453935652424f8168ecf9113 | [
"MIT"
] | null | null | null | tasks/migrations/0002_auto_20201008_2236.py | milenakowalska/todolist | 5b5208b952e88334453935652424f8168ecf9113 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.2 on 2020-10-08 22:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='background',
field=models.CharField(default='linear-gradient(to top, #48c6ef 0%, #6f86d6 100%)', max_length=300),
),
migrations.AlterField(
model_name='task',
name='done',
field=models.BooleanField(default=False),
),
]
| 24.458333 | 112 | 0.575809 | 494 | 0.841567 | 0 | 0 | 0 | 0 | 0 | 0 | 149 | 0.253833 |
397e9f0c2652f385de08911a9951e3eb07c5c86a | 874 | py | Python | tools/one-offs/convert-genres.py | DrDos0016/z2 | b63e77129fefcb4f990ee1cb9952f4f708ee3a2b | [
"MIT"
] | 3 | 2017-05-01T19:53:57.000Z | 2018-08-27T20:14:43.000Z | tools/one-offs/convert-genres.py | DrDos0016/z2 | b63e77129fefcb4f990ee1cb9952f4f708ee3a2b | [
"MIT"
] | null | null | null | tools/one-offs/convert-genres.py | DrDos0016/z2 | b63e77129fefcb4f990ee1cb9952f4f708ee3a2b | [
"MIT"
] | 1 | 2018-08-27T20:14:46.000Z | 2018-08-27T20:14:46.000Z | import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from django.contrib.auth.models import User # noqa: E402
from museum_site.models import * # noqa: E402
def main():
print("This script will convert the SSV field file.genre to proper Genre object associations")
input("Press Enter to begin... ")
qs = File.objects.all().order_by("id")
for f in qs:
old_genres = f.genre.split("/")
count = len(old_genres)
for g in old_genres:
g = Genre.objects.get(title=g)
f.genres.add(g)
if len(f.genres.all()) != count:
print("UH OH", f.title)
print(f.title, len(f.genres.all()), count)
return True
if __name__ == '__main__':
main()
| 23.621622 | 98 | 0.639588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 202 | 0.231121 |
397ee9d80cbe93ca71977088ed64acae351304fd | 553 | py | Python | python/learn/PythonDataVisualizationCookbookSE_Code/Chapter 04/ch04_rec03_plot_with_table.py | flyingwjw/Documentation | 567608f388ca369b864c2d75a94647801b5dfa1e | [
"Unlicense"
] | 26 | 2016-08-25T01:33:36.000Z | 2022-03-20T11:33:31.000Z | python/learn/PythonDataVisualizationCookbookSE_Code/Chapter 04/ch04_rec03_plot_with_table.py | flyingwjw/Documentation | 567608f388ca369b864c2d75a94647801b5dfa1e | [
"Unlicense"
] | null | null | null | python/learn/PythonDataVisualizationCookbookSE_Code/Chapter 04/ch04_rec03_plot_with_table.py | flyingwjw/Documentation | 567608f388ca369b864c2d75a94647801b5dfa1e | [
"Unlicense"
] | 31 | 2016-08-16T15:32:46.000Z | 2021-01-26T19:16:48.000Z | import matplotlib.pylab as plt
import numpy as np
plt.figure()
axes=plt.gca()
y= np.random.randn(9)
col_labels=['col1','col2','col3']
row_labels=['row1','row2','row3']
table_vals=[[11,12,13],[21,22,23],[28,29,30]]
row_colors=['red','gold','green']
the_table = plt.table(cellText=table_vals,
colWidths = [0.1]*3,
rowLabels=row_labels,
colLabels=col_labels,
rowColours=row_colors,
loc='upper right')
plt.text(12,3.4,'Table Title',size=8)
plt.plot(y)
plt.show() | 24.043478 | 45 | 0.593128 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.144665 |
3980310409feb9f0ac71dbf46448b126022d5366 | 1,258 | py | Python | support.py | ipascual1/spootnik_bot | ad7658f49705b1ce57bcc5ed84006ef658f63fa3 | [
"Unlicense"
] | null | null | null | support.py | ipascual1/spootnik_bot | ad7658f49705b1ce57bcc5ed84006ef658f63fa3 | [
"Unlicense"
] | null | null | null | support.py | ipascual1/spootnik_bot | ad7658f49705b1ce57bcc5ed84006ef658f63fa3 | [
"Unlicense"
] | null | null | null | import re
import os
def extract(regularE : str, init : str, stop : str, string : str):
"""
regularE: RE to catch string
init: First string to replace
stop: Last string to replace
string: String to apply the RE
With a regular expression and init and stop to replace, gets a
substring from string argument and returns it.
"""
return re.findall(regularE, string)[0]\
.replace(init, "")\
.replace(stop, "")
def get_term_clock_pid():
"""
return: int with the PID of term_clock;
-1 if process doesn't exist.
Extracts the PID of term_clock process with systemctl.
"""
# sputnikDriver prints in their own console all the PIDs of its subprocesses
ret = os.popen("systemctl status sputnikDriver.service").read()
if ret == "":
return -1
return int(extract(r"term_clock .+ PID", "term_clock ", " PID", ret))
def check_alive():
"""
return: True if java process is running;
False otherwise
Check if a java process in sputnikDriver (i.e. the Minecraft Server) is running
"""
ret = os.popen("systemctl status sputnikDriver.service").read()
return "java" in ret
| 29.255814 | 83 | 0.612878 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 851 | 0.676471 |
39806196aae9564f8e399df05393bb7226dec4f7 | 1,054 | py | Python | steam.py | iganeshk/alfred-totp | f9c17fe83025c99cbfaf5413d20212aa63d7e0d5 | [
"MIT"
] | 7 | 2020-04-12T21:16:41.000Z | 2022-01-09T08:55:22.000Z | steam.py | iganeshk/alfred-totp | f9c17fe83025c99cbfaf5413d20212aa63d7e0d5 | [
"MIT"
] | null | null | null | steam.py | iganeshk/alfred-totp | f9c17fe83025c99cbfaf5413d20212aa63d7e0d5 | [
"MIT"
] | 1 | 2022-03-26T16:04:53.000Z | 2022-03-26T16:04:53.000Z | #!/usr/env/python3
# coding=utf-8
#
# Generate Steamguard OTP with the shared secret passed as an argument
# Ganesh Velu
import hmac
import base64
import hashlib
import codecs
import time
import sys
STEAM_DECODE_CHARS = ['2', '3', '4', '5', '6', '7', '8', '9',
'B', 'C', 'D', 'F', 'G', 'H', 'J', 'K',
'M', 'N', 'P', 'Q', 'R', 'T', 'V', 'W',
'X', 'Y']
def get_authentication_code(secret):
msg = bytes.fromhex(('%016x' % int(time.time() // 30)))
key = base64.b64decode(secret)
auth = hmac.new(key, msg, hashlib.sha1)
digest = auth.digest()
start = digest[19] & 0xF
code = digest[start:start + 4]
auth_code_raw = int(codecs.encode(code, 'hex'), 16) & 0x7FFFFFFF
auth_code = []
for i in range(5):
auth_code.append(STEAM_DECODE_CHARS[int(auth_code_raw % len(STEAM_DECODE_CHARS))])
auth_code_raw /= len(STEAM_DECODE_CHARS)
return ''.join(auth_code)
if __name__ == '__main__':
print(get_authentication_code(sys.argv[1]), end='') | 29.277778 | 90 | 0.586338 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 221 | 0.209677 |
39812282916a91f854eceaec095dab9dd29955a6 | 1,783 | py | Python | igvc_ws/src/igvc_nav/src/path_planner/node.py | SoonerRobotics/igvc_software_2022 | 906e6a4fca22d2b0c06ef1b8a4a3a9df7f1d17dd | [
"MIT"
] | 4 | 2020-07-07T14:56:56.000Z | 2021-08-13T23:31:07.000Z | igvc_ws/src/igvc_nav/src/path_planner/node.py | pradumn203/igvc-winners-2021 | 658233609054eafac59603a77b2a092dc002e145 | [
"MIT"
] | 13 | 2019-11-12T02:57:54.000Z | 2020-03-17T17:04:22.000Z | igvc_ws/src/igvc_nav/src/path_planner/node.py | pradumn203/igvc-winners-2021 | 658233609054eafac59603a77b2a092dc002e145 | [
"MIT"
] | 3 | 2021-06-29T05:21:18.000Z | 2021-08-23T05:03:27.000Z | """
"""
class Node:
INFINITY = 1000000000
def __init__(self, row, col, cost = 0):
self.row = row
self.col = col
# Default node value
self.G = self.INFINITY
self.rhs = self.INFINITY
self.par = None
self.key = (None, None)
# Cost (for obstacles and such)
self.cost = cost
def set_g(self, G):
if G > self.INFINITY:
G = self.INFINITY
self.G = G
def set_rhs(self, rhs):
if rhs > self.INFINITY:
rhs = self.INFINITY
self.rhs = rhs
def set_par(self, par):
self.par = par
def set_key(self, key):
self.key = key
def set_cost(self, cost):
self.cost = cost
def __cmp__(self, other):
""" Sort keys with lowest priority to the top of the list"""
# Sort by the first key
comp_val = cmp(self.key[0], other.key[0])
if comp_val != 0:
return comp_val
# If there was a tie, use the second key as a tiebreaker
return cmp(self.key[1], other.key[1])
def __lt__(self, other):
comp_val = (self.key[0] < other.key[0])
if comp_val is True:
return True
elif self.key[0] == other.key[0]:
return self.key[1] < other.key[1]
return False
def __gt__(self, other):
comp_val = (self.key[0] > other.key[0])
if comp_val is True:
return True
elif self.key[0] == other.key[0]:
return self.key[1] > other.key[1]
return False
def __eq__(self, other):
if other == None:
return False
return (self.row == other.row) and (self.col == other.col)
def __hash__(self):
return hash((self.row, self.col))
| 22.858974 | 68 | 0.528884 | 1,772 | 0.993831 | 0 | 0 | 0 | 0 | 0 | 0 | 198 | 0.111049 |
3982bd3c6134c4bd9c5526d9392f74c9c724e7ab | 556 | py | Python | makahiki/apps/widgets/energy_power_meter/views.py | justinslee/Wai-Not-Makahiki | 4b7dd685012ec64758affe0ecee3103596d16aa7 | [
"MIT"
] | 1 | 2015-07-22T11:31:20.000Z | 2015-07-22T11:31:20.000Z | makahiki/apps/widgets/energy_power_meter/views.py | justinslee/Wai-Not-Makahiki | 4b7dd685012ec64758affe0ecee3103596d16aa7 | [
"MIT"
] | null | null | null | makahiki/apps/widgets/energy_power_meter/views.py | justinslee/Wai-Not-Makahiki | 4b7dd685012ec64758affe0ecee3103596d16aa7 | [
"MIT"
] | null | null | null | """Handle rendering of the Energy Power Meter widget."""
from apps.widgets.resource_goal import resource_goal
def supply(request, page_name):
"""Return the view_objects content, which in this case is empty."""
_ = page_name
team = request.user.get_profile().team
if team:
interval = resource_goal.team_goal_settings(team, "energy").realtime_meter_interval
else:
interval = None
width = 300
height = 100
return {"interval": interval,
"width": width,
"height": height
}
| 26.47619 | 91 | 0.645683 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 156 | 0.280576 |
3982edd57b175c1d224315f35831e37d04e0c726 | 1,408 | py | Python | tools/generatekeypair.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 2,027 | 2019-11-12T18:05:48.000Z | 2022-03-31T22:25:04.000Z | tools/generatekeypair.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 496 | 2019-11-12T18:13:37.000Z | 2022-03-31T10:43:45.000Z | tools/generatekeypair.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 249 | 2019-11-12T18:02:27.000Z | 2022-03-22T12:19:19.000Z | import argparse
import json
from authlib.jose import JsonWebKey
from cryptography.hazmat.primitives import serialization
def generate_key_pair(filename, kid=None):
"""
'kid' will default to the jwk thumbprint if not set explicitly.
Reference: https://tools.ietf.org/html/rfc7638
"""
options = {}
if kid:
options["kid"] = kid
jwk = JsonWebKey.generate_key("RSA", 2048, is_private=True, options=options)
print(("Writing public key to %s.jwk" % filename))
with open("%s.jwk" % filename, mode="w") as f:
f.truncate(0)
f.write(jwk.as_json())
print(("Writing key ID to %s.kid" % filename))
with open("%s.kid" % filename, mode="w") as f:
f.truncate(0)
f.write(jwk.as_dict()["kid"])
print(("Writing private key to %s.pem" % filename))
with open("%s.pem" % filename, mode="wb") as f:
f.truncate(0)
f.write(
jwk.get_private_key().private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
)
parser = argparse.ArgumentParser(description="Generates a key pair into files")
parser.add_argument("filename", help="The filename prefix for the generated key files")
args = parser.parse_args()
generate_key_pair(args.filename)
| 30.608696 | 87 | 0.648438 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 359 | 0.254972 |
3983bdef6c20e9a6ac20cbeb01a996a5e1766f34 | 4,855 | py | Python | hkpy/hkpyo/reasoners/simple_reasoner.py | renan-souza/hkpy | 1fdcd3da3520e876f95295bf6d15e40581b2bb49 | [
"MIT"
] | 7 | 2019-12-23T17:59:36.000Z | 2022-02-17T19:35:32.000Z | hkpy/hkpyo/reasoners/simple_reasoner.py | renan-souza/hkpy | 1fdcd3da3520e876f95295bf6d15e40581b2bb49 | [
"MIT"
] | 9 | 2019-12-30T13:34:41.000Z | 2021-07-16T22:46:06.000Z | hkpy/hkpyo/reasoners/simple_reasoner.py | renan-souza/hkpy | 1fdcd3da3520e876f95295bf6d15e40581b2bb49 | [
"MIT"
] | 2 | 2020-03-14T21:34:02.000Z | 2021-06-12T00:10:43.000Z | ###
# Copyright (c) 2019-present, IBM Research
# Licensed under The MIT License [see LICENSE for details]
###
from collections import defaultdict
from hkpy.hkpyo.model import HKOContext, HKOContextManager, HKOConcept, HKOSubConceptAxiom, HKOConjunctionExpression, \
HKODisjunctionExpression, HKOConceptAssertion, HKOIndividual, HKOPropertyAssertion, HKOLiteral, Union, HKOAxiom, \
HKOAssertion, HKOProperty
class HKAssertedContextReasoner:
def __init__(self, context: HKOContext):
self.mgr = HKOContextManager.get_global_context_manager()
self.context = context
self.reset_caches()
def reset_caches(self):
self.cache_axioms = []
self.cache_assertions = []
self.cache_individual_concept = defaultdict(lambda: {})
self.cache_concept_individual = defaultdict(lambda: {})
self.cache_individual_prop_value = defaultdict(lambda: defaultdict(lambda: {}))
self.cache_value_prop_individual = defaultdict(lambda: defaultdict(lambda: {}))
for e in self.context.elements:
if isinstance(e, HKOConceptAssertion):
self.cache_individual_concept[e.individual][e.concept] = True
self.cache_concept_individual[e.concept][e.individual] = True
elif isinstance(e, HKOPropertyAssertion):
self.cache_individual_prop_value[e.arg1][e.property][e.arg2] = True
self.cache_value_prop_individual[e.arg2][e.property][e.arg1] = True
if isinstance(e, HKOAxiom):
self.cache_axioms.append(e)
elif isinstance(e, HKOAssertion):
self.cache_assertions.append(e)
def get_direct_sub_concepts_of(self, super_concept: HKOConcept) -> [HKOConcept]:
print("Warning: incomplete implementation of get_direct_sub_concepts_of")
sub_concepts = set()
for e in self.cache_axioms:
if isinstance(e, HKOSubConceptAxiom):
if e.sup == super_concept:
sub_concepts.add(e.sub)
# TODO: should look recursively into conjunctive expressions
# elif isinstance(e.sub, HKOConjunctionExpression):
# # sub = (and c1 c2 super c3 ... cn)
# for exp in e.sub.concepts:
# if exp == super_concept:
# sub_concepts.add(e.sub)
return list(sub_concepts)
def get_direct_instances_of(self, concept: HKOConcept) -> [HKOIndividual]:
print("Warning: incomplete implementation of get_direct_sub_concepts_of")
return list(self.cache_concept_individual[concept].keys())
def is_direct_instance_of(self, individual: HKOIndividual, concept: HKOConcept) -> bool:
return self.cache_concept_individual[concept].get(individual, False)
def is_instance_of(self, individual: HKOIndividual = None, concept: HKOConcept = None) -> bool:
return self.is_direct_instance_of(individual=individual, concept=concept)
def get_concept_assertion_pattern(self, concept: HKOConcept = None, individual: HKOIndividual = None) -> object:
matched_assertions = set()
for e in self.context.elements:
if isinstance(e, HKOConceptAssertion):
if concept is not None and e.concept != concept: continue
if individual is not None and e.individual != individual: continue
# match!
matched_assertions.add(e)
return list(matched_assertions)
def get_related_values(self, property: HKOProperty, arg1: HKOIndividual) -> [Union[HKOIndividual, HKOLiteral]]:
return list(self.cache_individual_prop_value.get(arg1, {}).get(property, {}).keys())
def get_entities_relating_to(self, property: HKOProperty, arg2: HKOIndividual) -> [
Union[HKOIndividual, HKOLiteral]]:
return list(self.cache_value_prop_individual.get(arg2, {}).get(property, {}).keys())
def get_related_value(self, property, arg1) -> Union[HKOIndividual, HKOLiteral]:
lst = self.get_related_values(property, arg1)
if len(lst) == 1:
return lst[0]
elif len(lst) == 0:
return None
else:
raise Exception('Property returned more related values than expected.')
def get_property_assertion_pattern(self, property=None, arg1=None, arg2=None) -> [HKOPropertyAssertion]:
matched_assertions = set()
for e in self.cache_assertions:
if isinstance(e, HKOPropertyAssertion):
if property is not None and e.property != property: continue
if arg1 is not None and e.arg1 != arg1: continue
if arg2 is not None and e.arg2 != arg2: continue
# match!
matched_assertions.add(e)
return list(matched_assertions)
| 45.801887 | 119 | 0.65829 | 4,435 | 0.913491 | 0 | 0 | 0 | 0 | 0 | 0 | 563 | 0.115963 |
39846d963efc3c25f62f763940ae6d00481112ea | 237 | py | Python | coffeebar/admin.py | viktor-yakubiv/django-coffee | 0a7d62a53db6af48fdc852fbb4dae43a0fc2b2ef | [
"MIT"
] | null | null | null | coffeebar/admin.py | viktor-yakubiv/django-coffee | 0a7d62a53db6af48fdc852fbb4dae43a0fc2b2ef | [
"MIT"
] | null | null | null | coffeebar/admin.py | viktor-yakubiv/django-coffee | 0a7d62a53db6af48fdc852fbb4dae43a0fc2b2ef | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Account, Product, Drink, Topping, Order
admin.site.register(Account)
admin.site.register(Product)
admin.site.register(Drink)
admin.site.register(Topping)
admin.site.register(Order)
| 21.545455 | 59 | 0.805907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
398508cf7b96c7a53317b86338d3ac80d4ac69c4 | 106 | py | Python | influxdb_client/client/__init__.py | rhajek/influxdb-client-python | 852e6f1b1161df4d67eabc19cdb6b323a46b88e2 | [
"MIT"
] | null | null | null | influxdb_client/client/__init__.py | rhajek/influxdb-client-python | 852e6f1b1161df4d67eabc19cdb6b323a46b88e2 | [
"MIT"
] | null | null | null | influxdb_client/client/__init__.py | rhajek/influxdb-client-python | 852e6f1b1161df4d67eabc19cdb6b323a46b88e2 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from influxdb_client.client.influxdb_client import InfluxDBClient
| 26.5 | 65 | 0.896226 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
398533491570a42901637e1afb785d157af6a86a | 809 | py | Python | accounts/forms.py | mohsenamoon1160417237/Social_app | 79fa0871f7b83648894941f9010f1d99f1b27ab3 | [
"MIT"
] | null | null | null | accounts/forms.py | mohsenamoon1160417237/Social_app | 79fa0871f7b83648894941f9010f1d99f1b27ab3 | [
"MIT"
] | null | null | null | accounts/forms.py | mohsenamoon1160417237/Social_app | 79fa0871f7b83648894941f9010f1d99f1b27ab3 | [
"MIT"
] | null | null | null | from django.contrib.auth.models import User
from django import forms
from .models import UserProfile
class UserRegistrationForm(forms.ModelForm):
password = forms.CharField(max_length=20 , widget=forms.PasswordInput , label='Password')
password2 = forms.CharField(max_length=20 , widget=forms.PasswordInput , label="Repeat password")
class Meta:
model = User
fields = ['username' , 'email']
def clean_password2(self):
cd = self.cleaned_data
if cd['password'] != cd['password2']:
raise forms.ValidationError("Passwords must match")
return cd['password2']
class UserEditProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ['image' , 'age']
class UserEditForm(forms.ModelForm):
class Meta:
model = User
fields = ['first_name' , 'last_name'] | 17.212766 | 98 | 0.721879 | 696 | 0.860321 | 0 | 0 | 0 | 0 | 0 | 0 | 133 | 0.1644 |
3985a0d08f66c16279006e5cf92a0a215003522a | 8,031 | py | Python | prediction-experiments/python-nb/ov-predict/src/api/model_loader.py | ouyangzhiping/Info-extract | d8a7ca47201dad4d28b9b96861b0b1b3fc27c63a | [
"Apache-2.0"
] | 15 | 2019-02-25T09:53:37.000Z | 2022-03-22T05:13:24.000Z | prediction-experiments/python-nb/ov-predict/src/api/model_loader.py | ouyangzhiping/Info-extract | d8a7ca47201dad4d28b9b96861b0b1b3fc27c63a | [
"Apache-2.0"
] | 8 | 2019-06-12T10:14:58.000Z | 2021-08-15T08:04:10.000Z | prediction-experiments/python-nb/ov-predict/src/api/model_loader.py | ouyangzhiping/Info-extract | d8a7ca47201dad4d28b9b96861b0b1b3fc27c63a | [
"Apache-2.0"
] | 1 | 2022-03-15T16:45:35.000Z | 2022-03-15T16:45:35.000Z | import sys
import numpy as np
import os
import requests
import json
import logging
from json import JSONEncoder
from keras.models import model_from_json
sys.path.append('..')
from preprocessing.InputHelper import InputHelper
from model.lstm import rmse
from model.lstm import buildModel
from keras.preprocessing.sequence import pad_sequences
sys.path.append('..')
'''
This is a stand-alone test for the python API service. It doesn't use Flask.
'''
OPTIMIZER = 'rmsprop'
NUM_CLASSES = 0
MAXLEN = 50
SAVED_MODEL_FILE = '../../saved_models/model.h5'
PUBMED_DIM = 200
VAL_DIMENSIONS = 5
TF_SERVING_HOSTNAME = os.environ.get("TF_SERVING_HOSTNAME", "")
TF_SERVING_PORT = os.environ.get("TF_SERVING_PORT", "")
USES_TF_SERVING = TF_SERVING_HOSTNAME != "" and TF_SERVING_PORT != ""
class FuzzyMatchInfo:
def __init__(self, closestToken, origValue, replacedValue):
self.closestToken = closestToken
self.origValue = origValue
self.replacedValue = replacedValue
class NumpyArrayEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
return JSONEncoder.default(self, obj)
def get_model_json(saved_model):
print("Loading model from file {}".format(saved_model))
json_file = open(saved_model, 'r')
json_str = json_file.read()
json_file.close()
return json_str
def predict_outcome(inpH, model, test_instance_str):
x = inpH.tokenizer.texts_to_sequences([test_instance_str])
x = pad_sequences(x, padding='post', maxlen=MAXLEN)
y_preds = model.predict(x, steps=1)
return y_preds[0]
def predict_regression_outcome(model, model_name, test_input_batch):
y_preds = predict_outcome_local_or_api(model, model_name, test_input_batch)
return y_preds[:,0]
def predict_confidence(model, model_name, test_input_batch):
y_preds = predict_outcome_local_or_api(model, model_name, test_input_batch)
return np.max(y_preds, axis=1)
def predict_outcome_local_or_api(model, model_name, test_input_batch):
if USES_TF_SERVING:
return call_tf_serving_predict(model_name, test_input_batch)
else:
# in this case, "model" is the actual keras model
return predict_outcome_with_dynamic_vocabchange(model, test_input_batch)
def predict_outcome_with_dynamic_vocabchange(model, test_input_batch):
x_test = test_input_batch
print("x_test = {}".format(x_test))
y_preds = model.predict_on_batch(x_test)
print('y_preds = {}'.format(y_preds))
return y_preds
def call_tf_serving_predict(model_name, test_input_batch):
x_test = test_input_batch
logging.debug("x_test = {}".format(x_test))
url = get_tf_serving_predict_endpoint(model_name)
# batched instances
instances = x_test
json_post_body = json.dumps({"instances": instances}, cls=NumpyArrayEncoder)
r = requests.post(url, json_post_body)
logging.info(f"Response from {url}")
logging.info(r.text)
response = r.json()
return np.array(response["predictions"])
def get_tf_serving_predict_endpoint(model_name):
return "http://" + TF_SERVING_HOSTNAME + ":" + TF_SERVING_PORT + "/" \
+ "v1/models/" + model_name + ":predict"
def init_embedding(embfile):
inpH = InputHelper()
print("converting words to ids...")
inpH.convertWordsToIds(embfile)
print("vocab size = {}".format(inpH.vocab_size))
inpH.loadW2V(embfile)
return inpH
# Replace a node if the form C:<x>:0.1 with C:<x>:0.2 (the closest value with the same attrib-id in our vocabulary)
def getClosestNode(inpH, node):
keytokens = node.split(':')
keynode = keytokens[1]
keyvalue = keytokens[2]
if is_number(keyvalue) == False:
return None
keyvalue = float(keyvalue)
mindiff = 10000
closestFound = None
tobeReplacedWith = 0
# Match the AttribType:Id part
for token in inpH.pre_emb:
parts = token.split(':')
nodename = parts[1]
if nodename == keynode:
if is_number(parts[2]) == False:
continue
x = float(parts[2])
diff = abs(keyvalue - x)
if diff < mindiff:
mindiff = diff
closestFound = token
tobeReplacedWith = x
return FuzzyMatchInfo(closestFound, keyvalue, tobeReplacedWith)
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def build_input_sequence(inpH, x_text, nodevec_dim):
changeLogDict, modified_x_text = replaceAVPSeqWithNN(inpH, x_text, nodevec_dim)
# Convert each sentence (node name sequence) to a sequence of integer ids
x = inpH.tokenizer.texts_to_sequences([modified_x_text])
x = pad_sequences(x, padding='post', maxlen=MAXLEN)
# after prediction revert back the values that we changed from the vocab-vector map
for changeInfo in changeLogDict.values():
inpH.pre_emb[changeInfo.closestToken][-VAL_DIMENSIONS] = changeInfo.origValue
return x
def replaceAVPSeqWithNN(inpH, avpseq, nodevec_dim):
tokens = avpseq.split(' ')
modified_avpseq = []
changedTokens = {} # to keep track of the changes for reverting back
for token in tokens:
fuzzyMatchInfo = getClosestNode(inpH, token)
if fuzzyMatchInfo == None:
continue # check if continue works as expected in Python
changedTokens[fuzzyMatchInfo.closestToken] = fuzzyMatchInfo
instvec = []
attrvec = inpH.pre_emb[fuzzyMatchInfo.closestToken]
# change the dimension corresponding to the value in our vocabulary dict
# replace the nodevec part of instvec with attrvec
for i in range(nodevec_dim):
instvec.append(float(attrvec[i]))
# context part comes from the current instance
for i in range(nodevec_dim, nodevec_dim + PUBMED_DIM + VAL_DIMENSIONS):
instvec.append(float(inpH.pre_emb[fuzzyMatchInfo.closestToken][i]))
instvec_array = np.asarray(instvec)
instvec_array[-VAL_DIMENSIONS] = fuzzyMatchInfo.replacedValue # new followup value
inpH.pre_emb[fuzzyMatchInfo.closestToken] = instvec_array # modified instvec
modified_avpseq.append(fuzzyMatchInfo.closestToken)
return changedTokens, ' '.join(modified_avpseq)
def init_model(inpH, saved_model_wts_file=SAVED_MODEL_FILE, num_classes=NUM_CLASSES):
# saved_model_meta_file = '../../saved_models/model.json'
# json_str = get_model_json(saved_model_meta_file)
# print (json_str)
# trained_model = model_from_json(json_str)
# rebuild the original model
print("DEBUG: During API call - emb matrix o/p dimension: {}".format(inpH.embedding_matrix.shape[1]))
print("DEBUG: During API call - emb matrix shape: {}".format(inpH.embedding_matrix.shape))
trained_model = buildModel(num_classes, inpH.vocab_size, inpH.embedding_matrix.shape[1], MAXLEN,
inpH.embedding_matrix)
# load weights into new model
trained_model.load_weights(saved_model_wts_file)
trained_model.summary()
return trained_model
def init_model_and_embedding(embfile, modelfile=SAVED_MODEL_FILE):
inpH = init_embedding(embfile)
trained_model = init_model(inpH, modelfile)
return inpH, trained_model
def main(argv):
NODEVEC_DIM = 100
EMBFILE = "../../../../../core/prediction/graphs/nodevecs/embfile4api.merged.vec"
# one sample line from test data file
TESTDATA_ROW = "C:5579689:18 I:3675717:1"
TESTDATA_ROW2 = "C:5579689:18 I:3675717:1 C:5579088:35 I:3673272:1"
inpH, trained_model = init_model_and_embedding(EMBFILE)
# try executing a test instance on the loaded model
predicted_val = predict_outcome_with_dynamic_vocabchange(inpH, trained_model, TESTDATA_ROW, NODEVEC_DIM)
print(predicted_val)
predicted_val = predict_outcome_with_dynamic_vocabchange(inpH, trained_model, TESTDATA_ROW2, NODEVEC_DIM)
print(predicted_val)
if __name__ == "__main__":
main(sys.argv[1:])
| 32.383065 | 115 | 0.707757 | 387 | 0.048188 | 0 | 0 | 0 | 0 | 0 | 0 | 1,643 | 0.204582 |
3986c0e0bd792870f8eee7d99d0e2fa5761fa22e | 1,429 | py | Python | blueprints/accounts/manage/config.py | GetmeUK/h51 | 17d4003336857514765a42a0853995fbe3da6525 | [
"MIT"
] | null | null | null | blueprints/accounts/manage/config.py | GetmeUK/h51 | 17d4003336857514765a42a0853995fbe3da6525 | [
"MIT"
] | 4 | 2021-06-08T22:58:13.000Z | 2022-03-12T00:53:18.000Z | blueprints/accounts/manage/config.py | GetmeUK/h51 | 17d4003336857514765a42a0853995fbe3da6525 | [
"MIT"
] | null | null | null | from manhattan.manage import config
from manhattan.nav import Nav, NavItem
from blueprints.accounts.manage import blueprint
from blueprints.accounts.models import Account
__all__ = ['AccountConfig']
class AccountConfig(config.ManageConfig):
frame_cls = Account
blueprint = blueprint
@classmethod
def tabs(cls, view_type, document=None):
tabs = Nav.local_menu()
if view_type in ['api_log', 'change_log', 'activity', 'view']:
tabs.add(
NavItem(
'Details',
endpoint=AccountConfig.get_endpoint('view'),
view_args={'account': document._id}
)
)
tabs.add(
NavItem(
'Activity',
endpoint=AccountConfig.get_endpoint('activity'),
view_args={'account': document._id}
)
)
tabs.add(
NavItem(
'API log',
endpoint=AccountConfig.get_endpoint('api_log'),
view_args={'account': document._id}
)
)
tabs.add(
NavItem(
'Change log',
endpoint=AccountConfig.get_endpoint('change_log'),
view_args={'account': document._id}
)
)
return tabs
| 26.462963 | 70 | 0.491952 | 1,225 | 0.857243 | 0 | 0 | 1,127 | 0.788663 | 0 | 0 | 165 | 0.115465 |
3986fe60405cf4775e3e7c28b77f8afe1fba2cf3 | 599 | py | Python | tests/test_fails.py | Alviner/wsrpc-aiohttp | 12387f68b74587e52ae4b10f28892dbbb2afc32f | [
"MIT"
] | null | null | null | tests/test_fails.py | Alviner/wsrpc-aiohttp | 12387f68b74587e52ae4b10f28892dbbb2afc32f | [
"MIT"
] | null | null | null | tests/test_fails.py | Alviner/wsrpc-aiohttp | 12387f68b74587e52ae4b10f28892dbbb2afc32f | [
"MIT"
] | null | null | null | from aiohttp import ClientConnectionError
from wsrpc_aiohttp.testing import BaseTestCase, async_timeout
class TestDisconnect(BaseTestCase):
@async_timeout
async def test_call_error(self):
class DataStore:
def get_data(self, _):
return 1000
self.WebSocketHandler.add_route('get_data', DataStore().get_data)
client = await self.get_ws_client()
# Imitation of server connection has been closed
client.socket._closed = True
with self.assertRaises(ClientConnectionError):
await client.call('get_data')
| 28.52381 | 73 | 0.689482 | 492 | 0.821369 | 0 | 0 | 452 | 0.754591 | 433 | 0.722871 | 68 | 0.113523 |
398a3a700f8b78eced80ede2546a27f9c162d1aa | 2,325 | py | Python | devops/python/issuebot/applog.py | simahao/lily | c22ec37cb02374e94b41822eccc5e6d6aa7d0d25 | [
"MIT"
] | 4 | 2020-11-16T06:24:19.000Z | 2021-05-19T02:10:01.000Z | devops/python/issuebot/applog.py | simahao/lily | c22ec37cb02374e94b41822eccc5e6d6aa7d0d25 | [
"MIT"
] | 5 | 2021-05-05T14:17:27.000Z | 2021-09-30T08:47:23.000Z | devops/python/issuebot/applog.py | simahao/lily | c22ec37cb02374e94b41822eccc5e6d6aa7d0d25 | [
"MIT"
] | 3 | 2021-02-22T01:38:49.000Z | 2021-06-03T08:52:37.000Z | import logging
import logging.config
import os
LOG_DIR = os.path.dirname(os.path.abspath(__file__))
log_config = {
'version': 1,
'formatters': {
'verbose': {
'class': 'logging.Formatter',
'format': '%(asctime)s [%(name)s] %(levelname)-8s %(pathname)s:%(lineno)d - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
'style': '%'
},
'simple': {
'class': 'logging.Formatter',
'format': '%(asctime)s %(levelname)-8s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
'style': '%'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'DEBUG',
'formatter': 'simple'
},
'octopus': {
'class': 'logging.FileHandler',
'level': 'INFO',
'filename': os.path.join(LOG_DIR, 'octopus.log'),
'mode': 'a',
'formatter': 'verbose',
'encoding': 'utf-8'
},
'surveillance': {
'class': 'logging.FileHandler',
'level': 'INFO',
'filename': os.path.join(LOG_DIR, 'surveillance.log'),
'mode': 'a',
'formatter': 'verbose',
'encoding': 'utf-8'
},
'file': {
'class': 'logging.FileHandler',
'level': 'INFO',
'filename': 'app.log',
'mode': 'a',
'formatter': 'verbose',
'encoding': 'utf-8'
},
'rotate_file': {
'class': 'logging.handlers.RotatingFileHandler',
'level': 'INFO',
'filename': 'app.log',
'mode': 'a',
'formatter': 'verbose',
'maxBytes': 10485760,
'backupCount': 3,
'encoding': 'utf-8'
}
},
'loggers': {
'Octopus': {
'handlers': ['octopus']
},
'Surveillance': {
'handlers': ['surveillance']
}
},
'root': {
'level': 'INFO',
'handlers': ['console']
}
}
# propagate default is true,so message is propagated its parent's logger until root
# e.x. Octopus flush message to file, and progagate message to root logger, and flush to console
logging.config.dictConfig(log_config)
| 29.43038 | 101 | 0.455054 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,196 | 0.514409 |
398adc2cec18c8f88eebd57e5b5cd30a4eaccd31 | 5,280 | py | Python | basket/BasketGlobals.py | Hartman-/Basket | 7b9c174b031c9ffac2de886f5e149adcd5f7c83f | [
"BSD-3-Clause"
] | 2 | 2017-02-07T11:28:58.000Z | 2017-12-01T05:41:36.000Z | basket/BasketGlobals.py | Hartman-/Basket | 7b9c174b031c9ffac2de886f5e149adcd5f7c83f | [
"BSD-3-Clause"
] | 25 | 2016-08-18T01:16:59.000Z | 2017-02-11T03:57:20.000Z | basket/BasketGlobals.py | Hartman-/Basket | 7b9c174b031c9ffac2de886f5e149adcd5f7c83f | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
import os
import platform
from glob import glob
import utils.appconfig as appconfig
# GLOBAL CONSTANTS
# --- File Structure Constants ---
BASE_DIRS = {
'delivery': [
'CritiqueArchive'
],
'docs': [],
'frames': [],
'library': [
'models',
'templates',
'sound',
'texture'
],
'publish': [],
'source': [
'plates',
'reference'
],
'working': [
'scenes',
'assets'
]}
PROD_DIRS = [
'scenes',
'publish'
]
STAGE_DIRS = appconfig.get_config_value('law', 'stages')
FRAME_DIRS = [
'cg',
'comp',
'edit',
'elements',
'plates'
]
# GLOBAL FUNCTIONS
def curOS():
currentOS = platform.system()
return currentOS
def rootDir():
curDir = os.path.expanduser('~') + '\\Desktop\\LAW_local\\'
# MAYA LOVES TO MAKE MY LIFE DIFFICULT
# THROWING \DOCUMENTS INTO SHIT
if 'Documents' in curDir:
curDir = curDir.replace('/', '\\').replace('\\Documents', '')
return curDir
def serverDir():
# \\awexpress.westphal.drexel.edu\digm_anfx\SRPJ_LAW\ALAW\renderman\HeroShipTurntable_v002_imh29_0\images
# curDir = os.path.expanduser('~') + '\\Desktop\\LAW_s\\'
curDir = '%s' % appconfig.get_config_value('project', 'projdir')
# MAYA LOVES TO MAKE MY LIFE DIFFICULT
# THROWING \DOCUMENTS INTO SHIT
if 'Documents' in curDir:
curDir = curDir.replace('/', '\\').replace('\\Documents', '')
return curDir
def getNukeScripts():
nkFiles = glob(os.path.join(nukeDir(), '*.nk'))
return nkFiles
def nukeDir():
curDir = os.path.join(rootDir(), 'working', 'scenes', os.getenv('SEQ'), os.getenv('SHOT'), '07. Comp')
if not os.path.isdir(curDir):
raise ValueError, '%s NUKE Directory does not exist' % curDir
return curDir
def serverStageDir(stage):
curDir = os.path.join(serverDir(), 'working', 'scenes', os.getenv('SEQ'), os.getenv('SHOT'), STAGE_DIRS[stage])
if not os.path.isdir(curDir):
raise ValueError, 'Stage Directory does not exist'
return curDir
def localFramesDir():
curDir = os.path.join(rootDir(), 'frames', os.getenv('SEQ'), os.getenv('SHOT'), 'plates')
if not os.path.isdir(curDir):
raise ValueError, 'Frames Directory does not exist'
return curDir
def stageDir(stage):
baseDir = os.path.join(serverDir(), 'working', 'scenes', os.getenv('SEQ'), os.getenv('SHOT'))
# Thanks for starting at Zero lists!
curDir = os.path.join(baseDir, STAGE_DIRS[stage])
if not os.path.isdir(curDir):
raise ValueError, 'File Directory does not exist: ' + curDir
return curDir
def publishDir(stage):
baseDir = os.path.join(serverDir(), 'publish', os.getenv('SEQ'), os.getenv('SHOT'))
# Thanks for starting at Zero lists!
curDir = os.path.join(baseDir, STAGE_DIRS[stage])
if not os.path.isdir(curDir):
raise ValueError, 'File Directory does not exist: ' + curDir
return curDir
def seqDir():
curDir = os.path.join(serverDir(), 'Frames', os.getenv('SEQ'), os.getenv('SHOT'), 'plates')
if not os.path.isdir(curDir):
raise ValueError, 'Frames Directory does not exist'
return curDir
def libraryDir(sub):
curDir = os.path.join(serverDir(), 'library', str(sub))
if not os.path.isdir(curDir):
raise ValueError, 'Library Directory does not exist'
return curDir
def framesDir():
curDir = os.path.join(serverDir(), 'Frames')
print curDir
if not os.path.isdir(curDir):
raise ValueError, 'Frames Directory does not exist'
return curDir
# SET SHOW ENV VARIABLE
def setShow(show):
os.environ['SHOW'] = str(show)
# SET SEQ ENV VARIABLE
def setSeq(seq):
os.environ['SEQ'] = str(seq)
# SET SHOT ENV VARIABLE
def setShot(shot):
os.environ['SHOT'] = str(shot)
def setStage(stage):
os.environ['LAWSTAGE'] = str(stage)
def stageNum():
return int(os.getenv('LAWSTAGE'))
def applicationPath(ext):
if type(ext) is not int:
paths = {
'.ma': appconfig.get_config_value('app', 'mayaexe'),
'.mb': appconfig.get_config_value('app', 'mayaexe'),
'.nk': appconfig.get_config_value('app', 'nukeexe'),
'.hip': appconfig.get_config_value('app', 'houdiniexe'),
'.hipnc': appconfig.get_config_value('app', 'houdiniexe'),
'.hiplc': appconfig.get_config_value('app', 'houdiniexe')
}
return paths[ext]
else:
paths = {
0: appconfig.get_config_value('app', 'mayaexe'),
1: appconfig.get_config_value('app', 'mayaexe'),
2: appconfig.get_config_value('app', 'mayaexe'),
3: appconfig.get_config_value('app', 'houdiniexe'),
4: appconfig.get_config_value('app', 'mayaexe'),
5: appconfig.get_config_value('app', 'mayaexe'),
6: appconfig.get_config_value('app', 'nukeexe'),
7: appconfig.get_config_value('app', 'premiereexe')
}
return paths[ext]
if __name__ == '__main__':
print serverDir()
| 27.076923 | 115 | 0.595455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,608 | 0.304545 |
398d56540cd3fb4efa42ef33aee42fa70cf89afe | 3,024 | py | Python | datasets/thuc_news/thuc_news.py | jhxu-org/datasets | e78e81ff2aec2928506a42c3312799acd6c5e807 | [
"Apache-2.0"
] | null | null | null | datasets/thuc_news/thuc_news.py | jhxu-org/datasets | e78e81ff2aec2928506a42c3312799acd6c5e807 | [
"Apache-2.0"
] | null | null | null | datasets/thuc_news/thuc_news.py | jhxu-org/datasets | e78e81ff2aec2928506a42c3312799acd6c5e807 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""THUNews"""
import csv
import ctypes
import os
import datasets
csv.field_size_limit(int(ctypes.c_ulong(-1).value // 2))
_CITATION = """\
@misc{xujianhua,
title={page xxx},
author={Xiang Zhang and Junbo Zhao and Yann LeCun},
year={2015},
eprint={1509.01626},
archivePrefix={arXiv},
primaryClass={cs.LG}
}
"""
_DESCRIPTION = """\
THUCTC(THU Chinese Text Classification)是由清华大学自然语言处理实验室推出的中文文本分类工具包,能够自动高效地实现用户自定义的文本分类语料的训练、\
评测、分类功能。文本分类通常包括特征选取、特征降维、分类模型学习三个步骤。如何选取合适的文本特征并进行降维,是中文文本分类的挑战性问题。、
我组根据多年在中文文本分类的研究经验,在THUCTC中选取二字串bigram作为特征单元,特征降维方法为Chi-square,权重计算方法为tfidf,、
分类模型使用的是LibSVM或LibLinear。THUCTC对于开放领域的长文本具有良好的普适性,不依赖于任何中文分词工具的性能,具有准确率高、测试速度快的优点。
"""
_DATA_URL = "http://127.0.0.1/thuc_news.zip"
_CLS = ['体育', '娱乐', '家居', '彩票', '房产', '教育', '时尚', '时政', '星座', '游戏', '社会', '科技', '股票', '财经']
class THUC_News(datasets.GeneratorBasedBuilder):
"""Sogou News dataset"""
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"content": datasets.Value("string"),
"label": datasets.features.ClassLabel(
names=_CLS
),
}
),
# No default supervised_keys (as we have to pass both premise
# and hypothesis as input).
supervised_keys=None,
homepage="", # didn't find a real homepage
citation=_CITATION,
)
def _split_generators(self, dl_manager):
dl_dir = dl_manager.download_and_extract(_DATA_URL)
return [
datasets.SplitGenerator(
name=datasets.Split.TEST, gen_kwargs={"filepath": os.path.join(dl_dir, "thuc_news", "test.txt")}
),
datasets.SplitGenerator(
name=datasets.Split.TRAIN, gen_kwargs={"filepath": os.path.join(dl_dir, "thuc_news", "train.txt")}
),
]
def _generate_examples(self, filepath):
"""This function returns the examples in the raw (text) form."""
with open(filepath, encoding="utf-8") as txt_file:
data = txt_file.readlines()
for id_, row in enumerate(data):
row = row.split('\t')
yield id_, {"content": row[1], "label": _CLS.index(row[0])}
| 33.977528 | 114 | 0.638889 | 1,527 | 0.430626 | 370 | 0.104343 | 0 | 0 | 0 | 0 | 2,128 | 0.600113 |
3990560a6bff336fd21ff88b51780152f5105716 | 1,215 | py | Python | mundo3/ex115/lib/arquivo/__init__.py | dilsonm/CeV | 8043be36b2da187065691d23ed5cb40fd65f806f | [
"MIT"
] | null | null | null | mundo3/ex115/lib/arquivo/__init__.py | dilsonm/CeV | 8043be36b2da187065691d23ed5cb40fd65f806f | [
"MIT"
] | null | null | null | mundo3/ex115/lib/arquivo/__init__.py | dilsonm/CeV | 8043be36b2da187065691d23ed5cb40fd65f806f | [
"MIT"
] | null | null | null | from lib.interface import cabecalho
def arquivoExiste(arq):
try:
a = open(arq, 'rt')
a.close()
except FileNotFoundError:
return False
else:
return True
def criarArquivo(arq):
try:
a = open(arq, 'wt+')
a.close()
except:
print('Houve um erro na criação do arquivo.')
else:
print(f'Arquivo {arq} criado com sucesso.')
def lerarquivo(arq):
try:
a = open(arq,'rt')
except:
print('Erro ao abrir o arquivo.')
else:
cabecalho('PESSOAS CADASTRADAS')
for linha in a:
dado = linha.split(';')
dado1 = dado[1].replace('\n','')
print(f'{dado[0]:<30} {dado1:>3}')
# print(f'{dado[0]:<30}{dado[1]:>3}')
finally:
a.close()
def cadastrar(arq,nome='desconhecido', idade=0):
# cabecalho('Opção 2')
try:
a = open(arq,'at')
except:
print('Houve um ERRO na abertura do arquivo.')
else:
try:
a.write(f'{nome};{idade}\n')
except:
print('Não foi possivel gravar no arquivo.')
else:
print(f'Novo cadastro de {nome} adicionado.')
a.close() | 23.365385 | 57 | 0.516872 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 385 | 0.315574 |
399279cf633bc710b68c85b8b7d375ff1f8fa454 | 2,626 | py | Python | path-sum-four-ways/solution.py | ALB37/project-euler-problems | c3fb4213e150805bfe45b15847bc6449eb907c7a | [
"MIT"
] | null | null | null | path-sum-four-ways/solution.py | ALB37/project-euler-problems | c3fb4213e150805bfe45b15847bc6449eb907c7a | [
"MIT"
] | null | null | null | path-sum-four-ways/solution.py | ALB37/project-euler-problems | c3fb4213e150805bfe45b15847bc6449eb907c7a | [
"MIT"
] | null | null | null | from graph import Graph
matrix = []
with open('p083_matrix.txt') as file:
for line in file.readlines():
currentline = [int(n) for n in line.split(',')]
matrix.append(currentline)
numGraph = Graph()
# add each node first
for i in range(len(matrix)):
for j in range(len(matrix[i])):
numGraph.addNode((i, j))
# then map edges
for i in range(len(matrix)):
for j in range(len(matrix[i])):
if i == 0 and j == 0:
numGraph.addEdge((i, j), (i + 1, j), matrix[i + 1][j])
numGraph.addEdge((i, j), (i, j + 1), matrix[i][j + 1])
elif i == 0 and j == len(matrix[i]) - 1:
numGraph.addEdge((i, j), (i + 1, j), matrix[i + 1][j])
numGraph.addEdge((i, j), (i, j - 1), matrix[i][j - 1])
elif i == len(matrix) - 1 and j == 0:
numGraph.addEdge((i, j), (i, j + 1), matrix[i][j + 1])
numGraph.addEdge((i, j), (i - 1, j), matrix[i - 1][j])
elif i == len(matrix) - 1 and j == len(matrix[i]) - 1:
numGraph.addEdge((i, j), (i - 1, j), matrix[i - 1][j])
numGraph.addEdge((i, j), (i, j - 1), matrix[i][j - 1])
elif i == 0:
numGraph.addEdge((i, j), (i + 1, j), matrix[i + 1][j])
numGraph.addEdge((i, j), (i, j + 1), matrix[i][j + 1])
numGraph.addEdge((i, j), (i, j - 1), matrix[i][j - 1])
elif i == len(matrix) - 1:
numGraph.addEdge((i, j), (i, j + 1), matrix[i][j + 1])
numGraph.addEdge((i, j), (i - 1, j), matrix[i - 1][j])
numGraph.addEdge((i, j), (i, j - 1), matrix[i][j - 1])
elif j == 0:
numGraph.addEdge((i, j), (i + 1, j), matrix[i + 1][j])
numGraph.addEdge((i, j), (i, j + 1), matrix[i][j + 1])
numGraph.addEdge((i, j), (i - 1, j), matrix[i - 1][j])
elif j == len(matrix[i]) - 1:
numGraph.addEdge((i, j), (i + 1, j), matrix[i + 1][j])
numGraph.addEdge((i, j), (i - 1, j), matrix[i - 1][j])
numGraph.addEdge((i, j), (i, j - 1), matrix[i][j - 1])
else:
numGraph.addEdge((i, j), (i + 1, j), matrix[i + 1][j])
numGraph.addEdge((i, j), (i, j + 1), matrix[i][j + 1])
numGraph.addEdge((i, j), (i - 1, j), matrix[i - 1][j])
numGraph.addEdge((i, j), (i, j - 1), matrix[i][j - 1])
endCoordinates = (len(matrix) - 1, len(matrix[0]) - 1)
shortestPathMap = numGraph.aStarSearch((0, 0), endCoordinates)
shortestPath = numGraph.outputPath(shortestPathMap, (0, 0), endCoordinates)
print(sum([matrix[c[0]][c[1]] for c in shortestPath]))
| 38.617647 | 75 | 0.485149 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 57 | 0.021706 |
39965ea3888f463b999a6106ce07def8d9adf4ac | 4,010 | py | Python | carts/views.py | yun-mh/uniwalk | f5307f6970b24736d13b56b4792c580398c35b3a | [
"Apache-2.0"
] | null | null | null | carts/views.py | yun-mh/uniwalk | f5307f6970b24736d13b56b4792c580398c35b3a | [
"Apache-2.0"
] | 9 | 2020-01-10T14:10:02.000Z | 2022-03-12T00:08:19.000Z | carts/views.py | yun-mh/uniwalk | f5307f6970b24736d13b56b4792c580398c35b3a | [
"Apache-2.0"
] | null | null | null | from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import render, redirect, get_object_or_404
from designs import models as design_models
from feet import models as foot_models
from products import models as product_models
from .models import Cart, CartItem
# 現在のセッション宛にカートを生成するための関数
def _session_key(request):
cart = request.session.session_key
if not cart:
cart = request.session.create()
return cart
def add_cart(request, pk, design_pk):
""" カートに商品を追加するビュー """
product = product_models.Product.objects.get(pk=pk)
# カートを持っているかチェック
try:
cart = Cart.objects.get(session_key=_session_key(request))
# 持っていない場合、カートを生成する
except Cart.DoesNotExist:
if request.user.is_authenticated:
cart = Cart.objects.create(
session_key=_session_key(request), user_id=request.user.pk
)
cart.save()
else:
cart = Cart.objects.create(session_key=_session_key(request))
cart.save()
# カート内に同じ商品かつ同じデザインのアイテムがあるかチェック
try:
cart_item = CartItem.objects.get(product=product, cart=cart, design=design_pk)
# 取得したアイテムのサイズとセッションのサイズ値が違う場合、ブラウザの前ボタンでサイズを修正したことのため、サイズのみをアップデートする
if (
cart_item.length_left != request.session["length_left"]
or cart_item.length_right != request.session["length_right"]
or cart_item.width_left != request.session["width_left"]
or cart_item.width_right != request.session["width_right"]
):
cart_item.length_left = request.session["length_left"]
cart_item.length_right = request.session["length_right"]
cart_item.width_left = request.session["width_left"]
cart_item.width_right = request.session["width_right"]
# サイズも同じな場合は全く同じ商品の追加になるため、数量を増やす
else:
cart_item.quantity += 1
cart_item.save()
# ない場合、新しくカートアイテムを生成する
except CartItem.DoesNotExist:
cart_item = CartItem.objects.create(
product=product,
design=design_models.Design.objects.get(pk=design_pk),
length_left=request.session["length_left"],
length_right=request.session["length_right"],
width_left=request.session["width_left"],
width_right=request.session["width_right"],
quantity=1,
cart=cart,
)
cart_item.save()
return redirect("carts:cart")
def cart_display(request, amount=0, counter=0, cart_items=None):
""" カートの内容を表示するためのビュー """
# セッションキーに対しカートが既に存在する場合
try:
cart = Cart.objects.get(session_key=_session_key(request))
cart_items = CartItem.objects.filter(cart=cart)
for cart_item in cart_items:
amount += cart_item.product.price * cart_item.quantity
counter += cart_item.quantity
# カートが存在しない場合
except ObjectDoesNotExist:
pass
return render(
request,
"carts/cart.html",
{"cart_items": cart_items, "amount": amount, "counter": counter},
)
def remove_item(request, pk, design_pk):
""" カートに入れた商品の個数を減少させるためのビュー """
# データベースから関連項目を取得する
cart = Cart.objects.get(session_key=_session_key(request))
product = get_object_or_404(product_models.Product, pk=pk)
cart_item = CartItem.objects.get(product=product, cart=cart, design=design_pk)
# 削除しようとするカートアイテムの数が1より多い場合
if cart_item.quantity > 1:
cart_item.quantity -= 1
cart_item.save()
# 削除しようとするカートアイテムの数が1以下の場合
else:
cart_item.delete()
return redirect("carts:cart")
def delete_cartitem(request, pk, design_pk):
""" 商品項目をカートから削除するためのビュー """
# データベースから関連項目を取得し、対象カートアイテムを削除する
cart = Cart.objects.get(session_key=_session_key(request))
product = get_object_or_404(product_models.Product, pk=pk)
cart_item = CartItem.objects.get(product=product, cart=cart, design=design_pk)
cart_item.delete()
return redirect("carts:cart")
| 33.983051 | 86 | 0.672319 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,513 | 0.3139 |
3996a072b5270c64e9a774f3c2758ba1336ec30d | 13,515 | py | Python | deploy.py | j-benson/Deploy | 9fb2bd1c383949521967a672ac76fcdcaced503f | [
"MIT"
] | null | null | null | deploy.py | j-benson/Deploy | 9fb2bd1c383949521967a672ac76fcdcaced503f | [
"MIT"
] | null | null | null | deploy.py | j-benson/Deploy | 9fb2bd1c383949521967a672ac76fcdcaced503f | [
"MIT"
] | null | null | null | """
Script to deploy a website to the server by ftp.
- Compares local directory with remote directory
- Updates modified files
- Adds new files
- Optionally, removes deleted files from remote
Requires: python 3.3+
Due to use of ftplib.mlsd()
The MIT License (MIT)
Copyright (c) 2015 James Benson
"""
"""
TODO: FTP response codes to look out for:
- 502 unknown command
- 550 empty directory
- 451 can't remove directory
Good ones:
- 226 transfer complete
"""
asciiExt = ['coffee', 'css', 'erb', 'haml', 'handlebars', 'hb', 'htm', 'html',
'js', 'less', 'markdown', 'md', 'ms', 'mustache', 'php', 'rb', 'sass', 'scss',
'slim', 'txt', 'xhtml', 'xml'];
deleteIgnoreFiles = ["/.ftpquota"];
deleteIgnoreDirs = ["/cgi-bin"];
remoteSep = "/";
dLogName = "debug.txt";
STOR_AUTO = 0;
STOR_BINARY = 1;
STOR_ASCII = 2;
UPLOAD_OVERWRITE = 0;
UPLOAD_MODIFIED = 1;
######################### SETUP ##########################
remoteHost = "127.0.0.1";
remoteUser = "Benson";
remotePassword = "benson";
localPath = "D:\\test\\ftp";
remotePath = "/";
### OPTIONS ###
verbose = True;
remoteTLS = False; # SSL/TLS doesn't work invalid certificate error
remoteDelete = True;
remoteIgnoreHidden = False; # TODO: Implement hidden.
storMode = STOR_BINARY; # only binary currently works
uploadMode = UPLOAD_MODIFIED;
debug = True;
##########################################################
import os;
from datetime import datetime, timedelta;
from ftplib import FTP, FTP_TLS, error_reply, error_temp, error_perm, error_proto, all_errors;
if remoteTLS:
import ssl;
ftp = None;
dLog = None;
# === FTP Functions ===
def connect():
global ftp;
if remoteTLS:
context = ssl.create_default_context();
ftp = FTP_TLS(remoteHost, remoteUser, remotePassword, acct="", keyfile=None, certfile=None, context=context, timeout=20);
ftp.prot_p();
else:
ftp = FTP(remoteHost, remoteUser, remotePassword, 20);
print(ftp.getwelcome());
def stor(dirpath, file):
"""Store the file obj to the dirpath of server."""
ext = (os.path.splitext(file.name())[1]).lstrip('.');
storpath = remoteJoin(dirpath, file.name());
try:
if (storMode == STOR_ASCII) or (storMode == STOR_AUTO and ext in asciiExt):
# Store in ASCII mode
if verbose: print("[asc] ", end="");
ftp.storlines("STOR %s" % storpath, open(file.path));
else:
# Store in binary mode
if verbose: print("[bin] ", end="");
ftp.storbinary("STOR %s" % storpath, open(file.path, "rb"));
setModified(dirpath, file);
if verbose: print("Uploaded: %s -> %s" % (file.path, storpath));
except OSError as oserror:
print("Failed Upload: %s\n %s" % (file.path, oserror));
def setModified(dirpath, file):
"""Attempts to set the modified time with MFMT."""
ftp.voidcmd("MFMT %s %s" % (file.getModified(), remoteJoin(dirpath, file.name())));
def rm(dirpath, file):
"""Delete the file at the path from the server."""
p = remoteJoin(dirpath, file.name());
_rm(p);
if verbose: print("Deleted: %s" % p);
def _rm(filepath):
ftp.delete(filepath);
def mkDir(dirpath, name):
dirname = remoteJoin(dirpath, name);
ftp.mkd(dirname);
if verbose: print("Created: %s" % dirname);
def rmDir(dirpath, name, recursive = False):
dirname = remoteJoin(dirpath, name);
if recursive:
_rmDirR(dirname);
_rmDir(dirname);
else:
_rmDir(dirname);
if verbose: print("Deleted: %s" % remoteJoin(dirname, "*"));
def _rmDir(dirpath):
"""Delete directory with name from the current working directory.
Only deletes empty directories."""
ftp.rmd(dirpath); # TODO: What if fails to delete?
def _rmDirR(dirpath):
"""Remove the directory at dirpath and its contents (recursive)."""
try:
dirs, files = listRemote(dirpath);
for f in files:
_rm(f.path);
for d in dirs:
_rmDirR(d.path);
_rmDir(d.path);
except:
raise error_temp("451 Can't remove directory");
# === End FTP Functions ===
# === Traversal Functions ===
def traverse(localPath, remotePath = remoteSep):
dprint("TRAVERSING: local %s | remote %s"%(localPath, remotePath));
localDirs, localFiles = listLocal(localPath);
remoteDirs, remoteFiles = listRemote(remotePath);
newF, modifiedF, unmodifiedF, deletedF = compareFiles(localFiles, remoteFiles, remoteDelete);
newD, existingD, deletedD = compareDirs(localDirs, remoteDirs, remoteDelete);
for f in newF + modifiedF:
stor(remotePath, f);
for d in newD:
mkDir(remotePath, d);
for d in newD + existingD:
dname = d.name();
traverse(os.path.join(localPath, dname), remoteJoin(remotePath, dname));
if remoteDelete:
for d in deletedD:
rmDir(remotePath, d, True);
for f in deletedF:
rm(remotePath, f);
def listLocal(path):
dirs = [];
files = [];
names = os.listdir(path);
for n in names:
fullp = os.path.join(path, n);
if os.path.isdir(fullp):
dirs.append(Directory(fullp));
if os.path.isfile(fullp):
f = File(fullp);
f.setModifiedTimestamp(os.stat(fullp).st_mtime);
files.append(f);
return (dirs, files);
def listRemote(path = ""):
dirs = [];
files = [];
response = ftp.mlsd(path);
for name, fact in response:
if fact["type"] == "dir":
dirs.append(Directory(remoteJoin(path, name)));
if fact["type"] == "file":
f = File(remoteJoin(path, name));
f.setModifiedUTCStr(fact["modify"]);
files.append(f);
return (dirs, files);
# === End Traversal Functions ===
def remoteJoin(pathA, pathB):
if not pathA.endswith(remoteSep) and not pathB.startswith(remoteSep):
pathA += remoteSep;
elif pathA.endswith(remoteSep) and pathB.startswith(remoteSep):
pathA = pathA.rstrip(remoteSep);
return pathA + pathB;
# === Structures ===
class File(object):
def __init__(self, path):
self.path = str(path);
self.datetimeFormat = "%Y%m%d%H%M%S";
def __str__(self):
return self.name();
# Object Comparison
def __eq__(self, other):
"""As File objects will only be compared within a directory the unique
identitifier will be the name."""
if isinstance(other, File):
return self.name() == other.name();
else:
return self.name() == str(other);
def __lt__(self, other):
"""Determine if the file is older than other using the modified timestamp."""
return self.modified < other.modified;
def __gt__(self, other):
"""Determine if the file is newer than other using the modified timestamp."""
return self.modified > other.modified;
def __le__(self, other):
"""Determine if the file is older or the same than other using the modified timestamp."""
return self.modified <= other.modified;
def __ge__(self, other):
"""Determine if the file is newer or the same than other using the modified timestamp."""
return self.modified >= other.modified;
# End Object Comparison
def name(self):
return os.path.basename(self.path);
def setModifiedUTCStr(self, modified):
# Should be a string of the utc time.
self.modified = datetime.strptime(modified, self.datetimeFormat);
def setModifiedTimestamp(self, modified):
# Timestamp (in windows at least) gives extra microseconds (us) that ftp doesn't have
usModified = datetime.utcfromtimestamp(modified)
usExtra = timedelta(microseconds=usModified.microsecond);
self.modified = usModified - usExtra;
def getModified(self):
return datetime.strftime(self.modified, self.datetimeFormat);
class Directory(object):
def __init__(self, path):
self.path = path;
def __str__(self):
return self.name();
def __eq__(self, other):
if isinstance(other, Directory):
return self.name() == other.name();
else:
return self.name() == str(other);
# def __len__(self):
# len()
def name(self):
if isinstance(self.path, Directory):
raise Exception("Expected str found Directory");
return os.path.basename(self.path);
# === End Structures ===
def compareFiles(localList, remoteList, checkDeleted = True):
"""Compares localList with remoteList gets the tuple containing File objects:
(new, modified, unmodified, deleted)
new: Files that are in localList but not in remoteList.
modified: Files that are newer in localList than remoteList.
unmodified: Files that are the same in both lists.
deleted: Files that are in the remoteList but not in localList.
*newer is defined by the file's date modified attribute.
New, Modified and Unmodified will contain local files objects that need to
be uploaded to the remote location.
Deleted will contain remote file objects that need to be deleted from
the remote location."""
new = [];
modified = [];
unmodified = [];
deleted = [];
dprint("COMPARE FILES");
for lfile in localList:
dprint("LOCAL: %s - %s" % (lfile.path, lfile.modified));
existsInRemote = False;
for rfile in remoteList:
if lfile == rfile:
dprint("REMOTE: %s - %s" % (rfile.path, rfile.modified));
existsInRemote = True;
if uploadMode == UPLOAD_OVERWRITE or lfile > rfile:
dprint("Upload Mode: %s | Modified: lfile > rfile" % uploadMode);
modified.append(lfile);
else:
dprint("Not Modified: lfile <= rfile");
unmodified.append(lfile);
break;
if not existsInRemote:
dprint("New local file");
new.append(lfile);
dprint("--------------------------------------");
# Check for deleted files
if checkDeleted:
dprint("CHECK FOR DELETED FILES");
for rfile in remoteList:
existsInLocal = False;
for lfile in localList:
if rfile == lfile:
existsInLocal = True;
break;
if not existsInLocal and not rfile.path in deleteIgnoreFiles:
dprint("DELETED: %s" % rfile.path);
deleted.append(rfile);
dprint("--------------------------------------");
return (new, modified, unmodified, deleted);
def compareDirs(localList, remoteList, checkDeleted = True):
"""Compares localList with remoteList gets the tuple containing string
names of the directories: (new, existing, deleted)
new: Directories that are in localList but not in remoteList.
existing: Directories that are in both lists.
deleted: Directories that are in the remoteList but not in localList.
localList - list of strings of the directory names in the local location.
remoteList - list of strings of the directory name in the remote location."""
new = [];
existing = [];
deleted = [];
dprint("COMPARE DIRECTORIES");
for ldir in localList:
dprint("LOCAL DIR: %s"%ldir.path);
existsInRemote = False;
for rdir in remoteList:
if ldir == rdir:
dprint("REMOTE DIR: %s"%rdir.path);
dprint("Exists On Local and Remote");
existsInRemote = True;
existing.append(ldir)
break;
if not existsInRemote:
dprint("New Local Directory");
new.append(ldir);
# Check for deleted directories
if checkDeleted:
dprint("CHECK FOR DELETED DIRECTORIES");
for rdir in remoteList:
existsInLocal = False;
for ldir in localList:
if rdir == ldir:
existsInLocal = True;
break;
if not existsInLocal and not rdir.path in deleteIgnoreDirs:
dprint("DELETED: %s" % rdir.path);
deleted.append(rdir);
dprint("--------------------------------------");
return (new, existing, deleted);
def dprint(line, end="\n"):
global dLog;
if debug:
if dLog == None:
if os.path.exists(dLogName):
os.remove(dLogName);
dLog = open(dLogName, "w")
dLog.write(line + end);
def main():
if not os.path.isdir(localPath):
print("Path Not Found: %s" % localPath);
return -1;
try:
connect();
traverse(localPath, remotePath);
except error_reply as r:
print(r);
except error_temp as t:
print(t);
except error_perm as p:
print(p);
except error_proto as pr:
print(pr);
except all_errors as a:
# REVIEW: all_errors is a tuple of (Error, OSError, EOFError)
# printing like this won't work I doubt, but I'm doing it anyway.
print(a);
finally:
if not ftp == None:
try:
ftp.quit();
except: pass;
ftp.close();
if not dLog == None and not dLog.closed:
dLog.flush();
dLog.close();
if __name__ == "__main__":
main();
| 35.565789 | 129 | 0.592379 | 2,350 | 0.173881 | 0 | 0 | 0 | 0 | 0 | 0 | 4,295 | 0.317795 |
39972511fba92d415fe55b1c71b33e08a7f6d99e | 6,079 | py | Python | pythorn/data_structures/queue.py | Gourav-KP/pythorn | f7130721c02292af0e23bd8bcf31d41990c0d48b | [
"MIT"
] | 5 | 2020-11-23T14:10:28.000Z | 2021-05-07T16:25:38.000Z | pythorn/data_structures/queue.py | Gourav-KP/pythorn | f7130721c02292af0e23bd8bcf31d41990c0d48b | [
"MIT"
] | null | null | null | pythorn/data_structures/queue.py | Gourav-KP/pythorn | f7130721c02292af0e23bd8bcf31d41990c0d48b | [
"MIT"
] | 3 | 2020-11-25T11:00:14.000Z | 2021-10-01T12:16:30.000Z | """
Author : Robin Singh
Programs List:
1.Queue
2.Circular Queue
3.Double Ended Queue
"""
import inspect
class Queue(object):
def __init__(self, length=5):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
self.items = []
self.size = 0
self.front = 0
self.rear = 0
self.limit = length
def isEmpty(self):
"""
checks the queue if its empty or not
"""
if self.items == []:
return True
else:
return False
def enqueue(self, data):
"""
inserts an element into the queue
"""
if self.size >= self.limit:
return -1
else:
self.items.append(data)
if self.front == None:
self.front = self.rear = 0
else:
self.rear = self.size
self.size = self.size+1
def dequeue(self):
"""
removes an element from the queue
"""
if self.isEmpty():
return -1
else:
self.size = self.size-1
if self.size == 0:
self.front = self.rear = 0
else:
self.rear = self.size - 1
return self.items.pop(0)
def Size(self):
"""
returns size of the queue
"""
return self.size
def display(self):
"""
displays full queue
"""
if self.items == []:
return -1
else:
print(self.items)
@staticmethod
def get_code():
"""
:return: source code
"""
return inspect.getsource(Queue)
@staticmethod
def time_complexity():
"""
:return: time complexity
"""
return " Time Complexity of enqueue: O(1) "\
" Time Complexity of dequeue: O(n)"\
" Optimizations : We can implement both enqueu and dequeue operations in O(1) time. To achive this, we can either use linked list implementaion of queue or circular implementation of queue"
class CircularQueue(object):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
def __init__(self, length=5):
"""
"""
self.items = []
self.rear = 0
self.front = 0
self.length = length
def isEmpty(self):
"""
Checks whether queue is empty or not
"""
if self.items == []:
return True
else:
return False
def isQueuefull(self):
"""
checks whether queue is full or not
"""
if len(self.items) == self.length:
return True
else:
return False
def enqueue(self, data):
"""
inserts an element into the queue
"""
if (self.isQueuefull()):
# Queue is full then return print
return print("queue is full")
elif self.isEmpty():
self.front = self.rear = 0
self.items.append(data)
else:
self.rear += 1
self.items.append(data)
def dequeue(self):
"""
removes an element from the queue
"""
if self.isEmpty():
return -1
else:
self.front += 1
return self.items.pop(0)
def display(self):
"""
displays full queue
"""
if self.items == []:
return True
else:
print(self.items)
@staticmethod
def get_code():
"""
:return: source code
"""
return inspect.getsource(CircularQueue)
@staticmethod
def time_complexity():
"""
:return: time complexity
"""
return " Time Complexity of enqueue: O(1)"\
" Time Complexity of dequeue: O(1)"
class Deque(object):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
def __init__(self, length=5):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
self.items = []
self.length = length
def isFull(self):
"""
checks whether queue is full or not
"""
if len(self.items) == self.length:
return True
else:
return False
def isEmpty(self):
"""
Checks whether queue is empty or not
"""
if self.items == []:
return True
else:
return False
def enqueue_start(self, element):
"""
inserts an element at the start of the queue
"""
if (self.isFull()):
return print("queue is full")
else:
self.items.insert(0, element)
def enqueue_end(self, ele):
"""
inserts an element at the end of the queue
"""
if (self.isFull()):
return print("queue is full")
else:
self.items.append(ele)
def dequeue_start(self):
"""
deletes an element from the start of the queue
"""
if (self.isEmpty()):
return print("empty queue..!")
else:
return self.items.pop(0)
def dequeue_end(self):
"""
deletes an element from the end of the queue
"""
if (self.isEmpty()):
return print("empty queue")
else:
return self.items.pop()
def display(self):
"""
displays full queue
"""
if self.items == []:
return True
else:
print(self.items)
@staticmethod
def get_code():
"""
:return: source code
"""
return inspect.getsource(Deque)
@staticmethod
def time_complexity():
"""
:return: time complexity
"""
return " Time Complexity of all the above operations is constant : O(1)"
| 22.853383 | 204 | 0.497779 | 5,963 | 0.980918 | 0 | 0 | 1,173 | 0.192959 | 0 | 0 | 2,288 | 0.376378 |
3997e398937ee03af443d926f755e2d9046ee9c6 | 1,740 | py | Python | wataru/commands/models/project.py | risuoku/wataru | 63be36d15454abd0636f67eaf1e80728b8c5a9bd | [
"MIT"
] | null | null | null | wataru/commands/models/project.py | risuoku/wataru | 63be36d15454abd0636f67eaf1e80728b8c5a9bd | [
"MIT"
] | null | null | null | wataru/commands/models/project.py | risuoku/wataru | 63be36d15454abd0636f67eaf1e80728b8c5a9bd | [
"MIT"
] | null | null | null | from wataru.commands.models.base import CommandBase
from wataru.logging import getLogger
import wataru.rules.models as rmodels
import os
import sys
logger = getLogger(__name__)
class Create(CommandBase):
def apply_arguments(self, parser):
parser.add_argument('--name', action='store', dest='projectname'),
parser.add_argument('--root-dir', action='store', dest='rootdir'),
parser.add_argument('--enable-virtualenv', action='store_true', default=False, dest='virtualenv_enabled'),
parser.add_argument('--theme-dir', action='store', dest='themedir'),
def pre_execute(self, namespace):
pass
def execute(self, namespace):
# get theme
from wataru.rules import themes
tm = themes.get_default() if namespace.themedir is None else themes.get(namespace.themedir)
# update theme
if namespace.projectname is not None:
tm.update_project('name', namespace.projectname)
if namespace.rootdir is not None:
tm.update_project('rootdir', namespace.rootdir)
if namespace.virtualenv_enabled:
tm.update_project('virtualenv', True)
# setup template loader
from wataru.rules import templates
templates.setenv(tm.abs_tpldir)
# get project rule graph
from wataru.rules import graph
rg = graph.get_by_theme(tm)
project = rg.project
# add extra nodes
mddir = rmodels.get_metadatadirectory(project)
project.add_node(mddir)
# process project
project.converge()
# process meta
mt = tm.config['meta']
jobj = rmodels.SetupJupyter(mddir, project.abspath, mt.get('jupyter'))
jobj.converge()
| 32.222222 | 114 | 0.65977 | 1,558 | 0.895402 | 0 | 0 | 0 | 0 | 0 | 0 | 301 | 0.172989 |
3998894acc2c2f5b50a8cd1451c55bffb80880f7 | 2,914 | py | Python | UnityExamples/Assets/StreamingAssets/Python/BlockLibraries/UnityExamples/FingerTrace.py | 6henrykim/UnityExamples | 3d4d782e6e67fee1ede902998c2df1b5b90b074a | [
"Apache-2.0"
] | 9 | 2020-04-02T10:33:37.000Z | 2021-12-03T17:14:40.000Z | UnityExamples/Assets/StreamingAssets/Python/BlockLibraries/UnityExamples/FingerTrace.py | ultrahaptics/UnityExamples | 3d4d782e6e67fee1ede902998c2df1b5b90b074a | [
"Apache-2.0"
] | 2 | 2019-11-06T10:37:18.000Z | 2021-09-20T14:31:13.000Z | UnityExamples/Assets/StreamingAssets/Python/BlockLibraries/UnityExamples/FingerTrace.py | ultrahaptics/UnityExamples | 3d4d782e6e67fee1ede902998c2df1b5b90b074a | [
"Apache-2.0"
] | 1 | 2022-02-25T16:38:52.000Z | 2022-02-25T16:38:52.000Z | # A Sensation which creates a Polyline of 35 points of the finger joints, along which a Circle Path is animated.
from pysensationcore import *
import sensation_helpers as sh
import HandOperations
# We will use the joint positions of the fingers to animate a Circle along a PolylinePath
fingers = ["thumb", "indexFinger", "middleFinger", "ringFinger", "pinkyFinger"]
bones = ["metacarpal", "proximal", "intermediate", "distal", "intermediate","proximal","metacarpal"]
jointKeyFrames = []
# Create a Polyline Path for each Animation Step
animPath = createInstance("PolylinePath", "PolylinePathInstance")
# Create inputs for each of the Bone joints
for finger in fingers:
for bone in bones:
jointInputName = "%s_%s_position" % (finger, bone)
jointKeyFrames+=[jointInputName]
# The number of Key frames
numPoints = len(jointKeyFrames)
points = sh.createList(numPoints)
# Connect the points list for our Polylinepath to the animation path
connect(points["output"], animPath.points)
translateAlongPath = createInstance("TranslateAlongPath", "translateAlongPath")
connect(Constant((1,0,0)), translateAlongPath.direction)
connect(animPath.out, translateAlongPath.animationPath)
# The Object Path (a circle) Will trace along the animation Path
# On top of its translation along the path, we apply a rotation transform,
# to match the orientation of the Palm
circlePath = createInstance("CirclePath", "objectPath")
orientToPalmInstance = createInstance("OrientPathToPalm", "orientToPalm")
# Object Path -> OrientPathToPalm -> TranslateAlongPath
connect(circlePath.out, orientToPalmInstance.path)
connect(orientToPalmInstance.out, translateAlongPath.objectPath)
topLevelInputs = {}
for n in range(0,numPoints):
topLevelInputs[(jointKeyFrames[n], points["inputs"][n])] = (0,0,0)
topLevelInputs[("t", translateAlongPath.t)] = (0, 0, 0)
topLevelInputs[("duration", translateAlongPath.duration)] = (2.5,0,0)
topLevelInputs[("dotSize", circlePath.radius)] = (0.01, 0, 0)
topLevelInputs[("palm_direction", orientToPalmInstance.palm_direction)] = (0, 0, 0)
topLevelInputs[("palm_normal", orientToPalmInstance.palm_normal)] = (0, 0, 0)
fingerScan = sh.createSensationFromPath("Finger Trace",
topLevelInputs,
output = translateAlongPath.out,
drawFrequency = 120,
renderMode=sh.RenderMode.Loop,
definedInVirtualSpace = True
)
# Hide the non-vital inputs...
visibleInputs = ("duration", "dotSize")
for topLevelInput in topLevelInputs.keys():
inputName = topLevelInput[0]
if inputName not in visibleInputs:
setMetaData(getattr(fingerScan, inputName), "Input-Visibility", False)
setMetaData(fingerScan.duration, "Type", "Scalar")
setMetaData(fingerScan.dotSize, "Type", "Scalar") | 42.231884 | 112 | 0.710707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,080 | 0.370625 |
3998e8576c81d8620613973a3fcb28ca0f349137 | 2,053 | py | Python | scripts/extarct_from_videos.py | corenel/yt8m-feature-extractor | 3f658749fd365478f1f26daa78b3e7b8d4844047 | [
"MIT"
] | 18 | 2017-09-12T07:02:28.000Z | 2021-06-07T13:38:51.000Z | scripts/extarct_from_videos.py | corenel/yt8m-feature-extractor | 3f658749fd365478f1f26daa78b3e7b8d4844047 | [
"MIT"
] | 1 | 2017-10-19T13:51:41.000Z | 2017-12-30T08:49:08.000Z | scripts/extarct_from_videos.py | corenel/yt8m-feature-extractor | 3f658749fd365478f1f26daa78b3e7b8d4844047 | [
"MIT"
] | 3 | 2017-09-07T07:07:22.000Z | 2018-09-18T15:49:29.000Z | """Extract inception_v3_feats from videos for Youtube-8M feature extractor."""
import os
import torch
import init_path
import misc.config as cfg
from misc.utils import (concat_feat_var, get_dataloader, make_cuda,
make_variable)
from models import inception_v3
if __name__ == '__main__':
# init models and data loader
model = make_cuda(inception_v3(pretrained=True,
transform_input=True,
extract_feat=True))
model.eval()
# get vid list
video_list = os.listdir(cfg.video_root)
video_list = [v for v in video_list
if os.path.splitext(v)[1] in cfg.video_ext]
# extract features by inception_v3
for idx, video_file in enumerate(video_list):
vid = os.path.splitext(video_file)[0]
filepath = os.path.join(cfg.video_root, video_file)
if os.path.exists(cfg.inception_v3_feats_path.format(vid)):
print("skip {}".format(vid))
else:
print("processing {}".format(vid))
# data loader for frames in single video
data_loader = get_dataloader(dataset="VideoFrame",
path=filepath,
num_frames=cfg.num_frames,
batch_size=cfg.batch_size)
# extract features by inception_v3
feats = None
for step, frames in enumerate(data_loader):
print("--> extract features [{}/{}]".format(step + 1,
len(data_loader)))
feat = model(make_variable(frames))
feats = concat_feat_var(feats, feat.data.cpu())
print("--> save feats to {}"
.format(cfg.inception_v3_feats_path.format(vid)))
torch.save(feats, cfg.inception_v3_feats_path.format(vid))
# print("--> delete original video file: {}".format(filepath))
# os.remove(filepath)
| 40.254902 | 78 | 0.560156 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 410 | 0.199708 |
399fd36bf8e08b05046794370fe69a0ebbb1e2b1 | 4,208 | py | Python | wc_rules/simulator/simulator.py | KarrLab/wc_rules | 5c6d8ec7f3152f2d234107d6fec3e2bc8d9ff518 | [
"MIT"
] | 5 | 2018-12-24T16:20:27.000Z | 2022-02-12T23:07:42.000Z | wc_rules/simulator/simulator.py | KarrLab/wc_rules | 5c6d8ec7f3152f2d234107d6fec3e2bc8d9ff518 | [
"MIT"
] | 7 | 2019-01-14T23:08:52.000Z | 2021-06-03T02:38:43.000Z | wc_rules/simulator/simulator.py | KarrLab/wc_rules | 5c6d8ec7f3152f2d234107d6fec3e2bc8d9ff518 | [
"MIT"
] | 3 | 2018-12-15T00:51:56.000Z | 2020-04-29T14:12:34.000Z | from collections import deque
from ..utils.collections import DictLike
from ..matcher.core import ReteNet
from ..matcher.actions import make_node_token, make_edge_token, make_attr_token
from .sampler import NextReactionMethod
class SimulationState:
def __init__(self,nodes=[],**kwargs):
self.cache = DictLike(nodes)
# for both stacks, use LIFO semantics using appendleft and popleft
self.rollback = kwargs.get('rollback',False)
self.action_stack = deque()
self.rollback_stack = deque()
self.matcher = kwargs.get('matcher',ReteNet.default_initialization())
self.start_time = kwargs.get('start_time',0.0)
self.end_time = kwargs.get('end_time',0.0)
self.sampler = NextReactionMethod(time=self.start_time)
# These are elementary methods, used as
# the final step in adding/removing a node
def resolve(self,idx):
return self.cache.get(idx)
def update(self,node):
self.cache.add(node)
return self
def remove(self,node):
self.cache.remove(node)
return self
def get_contents(self,ignore_id=True,ignore_None=True,use_id_for_related=True,sort_for_printing=True):
d = {x.id:x.get_attrdict(ignore_id=ignore_id,ignore_None=ignore_None,use_id_for_related=use_id_for_related) for k,x in self.cache.items()}
if sort_for_printing:
# sort list attributes
for idx,adict in d.items():
for k,v in adict.items():
if isinstance(v,list):
adict[k] = list(sorted(v))
adict = dict(sorted(adict.items()))
d = dict(sorted(d.items()))
return d
def push_to_stack(self,action):
if isinstance(action,list):
# assume list has to be executed left to right
self.action_stack = deque(action) + self.action_stack
else:
self.action_stack.appendleft(action)
return self
def simulate(self):
while self.action_stack:
action = self.action_stack.popleft()
if hasattr(action,'expand'):
self.push_to_stack(action.expand())
elif action.__class__.__name__ == 'RemoveNode':
if self.rollback:
self.rollback_stack.appendleft(action)
matcher_tokens = self.compile_to_matcher_tokens(action)
action.execute(self)
outtokens = self.matcher.process(matcher_tokens)
else:
if self.rollback:
self.rollback_stack.appendleft(action)
action.execute(self)
matcher_tokens = self.compile_to_matcher_tokens(action)
outtokens = self.matcher.process(matcher_tokens)
self.update_sampler(outtokens)
return self
def rollback(self):
while self.rollback_stack:
action = self.rollback_stack.popleft()
action.execute(self)
return self
def compile_to_matcher_tokens(self,action):
action_name = action.__class__.__name__
#d = {'AddNode':'add','RemoveNode':'remove','AddEdge':'add','RemoveEdge':'remove'}
# NOTE: WE"RE ATTACHING ACTUAL NODES HERE, NOT IDS, FIX action.idx,idx1,idx2 later
if action_name in ['AddNode','RemoveNode']:
return [make_node_token(action._class, self.resolve(action.idx), action_name)]
if action_name in ['SetAttr']:
_class = self.resolve(action.idx).__class__
return [make_attr_token(_class, self.resolve(action.idx), action.attr, action.value, action_name)]
if action_name in ['AddEdge','RemoveEdge']:
i1,a1,i2,a2 = [getattr(action,x) for x in ['source_idx','source_attr','target_idx','target_attr']]
c1,c2 = [self.resolve(x).__class__ for x in [i1,i2]]
return [
make_edge_token(c1,self.resolve(i1),a1,self.resolve(i2),a2,action_name),
make_edge_token(c2,self.resolve(i2),a2,self.resolve(i1),a1,action_name)
]
return []
def update_sampler(self,tokens):
for token in tokens:
self.sampler.update_propensity(reaction=token['source'],propensity=token['propensity'])
return self
def sample_next_event(self):
rule,time = self.sampler.next_event()
if time == float('inf'):
print('Null event!')
return self
sample = self.matcher.function_sample_rule(rule)
rule_node = self.matcher.get_node(core=rule,type='rule')
for act in rule_node.data.actions:
if act.deps.declared_variable is not None:
sample[act.deps.declared_variable] = act.exec(sample,rule_node.data.helpers)
else:
self.push_to_stack(act.exec(sample,rule_node.data.helpers))
self.sampler.update_time(time)
self.simulate()
return self
| 34.491803 | 140 | 0.736217 | 3,972 | 0.943916 | 0 | 0 | 0 | 0 | 0 | 0 | 586 | 0.139259 |
39a05a3ae20bd7b9b573cc3402d91e45b4b3aa9a | 594 | py | Python | samples/module_snapcheck.py | luislezcair/jsnapy | 86381aa389cf19394a6165fe34bcfd95ee8a7f67 | [
"Apache-2.0",
"BSD-3-Clause"
] | 101 | 2016-07-04T13:18:48.000Z | 2022-02-11T19:18:15.000Z | samples/module_snapcheck.py | luislezcair/jsnapy | 86381aa389cf19394a6165fe34bcfd95ee8a7f67 | [
"Apache-2.0",
"BSD-3-Clause"
] | 187 | 2016-07-06T14:58:03.000Z | 2022-03-15T09:19:11.000Z | samples/module_snapcheck.py | luislezcair/jsnapy | 86381aa389cf19394a6165fe34bcfd95ee8a7f67 | [
"Apache-2.0",
"BSD-3-Clause"
] | 70 | 2016-07-12T15:20:58.000Z | 2022-03-25T05:14:40.000Z | ### performing function similar to --snapcheck option in command line ######
from jnpr.jsnapy import SnapAdmin
from pprint import pprint
from jnpr.junos import Device
js = SnapAdmin()
config_file = "/etc/jsnapy/testfiles/config_single_snapcheck.yml"
snapvalue = js.snapcheck(config_file, "snap")
for snapcheck in snapvalue:
print "\n -----------snapcheck----------"
print "Tested on", snapcheck.device
print "Final result: ", snapcheck.result
print "Total passed: ", snapcheck.no_passed
print "Total failed:", snapcheck.no_failed
pprint(dict(snapcheck.test_details))
| 33 | 76 | 0.720539 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 226 | 0.380471 |
39a0dad5efbaf0ea7f66987d69ed3575a2e7b7d0 | 1,068 | py | Python | python/easy/1342_Number_of_Steps_to_Reduce_a_Number_to_Zero.py | JackWang0107/leetcode | c02932190b639ef87a8d0fcd07d9cd6ec7344a67 | [
"MIT"
] | 1 | 2021-05-22T03:27:33.000Z | 2021-05-22T03:27:33.000Z | python/easy/1342_Number_of_Steps_to_Reduce_a_Number_to_Zero.py | JackWang0107/leetcode | c02932190b639ef87a8d0fcd07d9cd6ec7344a67 | [
"MIT"
] | null | null | null | python/easy/1342_Number_of_Steps_to_Reduce_a_Number_to_Zero.py | JackWang0107/leetcode | c02932190b639ef87a8d0fcd07d9cd6ec7344a67 | [
"MIT"
] | null | null | null | from typing import *
class Solution:
# 32 ms, faster than 53.97% of Python3 online submissions for Number of Steps to Reduce a Number to Zero.
# 14.2 MB, less than 35.20% of Python3 online submissions for Number of Steps to Reduce a Number to Zero.
def numberOfSteps(self, num: int) -> int:
ans = 0
while num != 0:
if num & 1:
num -= 1
else:
num = int(num/2 )
ans += 1
return ans
# 20 ms, faster than 98.79% of Python3 online submissions for Number of Steps to Reduce a Number to Zero.
# 14.3 MB, less than 35.20% of Python3 online submissions for Number of Steps to Reduce a Number to Zer
def numberOfSteps(self, num: int) -> int:
def rec(num):
if num == 0 or num == 1:
return num
elif num % 2:
return 2 + rec(num // 2)
else:
return 1 + rec(num // 2)
return rec(num)
if __name__ == "__main__":
so = Solution()
print(so.numberOfSteps(123)) | 34.451613 | 110 | 0.553371 | 964 | 0.902622 | 0 | 0 | 0 | 0 | 0 | 0 | 429 | 0.401685 |
39a16a05ac36a9db042c0bce00dc04a5a657ef37 | 1,370 | py | Python | __private__/temp_dev/testshapefile.py | karimbahgat/PyA | 4d62a0850ba1dca93f7362ef23e18a13938fce4f | [
"MIT"
] | 16 | 2016-02-26T15:24:28.000Z | 2021-06-16T21:00:22.000Z | __private__/temp_dev/testshapefile.py | karimbahgat/PyA | 4d62a0850ba1dca93f7362ef23e18a13938fce4f | [
"MIT"
] | 5 | 2016-02-27T20:13:26.000Z | 2018-09-12T23:08:36.000Z | __private__/temp_dev/testshapefile.py | karimbahgat/PyA | 4d62a0850ba1dca93f7362ef23e18a13938fce4f | [
"MIT"
] | 7 | 2015-07-08T12:51:57.000Z | 2019-12-05T19:07:27.000Z |
import Tkinter as tk
from PIL import Image, ImageTk
import aggdraw
window = tk.Tk()
label = tk.Label(window)
label.pack()
# schedule changing images
import itertools, random, time
def agg2tkimg(aggimage):
t = time.clock()
img = aggimage
colorlength = len(img.mode)
width,height = img.size
imgbytes = img.tostring()
# via PIL/PILLOW for fast window updates
tempimg = Image.fromstring("RGBA", (width,height), data=imgbytes)
tkimg = ImageTk.PhotoImage(image=tempimg)
return tkimg
def random_n(minval, maxval, n=1):
ns = (random.randrange(minval,maxval) for _ in xrange(n))
return tuple(ns)
def draw_polygon(img, coords):
pen = aggdraw.Pen(random_n(0,222,n=3), width=int(img.size[0]*0.001))
brush = aggdraw.Brush(random_n(0,222,n=3))
# draw
img.polygon(coords, pen, brush)
def update(img):
# update
img.flush()
tkimg = agg2tkimg(img)
label["image"] = label.img = tkimg
# Begin #
img = aggdraw.Draw("RGBA", (1000,600), random_n(0,222,n=3) )
import geovis
sf = geovis.shapefile_fork.Reader("D:/Test Data/cshapes/cshapes.shp")
for shape in sf.iterShapes():
if shape.__geo_interface__["type"] == "Polygon":
flatcoords = [xory+350 for xy in shape.__geo_interface__["coordinates"][0] for xory in xy]
draw_polygon(img, flatcoords)
update(img)
window.mainloop()
| 22.096774 | 98 | 0.674453 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 170 | 0.124088 |
39a902062ca7512880d1818276ec6c8f4ed11b57 | 693 | py | Python | aoc10.py | roscroft/aoc-2020 | 3f37f6b29ec66bac5610bccd6de5ebb000bde312 | [
"MIT"
] | 1 | 2020-12-07T22:16:17.000Z | 2020-12-07T22:16:17.000Z | aoc10.py | roscroft/aoc-2020 | 3f37f6b29ec66bac5610bccd6de5ebb000bde312 | [
"MIT"
] | null | null | null | aoc10.py | roscroft/aoc-2020 | 3f37f6b29ec66bac5610bccd6de5ebb000bde312 | [
"MIT"
] | null | null | null | from utils import utils
def part_1(data):
count_1 = sum([1 if data[i] - data[i-1] == 1 else 0 for i in range(len(data))])
count_3 = sum([1 if data[i] - data[i-1] == 3 else 0 for i in range(len(data))])
return count_1*count_3
def part_2(data):
dynm = [1] + [0]*(len(data)-1)
for i in range(1, len(data)):
dynm[i] = sum([dynm[i-j] if data[i] - data[i-j] <= 3 else 0 for j in range(1, 4)])
return dynm[-1]
if __name__ == "__main__":
day = 10
data = utils.get_ints_from_file(f"data/aoc{day}_data.txt")
data = sorted(data)
data = [0] + data + [data[-1]+3]
print(f"Part 1 solution: {part_1(data)}")
print(f"Part 2 solution: {part_2(data)}") | 34.65 | 90 | 0.588745 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 103 | 0.148629 |
39a92e95003cf25b12c9d62aa465b8c0ddd75afb | 5,510 | py | Python | HyperGui.py | MIC-Surgery-Heidelberg/HyperGUI_1.0 | 0ee8e0da85049076bb22a542d15d6c3adf6ea106 | [
"MIT"
] | null | null | null | HyperGui.py | MIC-Surgery-Heidelberg/HyperGUI_1.0 | 0ee8e0da85049076bb22a542d15d6c3adf6ea106 | [
"MIT"
] | null | null | null | HyperGui.py | MIC-Surgery-Heidelberg/HyperGUI_1.0 | 0ee8e0da85049076bb22a542d15d6c3adf6ea106 | [
"MIT"
] | null | null | null | """
@author: Alexander Studier-Fischer, Jan Odenthal, Berkin Oezdemir, Isabella Camplisson, University of Heidelberg
"""
from HyperGuiModules import *
import logging
import os
#logging.basicConfig(level=logging.DEBUG)
xSize=None
ySize=None
def main():
(window, introduction, input_output, image_diagram, hist_calculation, spec_calculation, bp, circles, rectangles, crop, bs, measure, mda, spec_invest) = init()
listener = ModuleListener()
# introduction
Introduction(introduction)
# histogram calculation
HistCalculation(hist_calculation, listener)
# histogram calculation
SpecCalculation(spec_calculation, listener)
# analysis and form
analysis_and_form_frame = frame(input_output, BACKGROUND, 1, 0, 4, 2)
analysis_and_form_module = AnalysisAndForm(analysis_and_form_frame, listener)
listener.attach_module(ANALYSIS_AND_FORM, analysis_and_form_module)
# source and output
source_and_output_frame = frame(input_output, BACKGROUND, 0, 0, 1, 2)
source_and_output_module = SourceAndOutput(source_and_output_frame, listener)
listener.attach_module(SOURCE_AND_OUTPUT, source_and_output_module)
# save
save_frame = frame(input_output, BACKGROUND, 5, 0, 1, 1)
save_module = Save(save_frame, listener)
listener.attach_module(SAVE, save_module)
# save csvs
csv_frame = frame(input_output, BACKGROUND, 0, 2, 6, 1)
csv_module = CSVSaver(csv_frame, listener)
listener.attach_module(CSV, csv_module)
# info
info_frame = frame(input_output, BACKGROUND, 5, 1, 1, 1)
info_module = Info(info_frame, listener)
listener.attach_module(INFO, info_module)
# parameter specification
#parameter_frame = frame(input_output, BACKGROUND, 0, 3, 2, 1)
#parameter_module = Parameter(parameter_frame, listener)
#listener.attach_module(PARAMETER, parameter_module)
# original colour
og_color_frame = frame(image_diagram, BACKGROUND, 0, 0, 7, 6)
og_color_module = OGColour(og_color_frame, listener)
listener.attach_module(ORIGINAL_COLOUR, og_color_module)
# original colour data
og_color_data_frame = frame(image_diagram, BACKGROUND, 2, 12, 3, 2)
og_color_data_module = OGColourData(og_color_data_frame, listener)
listener.attach_module(ORIGINAL_COLOUR_DATA, og_color_data_module)
# recreated colour
recreated_color_frame = frame(image_diagram, BACKGROUND, 7, 0, 7, 3)
recreated_color_module = RecColour(recreated_color_frame, listener)
listener.attach_module(RECREATED_COLOUR, recreated_color_module)
# recreated colour data
rec_color_data_frame = frame(image_diagram, BACKGROUND, 5, 12, 4, 2)
rec_color_data_module = RecreatedColourData(rec_color_data_frame, listener)
listener.attach_module(RECREATED_COLOUR_DATA, rec_color_data_module)
# new colour
new_color_frame = frame(image_diagram, BACKGROUND, 7, 3, 7, 3)
new_color_module = NewColour(new_color_frame, listener)
listener.attach_module(NEW_COLOUR, new_color_module)
# new colour data
new_color_data_frame = frame(image_diagram, BACKGROUND, 9, 12, 3, 2)
new_color_data_module = NewColourData(new_color_data_frame, listener)
listener.attach_module(NEW_COLOUR_DATA, new_color_data_module)
# diagram
diagram_frame = frame(image_diagram, BACKGROUND, 0, 12, 2, 2)
diagram_module = Diagram(diagram_frame, listener)
listener.attach_module(DIAGRAM, diagram_module)
# histogram
histogram_frame = frame(image_diagram, BACKGROUND, 0, 6, 8, 6)
histogram_module = Histogram(histogram_frame, listener)
listener.attach_module(HISTOGRAM, histogram_module)
# absorption
absorption_spec_frame = frame(image_diagram, BACKGROUND, 8, 6, 6, 6)
absorption_module = AbsorptionSpec(absorption_spec_frame, listener)
listener.attach_module(ABSORPTION_SPEC, absorption_module)
# Batch Processing
BP_frame = frame(bp, BACKGROUND, 0, 0, 16, 16)
BP_module = BP(BP_frame, listener)
listener.attach_module(BP, BP_module)
rectangles_frame = frame(rectangles, BACKGROUND, 0, 0, 16, 16)
rectangles_module = Rectangle(rectangles_frame, listener)
listener.attach_module(rectangles, rectangles_module)
circles_frame = frame(circles, BACKGROUND, 0, 0, 16, 16)
circles_module = Circle(circles_frame, listener)
listener.attach_module(circles, circles_module)
BS_frame = frame(bs, BACKGROUND, 0, 0, 16, 16)
BS_module = BS(BS_frame, listener)
listener.attach_module(BS, BS_module)
measure_frame = frame(measure, BACKGROUND, 0, 0, 16, 16)
measure_module = Measure(measure_frame, listener)
listener.attach_module(MEASURE, measure_module)
crops_frame = frame(crop, BACKGROUND, 0, 0, 16, 16)
crops_module = crops(crops_frame, listener)
listener.attach_module(crop, crops_module)
mda_frame = frame(mda, BACKGROUND, 0, 0, 16, 16)
mda_module = MDA(mda_frame, listener)
listener.attach_module(mda, mda_module)
spec_invest_frame = frame(spec_invest, BACKGROUND, 0, 0, 16, 16)
spec_invest_module = SpecInvest(spec_invest_frame, listener)
listener.attach_module(spec_invest, spec_invest_module)
# colourbar
colour_frame = frame(image_diagram, BACKGROUND, 12, 12, 2, 2)
colour_module = Colour(colour_frame, listener)
if xSize is not None and ySize is not None:
window.geometry(str(xSize) + "x" + str(ySize))
window.mainloop()
if __name__ == '__main__':
main()
| 36.979866 | 162 | 0.741561 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 660 | 0.119782 |
39a9bf645816b1c506dcc188750ce0f86697bf35 | 241 | py | Python | 8. The Prisoner.py | Zfauser/Code-Combat-Introductory-To-Computer-Science-Python-Answers | 231d17ad2224fc616c022b515bc14e78ec5822f9 | [
"MIT"
] | 1 | 2021-02-25T16:43:08.000Z | 2021-02-25T16:43:08.000Z | 8. The Prisoner.py | Zfauser/Code-Combat-Introductory-To-Computer-Science-Python-Answers | 231d17ad2224fc616c022b515bc14e78ec5822f9 | [
"MIT"
] | null | null | null | 8. The Prisoner.py | Zfauser/Code-Combat-Introductory-To-Computer-Science-Python-Answers | 231d17ad2224fc616c022b515bc14e78ec5822f9 | [
"MIT"
] | null | null | null | # Free the prisoner, defeat the guard and grab the gem.
hero.moveRight()
# Free Patrick from behind the "Weak Door".
hero.attack("Weak Door")
hero.moveRight(2)
# Defeat the guard, named "Two".
# Get the gem.
hero.moveRight()
hero.moveDown(3) | 26.777778 | 55 | 0.73029 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 155 | 0.643154 |
39ab4f35e7e866e763852b3e23d066d864569549 | 1,120 | py | Python | conti_wc.py | saturn99/cleaks | c826c973d9695c3bfc31bf580b470267792807e7 | [
"MIT"
] | 6 | 2022-03-01T10:33:52.000Z | 2022-03-05T22:26:27.000Z | conti_wc.py | saturn99/cleaks | c826c973d9695c3bfc31bf580b470267792807e7 | [
"MIT"
] | 1 | 2022-03-01T13:40:29.000Z | 2022-03-01T13:40:29.000Z | conti_wc.py | saturn99/cleaks | c826c973d9695c3bfc31bf580b470267792807e7 | [
"MIT"
] | 2 | 2022-03-01T10:40:57.000Z | 2022-03-01T13:21:23.000Z | # -*- coding: utf-8 -*-
# import libraries
import os
from PIL import Image
import nltk
import numpy as np
import matplotlib.pyplot as plt
import random
from scipy.ndimage import gaussian_gradient_magnitude
from wordcloud import WordCloud, ImageColorGenerator, STOPWORDS
# import mask image. Search for stencil image for better results
mask = np.array(Image.open("darthvader01.png"))
# define function for grayscale coloring
def grey_color_func(word, font_size, position, orientation, random_state=None,
**kwargs):
return "hsl(0, 0%%, %d%%)" % random.randint(60, 100)
# Load and text and decode
text = open(('conti_just_body.txt'), "rb").read().decode('UTF-8', errors='replace')
# Load stopwords for EN language from nlkt
stopwords = nltk.corpus.stopwords.words('english')
# Create Worldcloud
wc = WordCloud(max_words=100000, width=1596, height=584, stopwords=stopwords, mask=mask).generate(text)
# Recolor our Wordcloud
plt.imshow(wc.recolor(color_func=grey_color_func, random_state=3),
interpolation="bilinear")
# Save worldcloud file
wc.to_file("CONTI_Darth.png")
| 25.454545 | 103 | 0.738393 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 394 | 0.351786 |
39ab88cab3f3527e44f2aa4992feac019e41f3f0 | 2,120 | py | Python | PA2_Optical_Flow.py | tianzixie/CAP5415PA2 | 6a7f4b1f178f10b37d588e698eddd013ce193544 | [
"MIT"
] | null | null | null | PA2_Optical_Flow.py | tianzixie/CAP5415PA2 | 6a7f4b1f178f10b37d588e698eddd013ce193544 | [
"MIT"
] | null | null | null | PA2_Optical_Flow.py | tianzixie/CAP5415PA2 | 6a7f4b1f178f10b37d588e698eddd013ce193544 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Oct 26 08:19:16 2017
@author: 0
"""
from scipy.misc import imresize
from scipy.signal import convolve,convolve2d
import scipy
from PIL import Image
import cv2
import numpy as np
img = cv2.imread("C://Users/0/Downloads/basketball1.png",0)
img2 = cv2.imread("C://Users/0/Downloads/basketball2.png",0)
#cv2.imshow('img',img)
#cv2.imshow('img2',img2)
k=(3,3)
print img
img = cv2.GaussianBlur(img, k, 1.5)
img2 = cv2.GaussianBlur(img2, k, 1.5)
cv2.imshow('img3',img)
#cv2.waitKey(10000)
cv2.destroyAllWindows()
imga=np.matrix(img)
imga2=np.matrix(img2)
#print imga
#img=Image.fromarray(imga)
#img.show()
height,width = imga.shape
#for x in range img(x,0):
print imga.shape
print height ,width
# print x
#for y in height:
# for x in width:
# print '0'
#for y in range(height):
print imga
#imga[0,1]=imga[0,1]+1
#print imga
def fx(y,x):
fx=(int(imga[y,x+1])-int(imga[y,x]))/1
return fx
def fy(y,x):
fy=(int(imga[y+1,x])-int(imga[y,x]))/1
return fy
print fx(1,0),fy(0,4)
imga=imresize(imga,(240,320))
imga2=imresize(imga2,(240,320))
print imga,imga.shape,imga2,imga2.shape
u=np.zeros([240,320])
v=np.zeros([240,320])
w2=30
w=15
#for i in range(w2):
# for y in range(w2):
#
#
# print matrix
#matrix=np.zeros([w2,w2])
#
#for x in range(w,240-w):
#
# for y in range(w,320-w):
# c=0
## matrix[w,w]=x
# print x,y
#print matrix
#def conv2(x, y, mode='same'):
# return np.rot90(convolve2d(np.rot90(x, 2), np.rot90(y, 2), mode=mode), 2)
#print convolve2d(imga2,matrix,'valid')
'''
ft = scipy.signal.convolve2d(imga, 0.25 * np.ones((2,2))) + \
scipy.signal.convolve2d(imga2, -0.25 * np.ones((2,2)))
#print ft
fx,fy=np.gradient(cv2.GaussianBlur(img, k, 1.5))
fx = fx[0:478, 0:638]
fy = fy[0:478, 0:638]
ft = ft[0:478, 0:638]
#print fx,fy,ft
'''
'''
for i in range(w+1,480-w):
for j in range(w+1,640-w):
Ix = fx[i-w:i+w, j-w:j+w]
Iy = fy[i-w:i+w, j-w:j+w]
It = ft[i-w:i+w, j-w:j+w]
A = [Ix,Iy]
print fx,fy,ft
'''
#C=A.T*-It
#print C
#print curFx,curFy,curFt,U[0],U[1]
| 20.784314 | 78 | 0.618868 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,314 | 0.619811 |
39ab9e369da24d4871a1bbc5c6f073cf0d4fed1f | 743 | py | Python | Test_data/database.py | mayowak/SQLite_test | a1185650dffe360d033e0691567ec2b2e075cae5 | [
"MIT"
] | null | null | null | Test_data/database.py | mayowak/SQLite_test | a1185650dffe360d033e0691567ec2b2e075cae5 | [
"MIT"
] | null | null | null | Test_data/database.py | mayowak/SQLite_test | a1185650dffe360d033e0691567ec2b2e075cae5 | [
"MIT"
] | null | null | null | #!usr/bin/env python3
#import dependecies
import sqlite3
import csv
#connect to test_data
conn = sqlite3.connect('test_data.db')
#create a cursor
c = conn.cursor()
c.execute("DROP TABLE test_data")
#create a test_data table
c.execute("""CREATE TABLE test_data(age integer,
sex text,
bmi real,
children integer,
smoker text,
region text)""")
#get test_data file
get_file = open('test_data.csv')
#read test_data file
read_file = csv.reader(get_file)
c.executemany("INSERT INTO test_data VALUES (?, ?, ?, ?, ?, ?,?)", read_file)
conn.commit()
conn.close() | 22.515152 | 78 | 0.549125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 529 | 0.711978 |
39abb2ca3dacb04c99f9108d126a09ef92f5c7d4 | 1,824 | py | Python | swift_cloud_py/validate_safety_restrictions/validate.py | stijnfleuren/swift_cloud_api | 30f3b6c1fd80e5cfa5ce11e1daa08a09ab1e4e9b | [
"MIT"
] | 3 | 2021-05-25T18:29:38.000Z | 2021-08-03T17:04:29.000Z | swift_cloud_py/validate_safety_restrictions/validate.py | stijnfleuren/swift_cloud_api | 30f3b6c1fd80e5cfa5ce11e1daa08a09ab1e4e9b | [
"MIT"
] | null | null | null | swift_cloud_py/validate_safety_restrictions/validate.py | stijnfleuren/swift_cloud_api | 30f3b6c1fd80e5cfa5ce11e1daa08a09ab1e4e9b | [
"MIT"
] | null | null | null | from swift_cloud_py.entities.control_output.fixed_time_schedule import FixedTimeSchedule
from swift_cloud_py.entities.intersection.intersection import Intersection
from swift_cloud_py.validate_safety_restrictions.validate_bounds import validate_bounds
from swift_cloud_py.validate_safety_restrictions.validate_completeness import validate_completeness
from swift_cloud_py.validate_safety_restrictions.validate_conflicts import validate_conflicts
from swift_cloud_py.validate_safety_restrictions.validate_fixed_orders import validate_fixed_orders
from swift_cloud_py.validate_safety_restrictions.validate_other_sg_relations import validate_other_sg_relations
def validate_safety_restrictions(intersection: Intersection, fixed_time_schedule: FixedTimeSchedule,
tolerance: float = 10**(-2)) -> None:
"""
Check if the fixed-time schedule satisfies the safety restrictions such as bounds on greenyellow times
and bounds on red times.
:param intersection: intersection object (this object also contains safety restrictions that a
fixed-time schedule should satisfy)
:param fixed_time_schedule: the schedule that we would like to validate
:param tolerance: tolerance in seconds for violating safety restrictions
This method raises a SafetyViolation-exception if the safety restrictions are not satisfied.
"""
validate_bounds(intersection=intersection, fts=fixed_time_schedule, tolerance=tolerance)
validate_conflicts(intersection=intersection, fts=fixed_time_schedule, tolerance=tolerance)
validate_other_sg_relations(intersection=intersection, fts=fixed_time_schedule, tolerance=tolerance)
validate_completeness(intersection=intersection, fts=fixed_time_schedule)
validate_fixed_orders(intersection=intersection, fts=fixed_time_schedule)
| 67.555556 | 111 | 0.838268 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 537 | 0.294408 |
39ac7cdc9dcc48e4f5e6e8db36ab648730a99cc2 | 20,366 | py | Python | source/python/brick_characterizer/CellRiseFall_Char.py | electronicvisions/brick | 9ad14f9d2912e70191f4711f359e3912c8cef837 | [
"BSD-3-Clause"
] | 1 | 2016-08-02T15:23:16.000Z | 2016-08-02T15:23:16.000Z | source/python/brick_characterizer/CellRiseFall_Char.py | ahartel/brick | 9ad14f9d2912e70191f4711f359e3912c8cef837 | [
"BSD-3-Clause"
] | null | null | null | source/python/brick_characterizer/CellRiseFall_Char.py | ahartel/brick | 9ad14f9d2912e70191f4711f359e3912c8cef837 | [
"BSD-3-Clause"
] | 1 | 2016-05-27T21:22:14.000Z | 2016-05-27T21:22:14.000Z | from timingsignal import TimingSignal
from brick_characterizer.CharBase import CharBase
class CellRiseFall_Char(CharBase):
def __init__(self,toplevel,output_filename,temperature,use_spectre=False):
self.toplevel = toplevel
self.output_filename = output_filename
self.load_capacitance = 0.01
self.clock_rise_time = 0.1 #ns
self.signal_rise_time = 0.1 #ns
self.stimulus_signals = []
self.delays = {}
self.transitions = {}
super(CellRiseFall_Char,self).__init__(temperature,use_spectre)
# The following assignments have to be after the super constructor
self.initial_delay = self.clock_period/2.0
self.simulation_length = 9.0 #ns
def get_delays(self):
return self.delays
def get_transitions(self):
return self.transitions
def get_first_table_param(self):
return round(self.get_clock_rise_time(),5)
def get_second_table_param(self):
return self.get_load_capacitance()
def get_clock_rise_time(self):
return self.clock_rise_time*self.slew_derate_factor
def set_clock_rise_time(self,value):
self.clock_rise_time = value/self.slew_derate_factor
def get_load_capacitance(self):
return self.load_capacitance
def set_load_capacitance(self,value):
self.load_capacitance = value
def whats_my_name(self):
return 'CellRiseFall_Char_inTr'+str(self.get_clock_rise_time())+'_cap'+str(self.load_capacitance)
def log_my_name(self):
return self.state+'\tin'+str(self.get_clock_rise_time())+'\tcap'+str(self.load_capacitance)
def next_step(self):
# this class has only one step
if self.state == 'init':
self.state = 'delay'
self.write_spice_file()
if not self.run() == 0:
return 1
if not self.check_timing() == 0:
return 1
self.state = 'done'
return 0
return 0
def get_current_filename(self):
import os
name,ext = os.path.splitext(self.output_filename)
return name+'_inTr'+str(self.get_clock_rise_time())+'_cap' \
+str(self.load_capacitance)+'_'+self.state+ext
def add_clock_signals(self,clocks):
# Add clock signals
self.clocks = clocks
# Check if one of the clocks is alreay given as a static signal
if self.added_static_signals:
for name in clocks.iterkeys():
if self.static_signals.has_key(name):
raise Exception('Clock signal '+name+' has already been'
+ ' defined as a static signal.')
def add_timing_signals(self,tim_sig):
"""This function adds the timing signals for this characterization run.
Ther parameter tim_sig has the following data structure:
{
'd_out[1:0]' : ['clk', 'd_out_ff[=index=]', 'positive_unate'],
'd_in_ff[1:0]' : ['clk', 'd_in[=index=]', 'positive_unate'],
}
There are two signals involved: The measured signal (in this case
d_out[1:0] and d_in_ff[1:0]) and the stimulus_signal (in this case
d_out_ff[1:0] and d_in[1:0])."""
# Add the actual timing signals
for signal, related in self.itersignals(tim_sig,
eval_index_expression=True):
# Check if one of the clocks is alreay given as a static signal
if self.added_static_signals:
if self.static_signals.has_key(signal):
raise Exception('Timing signal '+signal+' has ' \
+ 'already been defined as a ' \
+ 'static signal.')
t = TimingSignal(signal,related)
self.timing_signals[signal] = t
# The following list stores a unique list of the stimulus
# signals for later pulse source generation in the net list
self.stimulus_signals.append(t.stimulus())
self.delays[signal] = []
self.transitions[signal] = []
self.stimulus_signals = set(self.stimulus_signals)
self.added_timing_signals = True
def generate_timing_signals(self):
for name,direction in self.clocks.iteritems():
self.generate_clock_edge(name,direction)
self.add_probe(name)
for signal in self.stimulus_signals:
self.generate_two_edges(signal,self.signal_rise_time,self.initial_delay,self.initial_delay)
#self.logger_debug("Generating edge for "+signal+" with rising delay "+str(self.initial_delay)+ " and falling delay "+str(self.initial_delay))
self.add_probe(signal)
self.set_initial_condition(signal,self.low_value)
for signal_name,signal_obj in self.timing_signals.iteritems():
self.add_probe(signal_name)
self.add_capacitance(signal_name,self.load_capacitance)
if signal_obj.unateness() == 'positive_unate':
self.set_initial_condition(signal_name,self.low_value)
elif signal_obj.unateness() == 'negative_unate':
self.set_initial_condition(signal_name,self.high_value)
else:
raise Exception('Probe signal '+signal_name+' has unknown unate-ness. Please specify \'positive_unate\' or \'negative_unate\'')
def generate_clock_edge(self,name,direction):
self.append_out('V'+name+' '+name+' 0 pwl(')
if direction == 'R':
self.append_out('+ 0.0000000e+00 0.0000000e+00')
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5 + self.clock_rise_time)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.high_value))
else:
self.append_out('+ 0.0000000e+00 '+str(self.high_value)+'000000e+00')
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset-self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*0.5 + self.clock_rise_time)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5)+'e-9 '+str(self.low_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*1.5 + self.clock_rise_time)+'e-09 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 - self.clock_rise_time*0.5)+'e-9 '+str(self.high_value))
self.append_out('+ '+str(self.timing_offset+self.clock_period*2.0 + self.clock_rise_time*0.5)+'e-09 '+str(self.low_value))
def generate_two_edges(self,signal,transition_time,rising_delay,falling_delay):
self.append_out('V'+signal+' '+signal+' 0 pwl(')
start_time = self.timing_offset - rising_delay
start_time_2 = self.timing_offset+self.clock_period - falling_delay
first_value = self.low_value
second_value = self.high_value
self.append_out('+ 0.0000000e+00 '+str(first_value)+'e+00')
self.append_out('+ '+str(start_time)+'e-9 '+str(first_value)+'e+0')
self.append_out('+ '+str(start_time+transition_time)+'e-09 '+str(second_value)+'e+00')
self.append_out('+ '+str(start_time_2)+'e-9 '+str(second_value)+'e+00')
self.append_out('+ '+str(start_time_2+transition_time)+'e-09 '+str(first_value)+'e+00)')
def add_capacitance(self,signal_name,capacitance):
self.append_out('C'+signal_name+' '+signal_name \
+' 0 '+str(capacitance)+'e-12')
def add_pseudo_static_signals(self,signals):
"""Pseudo-Static signals in the case of an Output timing
characterization are the input timing signals. The function
*do_characterization* passes the input timing signals to this function.
It assigns zero to all of them during simulation."""
if not self.added_timing_signals:
raise Exception('Cannot add pseudo-static signals before' \
+ ' timing_signals have been added. Please call' \
+ ' this function afterwards.')
not_known = lambda name: not name in self.stimulus_signals and not self.clocks.has_key(name)
for signal,related in self.itersignals(signals,
eval_index_expression=True):
if not_known(signal):
self.static_signals[signal] = 0
self.added_static_signals = True
def check_timing(self):
# parse result file
# after this step, all edges are identified
if not self.parse_print_file() == 0:
return 1
# find clock edge
clock_edges = {}
try:
for clock_name, clock_dir in self.clocks.iteritems():
if not clock_edges.has_key(clock_name):
clock_edges[clock_name] = []
self.logger_debug(str(self.get_rising_edges(clock_name)))
if (clock_dir == 'R'):
clock_edges[clock_name].append(self.get_rising_edges(clock_name)[1*3+1])
clock_edges[clock_name].append(self.get_rising_edges(clock_name)[2*3+1])
# cnt = 0
# for edge in self.get_rising_edges(clock_name)[1,4,2]:
# if cnt == 1:
# clock_edges[clock_name].append(edge)
# cnt = cnt + 1 if cnt < 2 else 0
self.logger_debug( "Rising edge of "+clock_name+" at "+" ".join([str(x) for x in clock_edges[clock_name]]))
else:
clock_edges[clock_name].append(self.get_falling_edges(clock_name)[1*3+1])
clock_edges[clock_name].append(self.get_falling_edges(clock_name)[2*3+1])
# cnt = 0
# for edge in self.get_falling_edges(clock_name):
# if cnt == 1:
# clock_edges[clock_name].append(edge)
# cnt = cnt + 1 if cnt < 2 else 0
self.logger_debug( "Falling edge of "+clock_name+" at "+" ".join([str(x) for x in clock_edges[clock_name]]))
except:
self.logger_debug("Died")
return 1
for timing_signal in self.timing_signals.itervalues():
# some alias pointers
stimulus = timing_signal.stimulus()
probe = timing_signal.name()
probe_lc = probe
if not self.use_spectre:
probe_lc = probe.lower()
# initial timing values
delta_t = [0,0]
tran = [0,0]
self.logger_debug( "Rising edges of "+probe+" at "+" ".join([str(x) for x in self.get_rising_edges(probe_lc)]))
self.logger_debug( "Falling edges of "+probe+" at "+" ".join([str(x) for x in self.get_falling_edges(probe_lc)]))
if timing_signal.unateness() == 'positive_unate':
r_edges_probe = self.get_rising_edges(probe_lc)
if r_edges_probe:
while len(r_edges_probe) > 0:
lower = r_edges_probe.pop(0)
middle = r_edges_probe.pop(0)
upper = r_edges_probe.pop(0)
# get switching point
delta_t[0] = middle - clock_edges[timing_signal.clock()][0]
# get rising transition
tran[0] = upper - lower
if delta_t[0] < 0 or delta_t[0] > self.timing_offset*1.e-9:
self.logger_debug("Rising edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[0] = self.infinity
else:
self.logger_debug("Rising Delay: "+str(delta_t[0]))
break
else:
self.logger_error("Rising edge for signal "+probe+" not found but expected.")
return 1
f_edges_probe = self.get_falling_edges(probe_lc)
if f_edges_probe:
while len(f_edges_probe) > 0:
lower = f_edges_probe.pop(0)
middle = f_edges_probe.pop(0)
upper = f_edges_probe.pop(0)
# get threshold time for switching point
delta_t[1] = middle - clock_edges[timing_signal.clock()][1]
# get threshold time for falling transition upper
tran[1] = upper-lower
if delta_t[1] < 0 or delta_t[1] > self.timing_offset*1.e-9:
self.logger_debug("Falling edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[1] = self.infinity
else:
self.logger_debug( "Falling Delay: "+str(delta_t[1]))
break
else:
self.logger_error("Falling edge for signal "+probe+" not found but expected.")
return 1
elif timing_signal.unateness() == 'negative_unate':
f_edges_probe = self.get_falling_edges(probe_lc)
if f_edges_probe:
while len(f_edges_probe) > 0:
lower = f_edges_probe.pop(0)
middle = f_edges_probe.pop(0)
upper = f_edges_probe.pop(0)
# get threshold time for switching point
delta_t[1] = middle - clock_edges[timing_signal.clock()][0]
# get threshold time for rising transition upper
tran[1] = upper - lower
if delta_t[1] < 0 or delta_t[1] > self.timing_offset*1.e-9:
self.logger_debug("Falling edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[1] = self.infinity
else:
self.logger_debug( "Falling Delay: "+str(delta_t[1]))
break
else:
self.logger_error("Falling edge for signal "+probe_lc+" not found but expected.")
return 1
r_edges_probe = self.get_rising_edges(probe_lc)
if r_edges_probe:
while len(r_edges_probe) > 0:
lower = r_edges_probe.pop(0)
middle = r_edges_probe.pop(0)
upper = r_edges_probe.pop(0)
# get threshold time for switching point
delta_t[0] = middle - clock_edges[timing_signal.clock()][1]
# get threshold time for rising transition upper
tran[0] = upper - lower
if delta_t[0] < 0 or delta_t[0] > self.timing_offset*1.e-9:
self.logger_debug("Rising edge at "+str(middle)+" for signal " \
+probe+" too far away from clock edge")
delta_t[0] = self.infinity
else:
self.logger_debug( "Rising Delay: "+str(delta_t[0]))
break
else:
self.logger_error("Rising edge for signal "+probe_lc+" not found but expected.")
return 1
self.delays[probe] = delta_t
self.transitions[probe] = tran
self.logger_debug('Delays for signal \''+probe+'\' are rising: '+str(self.delays[probe][0])+' and falling: '+str(self.delays[probe][1]))
self.logger_debug('Transition times for signal \''+probe+'\' are rising: '+str(self.transitions[probe][0])+' and falling: '+str(self.transitions[probe][1]))
return 0
def parse_print_file(self):
import subprocess,os
call = ''
if self.use_spectre:
call = ['python', os.environ['BRICK_DIR']+'/source/python/brick_characterizer/parse_print_file_spectre.py', self.get_printfile_name(), str(self.high_value*self.rise_threshold), str(self.high_value*self.fall_threshold), str(self.high_value*self.slew_lower_rise), str(self.high_value*self.slew_upper_rise), str(self.high_value*self.slew_lower_fall), str(self.high_value*self.slew_upper_fall)]
else:
call = ['python', os.environ['BRICK_DIR']+'/source/python/brick_characterizer/parse_print_file.py', self.get_printfile_name(), str(self.high_value*self.rise_threshold), str(self.high_value*self.fall_threshold), str(self.high_value*self.slew_lower_rise), str(self.high_value*self.slew_upper_rise), str(self.high_value*self.slew_lower_fall), str(self.high_value*self.slew_upper_fall)]
self.logger_debug(" ".join(call))
returncode = subprocess.call(call)
if not returncode == 0:
self.logger_error("Error in Parse print file")
return 1
import pickle
with open(self.get_printfile_name()+'_rising') as input:
self.rising_edges = pickle.load(input)
with open(self.get_printfile_name()+'_falling') as input:
self.falling_edges = pickle.load(input)
# self.logger_debug(str(self.rising_edges))
# self.logger_debug(str(self.falling_edges))
return 0
| 50.78803 | 402 | 0.584847 | 20,275 | 0.995532 | 0 | 0 | 0 | 0 | 0 | 0 | 4,075 | 0.200088 |
39ad13fb0f9312898dcd01e19fe49f2a734c1783 | 58 | py | Python | pyjpboatrace/utils/__init__.py | miyamamoto/pyjpboatrace | fbc4a794d1f03e2ed7dfcafcb20c43098c1434a6 | [
"MIT"
] | null | null | null | pyjpboatrace/utils/__init__.py | miyamamoto/pyjpboatrace | fbc4a794d1f03e2ed7dfcafcb20c43098c1434a6 | [
"MIT"
] | null | null | null | pyjpboatrace/utils/__init__.py | miyamamoto/pyjpboatrace | fbc4a794d1f03e2ed7dfcafcb20c43098c1434a6 | [
"MIT"
] | null | null | null | from .str2num import str2num
__all__ = [
'str2num'
]
| 9.666667 | 28 | 0.655172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0.155172 |
39ae3c36550302817294c61764f3350d2f47cf3d | 2,168 | py | Python | snippets/integers.py | rhishi/python-snippets | 60020d3a187d7687b38b6b58f74ceb03a37983b9 | [
"Apache-2.0"
] | null | null | null | snippets/integers.py | rhishi/python-snippets | 60020d3a187d7687b38b6b58f74ceb03a37983b9 | [
"Apache-2.0"
] | null | null | null | snippets/integers.py | rhishi/python-snippets | 60020d3a187d7687b38b6b58f74ceb03a37983b9 | [
"Apache-2.0"
] | null | null | null | import sys
# First: to understand the uses of "format" below, read these:
# Format String Syntax https://docs.python.org/2/library/string.html#formatstrings
# Format Specification Mini-Language https://docs.python.org/2/library/string.html#formatspec
# In Python 2, there are two integer types: int, long.
# int is the underlying platform's signed integer type,
# either 32 or 64 bit, depending on the platform.
print "2^31 - 1 = {0:20} = {0:17x} ".format((1 << 31) - 1)
print "2^63 - 1 = {0:20} = {0:17x} ".format((1 << 63) - 1)
# sys.maxint gives the maximum value of int. It is 2^31-1 or 2^63-1.
maxint = sys.maxint
print " max int = {0:20} = {0:17x} {1}".format(maxint, type(maxint))
# There is no sys.minint, but it's simply -sys.maxint-1 as said in Python documentation
# http://docs.python.org/2/library/stdtypes.html#numeric-types-int-float-long-complex
minint = -maxint - 1
print " min int = {0:20} = {0:17x} {1}".format(minint, type(minint))
print
# long is an integer type with unlimited range. Python automatically
# switches over from int to long whenever there is overflow.
# That's why, there is no sys.maxlong.
# Python 3 even gets rid of sys.maxint, because it has just single
# integer type: int. It actually behaves like 2's long i.e. has unlimited range.
# 3 has sys.maxsize, which loosely relates to 2's sys.maxint.
# http://docs.python.org/3.3/whatsnew/3.0.html#integers
# http://docs.python.org/3/library/stdtypes.html#numeric-types-int-float-complex
# Let's test the automatic switchover from int to long
# On 64-bit platform, the switchover point is between 2^63-1 and 2^63.
for r in [ range(1, 22), range(28, 37), range(53, 69), range(88, 100), range(123, 131) ]:
for i in r:
# make 2^i - 1, without spilling beyond i bits.
n = (((1 << (i-1)) - 1) << 1) + 1
# i is formatted as left-aligned ('<'), width 3.
# n is formatted as hex ('x') with 0x prefix ('#'), width 35.
print "2**{0:<3} - 1 = {1:#35x} {2}".format(i, n, type(n))
print " + 1 = {1:#35x} {2}".format(i, n+1, type(n+1))
print "..."
print
print -1
print -1 & 0xFF
print -1 & 0xFFF
| 38.714286 | 95 | 0.652675 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,634 | 0.75369 |
39aefe4ed5c77eadc14e52071c40e7bf0197d590 | 332 | py | Python | covid mail/main.py | rahul263-stack/PROJECT-Dump | d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09 | [
"MIT"
] | 1 | 2020-04-06T04:41:56.000Z | 2020-04-06T04:41:56.000Z | covid mail/main.py | rahul263-stack/quarantine | d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09 | [
"MIT"
] | null | null | null | covid mail/main.py | rahul263-stack/quarantine | d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09 | [
"MIT"
] | null | null | null | import os
from sendDetailedEmail.email import MailAttachment
def sendMail(clientEmail):
try:
sender = MailAttachment(clientEmail=clientEmail)
sender.send()
except Exception as e:
raise e
if __name__=="__main__":
clientEmail = input("input a valid client email ID: ")
sendMail(clientEmail)
| 22.133333 | 58 | 0.698795 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.129518 |
39af2956611d454e6abd79bee5b3ec4243b86cd1 | 2,933 | py | Python | pyodide_importer/api.py | ryanking13/pyodide-importer | fb9f83e54eb307fcdb2590588f0b75db1c87ca97 | [
"MIT"
] | 1 | 2021-11-16T11:55:54.000Z | 2021-11-16T11:55:54.000Z | pyodide_importer/api.py | ryanking13/pyodide-importer | fb9f83e54eb307fcdb2590588f0b75db1c87ca97 | [
"MIT"
] | null | null | null | pyodide_importer/api.py | ryanking13/pyodide-importer | fb9f83e54eb307fcdb2590588f0b75db1c87ca97 | [
"MIT"
] | null | null | null | from contextlib import contextmanager
import pathlib
import sys
from typing import Union, List
from .import_hook import PyFinder, PyHTTPFinder
# Singleton instance of PyFinder
pyfinder: PyFinder = None
def _update_syspath(path: str):
"""
Append `path` to sys.path so that files in path can be imported
"""
path = pathlib.Path(path).resolve().as_posix()
if path not in sys.path:
sys.path.append(path)
def register_hook(
base_url: Union[str, List[str]],
download_path: str = "",
modules: List[str] = None,
update_syspath: bool = True,
):
"""
Register import hook to sys.meta_path.
Args:
base_url (str or List[str]): URL(s) where the directory containing Python packages is served through HTTP/S
download_path (str): the path in virtual file system where Python packages will be downloaded, default is current working directory
modules (List[str]): a list, with the names of the root modules/packages that can be imported from the given URL
update_syspath (bool): whether to add ``download_path`` to `sys.path`
**Notes on** ``module`` **parameter**:
If this parameter is not specified, import statement will try to search a module everytime
when the module is not found in local filesystem. This means every FAILED import statement will result in multiple 404 HTTP errors.
So when you have fixed modules, using modules parameter to whitelist downloadable modules in recommended.
"""
global pyfinder
if pyfinder is not None and pyfinder._registered():
raise RuntimeError(
"import hook is already registered, if you want to register a new hook, unregister the existing hook with unregister_hook() first"
)
pyfinder = PyHTTPFinder(base_url, download_path, modules)
pyfinder.register()
if update_syspath:
_update_syspath(download_path)
return pyfinder
def unregister_hook():
"""
Unregister import hook from sys.meta_path.
After calling this method, new external modules cannot be downloaded and imported,
while previously imported modules can be keep available.
"""
global pyfinder
if pyfinder is not None:
pyfinder.unregister()
pyfinder = None
def add_module(module: Union[str, List[str]]):
"""
Add new module(s) that can be imported from URL.
Args:
module (str or List[str]): modules/packages that can be imported from the URL
"""
global pyfinder
if pyfinder is None or (not pyfinder._registered()):
raise RuntimeError("import hook is not registered")
pyfinder.add_module(module)
def available_modules():
"""
Get the list of modules that can be imported from the URL.
"""
global pyfinder
if pyfinder is None or (not pyfinder._registered()):
raise RuntimeError("import hook is not registered")
return pyfinder.available_modules()
| 31.880435 | 142 | 0.699284 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,652 | 0.563246 |
39af8dcb80c383fcd4bfdd52b3cd4d36dce1df8f | 1,982 | py | Python | rastervision/new_version/batch_submit.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | 1 | 2019-11-07T10:02:23.000Z | 2019-11-07T10:02:23.000Z | rastervision/new_version/batch_submit.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | null | null | null | rastervision/new_version/batch_submit.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import uuid
import click
from rastervision.rv_config import RVConfig
def _batch_submit(cmd,
debug=False,
profile=False,
attempts=5,
parent_job_ids=None,
num_array_jobs=None,
use_gpu=False):
rv_config = RVConfig.get_instance()
batch_config = rv_config.get_subconfig('AWS_BATCH')
job_queue = batch_config('cpu_job_queue')
job_def = batch_config('cpu_job_definition')
if use_gpu:
job_queue = batch_config('job_queue')
job_def = batch_config('job_definition')
import boto3
client = boto3.client('batch')
job_name = 'ffda-{}'.format(uuid.uuid4())
cmd_list = cmd.split(' ')
if debug:
cmd_list = [
'python', '-m', 'ptvsd', '--host', '0.0.0.0', '--port', '6006',
'--wait', '-m'
] + cmd_list
if profile:
cmd_list = ['kernprof', '-v', '-l'] + cmd_list
kwargs = {
'jobName': job_name,
'jobQueue': job_queue,
'jobDefinition': job_def,
'containerOverrides': {
'command': cmd_list
},
'retryStrategy': {
'attempts': attempts
},
}
if parent_job_ids:
kwargs['dependsOn'] = [{'jobId': id} for id in parent_job_ids]
if num_array_jobs:
kwargs['arrayProperties'] = {'size': num_array_jobs}
job_id = client.submit_job(**kwargs)['jobId']
msg = 'submitted job with jobName={} and jobId={}'.format(job_name, job_id)
print(cmd_list)
print(msg)
return job_id
@click.command()
@click.argument('cmd')
@click.option('--debug', is_flag=True)
@click.option('--profile', is_flag=True)
@click.option('--attempts', default=5)
@click.option('--gpu', is_flag=True)
def batch_submit(cmd, debug, profile, attempts, gpu):
return _batch_submit(cmd, debug, profile, attempts, use_gpu=gpu)
if __name__ == '__main__':
batch_submit()
| 26.783784 | 79 | 0.589808 | 0 | 0 | 0 | 0 | 318 | 0.160444 | 0 | 0 | 428 | 0.215943 |
39b0985dcd907af2111c10e4b763175f9a26f8fe | 311 | py | Python | app/api/item.py | peterentroprise/entro-tad | b074d4810bcc7fb71b467da8dfaa19be66a41fa2 | [
"MIT"
] | null | null | null | app/api/item.py | peterentroprise/entro-tad | b074d4810bcc7fb71b467da8dfaa19be66a41fa2 | [
"MIT"
] | null | null | null | app/api/item.py | peterentroprise/entro-tad | b074d4810bcc7fb71b467da8dfaa19be66a41fa2 | [
"MIT"
] | null | null | null |
from fastapi import APIRouter
from models.item_model import Payload
from service import item_service
router = APIRouter()
@router.get("/")
async def read_root():
return {"Hello": "Universe"}
@router.post("/indexitem")
async def index_item(payload: Payload):
return item_service.index_item(payload) | 19.4375 | 43 | 0.752412 | 0 | 0 | 0 | 0 | 182 | 0.585209 | 138 | 0.44373 | 32 | 0.102894 |
39b1dd9a2298bcc4fe7df8fe5dd5e695bcdaca18 | 6,867 | py | Python | scripts/docker_configurator/docker_configurator.py | PlenusPyramis/dockerfiles | 0c1b19faa33e944c66f3762fe49d7f954aa60b12 | [
"MIT"
] | 1 | 2020-01-10T16:26:32.000Z | 2020-01-10T16:26:32.000Z | scripts/docker_configurator/docker_configurator.py | PlenusPyramis/dockerfiles | 0c1b19faa33e944c66f3762fe49d7f954aa60b12 | [
"MIT"
] | null | null | null | scripts/docker_configurator/docker_configurator.py | PlenusPyramis/dockerfiles | 0c1b19faa33e944c66f3762fe49d7f954aa60b12 | [
"MIT"
] | 2 | 2020-02-22T23:25:24.000Z | 2020-11-04T05:09:48.000Z | """
Docker Configurator
http://www.github.com/EnigmaCurry/docker-configurator
This tool creates self-configuring docker containers given a single
YAML file.
Run this script before your main docker CMD. It will write fresh
config files on every startup of the container, based off of Mako
templates embedded in the docker image, as well as values specified in
a YAML file provided in a mounted volume.
The idea of this is that container configuration is kind of hard
because everyone does it differently. This creates a standard way of
doing it for containers that I write. A single file to configure
everything.
See the included example project: `docker_configurator_example`
---------------------------------------------------------------------------
Copyright (c) 2019 PlenusPyramis
Copyright (c) 2015 Ryan McGuire
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import yaml
from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions as mako_exceptions
import logging
import argparse
import os
import shutil
import collections
logging.basicConfig(level=logging.INFO)
logger=logging.getLogger("docker_configurator")
__version__ = "v0.9.0"
def deep_merge(*dicts):
"""
Non-destructive deep-merge of multiple dictionary-like objects
>>> a = { 'first' : { 'all_rows' : { 'pass' : 'dog', 'number' : '1', 'recipe':['one','two'] } } }
>>> b = { 'first' : { 'all_rows' : { 'fail' : 'cat', 'number' : '5', 'recipe':['three'] } } }
>>> c = deep_merge(a, b)
>>> a == { 'first' : { 'all_rows' : { 'pass' : 'dog', 'number' : '1', 'recipe':['one','two'] } } }
True
>>> b == { 'first' : { 'all_rows' : { 'fail' : 'cat', 'number' : '5', 'recipe':['three'] } } }
True
>>> c == { 'first' : { 'all_rows' : { 'pass' : 'dog', 'fail' : 'cat', 'number' : '5', 'recipe':['three'] } } }
True
>>> c == deep_merge(a, b, c)
True
"""
# Wrap the merge function so that it is no longer destructive of its destination:
def merge(source, destination):
# Thanks @_v1nc3nt_ https://stackoverflow.com/a/20666342/56560
if isinstance(destination, collections.abc.Mapping):
for key, value in source.items():
if isinstance(value, dict):
node = destination.setdefault(key, {})
merge(value, node)
else:
destination[key] = value
final = {}
for d in dicts:
merge(d, final)
return final
def load_merged_config(config_path="/config"):
default_config_path = os.path.join(config_path,"default.yaml")
user_config_path = os.path.join(config_path, "config.yaml")
with open(default_config_path) as f:
default_config = yaml.safe_load(f)
if default_config is None:
raise AssertionError('Default config is empty: {}'.format(default_config_path))
logger.info("Default configuration loaded from {}".format(default_config_path))
if os.path.exists(user_config_path):
with open(user_config_path) as f:
user_config = yaml.safe_load(f)
logger.info("User configuration loaded from {}".format(user_config_path))
else:
user_config = {}
logger.warning("User configuration was not found. Using default config only.")
return deep_merge(default_config, user_config)
def render_to_files(template, output, **params):
def write(path, data):
if os.path.exists(path):
logger.warning("Overwriting existing file: {}".format(path))
with open(path, 'w') as f:
f.write(data)
try:
logging.info("Rendering template: {} to file(s): {}".format(template.uri, output))
data = template.render(**params)
if type(output) == str:
write(output, data)
else:
for out in output:
write(out, data)
return data
except:
print(mako_exceptions.text_error_template().render())
raise
class DockerConfigurator(object):
"""Reads a yaml config file and creates application config files from Mako templates
The config file should have a key called 'template_map' which is a map of
templates to final system paths.
# Example yaml for config.yaml or default.yaml:
template_map:
- my_config.mako: /etc/my_config
- my_script.sh.mako: /usr/local/bin/cool_script
"""
def __init__(self, config_path="/config"):
self.config = load_merged_config(config_path)
self.template_lookup = TemplateLookup(directories=[os.path.join(config_path, "templates")])
def write_configs(self, template_map=None):
"""Create config files from templates
template_map is a dictionary of template files to config file locations to create
"""
if template_map is None:
try:
template_map = self.config['template_map']
except KeyError:
logger.error("Missing template_map from config.yaml")
raise
for template_name, config_path in template_map.items():
template = self.template_lookup.get_template(template_name)
directory = os.path.dirname(config_path)
if not os.path.exists(directory):
logger.info("Creating directory: {}".format(directory))
os.makedirs(directory)
render_to_files(template, config_path, **self.config)
def main():
parser = argparse.ArgumentParser(description='Docker Configurator',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-c", "--config-path", help="Path to config and templates directory", default="/config")
args = parser.parse_args()
dc = DockerConfigurator(args.config_path)
dc.write_configs()
if __name__ == "__main__":
main()
| 39.24 | 114 | 0.663026 | 1,450 | 0.211155 | 0 | 0 | 0 | 0 | 0 | 0 | 3,697 | 0.538372 |
39b39323fb50875fc0c540df3d833adc6f094d24 | 2,583 | py | Python | definition.example.py | JoshData/represent-boundaries | 0a77bad99758bc77140c6c6def4f8d5e68810367 | [
"MIT"
] | 2 | 2016-07-05T06:10:21.000Z | 2016-10-20T17:55:13.000Z | definition.example.py | JoshData/represent-boundaries | 0a77bad99758bc77140c6c6def4f8d5e68810367 | [
"MIT"
] | null | null | null | definition.example.py | JoshData/represent-boundaries | 0a77bad99758bc77140c6c6def4f8d5e68810367 | [
"MIT"
] | 2 | 2016-07-05T06:10:25.000Z | 2020-03-04T02:22:24.000Z | from datetime import date
import boundaries
boundaries.register('federal-electoral-districts', # The slug of the boundary set
# The name of the boundary set for display.
name='Federal electoral districts',
# Generic singular name for a boundary from this set. Optional if the
# boundary set's name ends in "s".
singular='Federal electoral district', # If this were omitted, the same value would be generated
# Geographic extents which the boundary set encompasses
domain='Canada',
# Path to the shapefile directory. Relative to the current file, so if this file
# is in the same directory as the shapefile -- usually the case -- you can omit
# this parameter.
file='',
# Last time the source was updated or checked for new data
last_updated=date(1970, 1, 1),
# A function that's passed the feature and should return a name string
# The boundaries model provides some simple function factories for this.
name_func=boundaries.clean_attr('FEDENAME'),
# Function to extract a feature's "external_id" property
id_func=boundaries.attr('FEDUID'),
# Function to provide the slug (URL component) of the boundary
# If not provided, uses the name to generate the slug; this is usually
# what you want.
#slug_func=boundaries.attr('FEDUID'),
# Function that returns true/false to determine whether a given feature should be included
# By default, all features are included.
#is_valid_func=lambda f: True,
# Authority that is responsible for the accuracy of this data
authority='H.R.M. Queen Elizabeth II',
# A URL to the source of this data
source_url='http://www12.statcan.gc.ca/census-recensement/2011/geo/bound-limit/bound-limit-eng.cfm',
# A URL to the license for this data
licence_url='http://www12.statcan.gc.ca/census-recensement/2011/geo/bound-limit/license-eng.cfm?lang=_e&year=11&type=fed000a&format=a',
# A URL to the data file, e.g. a ZIP archive
data_url='http://www12.statcan.gc.ca/census-recensement/2011/geo/bound-limit/files-fichiers/gfed000a11a_e.zip',
# Notes identifying any pecularities about the data, such as columns that
# were deleted or files which were merged
notes='',
# Encoding of the text fields in the shapefile, e.g. 'utf-8'. Default: 'ascii'
encoding='iso-8859-1',
# Used only by the represent-maps app -- if you're not using that, ignore label_point_func.
# A function from a feature object to a Point where to display a label for feature on a map.
#label_point_func = lambda feature: None,
)
| 52.714286 | 139 | 0.722416 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,122 | 0.821525 |
39b549fc5da98ce81d958623dcf67a57d0a50eec | 2,962 | py | Python | tyo_mq_client/publisher.py | e-tang/tyo-mq-client-python | 82ea47bf8cf8a924b515149456eaecb5557a0f3e | [
"MIT"
] | null | null | null | tyo_mq_client/publisher.py | e-tang/tyo-mq-client-python | 82ea47bf8cf8a924b515149456eaecb5557a0f3e | [
"MIT"
] | 1 | 2018-06-19T23:42:27.000Z | 2018-06-20T07:06:25.000Z | tyo_mq_client/publisher.py | e-tang/tyo-mq-client-python | 82ea47bf8cf8a924b515149456eaecb5557a0f3e | [
"MIT"
] | null | null | null | #
#
from .subscriber import Subscriber
from .logger import Logger
from .constants import Constants
from .events import Events
#
import json
class Publisher(Subscriber):
def __init__(self, name, eventDefault=None, host=None, port=None, protocol=None):
super(Publisher, self).__init__(name, host, port, protocol)
self.type = 'PRODUCER'
self.eventDefault = eventDefault if eventDefault is not None else Constants.EVENT_DEFAULT
self.on_subscription_listener = None
self.subscribers = {}
# // Initialisation
futureFunc = lambda : self.set_on_subscription_listener()
self.add_on_connect_listener(futureFunc)
#
Logger.debug("creating producer: " + self.name)
def broadcast (self, data, event=None):
self.produce(data, event, Constants.METHOD_BROADCAST)
def produce (self, data, event=None, method=None) :
if (data is None):
raise Exception("data can't be null")
if (event is None):
if (self.eventDefault is None):
raise Exception("please specifiy event")
else:
event = self.eventDefault
message = {"event":event, "message":data, "from":self.name, "method":method}
self.send_message('PRODUCE', message)
# /**
# * On Subscribe
# */
def __on_subscription (self, data) :
Logger.log("Received subscription information: " + json.dumps(data))
self.subscribers[data["id"]] = data
# // further listener
if (self.on_subscription_listener is not None):
self.on_subscription_listener(data)
def set_on_subscription_listener (self) :
event = Events.to_onsubscribe_event(self.get_id())
self.on(event, self.__on_subscription)
# /**
# * On Lost connections with subscriber(s)
# */
def __on_lost_subscriber (self, callback, data) :
Logger.log("Lost subscriber's connection")
if (callback is not None):
callback(data)
def set_on_subscriber_lost_listener (self, callback) :
event = Events.to_ondisconnect_event(self.get_id())
futureFunc = lambda data : (lambda data, cb=callback : self.__on_lost_subscriber(cb, data))(data)
self.on(event, futureFunc)
def on_subscriber_lost (self, callback) :
self.set_on_subscriber_lost_listener(callback)
# /**
# * On Unsubsribe
# */
def __on_unsubscribed (self, callback, data) :
if callback is not None:
callback(data)
def set_on_unsubscribed_listener (self, event, callback) :
event = Events.to_onunsubscribe_event(event, self.get_id())
futureFunc = lambda data : (lambda data, cb=callback: self.__on_unsubscribed(cb, data))(data)
self.on(event, futureFunc)
def on_unsubscribed (self, event, callback) :
self.set_on_unsubscribed_listener(event, callback)
| 33.280899 | 105 | 0.641458 | 2,819 | 0.951722 | 0 | 0 | 0 | 0 | 0 | 0 | 336 | 0.113437 |
39b57868be76cc021f5f1127464558d697a138df | 3,560 | py | Python | app/authenticate.py | directedbyshawn/Secure-Login | 15f2a6168986b11ffbde318333415671fb62578f | [
"MIT"
] | null | null | null | app/authenticate.py | directedbyshawn/Secure-Login | 15f2a6168986b11ffbde318333415671fb62578f | [
"MIT"
] | null | null | null | app/authenticate.py | directedbyshawn/Secure-Login | 15f2a6168986b11ffbde318333415671fb62578f | [
"MIT"
] | null | null | null | '''
Authentication methods for cs166 final project.
'''
import random, hashlib
from .db import retrieve_accounts
lower_case = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
upper_case = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
nums = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0']
special = ['!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '?', '[', ']', '{', '}', ':', ';', '"', '/', '.', ',', '<', '>']
def authenticate(username, password):
''' Authenticates user upon login '''
# retrieves users from database
users = retrieve_accounts()
stored_username = ''
stored_password = ''
# finds user in records
for user in users:
if user[0] == username:
stored_username = user[0]
stored_password = user[1]
# if user is not found, false
if (stored_username == '' or stored_password == ''):
return False
# retrieves salt and stored password from pw string
salt_length = 40
salt = stored_password[:salt_length]
stored_hash = stored_password[salt_length:]
# compares inputted password with hash and returns result
hashable = salt + password
hashable = hashable.encode('utf-8')
this_hash = hashlib.sha1(hashable).hexdigest()
return this_hash == stored_hash
def verify_new_account(username, password):
'''
Method used to determine if new account credentials are valid
Parameters:
username (str) : username entered by user
password (str) : password entered by user
Returns:
status (bool) : status of if the new credentials are good or not
'''
global lower_case, upper_case, nums, special
# retrieves all users from db and makes a list of all usernames
users = retrieve_accounts()
taken_usernames = []
for accounts in users:
taken_usernames.append(accounts[0])
# status of whether or not password contains the requirements
requirement_one = len(password) >= 8
requirement_two = len(password) <= 25
requirement_three = username not in taken_usernames
requirement_lower = False
requierment_upper = False
requirement_nums = False
requirement_special = False
for char in password:
if char in lower_case:
requirement_lower = True
if char in upper_case:
requierment_upper = True
if char in nums:
requirement_nums = True
if char in special:
requirement_special = True
# SQL injection prevention
for char in username:
if char in special:
return False
status = False
if (requirement_one and requirement_two and requirement_three and requirement_lower and requierment_upper and requirement_nums and requirement_special):
status = True
return status
def random_password():
'''
Function to return randomly generated password
Returns:
password (str) : randomly generated password
'''
global lower_case, upper_case, nums, special
chars = [lower_case, upper_case, nums, special]
password_length = random.randint(12, 16)
password = ''
for i in range(password_length):
lib = chars[random.randint(0, 3)]
char = lib[random.randint(0, len(lib) - 1)]
password += char
return password | 28.709677 | 156 | 0.589045 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,169 | 0.328371 |
39b6bd6353821651a0a01cf687e78a807a34d494 | 337 | py | Python | tests/base_test_case.py | caoziyao/orm | 24121b8b10910c121a5dff19c6fd9f25ec7f425c | [
"MIT"
] | 1 | 2016-10-30T14:41:39.000Z | 2016-10-30T14:41:39.000Z | tests/base_test_case.py | caoziyao/orm | 24121b8b10910c121a5dff19c6fd9f25ec7f425c | [
"MIT"
] | null | null | null | tests/base_test_case.py | caoziyao/orm | 24121b8b10910c121a5dff19c6fd9f25ec7f425c | [
"MIT"
] | null | null | null | # coding: utf-8
"""
@author: csy
@license: (C) Copyright 2017-2018
@contact: wyzycao@gmail.com
@time: 2018/11/22
@desc:
"""
import unittest
from orm.data_base import Database
class BaseTestCase(unittest.TestCase):
def setUp(self):
url = 'mysql://root:zy123456@localhost/wiki?charset=utf8'
self.db = Database(url)
| 18.722222 | 65 | 0.688427 | 158 | 0.468843 | 0 | 0 | 0 | 0 | 0 | 0 | 174 | 0.51632 |
39b8f43a4fc39e9ee986451845affe8860e4df82 | 381 | py | Python | setup.py | kervi/kervi-hal-win | adb0d93f63b3ed36fd6527c69dc301a63a30138f | [
"MIT"
] | null | null | null | setup.py | kervi/kervi-hal-win | adb0d93f63b3ed36fd6527c69dc301a63a30138f | [
"MIT"
] | null | null | null | setup.py | kervi/kervi-hal-win | adb0d93f63b3ed36fd6527c69dc301a63a30138f | [
"MIT"
] | null | null | null | import distutils
from setuptools import setup
try:
from kervi.platforms.windows.version import VERSION
except:
VERSION = "0.0"
try:
distutils.dir_util.remove_tree("dist")
except:
pass
setup(
name='kervi-hal-win',
version=VERSION,
packages=[
"kervi/platforms/windows",
],
install_requires=[
'psutil',
'inputs'
],
) | 15.24 | 55 | 0.627297 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 67 | 0.175853 |
39b9562e1c7649e5f232cd655226d45528bdfb68 | 877 | py | Python | examples/minimize_koopman_error.py | kijanac/Materia | b49af518c8eff7d3a8c6caff39783e3daf80a7a0 | [
"MIT"
] | null | null | null | examples/minimize_koopman_error.py | kijanac/Materia | b49af518c8eff7d3a8c6caff39783e3daf80a7a0 | [
"MIT"
] | null | null | null | examples/minimize_koopman_error.py | kijanac/Materia | b49af518c8eff7d3a8c6caff39783e3daf80a7a0 | [
"MIT"
] | null | null | null | import argparse
import materia as mtr
import dask.distributed
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--qcenv", type=str)
parser.add_argument("--scratch", type=str)
parser.add_argument("--dask_scratch", type=str)
parser.add_argument("--num_evals", type=int)
args = parser.parse_args()
m = mtr.Molecule("benzene")
qchem = mtr.QChem(qcenv=args.qcenv, scratch_dir=args.scratch)
io = mtr.IO("gs.in", "gs.out", "minimize_koopman_error")
min_ke = qchem.minimize_koopman_error(io, name="min_ke")
min_ke.requires(molecule=m, num_evals=args.num_evals)
wf = mtr.Workflow(min_ke)
cluster = dask.distributed.LocalCluster()
with dask.config.set(temporary_directory=args.dask_scratch):
with dask.distributed.Client(cluster) as client:
print(wf.compute()["min_ke"])
| 31.321429 | 65 | 0.698974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 123 | 0.140251 |
39ba8a8ab31258dd5face8cc99e1f8cec294b091 | 300 | py | Python | simple/__init__.py | jbrid867/SIMPLE | 56e88c8271c22f7c41bd5d6b148b01e11a9e3713 | [
"Apache-2.0"
] | 1 | 2019-01-19T06:44:29.000Z | 2019-01-19T06:44:29.000Z | simple/__init__.py | jbrid867/SIMPLE | 56e88c8271c22f7c41bd5d6b148b01e11a9e3713 | [
"Apache-2.0"
] | 179 | 2018-10-02T21:07:19.000Z | 2020-09-08T17:38:44.000Z | simple/__init__.py | johnbridstrup/simple | 56e88c8271c22f7c41bd5d6b148b01e11a9e3713 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Top-level package for simple."""
__author__ = """John Bridstrup"""
__email__ = 'john.bridstrup@gmail.com'
__version__ = '0.1.8'
# import Data
# import data_analysis
# import kernels
# import KMC
# import running
# import simple
# import simulations
# import statevector
| 17.647059 | 38 | 0.703333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 245 | 0.816667 |
39baf90e3f5d1892dbfa7337958aae37f41a76bf | 13,482 | py | Python | emarket/views.py | MerlinEmris/eBazar | f159314183a8a95afd97d36b0d3d8cf22015a512 | [
"MIT"
] | null | null | null | emarket/views.py | MerlinEmris/eBazar | f159314183a8a95afd97d36b0d3d8cf22015a512 | [
"MIT"
] | null | null | null | emarket/views.py | MerlinEmris/eBazar | f159314183a8a95afd97d36b0d3d8cf22015a512 | [
"MIT"
] | null | null | null | # from traceback import TracebackException
from django.contrib.auth.forms import UserCreationForm
# from django.contrib.auth.models import User
from django.contrib.auth import login, authenticate
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.contrib.postgres.search import SearchVector
from django.core import serializers
from django.http import JsonResponse
from django.views import View
# import os
# from django.contrib.sites.shortcuts import get_current_site
# from django.utils.encoding import force_bytes
# from django.utils.encoding import force_text
# from django.utils.http import urlsafe_base64_encode
# from django.utils.http import urlsafe_base64_decode
# from django.template.loader import render_to_string
from django.http import HttpResponse
import django_filters.rest_framework
from django.shortcuts import render, redirect
from .forms import ProfilePhotoForm, PhotoForm, SignUpForm, ProfileForm, ItemForm, SearchForm
from .models import User, Profile, Item, Category, Item_Image, Favorite_item
from ebazar import settings
from .serializers import ( CategorySerializer,
ItemSerializer,
UserSerializer,
Item_ImageSerializer,)
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework import viewsets, status
# import django_filters.rest_framework
from rest_framework.generics import (
DestroyAPIView,
ListAPIView,
UpdateAPIView,
RetrieveAPIView,
CreateAPIView
)
from rest_framework.views import APIView
import shutil
import os
import datetime
import json
# print console logs
log_prefix = '['+datetime.datetime.now().strftime("%d-%m-%y %H:%M:%S")+']'
log_end = '********'
log_date = datetime.datetime.now().strftime("%d-%m-%y_%H:%M")
# redirect to create user (url(r'^$'))
def index(request):
if request.user:
return redirect('home')
else:
return redirect('home')
# create user with min information
def create_user(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
# form = UserCreationForm(request.POST)
if form.is_valid():
user = form.save()
print(log_prefix+'user '+form.cleaned_data['username']+'is created'+log_end)
# user.is_active = False
# user.refresh_from_db()
# user.profile.birth_date = form.cleaned_data.get('birth_date')
# user.profile.bio = form.cleaned_data.get('bio')
# user.profile.location = form.cleaned_data.get('location')
# current_site = get_current_site(request)
# subject = 'Activate Your MySite Account'
# message = render_to_string('account_activation_email.html', {
# 'user': user,
# 'domain': current_site.domain,
# 'uid': urlsafe_base64_encode(force_bytes(user.pk)),
# 'token': account_activation_token.make_token(user),
# })
# user.email_user(subject, message)
# return redirect('account_activation_sent')
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
print(log_prefix + 'user ' + username + 'is logged in' + log_end)
return redirect('home')
else:
form = SignUpForm(request.POST)
return render(request, 'registration/create_user.html', {'form': form})
else:
form = SignUpForm()
return render(request, 'registration/create_user.html', {'form': form})
@login_required
def edit_profile(request):
exist = 0
try:
profile = request.user.profile
exist = 1
except Profile.DoesNotExist:
profile = Profile(user=request.user)
if request.method == 'POST':
form = ProfileForm(request.POST, request.FILES, instance=profile)
if form.is_valid():
form.save()
print(log_prefix + ' user ' + request.user.username + ' profile is changed ' + log_end)
return redirect('home')
else:
return render(request, 'emarket/profile.html', {'form': form})
else:
form = ProfileForm(instance=profile)
return render(request, 'emarket/profile.html', {'form': form,'exist':exist})
def profile_change_photo(request, prof_id):
if request.method == 'POST':
profile = Profile.objects.filter(user_id=prof_id)[0]
form = ProfilePhotoForm(request.POST, request.FILES, instance=profile)
profile.img.delete(False)
if form.is_valid():
form.save()
return redirect('profile')
else:
form = ProfilePhotoForm()
return render(request, 'emarket/profile_add_image.html', {'form':form,})
print(log_prefix + 'user ' + prof_id + 'profile img is changed' + log_end)
def user(request, user_id):
items = Item.objects.filter(user_id=user_id)
pics = Item_Image.objects.all()
if items:
paginator = Paginator(items, 9)
page = request.GET.get('page')
try:
items = paginator.page(page)
except PageNotAnInteger:
items = paginator.page(1)
except EmptyPage:
items = paginator.page(paginator.num_pages)
return render(request, 'emarket/user.html', {'items': items, 'pics': pics, })
@login_required
def create_item(request):
if request.method == 'POST':
item = Item(user=request.user)
form = ItemForm(request.POST, instance=item)
if form.is_valid():
form.save()
print(log_prefix+'item:'+form.cleaned_data['name']+' is created at '+log_date+log_end)
return redirect('add_item_img', item.id)
else:
return render(request, 'emarket/item_create.html', {'form': form})
else:
form = ItemForm()
return render(request, 'emarket/item_create.html', {'form': form})
@login_required
def edit_item(request, it_id):
try:
item = Item.objects.filter(id=it_id)[0]
except Item.DoesNotExist:
redirect('home')
if request.method == 'POST':
form = ItemForm(request.POST, instance=item)
if form.is_valid():
form.save()
print(log_prefix + ' item ' + it_id + ' is changed ' + log_end)
return redirect('show_item',it_id)
else:
form = ItemForm(instance=item)
return render(request, 'emarket/item_edit.html',{'form':form})
else:
form = ItemForm(instance=item)
return render(request, 'emarket/item_edit.html',{'form':form})
def show_item(request, item_id):
user = request.user
exist = 1
# if user and request.method == "GET":
# favs = Favorite_item.objects.filter(user=user)
#
# for fav in favs:
# if fav.item_id == int(item_id):
# print(fav.item_id)
# exist = 1
# else:
# exist = 0
item = Item.objects.filter(id=item_id)[0]
item_images = Item_Image.objects.filter()
return render(request, 'emarket/item_detail.html', {'item': item,
'pics': item_images,
'exist': exist})
@login_required
def favorite_items(request, user_id):
user = User.objects.filter(id=user_id)
fav_items = Favorite_item.objects.filter(user = user)
item_images = Item_Image.objects.filter()
return render(request, 'emarket/favorite_items.html', {'fav_items': fav_items,
'pics': item_images})
# @login_required
# def add_to_fav(request):
# return redirect('home')
def show_category(request, cat_id):
cat = Category.objects.get(id=cat_id)
items = Item.objects.filter(category=cat)
pics = Item_Image.objects.all()
if items:
paginator = Paginator(items, 9)
page = request.GET.get('page')
try:
items = paginator.page(page)
except PageNotAnInteger:
items = paginator.page(1)
except EmptyPage:
items = paginator.page(paginator.num_pages)
return render(request, 'emarket/show_category.html', {'cat':cat, 'items':items, 'pics':pics})
def home(request):
cats = Category.objects.all()
# item_pic = {}
items = Item.objects.order_by('-price')[0:9]
item_images = Item_Image.objects.filter()
# print(item_images)
# print(items)
# print(categories)
return render(request, 'emarket/home.html', {'cats': cats, 'items': items, 'pics': item_images, })
def search(request, search_word=None):
message = 'Ähli goşlar:'
pics = Item_Image.objects.all()
items = Item.objects.all()
form = SearchForm
if request.method == 'POST':
form = SearchForm(request.POST)
search_word = request.POST.get('search')
location = request.POST.get('location')
user = request.POST.get('user')
if location and user:
items = Item.objects.filter(name__icontains=search_word).filter(user=user).filter(location=location)
elif user:
items = Item.objects.filter(name__icontains=search_word).filter(user=user)
elif location:
items = Item.objects.filter(name__icontains=search_word).filter(location=location)
else:
items = Item.objects.filter(name__icontains=search_word)
if items:
message = 'Netijeler:'
else:
message = 'Hiç zat ýok'
items = None
if items:
paginator = Paginator(items, 18)
page = request.GET.get('page')
try:
items = paginator.page(page)
except PageNotAnInteger:
items = paginator.page(1)
except EmptyPage:
items = paginator.page(paginator.num_pages)
return render(request, 'emarket/expo.html', {'items': items, 'pics': pics, 'ms': message, 's_word': search_word, 'form':form})
@login_required
def add_item_img(request, it_id):
photos = Item_Image.objects.filter()
if request.method == 'POST':
item_img = Item_Image(item_id=it_id)
form = PhotoForm(request.POST, request.FILES, instance=item_img)
if form.is_valid():
form.save()
print(log_prefix+'item_'+it_id+' added image'+str(form.cleaned_data['img'])+log_end)
return redirect('show_item', it_id)
else:
return render(request, 'emarket/item_add_image.html', {'form': form, 'photos': photos})
else:
form = PhotoForm()
return render(request, 'emarket/item_add_image.html', {'form':form, 'photos': photos})
@login_required
def delete_item(request, it_id):
item = Item.objects.filter(id=it_id)
if item:
item.delete()
items_path = os.path.join(settings.MEDIA_ROOT, 'items')
item_id = 'item_'+str(it_id)
item_path = os.path.join(items_path, item_id)
shutil.rmtree(item_path)
print(log_prefix+item_id+' is deleted'+log_end)
return redirect('home')
else:
return redirect('home')
class UserCreate(APIView):
def post(selfs, request, format='json'):
serializer = UserSerializer(data=request.data)
if serializer.is_valid():
user = serializer.save()
if user:
print(user)
username = serializer.data.get('username')
print(username)
raw_password = serializer.data.get('password')
print(raw_password)
user_log = authenticate(username=username, password=raw_password)
login(request, user_log)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
print('user create error')
else:
print('user validation failed')
# api for item
class ItemViewSet(ListAPIView):
filter_backends = (django_filters.rest_framework.DjangoFilterBackend,)
queryset = Item.objects.all()
serializer_class = ItemSerializer
search_fields = ('name',)
ordering_fields = '__all__'
class Item_ImageViewSet(ListAPIView):
filter_backends = (django_filters.rest_framework.DjangoFilterBackend,)
queryset = Item_Image.objects.all()
serializer_class = Item_ImageSerializer
class Item_ImageDetailViewSet(ListAPIView):
queryset = Item_Image.objects.all()
serializer_class = Item_ImageSerializer
def get_queryset(self):
item = self.kwargs['item']
return Item_Image.objects.filter(item=item)
class ItemCreateViewSet(CreateAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class ItemDetailViewSet(RetrieveAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class ItemUpdateViewSet(UpdateAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class ItemDeleteViewSet(DestroyAPIView):
queryset = Item.objects.all()
serializer_class = ItemSerializer
# api for category
class CategoryViewSet(viewsets.ModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
| 34.480818 | 130 | 0.641893 | 2,010 | 0.149043 | 0 | 0 | 3,461 | 0.256637 | 0 | 0 | 2,859 | 0.211998 |
39bdb6e5ac777c1dbb29e8d29b5d3a629b8f1d14 | 3,683 | py | Python | cogs/misc.py | DoggieLicc/doggie-bot | 31400a32916e08cd5b7909cce17db66ea927d2e3 | [
"MIT"
] | 3 | 2021-08-30T16:51:04.000Z | 2021-09-13T17:04:29.000Z | cogs/misc.py | DoggieLicc/doggie-bot | 31400a32916e08cd5b7909cce17db66ea927d2e3 | [
"MIT"
] | 1 | 2021-08-30T15:29:37.000Z | 2021-09-09T23:59:47.000Z | cogs/misc.py | DoggieLicc/doggie-bot | 31400a32916e08cd5b7909cce17db66ea927d2e3 | [
"MIT"
] | null | null | null | import discord
import utils
import inspect
from discord.ext import commands
from io import StringIO
class Misc(commands.Cog):
"""Commands that show info about the bot"""
def __init__(self, bot: utils.CustomBot):
self.bot: utils.CustomBot = bot
@commands.command(aliases=['i', 'ping'])
async def info(self, ctx: utils.CustomContext):
"""Shows information for the bot!"""
invite_url = discord.utils.oauth_url(ctx.me.id, permissions=discord.Permissions(1375866285270))
embed = utils.create_embed(
ctx.author,
title='Info for Doggie Bot!',
description='This bot is a multi-purpose bot!'
)
embed.add_field(
name="Invite this bot!",
value=f"[Invite]({invite_url})",
inline=False
)
embed.add_field(
name="Join support server!",
value="[Support Server](https://discord.gg/Uk6fg39cWn)",
inline=False
)
embed.add_field(
name='Bot Creator:',
value='[Doggie 2#8512](https://github.com/DoggieLicc/)',
inline=True
)
embed.add_field(
name='Source Code:',
value='[Github Repo](https://github.com/DoggieLicc/doggie-bot)'
)
embed.add_field(
name='Bot Online Since:',
value=utils.user_friendly_dt(self.bot.start_time),
inline=False
)
embed.add_field(
name='Ping:',
value='{} ms'.format(round(1000 * self.bot.latency)),
inline=False
)
await ctx.send(embed=embed)
@commands.cooldown(3, 86_400, commands.BucketType.user)
@commands.command(aliases=['report', 'bug'])
async def suggest(self, ctx: utils.CustomContext, *, suggestion):
"""Send a suggestion or bug report to the bot owner!"""
owner: discord.User = await self.bot.get_owner()
owner_embed = utils.create_embed(
ctx.author,
title='New suggestion!:',
description=suggestion
)
await owner.send(embed=owner_embed)
user_embed = utils.create_embed(
ctx.author,
title=f'👍 Suggestion has been sent to {owner}! 💖'
)
await ctx.send(embed=user_embed)
@commands.command(aliases=['code'])
async def source(self, ctx, *, command: str = None):
"""Look at the code of this bot!"""
if command is None:
embed = utils.create_embed(
ctx.author,
title='Source Code:',
description='[Github for **Doggie Bot**](https://github.com/DoggieLicc/doggie-bot)'
)
return await ctx.send(embed=embed)
if command == 'help':
src = type(self.bot.help_command)
else:
obj = self.bot.get_command(command.replace('.', ' ').lower())
if obj is None:
embed = utils.create_embed(
ctx.author,
title='Command not found!',
description='This command wasn\'t found in this bot.',
color=discord.Color.red()
)
return await ctx.send(embed=embed)
src = obj.callback.__code__
lines, _ = inspect.getsourcelines(src)
src_code = ''.join(lines)
buffer = StringIO(src_code)
file = discord.File(fp=buffer, filename=f'{command.replace(" ", "_").lower()}.py')
await ctx.send(f'Here you go, {ctx.author.mention}. (You should view this on a PC)', file=file)
def setup(bot):
bot.add_cog(Misc(bot))
| 29 | 103 | 0.555525 | 3,540 | 0.95961 | 0 | 0 | 3,362 | 0.911358 | 3,168 | 0.858769 | 870 | 0.235836 |
39bfed4c3b2ea966740de31f26fe83daafbdbab5 | 171 | py | Python | setup.py | andribas404/splay_benchmark | 1ba2fe4d715b25db806c0b241c6adadd8d442a77 | [
"MIT"
] | null | null | null | setup.py | andribas404/splay_benchmark | 1ba2fe4d715b25db806c0b241c6adadd8d442a77 | [
"MIT"
] | null | null | null | setup.py | andribas404/splay_benchmark | 1ba2fe4d715b25db806c0b241c6adadd8d442a77 | [
"MIT"
] | null | null | null | """
Setup.
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize('splay_tree.pyx'))
| 13.153846 | 46 | 0.760234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 67 | 0.391813 |
39c13236092aa20981aa814b36bf7e898a69daef | 343 | py | Python | app.py | victorathanasio/KPI-test | cbc24ebc9b6e9304c7ff0428458c827d09bd99aa | [
"MIT"
] | null | null | null | app.py | victorathanasio/KPI-test | cbc24ebc9b6e9304c7ff0428458c827d09bd99aa | [
"MIT"
] | null | null | null | app.py | victorathanasio/KPI-test | cbc24ebc9b6e9304c7ff0428458c827d09bd99aa | [
"MIT"
] | null | null | null | from WebApp.mainapp import app
import dash_html_components as html
import flask
from REST_API.rest_api import API
from WebApp.Layout import Layout
app.layout = Layout()
app.server.register_blueprint(API)
server = app.server
if __name__ == '__main__':
# app.run_server(debug=False, host='0.0.0.0', port=90)
app.run_server(debug=True)
| 24.5 | 58 | 0.766764 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.186589 |
39c16bfed4316959a8bb44396e89b0248bfc5ee5 | 719 | py | Python | URI/multiplicador.py | LuccasTraumer/pythonRepositorio | 52d4455cea0615c8eba7ab4c6224ce3350bbcf47 | [
"MIT"
] | null | null | null | URI/multiplicador.py | LuccasTraumer/pythonRepositorio | 52d4455cea0615c8eba7ab4c6224ce3350bbcf47 | [
"MIT"
] | null | null | null | URI/multiplicador.py | LuccasTraumer/pythonRepositorio | 52d4455cea0615c8eba7ab4c6224ce3350bbcf47 | [
"MIT"
] | null | null | null | '''
Leia 2 valores inteiros (A e B). Após, o programa deve mostrar uma mensagem "Sao Multiplos" ou
"Nao sao Multiplos", indicando se os valores lidos são múltiplos entre si.
'''
data = str(input())
values = data.split(' ')
first_value = int(values[0])
second_value = int(values[1])
if(second_value > first_value):
resul = second_value / first_value
if(first_value * resul == second_value and second_value % first_value == 0):
print('Sao Multiplos')
else:
print('Nao sao Multiplos')
else:
result = first_value / second_value
if(second_value * result == first_value and first_value % second_value == 0):
print('Sao Multiplos')
else:
print('Nao sao Multiplos')
| 27.653846 | 94 | 0.673157 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 251 | 0.347645 |
39c247e8b1fdf8e3efae1a8994e7cba05bbc1477 | 2,767 | py | Python | app/listeners.py | seratch/slack_learning_app_ja | 9552489b1d5d3adc61a7c73645a1ae09abc9d933 | [
"MIT"
] | 11 | 2020-10-28T08:04:16.000Z | 2022-03-18T09:12:29.000Z | app/listeners.py | seratch/slack_learning_app_ja | 9552489b1d5d3adc61a7c73645a1ae09abc9d933 | [
"MIT"
] | 1 | 2020-10-29T23:10:52.000Z | 2020-10-29T23:37:00.000Z | app/listeners.py | seratch/slack_learning_app_ja | 9552489b1d5d3adc61a7c73645a1ae09abc9d933 | [
"MIT"
] | null | null | null | import re
from slack_bolt import App
from app.onboarding import (
message_multi_users_select,
message_multi_users_select_lazy,
)
from app.tutorials import (
tutorial_page_transition,
tutorial_page_transition_lazy,
app_home_opened,
app_home_opened_lazy,
page1_home_tab_button_click,
page1_home_tab_button_click_lazy,
page1_home_tab_users_select_lazy,
page1_home_tab_users_select,
page2_modal,
page2_modal_lazy,
page2_modal_submission,
page4_create_channel,
page4_create_channel_lazy,
page4_create_channel_submission,
page4_create_channel_submission_lazy,
page4_create_channel_setup,
page4_create_channel_setup_lazy,
global_shortcut_handler,
global_shortcut_view_submission,
global_shortcut_view_submission_lazy,
message_shortcut_handler,
message_shortcut_handler_lazy,
external_data_source_handler,
)
def register_listeners(app: App):
app.action("link_button")(lambda ack: ack())
# ----------------------------------------------
# message
app.action("message_multi_users_select")(
ack=message_multi_users_select, lazy=[message_multi_users_select_lazy]
)
# ----------------------------------------------
# home tab
app.event("app_home_opened")(ack=app_home_opened, lazy=[app_home_opened_lazy])
app.action(re.compile("tutorial_page_transition_\d+"))(
ack=tutorial_page_transition, lazy=[tutorial_page_transition_lazy]
)
app.action(re.compile("page1_home_tab_button_\d"))(
ack=page1_home_tab_button_click, lazy=[page1_home_tab_button_click_lazy]
)
app.action("page1_home_tab_users_select")(
ack=page1_home_tab_users_select, lazy=[page1_home_tab_users_select_lazy]
)
app.action("page2_modal")(ack=page2_modal, lazy=[page2_modal_lazy])
app.view("page2_modal_submission")(page2_modal_submission)
app.action("page4_create_channel")(
ack=page4_create_channel, lazy=[page4_create_channel_lazy]
)
app.view("page4_create_channel_submission")(
ack=page4_create_channel_submission, lazy=[page4_create_channel_submission_lazy]
)
app.event("channel_created")(
ack=page4_create_channel_setup, lazy=[page4_create_channel_setup_lazy]
)
app.shortcut("global-shortcut-example")(global_shortcut_handler)
app.view("global-shortcut-example_submission")(
ack=global_shortcut_view_submission, lazy=[global_shortcut_view_submission_lazy]
)
app.shortcut("message-shortcut-example")(
ack=message_shortcut_handler, lazy=[message_shortcut_handler_lazy]
)
app.options("external-data-source-example")(external_data_source_handler)
app.action("external-data-source-example")(lambda ack: ack())
| 30.744444 | 88 | 0.734731 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 514 | 0.185761 |
39c310b2a22377850644e8e3e7bb4274bb90e2dd | 1,213 | py | Python | project2/redactor.py | m-harikiran/cs5293sp21-project2 | 48547543001813aee17731399f617f82043e4a8f | [
"MIT"
] | null | null | null | project2/redactor.py | m-harikiran/cs5293sp21-project2 | 48547543001813aee17731399f617f82043e4a8f | [
"MIT"
] | null | null | null | project2/redactor.py | m-harikiran/cs5293sp21-project2 | 48547543001813aee17731399f617f82043e4a8f | [
"MIT"
] | null | null | null | import nltk
import re
from nltk.corpus import wordnet
# This method reads the file and redacts names in it and writes redacted data to file with extension python3.redacted
def redactNames(path):
data = open(path).read() # Reading the file to be redacted
tokenized_data = nltk.word_tokenize(data) # Splitting data into words
# Generationg the parts of speech of each word
pos_tokenized_data = nltk.pos_tag(tokenized_data)
# Chunking the tagged words using named entity chunker
chk_tagged_tokens = nltk.chunk.ne_chunk(pos_tokenized_data)
for chk in chk_tagged_tokens.subtrees():
if chk.label().upper() == 'PERSON': # Extracting the words with tag PERSON
# Extracting first and last name
for name in chk:
# print(name)
data = re.sub('\\b{}\\b'.format(name[0]),
'\u2588'*len(name[0]), data) # Replacing the names with block character
# Opening a file with extension .redacted
redactedDoc = open(path.replace('.txt', '.redacted'), 'w')
redactedDoc.write(data) # Writing redacted data to file
redactedDoc.close()
return path.replace('.txt', '.redacted')
| 32.783784 | 117 | 0.660346 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 537 | 0.442704 |
39c3360de5ed5436c13f0b5c11ff3ff8f4c1e5e8 | 935 | py | Python | python3/max_area_of_island.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | 1 | 2020-10-08T09:17:40.000Z | 2020-10-08T09:17:40.000Z | python3/max_area_of_island.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | python3/max_area_of_island.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | # Space : O(n)
# Time : O(m*n)
class Solution:
def crawl(self, grid, x, y):
def bfs(dx, dy):
nonlocal area
if grid[dy][dx] == 1:
area += 1
grid[dy][dx] = 0
elif grid[dy][dx] == 0:
return
for ax, ay in c:
if 0 <= dy + ay < row and 0 <= dx + ax < col:
if grid[dy+ay][dx+ax] == 1:
bfs(dx+ax, dy+ay)
row = len(grid)
col = len(grid[0])
c = [(0, 1), (0, -1), (1, 0), (-1, 0)]
area = 0
bfs(x, y)
return area
def maxAreaOfIsland(self, grid: List[List[int]]) -> int:
row = len(grid)
col = len(grid[0])
ans = 0
for y in range(row):
for x in range(col):
if grid[y][x] == 1:
ans = max(ans, self.crawl(grid, x, y))
return ans
| 25.972222 | 61 | 0.37754 | 894 | 0.95615 | 0 | 0 | 0 | 0 | 0 | 0 | 38 | 0.040642 |
39c42e302788d37384d6aba69dfd98df2d11d258 | 1,000 | py | Python | datasets/linear/parking/lr.py | diego1q2w/lregret | 823c7f609559d1012ed52f619b1aa1297d5f2517 | [
"Apache-2.0"
] | null | null | null | datasets/linear/parking/lr.py | diego1q2w/lregret | 823c7f609559d1012ed52f619b1aa1297d5f2517 | [
"Apache-2.0"
] | null | null | null | datasets/linear/parking/lr.py | diego1q2w/lregret | 823c7f609559d1012ed52f619b1aa1297d5f2517 | [
"Apache-2.0"
] | null | null | null | import os
from datetime import datetime
import time
import pandas as pd
from datasets.linear import LinearProblem
from regresion.linear.feature import PolFeatures
from regresion.linear.linear import LinearRegression
class ParkingProblem(LinearProblem):
def dataset_title(self) -> str:
return "Parking"
def __init__(self, regression: LinearRegression, pol_features=PolFeatures(1)) -> None:
file_name = os.path.join(os.path.dirname(__file__), 'dataset.csv')
df = pd.read_csv(file_name)
def format_date(raw_date: str) -> float:
timestamp = time.mktime(datetime.strptime(raw_date, '%Y-%m-%d %H:%M:%S').timetuple())
delta = timestamp - 1475539200.0
return delta / 60.0
df['LastUpdated'] = df['LastUpdated'] \
.apply(format_date)
super().__init__(df[['Capacity', 'LastUpdated']], df['Occupancy'], regression, pol_features)
# lr = LinearRegression()
# p = ParkingProblem(lr)
# p.fit_solving()
| 29.411765 | 100 | 0.678 | 708 | 0.708 | 0 | 0 | 0 | 0 | 0 | 0 | 167 | 0.167 |
39c714143377ff9b1982f6d7182df1f2ee8d4c39 | 244 | py | Python | output/models/nist_data/atomic/name/schema_instance/nistschema_sv_iv_atomic_name_max_length_1_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/nist_data/atomic/name/schema_instance/nistschema_sv_iv_atomic_name_max_length_1_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/nist_data/atomic/name/schema_instance/nistschema_sv_iv_atomic_name_max_length_1_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.nist_data.atomic.name.schema_instance.nistschema_sv_iv_atomic_name_max_length_1_xsd.nistschema_sv_iv_atomic_name_max_length_1 import NistschemaSvIvAtomicNameMaxLength1
__all__ = [
"NistschemaSvIvAtomicNameMaxLength1",
]
| 40.666667 | 186 | 0.889344 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 36 | 0.147541 |
39c80db6883ae8bab680917b15a4a104eed100d2 | 4,888 | py | Python | vl/h5/mg_genome/norm_h5.py | hurwitzlab/viral-learning | 8d7aebc0d58fa32a429f4a47593452ee2722ba82 | [
"MIT"
] | 1 | 2018-02-23T16:49:30.000Z | 2018-02-23T16:49:30.000Z | vl/h5/mg_genome/norm_h5.py | hurwitzlab/viral-learning | 8d7aebc0d58fa32a429f4a47593452ee2722ba82 | [
"MIT"
] | null | null | null | vl/h5/mg_genome/norm_h5.py | hurwitzlab/viral-learning | 8d7aebc0d58fa32a429f4a47593452ee2722ba82 | [
"MIT"
] | null | null | null | """
1. Normalizing the entire dataset with mean and variance, shuffle, compression=9 runs for more than 8 hours on
ocelote and results in a file of more than 150GB.
2. Try normalizing with only variance and without shuffle.
"""
import os.path
import sys
import time
import h5py
import numpy as np
def calculate_mean_variance(dsets):
"""
Given a list of datasets calculate the mean and variance for all rows in all datasets.
Arguments:
dsets: sequence of datasets with matching column counts
Returns:
(mean, variance): tuple of mean vector and variance vector
"""
print('calculating mean and variance for "{}"'.format([dset.name for dset in dsets]))
t0 = time.time()
mean = np.zeros((1, dsets[0].shape[1]))
M2 = np.zeros((1, dsets[0].shape[1]))
count = 0
for dset in dsets:
# find the right subset size to load without running out of memory
# if dset has more than 10,000 rows use 10,000
# if dset has fewer than 10,000 rows load the whole dset
dsubset = np.zeros((min(10000, dset.shape[0]), dset.shape[1]))
print(' working on "{}"'.format(dset.name))
for n in range(0, dset.shape[0], dsubset.shape[0]):
m = min(n + dsubset.shape[0], dset.shape[0])
dset.read_direct(dsubset, source_sel=np.s_[n:m, :])
t00 = time.time()
for i in range(0, dsubset.shape[0]):
count = count + 1
delta = dsubset[i, :] - mean
mean += delta / count
delta2 = dsubset[i, :] - mean
M2 += delta * delta2
print(' processed slice [{}:{}] {:5.2f}s'.format(n, m, time.time()-t00))
print(' finished mean and variance in {:5.2f}s'.format(time.time()-t0))
# return mean, variance
return (mean, M2/(count - 1))
def normalize_datasets(input_h5_fp, norm_h5_fp):
dset_paths = []
def find_data(name, obj):
if hasattr(obj, 'dtype'):
print('found dataset "{}"'.format(name))
dset_paths.append(obj.name)
else:
pass
with h5py.File(input_h5_fp, 'r', libver='latest', swmr=True) as input_h5_file:
input_h5_file.visititems(find_data)
mean, variance = calculate_mean_variance((
input_h5_file['/clean-bact/training1/extract/kmers'],
input_h5_file['/clean-vir/training1/extract/kmers']))
zero_mean_column_count = len(mean[mean == 0.0])
print('{} column(s) have zero mean'.format(zero_mean_column_count))
zero_var_column_count = len(variance[variance == 0.0])
print('{} column(s) have zero variance'.format(zero_var_column_count))
with h5py.File(norm_h5_fp, 'w') as norm_h5_file:
print('writing normalized data to "{}"'.format(norm_h5_fp))
mean_dset = norm_h5_file.require_dataset(
name='/mean',
shape=mean.shape,
dtype=mean.dtype,
chunks=mean.shape,
compression='gzip')
mean_dset[:, :] = mean
variance_dset = norm_h5_file.require_dataset(
name='/variance',
shape=variance.shape,
dtype=variance.dtype,
chunks=variance.shape,
compression='gzip')
variance_dset[:, :] = variance
for dset_path in dset_paths:
dset = input_h5_file[dset_path]
print(' normalizing "{}"'.format(dset.name))
normalized_dset = norm_h5_file.require_dataset(
name=dset.name,
shape=dset.shape,
dtype=dset.dtype,
chunks=mean.shape,
compression='gzip',
compression_opts=6)
t0 = time.time()
n = 10000
for i in range(0, dset.shape[0], n):
j = i + n
t00 = time.time()
normalized_dset[i:j, :] = (dset[i:j, :] - mean) / variance
##normalized_dset[i:j, :] = dset[i:j, :] / variance
print(' normalized slice {}:{} in {:5.2f}s'.format(i, j, time.time()-t00))
print('normalized "{}" in {:5.2f}s'.format(dset.name, time.time()-t0))
def main():
input_h5_fp = sys.argv[1] # '../data/training_testing.h5'
print(input_h5_fp)
with h5py.File(input_h5_fp, 'r') as input_h5_file:
print(list(input_h5_file['/clean-bact/training1/extract'].items()))
input_h5_dp, input_h5_name = os.path.split(input_h5_fp)
norm_h5_fp = os.path.join(input_h5_dp, 'norm_' + input_h5_name)
normalize_datasets(input_h5_fp, norm_h5_fp)
if __name__ == '__main__':
main() | 35.678832 | 110 | 0.557897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,301 | 0.266162 |
39c9516fadde5be713c7c8c8f3a12e5d1178fce7 | 780 | py | Python | app/controller/api/fields/comment.py | Arianxx/LoniceraBlog | 1f13d336f42c7041b16293dc8f1af62cc98ce2f4 | [
"MIT"
] | 8 | 2018-09-08T04:41:01.000Z | 2018-09-08T13:15:59.000Z | app/controller/api/fields/comment.py | Arianxx/LoniceraBlog | 1f13d336f42c7041b16293dc8f1af62cc98ce2f4 | [
"MIT"
] | null | null | null | app/controller/api/fields/comment.py | Arianxx/LoniceraBlog | 1f13d336f42c7041b16293dc8f1af62cc98ce2f4 | [
"MIT"
] | 6 | 2018-09-08T08:51:50.000Z | 2018-09-11T00:29:20.000Z | from flask_restful import fields
from .custom import Num, EdgeUrl, PaginateUrl
getCommentField = {
"id": fields.Integer,
"time": fields.DateTime(attribute="timestamp"),
"author_name": fields.String(attribute="username"),
"article_id": fields.Integer(attribute="postid"),
"body": fields.String,
"urls": {
"arthor": fields.Url("api.user", absolute=True),
"post": fields.Url("api.post", absolute=True),
},
}
getPostCommentsField = {
"prev": EdgeUrl("api.post_comments", 0),
"next": EdgeUrl("api.post_comments", 1),
"all_comments": fields.Integer(attribute="total"),
"all_pages": fields.Integer(attribute="pages"),
"urls": fields.List(
PaginateUrl("api.comment", "commentid", "id"), attribute="items"
),
}
| 31.2 | 72 | 0.65 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 240 | 0.307692 |
39cc957ec5fbf6dc9322a11520c340004afd7af2 | 1,132 | py | Python | faq/templatetags/faq_tags.py | HerbyDE/jagdreisencheck-webapp | 9af5deda2423b787da88a0c893f3c474d8e4f73f | [
"BSD-3-Clause"
] | null | null | null | faq/templatetags/faq_tags.py | HerbyDE/jagdreisencheck-webapp | 9af5deda2423b787da88a0c893f3c474d8e4f73f | [
"BSD-3-Clause"
] | null | null | null | faq/templatetags/faq_tags.py | HerbyDE/jagdreisencheck-webapp | 9af5deda2423b787da88a0c893f3c474d8e4f73f | [
"BSD-3-Clause"
] | null | null | null | from django import template
from faq.forms import FaqInstanceForm, FaqAnswerForm
from faq.models import FaqInstance, FaqAnswer
register = template.Library()
@register.inclusion_tag('faq/jagdreisencheck/create-question-form.html', takes_context=True)
def create_question_form(context, model, identifier):
form = FaqInstanceForm()
context['form'] = form
context['model'] = model
context['identifier'] = identifier
return context
@register.inclusion_tag('faq/jagdreisencheck/answer-question-form.html', takes_context=True)
def answer_question_form(context, identifier, parent):
form = FaqAnswerForm()
context['form'] = form
context['identifier'] = identifier
context['parent'] = parent
return context
@register.inclusion_tag('faq/jagdreisencheck/render-questions.html', takes_context=True)
def render_questions(context, model, identifier):
questions = FaqInstance.objects.filter(model=model, identifier=identifier).order_by("-date_created")
context['questions'] = questions
context['qe_form'] = FaqInstanceForm
context['aw_form'] = FaqAnswerForm
return context | 28.3 | 104 | 0.754417 | 0 | 0 | 0 | 0 | 964 | 0.85159 | 0 | 0 | 232 | 0.204947 |
39cd092c9896194e7d5884416a86b0b247f8dee4 | 486 | py | Python | markflow/detectors/__init__.py | jmholla/markflow | 1accc4a23f9c06d9ab77d6c180c586da3d9ec69b | [
"Apache-2.0"
] | 14 | 2020-08-14T03:09:53.000Z | 2022-03-22T22:46:50.000Z | markflow/detectors/__init__.py | jmholla/markflow | 1accc4a23f9c06d9ab77d6c180c586da3d9ec69b | [
"Apache-2.0"
] | 6 | 2020-08-19T18:13:24.000Z | 2021-02-11T03:56:34.000Z | markflow/detectors/__init__.py | jmholla/markflow | 1accc4a23f9c06d9ab77d6c180c586da3d9ec69b | [
"Apache-2.0"
] | 3 | 2020-08-13T16:40:13.000Z | 2022-01-18T12:31:37.000Z | # flake8: noqa
"""
MarkFlow MarkDown Section Detection Library
This library provide this functions MarkFlow uses to split a document into it's
individual text types.
"""
from .atx_heading import *
from .blank_line import *
from .block_quote import *
from .fenced_code_block import *
from .indented_code_block import *
from .link_reference_definition import *
from .list import *
from .paragraph import *
from .setext_heading import *
from .table import *
from .thematic_break import *
| 25.578947 | 79 | 0.788066 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 169 | 0.347737 |
39cd57d3e96930bf2512f61084f0ec5dbd909936 | 2,129 | py | Python | django_project/apps/qfauth/forms.py | gaohj/nzflask_bbs | 36a94c380b78241ed5d1e07edab9618c3e8d477b | [
"Apache-2.0"
] | null | null | null | django_project/apps/qfauth/forms.py | gaohj/nzflask_bbs | 36a94c380b78241ed5d1e07edab9618c3e8d477b | [
"Apache-2.0"
] | 27 | 2020-02-12T07:55:58.000Z | 2022-03-12T00:19:09.000Z | django_project/apps/qfauth/forms.py | gaohj/nzflask_bbs | 36a94c380b78241ed5d1e07edab9618c3e8d477b | [
"Apache-2.0"
] | 2 | 2020-02-18T01:54:55.000Z | 2020-02-21T11:36:28.000Z | from django import forms
from apps.forms import FormMixin
from django.core import validators
from .models import User
from django.core.cache import cache
class LoginForm(forms.Form,FormMixin):
telephone = forms.CharField(max_length=11,min_length=11)
password = forms.CharField(max_length=30,min_length=6,error_messages={"max_length":"密码最多不能超过30个字符","min_length":"密码最少不能少于6个字符"})
remember = forms.IntegerField(required=False)
class RegisterForm(forms.Form,FormMixin):
telephone = forms.CharField(max_length=11, min_length=11,validators=[validators.RegexValidator(r'1[3-9]\d{9}',message="请输入正确的手机号")])
username = forms.CharField(max_length=30)
password1 = forms.CharField(max_length=30,min_length=6,error_messages={"max_length":"密码最多不能超过30个字符","min_length":"密码最少不能少于6个字符"})
password2 = forms.CharField(max_length=30,min_length=6,error_messages={"max_length":"密码最多不能超过30个字符","min_length":"密码最少不能少于6个字符"})
img_captcha = forms.CharField(max_length=4,min_length=4)
sms_captcha = forms.CharField(max_length=4,min_length=4)
def clean(self):
cleaned_data = super(RegisterForm, self).clean()
password1 = cleaned_data.get('password1')
password2 = cleaned_data.get('password2')
if password1 != password2:
raise forms.ValidationError('两次密码输入不一致')
#验证图形验证码
img_captcha = cleaned_data.get('img_captcha')#用户输入的
cache_img_captcha = cache.get(img_captcha.lower()) #缓存中的
print(cache_img_captcha)
if not cache_img_captcha or img_captcha.lower() != cache_img_captcha.lower():
raise forms.ValidationError('图形验证码输入错误')
#验证短信验证码
telephone = cleaned_data.get('telephone')
sms_captcha = cleaned_data.get('sms_captcha') # 用户输入的
cache_sms_captcha = cache.get(telephone) # 缓存中的
if not cache_sms_captcha or sms_captcha.lower() != cache_sms_captcha.lower():
raise forms.ValidationError('短信验证码输入错误')
exists = User.objects.filter(telephone=telephone).exists()
if exists:
forms.ValidationError('该手机号已经被注册')
return cleaned_data
| 43.44898 | 136 | 0.716768 | 2,255 | 0.933747 | 0 | 0 | 0 | 0 | 0 | 0 | 613 | 0.25383 |
39cf488b67a5b1e7312e55ca067c9bf0bfbe9c6e | 156 | py | Python | receives/pytest.py | felixsch/receives | 0d149e3a24c0377ac60d502736299c9f4348244a | [
"MIT"
] | null | null | null | receives/pytest.py | felixsch/receives | 0d149e3a24c0377ac60d502736299c9f4348244a | [
"MIT"
] | null | null | null | receives/pytest.py | felixsch/receives | 0d149e3a24c0377ac60d502736299c9f4348244a | [
"MIT"
] | null | null | null |
import pytest
from receives.receiver import Receiver
@pytest.fixture
def receive():
receiver = Receiver()
yield receiver
receiver.finalize()
| 14.181818 | 38 | 0.730769 | 0 | 0 | 83 | 0.532051 | 99 | 0.634615 | 0 | 0 | 0 | 0 |
39cfddaaca78d75a0a19c8026c9b58cbdca9cec8 | 18,099 | py | Python | contracts/crawler.py | waldyrious/public-contracts | 3107ddc007f3574ce19aaa2223399484bc6b1382 | [
"BSD-3-Clause"
] | 25 | 2015-03-05T00:15:11.000Z | 2021-04-04T18:50:43.000Z | contracts/crawler.py | waldyrious/public-contracts | 3107ddc007f3574ce19aaa2223399484bc6b1382 | [
"BSD-3-Clause"
] | 36 | 2015-03-21T17:04:54.000Z | 2017-07-06T10:35:51.000Z | contracts/crawler.py | waldyrious/public-contracts | 3107ddc007f3574ce19aaa2223399484bc6b1382 | [
"BSD-3-Clause"
] | 7 | 2015-03-24T16:18:02.000Z | 2019-05-29T11:51:01.000Z | import json
import logging
from django.core.exceptions import ValidationError
from django.db import transaction
from django.forms import DateField, CharField
import requests
import requests.exceptions
from . import models
from contracts.crawler_forms import EntityForm, ContractForm, \
TenderForm, clean_place, PriceField
logger = logging.getLogger(__name__)
class JSONLoadError(Exception):
"""
When JSON fails to parse the content of an url.
"""
def __init__(self, url):
self.url = url
class JSONCrawler:
"""
A crawler specific for retrieving JSON content.
"""
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) ' \
'AppleWebKit/537.36 (KHTML, like Gecko)'
def __init__(self):
self.session = requests.Session()
self.session.headers.update({'User-Agent': self.user_agent})
def get_response(self, url, headers=None):
if headers:
self.session.headers.update(headers)
return self.session.get(url)
def get_json(self, url, headers=None):
return json.loads(self.get_response(url, headers).text)
class ContractsStaticDataCrawler(JSONCrawler):
def save_contracts_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tipocontratos'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, continue
models.ContractType.objects.get(base_id=element['id'])
except models.ContractType.DoesNotExist:
contract_type = models.ContractType(name=element['description'],
base_id=element['id'])
contract_type.save()
def save_procedures_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tipoprocedimentos'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All that we don't use.
continue
try:
# if it exists, we pass
models.ProcedureType.objects.get(name=element['description'])
except models.ProcedureType.DoesNotExist:
procedure_type = models.ProcedureType(name=element['description'],
base_id=element['id'])
procedure_type.save()
def save_act_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tiposacto'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, we pass
models.ActType.objects.get(base_id=element['id'])
except models.ActType.DoesNotExist:
act_type = models.ActType(name=element['description'],
base_id=element['id'])
act_type.save()
def save_model_types(self):
url = 'http://www.base.gov.pt/base2/rest/lista/tiposmodelo'
data = self.get_json(url)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, we pass
models.ModelType.objects.get(base_id=element['id'])
except models.ModelType.DoesNotExist:
act_type = models.ModelType(name=element['description'],
base_id=element['id'])
act_type.save()
def save_all_countries(self):
url = 'http://www.base.gov.pt/base2/rest/lista/paises'
data = self.get_json(url)
for element in data['items']:
try:
# if it exists, we pass
models.Country.objects.get(name=element['description'])
pass
except models.Country.DoesNotExist:
country = models.Country(name=element['description'])
country.save()
def save_all_districts(self):
base_url = 'http://www.base.gov.pt/base2/rest/lista/distritos?pais=%d'
portugal = models.Country.objects.get(name="Portugal")
data = self.get_json(base_url % 187)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All" that we don't use.
continue
try:
# if it exists, we pass
models.District.objects.get(base_id=element['id'])
except models.District.DoesNotExist:
district = models.District(name=element['description'],
base_id=element['id'],
country=portugal)
district.save()
def save_councils(self, district):
base_url = 'http://www.base.gov.pt/base2/rest/lista/concelhos?distrito=%d'
data = self.get_json(base_url % district.base_id)
for element in data['items']:
if element['id'] == '0': # id = 0 is "All", that we don't use.
continue
try:
# if it exists, we pass
models.Council.objects.get(base_id=element['id'])
except models.Council.DoesNotExist:
council = models.Council(name=element['description'],
base_id=element['id'],
district=district)
council.save()
def retrieve_and_save_all(self):
self.save_contracts_types()
self.save_procedures_types()
self.save_model_types()
self.save_act_types()
# Countries first
self.save_all_countries()
# Districts second
self.save_all_districts()
# Councils third
for district in models.District.objects.all():
self.save_councils(district)
class DynamicCrawler(JSONCrawler):
object_url = None
object_list_url = None
object_name = None
object_model = None
def get_json(self, url, headers=None):
"""
Raises a `JSONLoadError` if all entries are `None`,
the BASE way of saying that the object doesn't exist in its database.
"""
data = super(DynamicCrawler, self).get_json(url, headers)
# ensures that data is not None
if not isinstance(data, list) and data['id'] == 0:
raise JSONLoadError(url)
return data
@staticmethod
def clean_data(data):
raise NotImplementedError
def save_instance(self, cleaned_data):
"""
Saves or updates the instance using cleaned_data
"""
try:
instance = self.object_model.objects.get(
base_id=cleaned_data['base_id'])
for (key, value) in cleaned_data.items():
setattr(instance, key, value)
action = 'updated'
except self.object_model.DoesNotExist:
instance = self.object_model(**cleaned_data)
action = 'created'
instance.save()
logger.info('%s "%d" %s' % (self.object_name, cleaned_data['base_id'],
action))
return instance, (action == 'created')
@transaction.atomic
def update_instance(self, base_id):
"""
Retrieves data of object base_id from BASE,
cleans, and saves it as an instance of a Django model.
Returns the instance
"""
data = self.get_json(self.object_url % base_id)
cleaned_data = self.clean_data(data)
return self.save_instance(cleaned_data)
def get_instances_count(self):
"""
Hits BASE to get the total number of instances in BASE db.
"""
response = self.get_response(self.object_list_url,
headers={'Range': 'items=0-1'})
results_range = response.headers['content-range']
# in "items 0-%d/%d", we want the second %d, the total.
return int(results_range.split('/')[1])
def _hasher(self, instance):
"""
Hashes a entry of BASE response to a tuple. E.g. `(instance['id'], )`.
Add more values to better identify if the instance changed.
"""
raise NotImplementedError
def _values_list(self):
"""
Returns a list of tuples that are retrieved from the database to match
the tuple returned by `_hasher`. E.g. `('base_id',)`.
"""
raise NotImplementedError
def get_base_ids(self, row1, row2):
items = self.get_json(self.object_list_url,
headers={'Range': 'items=%d-%d' % (row1, row2)})
return [self._hasher(instance) for instance in items]
def _update_batch(self, row1, row2):
"""
Updates items from row1 to row2 of BASE db with our db.
"""
c1s = self.get_base_ids(row1, row2)
c2s = set(self.object_model.objects.filter(base_id__gte=c1s[0][0],
base_id__lte=c1s[-1][0])
.order_by('base_id').values_list(*self._values_list()))
c1s = set(c1s)
# just the ids
c1_ids = set(item[0] for item in c1s)
c2_ids = set(item[0] for item in c2s)
aggregated_modifications = {'deleted': 0, 'added': 0, 'updated': 0}
for item in c1s - c2s:
id1 = item[0]
self.update_instance(id1)
if id1 in c2_ids:
aggregated_modifications['updated'] += 1
else:
aggregated_modifications['added'] += 1
for id2 in c2_ids - c1_ids:
self.object_model.objects.get(base_id=id2).delete()
logger.info('contract "%d" deleted' % id2)
aggregated_modifications['deleted'] += 1
return aggregated_modifications
def update(self, start=0, end=None, items_per_batch=1000):
"""
The method retrieves count of all items in BASE (1 hit), and
synchronizes items from `start` until `min(end, count)` in batches
of `items_per_batch`.
If `end=None` (default), it retrieves until the last item.
if `start < 0`, the start is counted from the end.
Use e.g. `start=-2000` for a quick retrieve of new items;
Use `start=0` (default) to synchronize all items in database
(it takes time!)
"""
aggregated = {'deleted': 0, 'added': 0, 'updated': 0}
count = self.get_instances_count()
if end is None:
end = count
else:
end = min(count, end)
if end <= 0:
return aggregated
# if start < 0, start is as if it was from the maximum
if start < 0:
start += end
if start > end:
return aggregated
# + 1 because it is [start, end]
total_items = end - start
# 103 // 100 = 1; we want 2 to also get the 3 in the next batch.
batches = total_items // items_per_batch + 1
logger.info('update of \'%s\' started: %d items in %d batches.' %
(self.object_name, total_items, batches))
for i in range(batches):
logger.info('Batch %d/%d started.' % (i + 1, batches))
batch_aggr = self._update_batch(
start + i*items_per_batch,
min(end, start + (i+1)*items_per_batch))
logger.info('Batch %d/%d finished: %s' % (i + 1, batches, batch_aggr))
for key in aggregated:
aggregated[key] += batch_aggr[key]
logger.info('update of \'%s\' finished: %s' %
(self.object_name, aggregated))
return aggregated
class EntitiesCrawler(DynamicCrawler):
"""
Crawler used to retrieve entities.
"""
object_url = 'http://www.base.gov.pt/base2/rest/entidades/%d'
object_list_url = 'http://www.base.gov.pt/base2/rest/entidades'
object_name = 'entity'
object_model = models.Entity
@staticmethod
def clean_data(data):
prepared_data = {'base_id': data['id'],
'name': data['description'],
'nif': data['nif'],
'country': data['location']}
form = EntityForm(prepared_data)
if not form.is_valid():
logger.error('Validation of entity "%d" failed' %
data['id'])
raise ValidationError(form.errors)
return form.cleaned_data
def _hasher(self, instance):
return instance['id'], \
CharField().clean(instance['nif']), \
CharField().clean(instance['description'])
def _values_list(self):
return 'base_id', 'nif', 'name'
class ContractsCrawler(DynamicCrawler):
"""
Crawler used to retrieve contracts.
"""
object_url = 'http://www.base.gov.pt/base2/rest/contratos/%d'
object_list_url = 'http://www.base.gov.pt/base2/rest/contratos'
object_name = 'contract'
object_model = models.Contract
@staticmethod
def clean_data(data):
def fix_exceptions(prepared_data):
# this is confirmed from the official contract in PDF
if prepared_data['base_id'] in (1892486, 1892453, 1892392):
prepared_data['contractors'] = [{'id': 8468}]
elif prepared_data['base_id'] in (2377732, 2377789, 2377777):
prepared_data['contractors'] = [{'id': 2154}]
return prepared_data
places = clean_place(data['executionPlace'])
prepared_data = {'base_id': data['id'],
'procedure_type': data['contractingProcedureType'],
'contract_type': data[u'contractTypes'],
'contract_description': data['objectBriefDescription'],
'description': data['description'],
'signing_date': data['signingDate'],
'added_date': data['publicationDate'],
'cpvs': data['cpvs'],
'category': data['cpvs'],
'price': data['initialContractualPrice'],
'country': places[0],
'district': places[1],
'council': {'district': places[1], 'council': places[2]},
'contractors': data['contracting'],
'contracted': data['contracted']
}
prepared_data = fix_exceptions(prepared_data)
form = ContractForm(prepared_data)
if not form.is_valid():
logger.error('Validation of contract "%d" failed' %
data['id'])
raise ValidationError(form.errors)
return form.cleaned_data
def save_instance(self, cleaned_data):
contractors = cleaned_data.pop('contractors')
contracted = cleaned_data.pop('contracted')
contract, created = super(ContractsCrawler, self)\
.save_instance(cleaned_data)
contract.contracted.clear()
contract.contracted.add(*list(contracted))
contract.contractors.clear()
contract.contractors.add(*list(contractors))
return contract, created
def _hasher(self, instance):
date_field = DateField(input_formats=["%d-%m-%Y"], required=False)
return instance['id'], \
PriceField().clean(instance['initialContractualPrice']), \
date_field.clean(instance['signingDate'])
def _values_list(self):
return 'base_id', 'price', 'signing_date'
class TendersCrawler(DynamicCrawler):
"""
Crawler used to retrieve tenders.
"""
object_url = 'http://www.base.gov.pt/base2/rest/anuncios/%d'
object_list_url = 'http://www.base.gov.pt/base2/rest/anuncios'
object_name = 'tender'
object_model = models.Tender
@staticmethod
def clean_data(data):
prepared_data = {'base_id': data['id'],
'act_type': data['type'],
'model_type': data['modelType'],
'contract_type': data['contractType'],
'description': data['contractDesignation'],
'announcement_number': data['announcementNumber'],
'dre_url': data['reference'],
'publication_date': data['drPublicationDate'],
'deadline_date': data['proposalDeadline'],
'cpvs': data['cpvs'],
'category': data['cpvs'],
'price': data['basePrice'],
'contractors': data['contractingEntities']}
prepared_data['publication_date'] = \
TenderForm.prepare_publication_date(prepared_data)
form = TenderForm(prepared_data)
if not form.is_valid():
logger.error('Validation of tender "%d" failed' %
data['id'])
raise ValidationError(form.errors)
return form.cleaned_data
def save_instance(self, cleaned_data):
contractors = cleaned_data.pop('contractors')
tender, created = super(TendersCrawler, self).save_instance(cleaned_data)
tender.contractors.clear()
tender.contractors.add(*list(contractors))
return tender, created
def _hasher(self, instance):
date_field = DateField(input_formats=["%d-%m-%Y"])
# e.g. tender 81558 has no price set
price = None
if instance['basePrice'] is not None:
price = PriceField(required=False).clean(instance['basePrice'])
return instance['id'], price, \
date_field.clean(instance['drPublicationDate'])
def _values_list(self):
return 'base_id', 'price', 'publication_date'
| 35.627953 | 82 | 0.560749 | 17,711 | 0.978562 | 0 | 0 | 3,896 | 0.215261 | 0 | 0 | 5,006 | 0.27659 |
39d2a63c210e03ad35c58e5b3b5e1afaa5b2db56 | 36,251 | py | Python | com/precisely/apis/model/validate_mailing_address_uscanapi_options.py | PreciselyData/PreciselyAPIsSDK-Python | 28ffff0c96d81d3a53a5599c987d54d7b632b508 | [
"Apache-2.0"
] | null | null | null | com/precisely/apis/model/validate_mailing_address_uscanapi_options.py | PreciselyData/PreciselyAPIsSDK-Python | 28ffff0c96d81d3a53a5599c987d54d7b632b508 | [
"Apache-2.0"
] | null | null | null | com/precisely/apis/model/validate_mailing_address_uscanapi_options.py | PreciselyData/PreciselyAPIsSDK-Python | 28ffff0c96d81d3a53a5599c987d54d7b632b508 | [
"Apache-2.0"
] | null | null | null | """
Precisely APIs
Enhance & enrich your data, applications, business processes, and workflows with rich location, information, and identify APIs. # noqa: E501
The version of the OpenAPI document: 11.9.3
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from com.precisely.apis.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from com.precisely.apis.exceptions import ApiAttributeError
class ValidateMailingAddressUSCANAPIOptions(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'output_address_blocks': (str,), # noqa: E501
'perform_us_processing': (str,), # noqa: E501
'perform_dpv': (str,), # noqa: E501
'output_formatted_on_fail': (str,), # noqa: E501
'output_postal_code_separator': (str,), # noqa: E501
'output_country_format': (str,), # noqa: E501
'keep_multimatch': (str,), # noqa: E501
'output_casing': (str,), # noqa: E501
'maximum_results': (str,), # noqa: E501
'output_record_type': (str,), # noqa: E501
'output_field_level_return_codes': (str,), # noqa: E501
'dpv_determine_no_stat': (str,), # noqa: E501
'street_matching_strictness': (str,), # noqa: E501
'can_french_apartment_label': (str,), # noqa: E501
'output_abbreviated_alias': (str,), # noqa: E501
'dpv_successful_status_condition': (str,), # noqa: E501
'standard_address_pmb_line': (str,), # noqa: E501
'firm_matching_strictness': (str,), # noqa: E501
'can_rural_route_format': (str,), # noqa: E501
'can_prefer_house_num': (str,), # noqa: E501
'output_preferred_alias': (str,), # noqa: E501
'directional_matching_strictness': (str,), # noqa: E501
'extract_firm': (str,), # noqa: E501
'fail_on_cmra_match': (str,), # noqa: E501
'can_non_civic_format': (str,), # noqa: E501
'can_sslvr_flg': (str,), # noqa: E501
'output_street_name_alias': (str,), # noqa: E501
'perform_ews': (str,), # noqa: E501
'can_output_city_format': (str,), # noqa: E501
'dual_address_logic': (str,), # noqa: E501
'perform_suite_link': (str,), # noqa: E501
'can_standard_address_format': (str,), # noqa: E501
'output_preferred_city': (str,), # noqa: E501
'output_multinational_characters': (str,), # noqa: E501
'can_delivery_office_format': (str,), # noqa: E501
'perform_lacs_link': (str,), # noqa: E501
'can_dual_address_logic': (str,), # noqa: E501
'extract_urb': (str,), # noqa: E501
'standard_address_format': (str,), # noqa: E501
'can_french_format': (str,), # noqa: E501
'dpv_determine_vacancy': (str,), # noqa: E501
'can_english_apartment_label': (str,), # noqa: E501
'suppress_zplus_phantom_carrier_r777': (str,), # noqa: E501
'can_output_city_alias': (str,), # noqa: E501
'output_short_city_name': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'output_address_blocks': 'OutputAddressBlocks', # noqa: E501
'perform_us_processing': 'PerformUSProcessing', # noqa: E501
'perform_dpv': 'PerformDPV', # noqa: E501
'output_formatted_on_fail': 'OutputFormattedOnFail', # noqa: E501
'output_postal_code_separator': 'OutputPostalCodeSeparator', # noqa: E501
'output_country_format': 'OutputCountryFormat', # noqa: E501
'keep_multimatch': 'KeepMultimatch', # noqa: E501
'output_casing': 'OutputCasing', # noqa: E501
'maximum_results': 'MaximumResults', # noqa: E501
'output_record_type': 'OutputRecordType', # noqa: E501
'output_field_level_return_codes': 'OutputFieldLevelReturnCodes', # noqa: E501
'dpv_determine_no_stat': 'DPVDetermineNoStat', # noqa: E501
'street_matching_strictness': 'StreetMatchingStrictness', # noqa: E501
'can_french_apartment_label': 'CanFrenchApartmentLabel', # noqa: E501
'output_abbreviated_alias': 'OutputAbbreviatedAlias', # noqa: E501
'dpv_successful_status_condition': 'DPVSuccessfulStatusCondition', # noqa: E501
'standard_address_pmb_line': 'StandardAddressPMBLine', # noqa: E501
'firm_matching_strictness': 'FirmMatchingStrictness', # noqa: E501
'can_rural_route_format': 'CanRuralRouteFormat', # noqa: E501
'can_prefer_house_num': 'CanPreferHouseNum', # noqa: E501
'output_preferred_alias': 'OutputPreferredAlias', # noqa: E501
'directional_matching_strictness': 'DirectionalMatchingStrictness', # noqa: E501
'extract_firm': 'ExtractFirm', # noqa: E501
'fail_on_cmra_match': 'FailOnCMRAMatch', # noqa: E501
'can_non_civic_format': 'CanNonCivicFormat', # noqa: E501
'can_sslvr_flg': 'CanSSLVRFlg', # noqa: E501
'output_street_name_alias': 'OutputStreetNameAlias', # noqa: E501
'perform_ews': 'PerformEWS', # noqa: E501
'can_output_city_format': 'CanOutputCityFormat', # noqa: E501
'dual_address_logic': 'DualAddressLogic', # noqa: E501
'perform_suite_link': 'PerformSuiteLink', # noqa: E501
'can_standard_address_format': 'CanStandardAddressFormat', # noqa: E501
'output_preferred_city': 'OutputPreferredCity', # noqa: E501
'output_multinational_characters': 'OutputMultinationalCharacters', # noqa: E501
'can_delivery_office_format': 'CanDeliveryOfficeFormat', # noqa: E501
'perform_lacs_link': 'PerformLACSLink', # noqa: E501
'can_dual_address_logic': 'CanDualAddressLogic', # noqa: E501
'extract_urb': 'ExtractUrb', # noqa: E501
'standard_address_format': 'StandardAddressFormat', # noqa: E501
'can_french_format': 'CanFrenchFormat', # noqa: E501
'dpv_determine_vacancy': 'DPVDetermineVacancy', # noqa: E501
'can_english_apartment_label': 'CanEnglishApartmentLabel', # noqa: E501
'suppress_zplus_phantom_carrier_r777': 'SuppressZplusPhantomCarrierR777', # noqa: E501
'can_output_city_alias': 'CanOutputCityAlias', # noqa: E501
'output_short_city_name': 'OutputShortCityName', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""ValidateMailingAddressUSCANAPIOptions - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
output_address_blocks (str): Specifies whether to return a formatted version of the address.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
perform_us_processing (str): Specifies whether or not to process U.S. addresses.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
perform_dpv (str): Delivery Point Validation (DPV®) validates that a specific address exists. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_formatted_on_fail (str): Specifies whether to return a formatted address when an address cannot be validated.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_postal_code_separator (str): Specifies whether to use separators (spaces or hyphens) in ZIP™ Codes or Canadian postal codes.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
output_country_format (str): Specifies the format to use for the country name returned in the Country output field.. [optional] if omitted the server will use the default value of "E" # noqa: E501
keep_multimatch (str): Indicates whether to return multiple address for input addresses that have more than one possible matches.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_casing (str): Specifies the casing of the output address. M for mixed case and U for upper case.. [optional] if omitted the server will use the default value of "M" # noqa: E501
maximum_results (str): Specifies a number between 1 and 10 that indicates the maximum number of addresses to be returned.. [optional] if omitted the server will use the default value of "10" # noqa: E501
output_record_type (str): Specifies the type of the output record.. [optional] if omitted the server will use the default value of "A" # noqa: E501
output_field_level_return_codes (str): Identifies which output addresses are candidate addresses as value if Y for OutputFieldLevelReturnCodes.. [optional] if omitted the server will use the default value of "N" # noqa: E501
dpv_determine_no_stat (str): Determines the no stat status of an address which means it exists but cannot receive mails.. [optional] if omitted the server will use the default value of "N" # noqa: E501
street_matching_strictness (str): Specifies the algorithm to determe if an input address matches in the postal database.. [optional] if omitted the server will use the default value of "M" # noqa: E501
can_french_apartment_label (str): Specifies the default apartment label for the output if there is no apartment label in the input address. This is specific to French address. . [optional] if omitted the server will use the default value of "Appartement" # noqa: E501
output_abbreviated_alias (str): Specifies whether to use a street's abbreviated alias in the output if the output address line is longer than 31 characters.. [optional] if omitted the server will use the default value of "N" # noqa: E501
dpv_successful_status_condition (str): Selecting the match condition where a DPV result does NOT cause a record to fail.. [optional] if omitted the server will use the default value of "A" # noqa: E501
standard_address_pmb_line (str): Specifies where Private Mailbox (PMB) information is placed.. [optional] if omitted the server will use the default value of "N" # noqa: E501
firm_matching_strictness (str): Specifies the algorithm to determining if an input address matches in the postal database.. [optional] if omitted the server will use the default value of "M" # noqa: E501
can_rural_route_format (str): Specifies where to place rural route delivery information.. [optional] if omitted the server will use the default value of "A" # noqa: E501
can_prefer_house_num (str): Specifies whether to select a house number of postal code in case of conflict.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_preferred_alias (str): Specifies whether to use a street's preferred alias in the output.. [optional] if omitted the server will use the default value of "N" # noqa: E501
directional_matching_strictness (str): Specifies the algorithm to determine if an input address matches in the postal database.. [optional] if omitted the server will use the default value of "M" # noqa: E501
extract_firm (str): Specifies whether to extract the firm name from AddressLine1 through AddressLine4 and place it in the FirmName output field.. [optional] if omitted the server will use the default value of "N" # noqa: E501
fail_on_cmra_match (str): Specifies whether to consider Treat Commercial Mail Receiving Agency (CMRA) matches as failures?. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_non_civic_format (str): Specifies whether or not non-civic keywords are abbreviated in the output. . [optional] if omitted the server will use the default value of "A" # noqa: E501
can_sslvr_flg (str): Changes the civic and/or suite information to match the LVR or single-single record.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_street_name_alias (str): Specifies how to handle street name aliases used in the input. This is specific to US.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
perform_ews (str): Specifies the Early Warning System (EWS) that uses the USPS EWS File to validate addresses that are not in the ZIP + 4 database.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_output_city_format (str): Specifies whether to use the long, medium, or short version of the city if the city has a long name.. [optional] if omitted the server will use the default value of "D" # noqa: E501
dual_address_logic (str): Specifies how to return a match if multiple non-blank address lines are present or multiple address types are on the same address line. (U.S. addresses only.). [optional] if omitted the server will use the default value of "N" # noqa: E501
perform_suite_link (str): Specifies whether to perform SuiteLink processing.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_standard_address_format (str): Specifies where to place secondary address information in the output address.. [optional] if omitted the server will use the default value of "D" # noqa: E501
output_preferred_city (str): Specifies whether the preferred last line city name should be stored.. [optional] if omitted the server will use the default value of "Z" # noqa: E501
output_multinational_characters (str): Specifies whether to return multinational characters, including diacritical marks such as umlauts or accents.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_delivery_office_format (str): Specifies where to place station information.. [optional] if omitted the server will use the default value of "I" # noqa: E501
perform_lacs_link (str): Facilitates the conversion of rural route address converting into street-style address using the LACS.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
can_dual_address_logic (str): Specifies whether ValidateMailingAddressUSCAN should return a street match or a PO Box/non-civic match when the address contains both civic and non-civic information.. [optional] if omitted the server will use the default value of "D" # noqa: E501
extract_urb (str): Specifies whether to extract the urbanization name from AddressLine1 through AddressLine4 and place it in the USUrbanName output field. . [optional] if omitted the server will use the default value of "N" # noqa: E501
standard_address_format (str): Specifies where to place secondary address information for U.S. addresses.. [optional] if omitted the server will use the default value of "C" # noqa: E501
can_french_format (str): Specifies how to determine the language (English or French) to use to format the address and directional.. [optional] if omitted the server will use the default value of "C" # noqa: E501
dpv_determine_vacancy (str): Determines if the location has been unoccupied for at least 90 days.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_english_apartment_label (str): Specifies the default apartment label to use in the output if there is no apartment label in the input address. rhis is specific to English addresses.. [optional] if omitted the server will use the default value of "Apt" # noqa: E501
suppress_zplus_phantom_carrier_r777 (str): Specifies whether to supress addresses with Carrier Route R777.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_output_city_alias (str): Specifies whether or not to return the city alias when the alias is in the input address.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_short_city_name (str): Specifies how to format city names that have short city name or non-mailing city name alternatives.. [optional] if omitted the server will use the default value of "N" # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ValidateMailingAddressUSCANAPIOptions - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
output_address_blocks (str): Specifies whether to return a formatted version of the address.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
perform_us_processing (str): Specifies whether or not to process U.S. addresses.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
perform_dpv (str): Delivery Point Validation (DPV®) validates that a specific address exists. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_formatted_on_fail (str): Specifies whether to return a formatted address when an address cannot be validated.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_postal_code_separator (str): Specifies whether to use separators (spaces or hyphens) in ZIP™ Codes or Canadian postal codes.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
output_country_format (str): Specifies the format to use for the country name returned in the Country output field.. [optional] if omitted the server will use the default value of "E" # noqa: E501
keep_multimatch (str): Indicates whether to return multiple address for input addresses that have more than one possible matches.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_casing (str): Specifies the casing of the output address. M for mixed case and U for upper case.. [optional] if omitted the server will use the default value of "M" # noqa: E501
maximum_results (str): Specifies a number between 1 and 10 that indicates the maximum number of addresses to be returned.. [optional] if omitted the server will use the default value of "10" # noqa: E501
output_record_type (str): Specifies the type of the output record.. [optional] if omitted the server will use the default value of "A" # noqa: E501
output_field_level_return_codes (str): Identifies which output addresses are candidate addresses as value if Y for OutputFieldLevelReturnCodes.. [optional] if omitted the server will use the default value of "N" # noqa: E501
dpv_determine_no_stat (str): Determines the no stat status of an address which means it exists but cannot receive mails.. [optional] if omitted the server will use the default value of "N" # noqa: E501
street_matching_strictness (str): Specifies the algorithm to determe if an input address matches in the postal database.. [optional] if omitted the server will use the default value of "M" # noqa: E501
can_french_apartment_label (str): Specifies the default apartment label for the output if there is no apartment label in the input address. This is specific to French address. . [optional] if omitted the server will use the default value of "Appartement" # noqa: E501
output_abbreviated_alias (str): Specifies whether to use a street's abbreviated alias in the output if the output address line is longer than 31 characters.. [optional] if omitted the server will use the default value of "N" # noqa: E501
dpv_successful_status_condition (str): Selecting the match condition where a DPV result does NOT cause a record to fail.. [optional] if omitted the server will use the default value of "A" # noqa: E501
standard_address_pmb_line (str): Specifies where Private Mailbox (PMB) information is placed.. [optional] if omitted the server will use the default value of "N" # noqa: E501
firm_matching_strictness (str): Specifies the algorithm to determining if an input address matches in the postal database.. [optional] if omitted the server will use the default value of "M" # noqa: E501
can_rural_route_format (str): Specifies where to place rural route delivery information.. [optional] if omitted the server will use the default value of "A" # noqa: E501
can_prefer_house_num (str): Specifies whether to select a house number of postal code in case of conflict.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_preferred_alias (str): Specifies whether to use a street's preferred alias in the output.. [optional] if omitted the server will use the default value of "N" # noqa: E501
directional_matching_strictness (str): Specifies the algorithm to determine if an input address matches in the postal database.. [optional] if omitted the server will use the default value of "M" # noqa: E501
extract_firm (str): Specifies whether to extract the firm name from AddressLine1 through AddressLine4 and place it in the FirmName output field.. [optional] if omitted the server will use the default value of "N" # noqa: E501
fail_on_cmra_match (str): Specifies whether to consider Treat Commercial Mail Receiving Agency (CMRA) matches as failures?. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_non_civic_format (str): Specifies whether or not non-civic keywords are abbreviated in the output. . [optional] if omitted the server will use the default value of "A" # noqa: E501
can_sslvr_flg (str): Changes the civic and/or suite information to match the LVR or single-single record.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_street_name_alias (str): Specifies how to handle street name aliases used in the input. This is specific to US.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
perform_ews (str): Specifies the Early Warning System (EWS) that uses the USPS EWS File to validate addresses that are not in the ZIP + 4 database.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_output_city_format (str): Specifies whether to use the long, medium, or short version of the city if the city has a long name.. [optional] if omitted the server will use the default value of "D" # noqa: E501
dual_address_logic (str): Specifies how to return a match if multiple non-blank address lines are present or multiple address types are on the same address line. (U.S. addresses only.). [optional] if omitted the server will use the default value of "N" # noqa: E501
perform_suite_link (str): Specifies whether to perform SuiteLink processing.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_standard_address_format (str): Specifies where to place secondary address information in the output address.. [optional] if omitted the server will use the default value of "D" # noqa: E501
output_preferred_city (str): Specifies whether the preferred last line city name should be stored.. [optional] if omitted the server will use the default value of "Z" # noqa: E501
output_multinational_characters (str): Specifies whether to return multinational characters, including diacritical marks such as umlauts or accents.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_delivery_office_format (str): Specifies where to place station information.. [optional] if omitted the server will use the default value of "I" # noqa: E501
perform_lacs_link (str): Facilitates the conversion of rural route address converting into street-style address using the LACS.. [optional] if omitted the server will use the default value of "Y" # noqa: E501
can_dual_address_logic (str): Specifies whether ValidateMailingAddressUSCAN should return a street match or a PO Box/non-civic match when the address contains both civic and non-civic information.. [optional] if omitted the server will use the default value of "D" # noqa: E501
extract_urb (str): Specifies whether to extract the urbanization name from AddressLine1 through AddressLine4 and place it in the USUrbanName output field. . [optional] if omitted the server will use the default value of "N" # noqa: E501
standard_address_format (str): Specifies where to place secondary address information for U.S. addresses.. [optional] if omitted the server will use the default value of "C" # noqa: E501
can_french_format (str): Specifies how to determine the language (English or French) to use to format the address and directional.. [optional] if omitted the server will use the default value of "C" # noqa: E501
dpv_determine_vacancy (str): Determines if the location has been unoccupied for at least 90 days.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_english_apartment_label (str): Specifies the default apartment label to use in the output if there is no apartment label in the input address. rhis is specific to English addresses.. [optional] if omitted the server will use the default value of "Apt" # noqa: E501
suppress_zplus_phantom_carrier_r777 (str): Specifies whether to supress addresses with Carrier Route R777.. [optional] if omitted the server will use the default value of "N" # noqa: E501
can_output_city_alias (str): Specifies whether or not to return the city alias when the alias is in the input address.. [optional] if omitted the server will use the default value of "N" # noqa: E501
output_short_city_name (str): Specifies how to format city names that have short city name or non-mailing city name alternatives.. [optional] if omitted the server will use the default value of "N" # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 83.914352 | 290 | 0.670354 | 35,547 | 0.980418 | 0 | 0 | 30,771 | 0.848691 | 0 | 0 | 30,710 | 0.847009 |
39d42321cd5a87223e9348e07673ab77d3799ca1 | 105 | py | Python | serverless/aws/features/__init__.py | captain-fox/serverless-builder | d79d120578d692dd34dd2f0a3bb75cc8ec719c81 | [
"MIT"
] | 3 | 2022-03-16T14:25:03.000Z | 2022-03-24T15:04:55.000Z | serverless/aws/features/__init__.py | captain-fox/serverless-builder | d79d120578d692dd34dd2f0a3bb75cc8ec719c81 | [
"MIT"
] | 3 | 2022-01-24T20:11:15.000Z | 2022-01-26T19:33:20.000Z | serverless/aws/features/__init__.py | epsyhealth/serverless-builder | 6a1f943b5cabc4c4748234b1623a9ced6464043a | [
"MIT"
] | 1 | 2022-02-15T13:54:29.000Z | 2022-02-15T13:54:29.000Z | from .api_handler import DefaultFourHundredResponse
from .api_keys import ApiKeys
from .xray import XRay
| 26.25 | 51 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
39d50b087b533ec75540f6aeefa21a97dbda7cfa | 7,392 | py | Python | tests/unit/test_resources.py | butla/PyDAS | 39df5abbe9563b58da7caaa191b89852fb122ab7 | [
"MIT"
] | 13 | 2016-06-29T13:35:05.000Z | 2021-05-25T09:47:31.000Z | tests/unit/test_resources.py | butla/PyDAS | 39df5abbe9563b58da7caaa191b89852fb122ab7 | [
"MIT"
] | 1 | 2016-07-11T23:11:33.000Z | 2016-07-11T23:11:33.000Z | tests/unit/test_resources.py | butla/PyDAS | 39df5abbe9563b58da7caaa191b89852fb122ab7 | [
"MIT"
] | 3 | 2017-10-17T15:54:25.000Z | 2022-03-24T01:11:37.000Z | import copy
import json
import os
from unittest.mock import MagicMock, call
from bravado.client import SwaggerClient
import bravado.exception
from bravado_falcon import FalconHttpClient
import falcon
import pytest
import pytest_falcon.plugin
import responses
import yaml
from data_acquisition.acquisition_request import AcquisitionRequest, RequestNotFoundError
from data_acquisition.consts import (ACQUISITION_PATH, DOWNLOAD_CALLBACK_PATH,
METADATA_PARSER_CALLBACK_PATH, GET_REQUEST_PATH)
from data_acquisition.resources import (get_download_callback_url, get_metadata_callback_url,
AcquisitionResource)
import tests
from tests.consts import (TEST_DOWNLOAD_REQUEST, TEST_DOWNLOAD_CALLBACK, TEST_ACQUISITION_REQ,
TEST_ACQUISITION_REQ_JSON)
FAKE_TIME = 234.25
FAKE_TIMESTAMP = 234
@pytest.fixture(scope='function')
def client(falcon_api):
client = pytest_falcon.plugin.Client(falcon_api)
client.post = (lambda path, data, post=client.post:
post(path, json.dumps(data), headers={'Content-Type': 'application/json'}))
return client
@pytest.fixture(scope='session')
def swagger_spec():
spec_file_path = os.path.join(tests.__path__[0], '../api_doc.yaml')
with open(spec_file_path) as spec_file:
return yaml.load(spec_file)
@pytest.fixture(scope='function')
def client_no_req_validation(falcon_api, swagger_spec):
return SwaggerClient.from_spec(swagger_spec,
http_client=FalconHttpClient(falcon_api),
config={'validate_requests': False})
@pytest.fixture(scope='function')
def client_swagger(falcon_api, swagger_spec):
return SwaggerClient.from_spec(swagger_spec,
http_client=FalconHttpClient(falcon_api))
@pytest.fixture(scope='function')
def acquisition_requests_resource(das_config, mock_executor, mock_req_store, fake_time):
return AcquisitionResource(mock_req_store, mock_executor, das_config)
@pytest.fixture(scope='function')
def req_store_get(mock_req_store):
mock_req_store.get.return_value = copy.deepcopy(TEST_ACQUISITION_REQ)
return mock_req_store.get
@pytest.fixture(scope='function')
def fake_time(monkeypatch):
monkeypatch.setattr('time.time', lambda: FAKE_TIME)
def test_get_download_callback_url():
callback_url = get_download_callback_url('https://some-test-das-url', 'some-test-id')
assert callback_url == 'https://some-test-das-url/v1/das/callback/downloader/some-test-id'
def test_get_metadata_callback_url():
callback_url = get_metadata_callback_url('https://some-test-das-url', 'some-test-id')
assert callback_url == 'https://some-test-das-url/v1/das/callback/metadata/some-test-id'
@responses.activate
def test_external_service_call_not_ok(acquisition_requests_resource):
test_url = 'https://some-fake-url/'
responses.add(responses.POST, test_url, status=404)
assert not acquisition_requests_resource._external_service_call(
url=test_url, data={'a': 'b'}, token='bearer fake-token', request_id='some-fake-id')
def test_processing_acquisition_request_for_hdfs(acquisition_requests_resource, mock_req_store):
# arrange
mock_enqueue_metadata_req = MagicMock()
acquisition_requests_resource._enqueue_metadata_request = mock_enqueue_metadata_req
hdfs_acquisition_req = copy.deepcopy(TEST_ACQUISITION_REQ)
hdfs_acquisition_req.source = TEST_ACQUISITION_REQ.source.replace('http://', 'hdfs://')
proper_saved_request = copy.deepcopy(hdfs_acquisition_req)
proper_saved_request.set_downloaded()
fake_token = 'bearer asdasdasdasd'
# act
acquisition_requests_resource._process_acquisition_request(hdfs_acquisition_req, fake_token)
# assert
mock_enqueue_metadata_req.assert_called_with(proper_saved_request, None, fake_token)
mock_req_store.put.assert_called_with(proper_saved_request)
def test_acquisition_bad_request(client_no_req_validation):
broken_request = dict(TEST_DOWNLOAD_REQUEST)
del broken_request['category']
with pytest.raises(bravado.exception.HTTPError):
client_no_req_validation.rest.submitAcquisitionRequest(body=broken_request).result()
def test_downloader_callback_failed(client, fake_time, mock_req_store, req_store_get):
failed_callback_req = dict(TEST_DOWNLOAD_CALLBACK)
failed_callback_req['state'] = 'ERROR'
response = client.post(
path=DOWNLOAD_CALLBACK_PATH.format(req_id=TEST_ACQUISITION_REQ.id),
data=failed_callback_req)
assert response.status == falcon.HTTP_200
updated_request = AcquisitionRequest(**TEST_ACQUISITION_REQ_JSON)
updated_request.state = 'ERROR'
updated_request.timestamps['ERROR'] = FAKE_TIMESTAMP
mock_req_store.put.assert_called_with(updated_request)
def test_metadata_callback_failed(client, fake_time, mock_req_store, req_store_get):
response = client.post(
path=METADATA_PARSER_CALLBACK_PATH.format(req_id=TEST_ACQUISITION_REQ.id),
data={'state': 'FAILED'})
assert response.status == falcon.HTTP_200
updated_request = AcquisitionRequest(**TEST_ACQUISITION_REQ_JSON)
updated_request.state = 'ERROR'
updated_request.timestamps['ERROR'] = FAKE_TIMESTAMP
mock_req_store.put.assert_called_with(updated_request)
def test_get_request(das_api, client_swagger, req_store_get):
das_api.request_management_res._org_checker = MagicMock()
acquisition_request = client_swagger.rest.getRequest(req_id=TEST_ACQUISITION_REQ.id).result()
assert AcquisitionRequest(**acquisition_request.__dict__) == TEST_ACQUISITION_REQ
def test_get_request_not_found(client, mock_req_store):
mock_req_store.get.side_effect = RequestNotFoundError()
response = client.get(GET_REQUEST_PATH.format(req_id='some-fake-id'))
assert response.status == falcon.HTTP_404
def test_delete_request(das_api, client, mock_req_store, req_store_get):
das_api.request_management_res._org_checker = MagicMock()
response = client.delete(GET_REQUEST_PATH.format(req_id=TEST_ACQUISITION_REQ.id))
assert response.status == falcon.HTTP_200
mock_req_store.delete.assert_called_with(TEST_ACQUISITION_REQ)
def test_delete_request_not_found(client, mock_req_store):
mock_req_store.get.side_effect = RequestNotFoundError()
response = client.delete(GET_REQUEST_PATH.format(req_id='fake-id'))
assert response.status == falcon.HTTP_404
@pytest.mark.parametrize('org_ids', [
['id-1'],
['id-1', 'id-2'],
['id-1', 'id-2', 'id-3'],
])
@pytest.mark.parametrize('acquisition_requests', [
[TEST_ACQUISITION_REQ],
[TEST_ACQUISITION_REQ, TEST_ACQUISITION_REQ]
])
def test_get_requests_for_org(org_ids, acquisition_requests,
das_api, client, mock_req_store):
das_api.acquisition_res._org_checker = MagicMock()
mock_req_store.get_for_org.return_value = acquisition_requests
response = client.get(path=ACQUISITION_PATH,
query_string='orgs=' + ','.join(org_ids))
returned_requests = [AcquisitionRequest(**req_json) for req_json in response.json]
assert response.status == falcon.HTTP_200
assert returned_requests == acquisition_requests * len(org_ids)
assert mock_req_store.get_for_org.call_args_list == [call(id) for id in org_ids]
| 38.103093 | 97 | 0.759199 | 0 | 0 | 0 | 0 | 2,697 | 0.364854 | 0 | 0 | 653 | 0.088339 |
39d5975250cb33441f80fb188d15a624f07f6415 | 4,216 | py | Python | GraphOfDocs.py | NC0DER/GraphOfDocs | 16603de9d8695ae8205117aa7123707d1dcbe0e0 | [
"Apache-2.0"
] | 12 | 2020-01-27T20:26:08.000Z | 2022-03-10T14:45:09.000Z | GraphOfDocs.py | NC0DER/GraphOfDocs | 16603de9d8695ae8205117aa7123707d1dcbe0e0 | [
"Apache-2.0"
] | 1 | 2021-11-17T11:45:55.000Z | 2021-11-17T11:45:55.000Z | GraphOfDocs.py | NC0DER/GraphOfDocs | 16603de9d8695ae8205117aa7123707d1dcbe0e0 | [
"Apache-2.0"
] | 2 | 2020-01-27T13:17:11.000Z | 2020-01-29T09:35:22.000Z | import sys
import platform
from neo4j import ServiceUnavailable
from GraphOfDocs.neo4j_wrapper import Neo4jDatabase
from GraphOfDocs.utils import generate_words, read_dataset, clear_screen
from GraphOfDocs.parse_args import parser
from GraphOfDocs.create import *
def graphofdocs(create, initialize, dirpath, window_size,
extend_window, remove_stopwords, lemmatize, stem):
# List that retains the skipped filenames.
skipped = []
current_system = platform.system()
# Open the database.
try:
database = Neo4jDatabase('bolt://localhost:7687', 'neo4j', '123')
# Neo4j server is unavailable.
# This client app cannot open a connection.
except ServiceUnavailable as error:
print('\t* Neo4j database is unavailable.')
print('\t* Please check the database connection before running this app.')
input('\t* Press any key to exit the app...')
sys.exit(1)
if create:
# Delete nodes from previous iterations.
database.execute('MATCH (n) DETACH DELETE n', 'w')
# Create uniqueness constraint on key to avoid duplicate word nodes.
database.execute('CREATE CONSTRAINT ON (word:Word) ASSERT word.key IS UNIQUE', 'w')
# Read text from files, which becomes a string in a list called dataset.
dataset = read_dataset(dirpath)
count = 1
total_count = len(dataset)
# Iterate all file records of the dataset.
for filename, file in dataset:
# Print the number of the currently processed file.
print(f'Processing {count} out of {total_count} files...' )
# Generate the terms from the text of each file.
words = generate_words(file, extend_window, remove_stopwords, lemmatize, stem)
# Create the graph of words in the database.
value = create_graph_of_words(words, database, filename, window_size)
if value is not None:
skipped.append(value)
# Update the progress counter.
count = count + 1
# Clear the screen to output the update the progress counter.
clear_screen(current_system)
# Count all skipped files and write their filenames in skipped.log
skip_count = len(skipped)
print(f'Created {total_count - skip_count}, skipped {skip_count} files.')
print('Check skipped.log for info.')
with open('skipped.log', 'w') as log:
for item in skipped:
log.write(item + '\n')
if initialize:
# Run initialization functions.
run_initial_algorithms(database)
create_similarity_graph(database)
create_clustering_tags(database)
database.close()
return
if __name__ == '__main__':
# If only one argument is specified,
# Then it's the script name.
# Print help for using the script and exit.
if len(sys.argv) == 1:
parser.print_help()
parser.exit()
# Parse all arguments from terminal.
args = parser.parse_args()
# If create flag is set but no dirpath is specified, print error.
if args.create and args.dirpath is None:
parser.error('Please set the dirpath flag and specify a valid filepath!')
# Else if create flag is specified along with a valid dirpath.
elif args.create:
print(args)
# Run the graphofdocs function with create and initialize set to True.
# The first argument (0th index) after the dirpath flag is the actual directory path.
graphofdocs(True, True, args.dirpath[0], args.window_size[0],
args.extend_window, args.insert_stopwords, args.lemmatize, args.stem)
# Else if reinitialize flag is specified, unset the create flag.
elif args.reinitialize:
print(args)
# Run the graphofdocs function with create set to False and initialize set to True.
# We also set the directory path to None, since its not needed.
graphofdocs(False, True, None, args.window_size[0],
args.extend_window, args.insert_stopwords, args.lemmatize, args.stem)
| 43.916667 | 95 | 0.64777 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,854 | 0.439753 |
39d61db6e252ece16991b4c554bc384accb4d908 | 27,144 | py | Python | line/f_MessageService.py | winbotscript/LineService | 4c79029648e858e567378485e75276f865c1f73f | [
"Apache-2.0"
] | 1 | 2020-08-20T08:00:23.000Z | 2020-08-20T08:00:23.000Z | line/f_MessageService.py | winbotscript/LineService | 4c79029648e858e567378485e75276f865c1f73f | [
"Apache-2.0"
] | null | null | null | line/f_MessageService.py | winbotscript/LineService | 4c79029648e858e567378485e75276f865c1f73f | [
"Apache-2.0"
] | null | null | null | #
# Autogenerated by Frugal Compiler (3.4.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from threading import Lock
from frugal.middleware import Method
from frugal.exceptions import TApplicationExceptionType
from frugal.exceptions import TTransportExceptionType
from frugal.processor import FBaseProcessor
from frugal.processor import FProcessorFunction
from frugal.util.deprecate import deprecated
from frugal.util import make_hashable
from thrift.Thrift import TApplicationException
from thrift.Thrift import TMessageType
from thrift.transport.TTransport import TTransportException
from .ttypes import *
class Iface(object):
def fetchMessageOperations(self, ctx, localRevision, lastOpTimestamp, count):
"""
Args:
ctx: FContext
localRevision: int (signed 64 bits)
lastOpTimestamp: int (signed 64 bits)
count: int (signed 32 bits)
"""
pass
def getLastReadMessageIds(self, ctx, chatId):
"""
Args:
ctx: FContext
chatId: string
"""
pass
def multiGetLastReadMessageIds(self, ctx, chatIds):
"""
Args:
ctx: FContext
chatIds: list of string
"""
pass
class Client(Iface):
def __init__(self, provider, middleware=None):
"""
Create a new Client with an FServiceProvider containing a transport
and protocol factory.
Args:
provider: FServiceProvider with TSynchronousTransport
middleware: ServiceMiddleware or list of ServiceMiddleware
"""
middleware = middleware or []
if middleware and not isinstance(middleware, list):
middleware = [middleware]
self._transport = provider.get_transport()
self._protocol_factory = provider.get_protocol_factory()
self._oprot = self._protocol_factory.get_protocol(self._transport)
self._iprot = self._protocol_factory.get_protocol(self._transport)
self._write_lock = Lock()
middleware += provider.get_middleware()
self._methods = {
'fetchMessageOperations': Method(self._fetchMessageOperations, middleware),
'getLastReadMessageIds': Method(self._getLastReadMessageIds, middleware),
'multiGetLastReadMessageIds': Method(self._multiGetLastReadMessageIds, middleware),
}
def fetchMessageOperations(self, ctx, localRevision, lastOpTimestamp, count):
"""
Args:
ctx: FContext
localRevision: int (signed 64 bits)
lastOpTimestamp: int (signed 64 bits)
count: int (signed 32 bits)
"""
return self._methods['fetchMessageOperations']([ctx, localRevision, lastOpTimestamp, count])
def _fetchMessageOperations(self, ctx, localRevision, lastOpTimestamp, count):
self._send_fetchMessageOperations(ctx, localRevision, lastOpTimestamp, count)
return self._recv_fetchMessageOperations(ctx)
def _send_fetchMessageOperations(self, ctx, localRevision, lastOpTimestamp, count):
oprot = self._oprot
with self._write_lock:
oprot.get_transport().set_timeout(ctx.timeout)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('fetchMessageOperations', TMessageType.CALL, 0)
args = fetchMessageOperations_args()
args.localRevision = localRevision
args.lastOpTimestamp = lastOpTimestamp
args.count = count
args.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
def _recv_fetchMessageOperations(self, ctx):
self._iprot.read_response_headers(ctx)
_, mtype, _ = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = fetchMessageOperations_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.success is not None:
return result.success
x = TApplicationException(TApplicationExceptionType.MISSING_RESULT, "fetchMessageOperations failed: unknown result")
raise x
def getLastReadMessageIds(self, ctx, chatId):
"""
Args:
ctx: FContext
chatId: string
"""
return self._methods['getLastReadMessageIds']([ctx, chatId])
def _getLastReadMessageIds(self, ctx, chatId):
self._send_getLastReadMessageIds(ctx, chatId)
return self._recv_getLastReadMessageIds(ctx)
def _send_getLastReadMessageIds(self, ctx, chatId):
oprot = self._oprot
with self._write_lock:
oprot.get_transport().set_timeout(ctx.timeout)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('getLastReadMessageIds', TMessageType.CALL, 0)
args = getLastReadMessageIds_args()
args.chatId = chatId
args.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
def _recv_getLastReadMessageIds(self, ctx):
self._iprot.read_response_headers(ctx)
_, mtype, _ = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = getLastReadMessageIds_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.success is not None:
return result.success
x = TApplicationException(TApplicationExceptionType.MISSING_RESULT, "getLastReadMessageIds failed: unknown result")
raise x
def multiGetLastReadMessageIds(self, ctx, chatIds):
"""
Args:
ctx: FContext
chatIds: list of string
"""
return self._methods['multiGetLastReadMessageIds']([ctx, chatIds])
def _multiGetLastReadMessageIds(self, ctx, chatIds):
self._send_multiGetLastReadMessageIds(ctx, chatIds)
return self._recv_multiGetLastReadMessageIds(ctx)
def _send_multiGetLastReadMessageIds(self, ctx, chatIds):
oprot = self._oprot
with self._write_lock:
oprot.get_transport().set_timeout(ctx.timeout)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('multiGetLastReadMessageIds', TMessageType.CALL, 0)
args = multiGetLastReadMessageIds_args()
args.chatIds = chatIds
args.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
def _recv_multiGetLastReadMessageIds(self, ctx):
self._iprot.read_response_headers(ctx)
_, mtype, _ = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = multiGetLastReadMessageIds_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.success is not None:
return result.success
x = TApplicationException(TApplicationExceptionType.MISSING_RESULT, "multiGetLastReadMessageIds failed: unknown result")
raise x
class Processor(FBaseProcessor):
def __init__(self, handler, middleware=None):
"""
Create a new Processor.
Args:
handler: Iface
"""
if middleware and not isinstance(middleware, list):
middleware = [middleware]
super(Processor, self).__init__()
self.add_to_processor_map('fetchMessageOperations', _fetchMessageOperations(Method(handler.fetchMessageOperations, middleware), self.get_write_lock()))
self.add_to_processor_map('getLastReadMessageIds', _getLastReadMessageIds(Method(handler.getLastReadMessageIds, middleware), self.get_write_lock()))
self.add_to_processor_map('multiGetLastReadMessageIds', _multiGetLastReadMessageIds(Method(handler.multiGetLastReadMessageIds, middleware), self.get_write_lock()))
class _fetchMessageOperations(FProcessorFunction):
def __init__(self, handler, lock):
super(_fetchMessageOperations, self).__init__(handler, lock)
def process(self, ctx, iprot, oprot):
args = fetchMessageOperations_args()
args.read(iprot)
iprot.readMessageEnd()
result = fetchMessageOperations_result()
try:
result.success = self._handler([ctx, args.localRevision, args.lastOpTimestamp, args.count])
except TalkException as e:
result.e = e
except TApplicationException as ex:
with self._lock:
_write_application_exception(ctx, oprot, "fetchMessageOperations", exception=ex)
return
except Exception as e:
with self._lock:
_write_application_exception(ctx, oprot, "fetchMessageOperations", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=e.message)
raise
with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('fetchMessageOperations', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "fetchMessageOperations", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.args[0])
else:
raise e
class _getLastReadMessageIds(FProcessorFunction):
def __init__(self, handler, lock):
super(_getLastReadMessageIds, self).__init__(handler, lock)
def process(self, ctx, iprot, oprot):
args = getLastReadMessageIds_args()
args.read(iprot)
iprot.readMessageEnd()
result = getLastReadMessageIds_result()
try:
result.success = self._handler([ctx, args.chatId])
except TalkException as e:
result.e = e
except TApplicationException as ex:
with self._lock:
_write_application_exception(ctx, oprot, "getLastReadMessageIds", exception=ex)
return
except Exception as e:
with self._lock:
_write_application_exception(ctx, oprot, "getLastReadMessageIds", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=e.message)
raise
with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('getLastReadMessageIds', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "getLastReadMessageIds", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.args[0])
else:
raise e
class _multiGetLastReadMessageIds(FProcessorFunction):
def __init__(self, handler, lock):
super(_multiGetLastReadMessageIds, self).__init__(handler, lock)
def process(self, ctx, iprot, oprot):
args = multiGetLastReadMessageIds_args()
args.read(iprot)
iprot.readMessageEnd()
result = multiGetLastReadMessageIds_result()
try:
result.success = self._handler([ctx, args.chatIds])
except TalkException as e:
result.e = e
except TApplicationException as ex:
with self._lock:
_write_application_exception(ctx, oprot, "multiGetLastReadMessageIds", exception=ex)
return
except Exception as e:
with self._lock:
_write_application_exception(ctx, oprot, "multiGetLastReadMessageIds", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=e.message)
raise
with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('multiGetLastReadMessageIds', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "multiGetLastReadMessageIds", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.args[0])
else:
raise e
def _write_application_exception(ctx, oprot, method, ex_code=None, message=None, exception=None):
if exception is not None:
x = exception
else:
x = TApplicationException(type=ex_code, message=message)
oprot.write_response_headers(ctx)
oprot.writeMessageBegin(method, TMessageType.EXCEPTION, 0)
x.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
return x
class fetchMessageOperations_args(object):
"""
Attributes:
- localRevision
- lastOpTimestamp
- count
"""
def __init__(self, localRevision=None, lastOpTimestamp=None, count=None):
self.localRevision = localRevision
self.lastOpTimestamp = lastOpTimestamp
self.count = count
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.localRevision = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.lastOpTimestamp = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.count = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('fetchMessageOperations_args')
if self.localRevision is not None:
oprot.writeFieldBegin('localRevision', TType.I64, 2)
oprot.writeI64(self.localRevision)
oprot.writeFieldEnd()
if self.lastOpTimestamp is not None:
oprot.writeFieldBegin('lastOpTimestamp', TType.I64, 3)
oprot.writeI64(self.lastOpTimestamp)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 4)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.localRevision))
value = (value * 31) ^ hash(make_hashable(self.lastOpTimestamp))
value = (value * 31) ^ hash(make_hashable(self.count))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class fetchMessageOperations_result(object):
"""
Attributes:
- success
- e
"""
def __init__(self, success=None, e=None):
self.success = success
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = MessageOperations()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('fetchMessageOperations_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLastReadMessageIds_args(object):
"""
Attributes:
- chatId
"""
def __init__(self, chatId=None):
self.chatId = chatId
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.STRING:
self.chatId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getLastReadMessageIds_args')
if self.chatId is not None:
oprot.writeFieldBegin('chatId', TType.STRING, 2)
oprot.writeString(self.chatId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.chatId))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLastReadMessageIds_result(object):
"""
Attributes:
- success
- e
"""
def __init__(self, success=None, e=None):
self.success = success
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = LastReadMessageIds()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getLastReadMessageIds_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multiGetLastReadMessageIds_args(object):
"""
Attributes:
- chatIds
"""
def __init__(self, chatIds=None):
self.chatIds = chatIds
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.LIST:
self.chatIds = []
(_, elem602) = iprot.readListBegin()
for _ in range(elem602):
elem603 = iprot.readString()
self.chatIds.append(elem603)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('multiGetLastReadMessageIds_args')
if self.chatIds is not None:
oprot.writeFieldBegin('chatIds', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.chatIds))
for elem604 in self.chatIds:
oprot.writeString(elem604)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.chatIds))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multiGetLastReadMessageIds_result(object):
"""
Attributes:
- success
- e
"""
def __init__(self, success=None, e=None):
self.success = success
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_, elem605) = iprot.readListBegin()
for _ in range(elem605):
elem606 = LastReadMessageIds()
elem606.read(iprot)
self.success.append(elem606)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('multiGetLastReadMessageIds_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for elem607 in self.success:
elem607.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 35.57536 | 171 | 0.597222 | 26,053 | 0.959807 | 0 | 0 | 0 | 0 | 0 | 0 | 2,997 | 0.110411 |
39d6297dad17364278641be6d1ed6ea276348300 | 886 | py | Python | Medium/279. Perfect Squares/solution (2).py | czs108/LeetCode-Solutions | 889f5b6a573769ad077a6283c058ed925d52c9ec | [
"MIT"
] | 3 | 2020-05-09T12:55:09.000Z | 2022-03-11T18:56:05.000Z | Medium/279. Perfect Squares/solution (2).py | czs108/LeetCode-Solutions | 889f5b6a573769ad077a6283c058ed925d52c9ec | [
"MIT"
] | null | null | null | Medium/279. Perfect Squares/solution (2).py | czs108/LeetCode-Solutions | 889f5b6a573769ad077a6283c058ed925d52c9ec | [
"MIT"
] | 1 | 2022-03-11T18:56:16.000Z | 2022-03-11T18:56:16.000Z | # 279. Perfect Squares
# Runtime: 60 ms, faster than 96.81% of Python3 online submissions for Perfect Squares.
# Memory Usage: 14.7 MB, less than 42.95% of Python3 online submissions for Perfect Squares.
class Solution:
# Greedy Enumeration
def numSquares(self, n: int) -> int:
square_nums = set([i * i for i in range(1, int(n**0.5) + 1)])
def is_divided_by(n: int, count: int) -> bool:
'''
Return `true` if `n` can be decomposed into `count` number of perfect square numbers.
'''
if count == 1:
return n in square_nums
for k in square_nums:
if is_divided_by(n - k, count - 1):
return True
return False
for count in range(1, n + 1):
if is_divided_by(n, count):
return count
assert False | 30.551724 | 97 | 0.555305 | 678 | 0.765237 | 0 | 0 | 0 | 0 | 0 | 0 | 338 | 0.38149 |
39d67d232e49de41fe6fece39a3376037a1fe5cc | 1,650 | py | Python | simulator/card_defs.py | NewLordVile/alphasheep | 2a86cf0009b686edafee8c80aa961d7075a5bd46 | [
"MIT"
] | 8 | 2019-11-25T22:05:58.000Z | 2022-01-19T23:48:39.000Z | simulator/card_defs.py | NewLordVile/alphasheep | 2a86cf0009b686edafee8c80aa961d7075a5bd46 | [
"MIT"
] | 5 | 2019-12-23T12:43:40.000Z | 2020-03-19T19:16:46.000Z | simulator/card_defs.py | NewLordVile/alphasheep | 2a86cf0009b686edafee8c80aa961d7075a5bd46 | [
"MIT"
] | 4 | 2020-03-14T21:25:29.000Z | 2022-01-27T22:59:31.000Z | """
Definitions for Card, Suit, Pip, etc.
WARN: DO NOT CHANGE THE ENUMS IN THIS FILE!
Changing the values might affect the ordering of the state/action space of agents, and will break compatibility with previously
saved model checkpoints.
"""
from enum import IntEnum
class Suit(IntEnum):
schellen = 0
herz = 1
gras = 2
eichel = 3
def __str__(self):
return self.name
class Pip(IntEnum):
sieben = 1
acht = 2
neun = 3
unter = 4
ober = 5
koenig = 6
zehn = 7
sau = 8
def __str__(self):
return self.name
class Card:
def __init__(self, suit: Suit, pip: Pip):
self.suit = suit
self.pip = pip
# There are some performance problems doing enum lookups, apparently Python implements them in a bit of a convoluted way.
# We often use Card as a dict key, so this has turned out to be a bit problematic. It turns out to be much faster
# to just precalc a unique card ID instead of comparing suits and pips (Python 3.5).
self._unique_hash = hash(Card) * 23 + self.suit.value * 23 + self.pip.value
def __str__(self):
return "({} {})".format(self.suit.name, self.pip.name)
def __eq__(self, other):
return isinstance(other, Card) and self._unique_hash == other._unique_hash
def __hash__(self):
return self._unique_hash
pip_scores = {
Pip.sieben: 0,
Pip.acht: 0,
Pip.neun: 0,
Pip.unter: 2,
Pip.ober: 3,
Pip.koenig: 4,
Pip.zehn: 10,
Pip.sau: 11}
def new_deck():
""" Returns an ordered deck. """
return [Card(suit, pip) for suit in Suit for pip in Pip]
| 23.571429 | 129 | 0.633333 | 1,096 | 0.664242 | 0 | 0 | 0 | 0 | 0 | 0 | 602 | 0.364848 |
39d6fc42a60ee57ea74155e98d6216d785fa855c | 2,720 | py | Python | server/perform_action/common.py | darrenfoong/battleships | 2866207b3a55d24fc085beedbd735d489990e487 | [
"MIT"
] | 11 | 2020-01-15T14:25:48.000Z | 2021-11-25T04:21:18.000Z | server/perform_action/common.py | darrenfoong/battleships | 2866207b3a55d24fc085beedbd735d489990e487 | [
"MIT"
] | 8 | 2021-02-04T16:41:57.000Z | 2022-03-29T21:57:15.000Z | esp8266/common.py | pythings/PythingsOS | 276b41a32af7fa0d5395b2bb308e611f784f9711 | [
"Apache-2.0"
] | null | null | null |
MAX_COPIES = 2
RECV_SIZE = 1024
SEND_SIZE = 1024
SERVER_IP = "172.24.1.107"
SERVER_PORT = 10000
def recv_line(conn):
data = ""
data += conn.recv(RECV_SIZE)
# data += conn.recv(RECV_SIZE).decode("utf-8")
return data
def make_request(entity_type, type, filename = None, auth = None, filesize = None, ip = None, ip_list = None , response_code = None, storage_space = None, used_space = None, port_no = None):
request = {}
#download : client -> server
if(type == "download"):
request['entity_type'] = entity_type
request['type'] = "download"
request['filename'] = filename
request['ip'] = ip
request['auth'] = auth
#upload : client -> servers
elif(type == "upload"):
request['entity_type'] = entity_type
request['type'] = "upload"
request['filename'] = filename
request['filesize'] = filesize
request['ip'] = ip
request['auth'] = auth
request['response_code'] = response_code
#download_ack : server -> client
elif(type == "download_ack"):
request['entity_type'] = entity_type
request['type'] = "download_ack"
request['ip_list'] = ip_list
request['response_code'] = response_code
request['filename'] = filename
request['filesize'] = filesize
request['auth'] = auth
#upload_ack : server -> client
elif(type == "upload_ack"):
request['entity_type'] = entity_type
request['type'] = "upload_ack"
request['ip'] = ip
request['response_code'] = response_code
request['filename'] = filename
request['filesize'] = filesize
request['auth'] = auth
#upload_complete_ack : storage_client -> client
elif(type == "upload_complete_ack"):
request['entity_type'] = entity_type
request['type'] = "upload_complete_ack"
request['filename'] = filename
request['response_code'] = response_code
request['filesize'] = filesize
request['auth'] = auth
request['ip'] = ip
#copy : server -> storage_client
elif(type == "copy"):
request['entity_type'] = entity_type
request['type'] = "copy"
request['filename'] = filename
request['filesize'] = filesize
request['ip'] = ip
request['auth'] = auth
#add_storage : storage_client -> server
elif(type == "add_storage"):
request['entity_type'] = entity_type
request['type'] = "add_storage"
request['auth'] = auth
request['storage_space'] = storage_space
request['used_space'] = used_space
request['port'] = port_no
#storage_added_ack : server -> storage_client
elif(type == "storage_added_ack"):
request['entity_type'] = entity_type
request['type'] = "storage_added_ack"
request['response_code'] = response_code
request['auth'] = auth
else:
return 0
return str(request)
def read_request(req):
return (eval(req))
# Error Codes
CODE_SUCCESS = 300
CODE_FAILURE = 400
| 28.93617 | 190 | 0.683824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,010 | 0.371324 |
39da37adde81c90589b9c7e68358e7bc3b53628e | 1,361 | py | Python | repeat_samples.py | xiz675/OpenNMT-py | eaee466437d6a2f7c06a2401f9a8ef6c7757cabd | [
"MIT"
] | null | null | null | repeat_samples.py | xiz675/OpenNMT-py | eaee466437d6a2f7c06a2401f9a8ef6c7757cabd | [
"MIT"
] | null | null | null | repeat_samples.py | xiz675/OpenNMT-py | eaee466437d6a2f7c06a2401f9a8ef6c7757cabd | [
"MIT"
] | null | null | null | def repeat(srcs, convs, tags):
new_src = []
new_conv = []
new_tag = []
print("size before repeat: " + str(len(srcs)))
for i in zip(srcs, convs, tags):
tag_list = i[2].split(";")
for j in range(len(tag_list)):
new_src.append(i[0])
new_conv.append(i[1])
new_tag += tag_list
assert len(new_conv) == len(new_src) == len(new_tag)
print("size after repeat: " + str(len(new_src)))
return new_src, new_conv, new_tag
def write_to_file(file_path, entities):
f = open(file_path, "w", encoding='utf-8')
for t in entities:
f.write(t)
f.write("\n")
f.close()
def read_file(file_path):
f = open(file_path, "r", encoding='utf-8')
lines = f.readlines()
f.close()
return [l.rstrip("\n") for l in lines]
if __name__ == '__main__':
key = "train"
base_path = "./data/Twitter/"
src_path = base_path + key + "_post.txt"
conv_path = base_path + key + "_conv.txt"
tag_path = base_path + key + "_tag.txt"
srcs = read_file(src_path)
convs = read_file(conv_path)
tags = read_file(tag_path)
new_data = repeat(srcs, convs, tags)
write_to_file(base_path + key + "new_post.txt", new_data[0])
write_to_file(base_path + key + "new_conv.txt", new_data[1])
write_to_file(base_path + key + "new_tag.txt", new_data[2])
| 28.957447 | 64 | 0.603968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 181 | 0.13299 |
39daa2204b3c5436de83103da0b269b9aadad179 | 1,540 | py | Python | tests/test_movies.py | dipakgupta12/taste_dive | 37df3f67e6efdf961cca230a4b2c8cfe23a38984 | [
"MIT"
] | null | null | null | tests/test_movies.py | dipakgupta12/taste_dive | 37df3f67e6efdf961cca230a4b2c8cfe23a38984 | [
"MIT"
] | null | null | null | tests/test_movies.py | dipakgupta12/taste_dive | 37df3f67e6efdf961cca230a4b2c8cfe23a38984 | [
"MIT"
] | null | null | null | import mock
def test_index_template(app, client, db, captured_templates):
res = client.get('/')
assert res.status_code == 200
template, context = captured_templates[0]
assert template.name == "index.html"
@mock.patch("taste_dive.main.routes.get_movies", mock.MagicMock(return_value=[{"Title": "Fast & Furious 6"}]))
def test_search_template(app, client, db, captured_templates):
res = client.get('/search', data={"name": "Fast & Furious 6"})
assert res.status_code == 200
template, context = captured_templates[0]
assert template.name == "search.html"
@mock.patch("taste_dive.main.routes.get_movies", mock.MagicMock(return_value=[{"Title": "Fast & Furious 6"}]))
def test_search(app, client, db, captured_templates):
res = client.get('/search', data={"name": "Fast & Furious 6"})
assert res.status_code == 200
@mock.patch("taste_dive.main.routes.get_movie_detail", mock.MagicMock(return_value={"Title": "Fast & Furious 6"}))
def test_detail_template(app, client, db, movie, captured_templates):
res = client.get('/detail/{{movie.imdbID}}', data={"name": "Fast & Furious 6"})
assert res.status_code == 200
template, context = captured_templates[0]
assert template.name == "detail.html"
@mock.patch("taste_dive.main.routes.get_movie_detail", mock.MagicMock(return_value={"Title": "Fast & Furious 6"}))
def test_detail(app, client, movie, captured_templates):
res = client.get('/detail/{{movie.imdbID}}', data={"name": "Fast & Furious 6"})
assert res.status_code == 200
| 42.777778 | 114 | 0.701948 | 0 | 0 | 0 | 0 | 1,306 | 0.848052 | 0 | 0 | 459 | 0.298052 |
39dce94f390b2bc845f4a4548517b2bf61e50466 | 5,711 | py | Python | CiscoWebAuthManager.py | darizotas/ciscowebauth | aaac65b5e78fe3246f0d4dedaf44eea4d8d293cb | [
"BSD-3-Clause"
] | 1 | 2018-01-22T04:43:39.000Z | 2018-01-22T04:43:39.000Z | CiscoWebAuthManager.py | darizotas/ciscowebauth | aaac65b5e78fe3246f0d4dedaf44eea4d8d293cb | [
"BSD-3-Clause"
] | null | null | null | CiscoWebAuthManager.py | darizotas/ciscowebauth | aaac65b5e78fe3246f0d4dedaf44eea4d8d293cb | [
"BSD-3-Clause"
] | null | null | null | """Script that establishes a session in a wireless network managed by Cisco Web Authentication.
This script requests for re-establishing a session in a wireless network managed by Cisco Web
Authentication.
Copyright 2013 Dario B. darizotas at gmail dot com
This software is licensed under a new BSD License.
Unported License. http://opensource.org/licenses/BSD-3-Clause
"""
from wlanapi.wlanapiwrapper import *
from wlanapi.wlanconninfo import *
from webauth.CiscoWebAuth import *
import sys
import argparse
import ssl
class CiscoWebAuthManager:
"""Class responsible for loging-in/out from wireless networks managed by Cisco Web Authentication.
"""
def __init__(self):
"""Initialises the class."""
self.crawler = CiscoWebAuthCrawler()
def isConnected(self, ssid):
"""Returns true whether it is currently connected to the Wlan identified by the given
ssid.
"""
try:
info = WlanConnInfo()
connected = info.isConnected(ssid)
del info
return connected
except WlanConnError as err:
del info
print err
return False
def _parseError(self, body):
"""Checks for an error or informative message"""
msg = self.crawler.getMessage(body, 'err')
if msg:
print msg
else:
# Check whether for an informative message.
msg = self.crawler.getMessage(body, 'info')
if msg:
print msg
else:
print 'I don\'t know how we arrived here. Check the Web:'
print body
def login(self, host, username, password):
"""Logs in to the wireless network"""
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
connection = httplib.HTTPSConnection(host, context=context)
url = "/login.html"
params = urllib.urlencode({\
'buttonClicked': 4, \
'err_flag': 0, 'err_msg': '', 'info_flag': 0, 'info_msg': '', \
'redirect_url': '', 'username': username, 'password': password \
})
headers = {\
'Content-Type': 'application/x-www-form-urlencoded', \
}
print "Connecting Cisco Web Authentication..."
try:
connection.request("POST", url, params, headers)
response = connection.getresponse()
except (httplib.HTTPException, socket.error) as ex:
print ex
return False
# 100 Continue.
if response.status == 200:
body = response.read()
if self.crawler.isConnected(body):
print 'Session re-established!'
else:
self._parseError(body)
else:
print response.status, response.reason
connection.close()
return True
def logout(self, host):
"""Logs out from the wireless network"""
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
connection = httplib.HTTPSConnection(host, context=context)
url = "/logout.html"
params = urllib.urlencode({\
# 'Logout': 'Logout', \
'err_flag': 0, 'err_msg': '', 'userStatus': 1 \
})
headers = {\
'Content-Type': 'application/x-www-form-urlencoded', \
}
print "Connecting Cisco Web Authentication..."
try:
connection.request("POST", url, params, headers)
response = connection.getresponse()
except (httplib.HTTPException, socket.error) as ex:
print ex
return False
# 100 Continue.
if response.status == 200:
body = response.read()
if self.crawler.isDisconnected(body):
print 'Session ended!'
else:
self._parseError(body)
else:
print response.status, response.reason
connection.close()
return True
# Main
def login(args):
"""Wrapper function to use through argparse to login to the wireless network"""
manager = CiscoWebAuthManager()
if manager.isConnected(args.ssid):
if not manager.login(args.host, args.user, args.pwd):
sys.exit(1)
else:
print "Not associated to %s. There is nothing to do." % args.ssid
def logout(args):
"""Wrapper function to use through argparse to logout to the wireless network"""
manager = CiscoWebAuthManager()
if manager.isConnected(args.ssid):
if not manager.logout(args.host):
sys.exit(1)
else:
print "Not associated to %s. There is nothing to do." % args.ssid
# Top-level argument parser
parser = argparse.ArgumentParser(description='Establishes a session in a wireless network managed ' \
'by Cisco Web Authentication.')
# SSID wireless network param
parser.add_argument('ssid', help='SSID name of the wireless network')
parser.add_argument('host', help='Cisco Web Authentication hostname or IP')
subparser = parser.add_subparsers(title='sub-commands', help='Available sub-commands')
# Login sub-command
parserCmdLogin = subparser.add_parser('login', help='Login request')
parserCmdLogin.add_argument('-u', '--user', required=True, help='User name')
parserCmdLogin.add_argument('-p', '--pwd', required=True, help='Password')
parserCmdLogin.set_defaults(func=login)
# Logout sub-command
parserCmdLogout = subparser.add_parser('logout', help='Logout request')
parserCmdLogout.set_defaults(func=logout)
args = parser.parse_args()
args.func(args)
sys.exit(0) | 34.823171 | 102 | 0.615654 | 3,582 | 0.627211 | 0 | 0 | 0 | 0 | 0 | 0 | 1,958 | 0.342847 |
39dee2f2383aa49564e67055109a18b1b7a24546 | 192 | py | Python | qr_code/urls.py | mapreri/django-qr-code | 4792dcc19f04b0915dc715ba83ae22372aa78ce9 | [
"BSD-3-Clause"
] | null | null | null | qr_code/urls.py | mapreri/django-qr-code | 4792dcc19f04b0915dc715ba83ae22372aa78ce9 | [
"BSD-3-Clause"
] | null | null | null | qr_code/urls.py | mapreri/django-qr-code | 4792dcc19f04b0915dc715ba83ae22372aa78ce9 | [
"BSD-3-Clause"
] | null | null | null | from django.urls import path
from qr_code import views
app_name = 'qr_code'
urlpatterns = [
path('images/serve-qr-code-image/', views.serve_qr_code_image, name='serve_qr_code_image')
]
| 19.2 | 94 | 0.755208 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 59 | 0.307292 |
39df74f7e7ea40de0f014c2a1bd6b468baf99ae0 | 974 | py | Python | matching.py | siweiwang24/marriage | d0f041ef380562885177418944791491949d024e | [
"MIT"
] | null | null | null | matching.py | siweiwang24/marriage | d0f041ef380562885177418944791491949d024e | [
"MIT"
] | null | null | null | matching.py | siweiwang24/marriage | d0f041ef380562885177418944791491949d024e | [
"MIT"
] | null | null | null | """
Stable Marriage Problem solution using Gale-Shapley.
Copyright 2020. Siwei Wang.
"""
# pylint: disable=no-value-for-parameter
from typing import Optional
from click import command, option, Path
from read_validate import get_smp
from marriage import compute_smp
from write import print_results
@command()
@option('--filename', '-f', required=True,
type=Path(exists=True, file_okay=True, dir_okay=False),
help='Path to input json on which to run SMP algorithm.')
@option('--output', '-o', required=False,
type=Path(exists=False, file_okay=True, dir_okay=False),
help='Path to output file in which to print results.')
def main(filename: str, output: Optional[str]):
"""Execute smp algorithm on input and print results to output."""
men_pref, women_pref = get_smp(filename)
men_engage, women_engage = compute_smp(men_pref, women_pref)
print_results(men_engage, women_engage, output)
if __name__ == '__main__':
main()
| 32.466667 | 69 | 0.724846 | 0 | 0 | 0 | 0 | 633 | 0.649897 | 0 | 0 | 333 | 0.341889 |
39e0cfb770931442146ef89aab0fb46b52dd6602 | 7,908 | py | Python | chimeric_blacklist.py | regnveig/juicer1.6_compact | 21cd24f4c711640584965704f4fa72e5a25b76e3 | [
"MIT"
] | null | null | null | chimeric_blacklist.py | regnveig/juicer1.6_compact | 21cd24f4c711640584965704f4fa72e5a25b76e3 | [
"MIT"
] | null | null | null | chimeric_blacklist.py | regnveig/juicer1.6_compact | 21cd24f4c711640584965704f4fa72e5a25b76e3 | [
"MIT"
] | null | null | null | import pysam
import json
import bisect
import subprocess
def LoadFragmentMap(RestrSitesMap):
FragmentMap = {}
with open(RestrSitesMap, 'rt') as MapFile:
for Contig in MapFile:
List = Contig[:-1].split(' ')
FragmentMap[List[0]] = [int(item) for item in List[1:]]
return FragmentMap
def CalcDist(Item1, Item2):
if ((Item1 is None) or (Item2 is None) or (type(Item1) == list) or (type(Item2) == list)): return None
return float("+inf") if (Item1["Chr"] != Item2["Chr"]) else abs(Item1["Pos"] - Item2["Pos"])
def SortItems(Item1, Item2): return tuple([(item["ID"], item["Pos"]) for item in sorted([Item1, Item2], key=lambda x: (x["RefID"], x["Pos"]))])
def ProcessQuery(Query, ChromSizes, MinMAPQ):
# Filter unmapped
if any([item[1].is_unmapped for item in Query["ReadBlock"]]): return { "ReadBlock": Query["ReadBlock"], "Type": "Unmapped" }
if any([item[1].mapping_quality < MinMAPQ for item in Query["ReadBlock"]]): return { "ReadBlock": Query["ReadBlock"], "Type": "MappingQualityFailed" }
# Create Sorter
TypeDict = { index: list() for index in ("1p", "1s", "2p", "2s") }
# Annotation
for index, item in Query["ReadBlock"]:
Start = item.reference_start + 1
End = item.reference_end
CigarFirst = item.cigar[0]
CigarLast = item.cigar[-1]
SoftHard = (4, 5)
if CigarFirst[0] in SoftHard:
Start -= CigarFirst[1]
if Start <= 0: Start = 1
if CigarLast[0] in SoftHard:
End += CigarLast[1]
if End >= ChromSizes[item.reference_name]: End = ChromSizes[item.reference_name]
Type = ("1" if item.is_read1 else "2") + ("s" if (item.is_secondary or item.is_supplementary) else "p")
TypeDict[Type].append({ "ID": int(index), "Chr": str(item.reference_name), "RefID": int(item.reference_id), "Pos": int(End) if item.is_reverse else int(Start) })
# Create Pattern
Pattern = tuple([len(item) for index, item in TypeDict.items()])
TypeDict = { index: (None if not item else (item[0] if len(item) == 1 else item)) for index, item in TypeDict.items() }
Dist = { f"1{index1}2{index2}": CalcDist(TypeDict[f"1{index1}"], TypeDict[f"2{index2}"]) for index1, index2 in ('pp', 'ps', 'sp', 'ss')}
# Norm Chimera 4 Ends
if Pattern == (1, 1, 1, 1):
if ((Dist["1p2p"] < 1000) and (Dist["1s2s"] < 1000)) or ((Dist["1p2s"] < 1000) and (Dist["1s2p"] < 1000)):
Sorted = SortItems(TypeDict["1p"], TypeDict["1s"])
Pair = [{ "Read": Query["ReadBlock"][Sorted[0][0]][1], "Pos": Sorted[0][1] }, { "Read": Query["ReadBlock"][Sorted[1][0]][1], "Pos": Sorted[1][1] }]
return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericPaired", "Pair": Pair }
else: return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericAmbiguous" }
# Norm Chimera 3 Ends
elif Pattern in ((1, 0, 1, 1), (1, 1, 1, 0)):
if TypeDict["1s"] is None:
if ((Dist["1p2p"] < 1000) or (Dist["1p2s"] < 1000)): Sorted = SortItems(TypeDict["1p"], TypeDict["2p"] if Dist["1p2p"] > Dist["1p2s"] else TypeDict["2s"])
else: Sorted = None
if TypeDict["2s"] is None:
if ((Dist["1p2p"] < 1000) or (Dist["1s2p"] < 1000)): Sorted = SortItems(TypeDict["2p"], TypeDict["1p"] if Dist["1p2p"] > Dist["1s2p"] else TypeDict["1s"])
else: Sorted = None
if Sorted is None: return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericAmbiguous" }
Pair = [{ "Read": Query["ReadBlock"][Sorted[0][0]][1], "Pos": Sorted[0][1] }, { "Read": Query["ReadBlock"][Sorted[1][0]][1], "Pos": Sorted[1][1] }]
return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericPaired", "Pair": Pair }
# Regular Pair
elif Pattern == (1, 0, 1, 0):
Sorted = SortItems(TypeDict["1p"], TypeDict["2p"])
Pair = [{ "Read": Query["ReadBlock"][Sorted[0][0]][1], "Pos": Sorted[0][1] }, { "Read": Query["ReadBlock"][Sorted[1][0]][1], "Pos": Sorted[1][1] }]
return { "ReadBlock": Query["ReadBlock"], "Type": "NormalPaired", "Pair": Pair }
# Collisions
elif (Pattern[1] > 1) or (Pattern[3] > 1):
pass # TODO Collisions
# Other
return { "ReadBlock": Query["ReadBlock"], "Type": "ChimericAmbiguous" }
def Main(InputFileSAM, OutputFileTXT, InterPairsTXT, ChimericAmbiguousFileSAM, UnmappedSAM, MappingQualityFailedSAM, StatsTXT, RestrictionSiteFile = None, MinMAPQ = 0):
Input = pysam.AlignmentFile(InputFileSAM, 'r', check_sq=False)
SortCommand = (f'sort -k2,2d -k6,6d -k4,4n -k8,8n -k1,1n -k5,5n -k3,3n | tee >( gzip -c > "{OutputFileTXT}" ) |' +
f' awk -F " " \'{{print $2 "\\t" $3 "\\t" $6 "\\t" $7}}\' | gzip -c > "{InterPairsTXT}"')
Output = subprocess.Popen(SortCommand, shell=True, executable="/bin/bash", stdin=subprocess.PIPE)
if RestrictionSiteFile is not None: FragmentMap = LoadFragmentMap(RestrictionSiteFile)
TechInfo = {
"ChimericAmbiguous": pysam.AlignmentFile(ChimericAmbiguousFileSAM, "wb", template = Input),
"Unmapped": pysam.AlignmentFile(UnmappedSAM, "wb", template = Input),
"MappingQualityFailed": pysam.AlignmentFile(UnmappedSAM, "wb", template = Input)
}
ChromSizes = { Input.references[i]: Input.lengths[i] for i in range(Input.nreferences) }
Stats = { "SequencedReadPairs": 0, "NormalPaired": 0, "ChimericPaired": 0, "ChimericAmbiguous": 0, "MappingQualityFailed": 0, "Unmapped": 0, "Ligation": { "Motif": None, "LineCount": 0, "PresentCount": 0 } }
Query = { "ReadName": None, "ReadBlock": [] }
def BlockProcess():
Stats["SequencedReadPairs"] += 1
Query["ReadBlock"] = list(enumerate(Query["ReadBlock"]))
Result = ProcessQuery(Query, ChromSizes, MinMAPQ)
Stats[Result["Type"]] += 1
if Result["Type"] in ("Unmapped", "ChimericAmbiguous", "MappingQualityFailed"):
for index, Rec in Query["ReadBlock"]: TechInfo[Result["Type"]].write(Rec)
if Result["Type"] in ("ChimericPaired", "NormalPaired"):
Read1, Read2 = Result["Pair"]
Line = ' '.join([
'16' if Read1["Read"].is_reverse else '0',
str(Read1["Read"].reference_name),
str(Read1["Pos"]),
'0' if RestrictionSiteFile is None else str(bisect.bisect(FragmentMap[Read1["Read"].reference_name], Read1["Pos"])),
'16' if Read2["Read"].is_reverse else '0',
str(Read2["Read"].reference_name),
str(Read2["Pos"]),
'1' if RestrictionSiteFile is None else str(bisect.bisect(FragmentMap[Read2["Read"].reference_name], Read2["Pos"])),
str(Read1["Read"].mapping_quality),
str(Read1["Read"].cigarstring),
str(Read1["Read"].seq.__str__()),
str(Read2["Read"].mapping_quality),
str(Read2["Read"].cigarstring),
str(Read2["Read"].seq.__str__()),
str(Read1["Read"].query_name),
str(Read2["Read"].query_name)
]) + '\n'
Output.stdin.write(Line.encode('utf-8'))
while 1:
try:
Record = next(Input)
if not (Record.is_secondary or Record.is_supplementary):
Stats["Ligation"]["LineCount"] += 1
# TODO Add ligation counter
if Record.query_name == Query["ReadName"]: Query["ReadBlock"].append(Record)
else:
BlockProcess()
Query["ReadName"] = Record.query_name
Query["ReadBlock"].clear()
Query["ReadBlock"].append(Record)
except StopIteration:
BlockProcess()
Input.close()
Output.stdin.close()
Output.wait()
Stats["Alignable"] = Stats["ChimericPaired"] + Stats["NormalPaired"]
for stat in ("ChimericPaired", "ChimericAmbiguous", "NormalPaired", "Unmapped", "Alignable", "MappingQualityFailed"): Stats[stat] = { "Count": Stats[stat], "%": Stats[stat] / Stats["SequencedReadPairs"] * 100 }
Stats["Ligation"]["%"] = Stats["Ligation"]["PresentCount"] / Stats["SequencedReadPairs"] * 100 # BUG WTF?
# TODO Postprocessing? Library Complexity?
json.dump(Stats, open(StatsTXT, 'wt'), indent=4, ensure_ascii=False)
break
Main(InputFileSAM = "/Data/NGS_Data/20211228_NGS_MinjaF_Pool/Results/Human_HiC/K1/splits/8_S73_L003.fastq.gz.filtered.sam", OutputFileTXT = "test_mergednodups.txt.gz", InterPairsTXT = "test_interpairs.txt.gz", MappingQualityFailedSAM = "/dev/null", ChimericAmbiguousFileSAM = "/dev/null", UnmappedSAM = "/dev/null", StatsTXT = "test.stats.txt", RestrictionSiteFile = None, MinMAPQ = 30)
| 55.300699 | 386 | 0.661861 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,284 | 0.288821 |
39e1251d560049f22f859dac5fed8e5ec4b4ca80 | 95 | py | Python | solutions/carrots.py | dx-dt/Kattis | 62856999ae2ac43dab81f87beeac5bf8979528f5 | [
"Unlicense"
] | null | null | null | solutions/carrots.py | dx-dt/Kattis | 62856999ae2ac43dab81f87beeac5bf8979528f5 | [
"Unlicense"
] | null | null | null | solutions/carrots.py | dx-dt/Kattis | 62856999ae2ac43dab81f87beeac5bf8979528f5 | [
"Unlicense"
] | null | null | null | # https://open.kattis.com/problems/carrots
import sys
print sys.stdin.read().split()[1]
| 15.833333 | 43 | 0.684211 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.452632 |
39e14dad20bbe0a515df5d2bbdc11d428ec81e56 | 1,799 | py | Python | yacht/data/transforms.py | IusztinPaul/yacht | c68ab7c66bde860bb91534c29e97772ba328adb5 | [
"Apache-2.0"
] | 5 | 2021-09-03T10:16:50.000Z | 2022-02-28T07:32:43.000Z | yacht/data/transforms.py | IusztinPaul/yacht | c68ab7c66bde860bb91534c29e97772ba328adb5 | [
"Apache-2.0"
] | null | null | null | yacht/data/transforms.py | IusztinPaul/yacht | c68ab7c66bde860bb91534c29e97772ba328adb5 | [
"Apache-2.0"
] | 1 | 2022-03-05T16:06:46.000Z | 2022-03-05T16:06:46.000Z | from abc import ABC, abstractmethod
from typing import Any, List, Optional
import pandas as pd
from yacht.config import Config
class Transform(ABC):
@abstractmethod
def __call__(self, sample: Any) -> Any:
pass
class Compose(Transform):
def __init__(self, transforms: List[Transform]):
self.transforms = transforms
def __call__(self, sample: Any) -> Any:
for transform in self.transforms:
sample = transform(sample)
return sample
class RelativeClosePriceScaling:
PRICE_COLUMNS = ['Close', 'Open', 'High', 'Low']
def __call__(self, data: pd.DataFrame) -> pd.DataFrame:
data[self.PRICE_COLUMNS] = data[self.PRICE_COLUMNS] / (data['Close'].iloc[-1] + 1e-7)
data['Volume'] = data['Volume'] / (data['Volume'].iloc[-1] + 1e-7)
return data
class AverageValueDiff:
PRICE_COLUMNS = ['Close', 'Open', 'High', 'Low']
def __call__(self, data: pd.DataFrame) -> pd.DataFrame:
close_price_average = data['Close'].mean()
volume_average = data['Volume'].mean()
data[self.PRICE_COLUMNS] = data[self.PRICE_COLUMNS] / (close_price_average + 1e-7)
data['Volume'] = data['Volume'] / (volume_average + 1e-7)
return data
#######################################################################################################################
transforms_registry = {
'RelativeClosePriceScaling': RelativeClosePriceScaling,
'AverageValueDiff': AverageValueDiff
}
def build_transforms(config: Config) -> Optional[Compose]:
input_config = config.input
if len(input_config.window_transforms) == 0:
return None
transforms = [transforms_registry[name]() for name in input_config.window_transforms]
return Compose(transforms=transforms)
| 27.676923 | 119 | 0.625347 | 1,112 | 0.618121 | 0 | 0 | 72 | 0.040022 | 0 | 0 | 274 | 0.152307 |
39e198255bc72ec3d147506eb38e23671a7f0cb4 | 4,088 | py | Python | bot.py | gilgamezh/registration_desk | 98303a6f96be78e0c1898a523db761f6d19866fc | [
"MIT"
] | null | null | null | bot.py | gilgamezh/registration_desk | 98303a6f96be78e0c1898a523db761f6d19866fc | [
"MIT"
] | null | null | null | bot.py | gilgamezh/registration_desk | 98303a6f96be78e0c1898a523db761f6d19866fc | [
"MIT"
] | null | null | null | import csv
import logging
import os
import discord
from discord.ext import commands, tasks
from discord.utils import get
# logging config
logging.basicConfig(
filename=".log/reg.log",
format="%(asctime)s - %(message)s",
level=logging.INFO,
datefmt="%d-%b-%y %H:%M:%S",
)
# set up channel ids and enviroment variables
reg_channel_id = int(os.environ["REG_CHANNEL_ID"])
try:
log_channel_id = int(os.environ["LOG_CHANNEL_ID"])
except:
log_channel_id = None
try:
only_respond_reg = int(os.environ["ONLY_RESPOND_REG"])
except:
only_respond_reg = False
# TODO: seperate customization in conf file
event_name = "EuroPython"
instruction = f"Welcome to {event_name}! Please use `!register <Full Name>, <Ticket Number>` to register.\nE.g. `!register James Brown, 99999`\nNOTE: please ONLY register for YOURSELF."
def welcome_msg(mention, roles):
if len(roles) == 2:
return f"Welcome {mention}, you now have the {roles[0]} and {roles[1]} roles."
elif len(roles) == 1:
return f"Welcome {mention}, you now have the {roles[0]} role."
else:
text = roles[1:-1].join(", ")
return f"Welcome {mention}, you now have the {roles[0]}, {text} and {roles[-1]} roles."
bot = commands.Bot(
command_prefix="!",
description=f"Registration Desk for {event_name}",
help_command=None,
)
def roles_given(name, ticket_no):
# check the roles that need to be given to the user
# return list of roles that need to be given
with open(os.environ["DATA_PATH"], newline="") as csvfile:
datareader = csv.reader(csvfile, delimiter=",")
for row in datareader:
try: # skip if it's header
if int(row[4]) == int(ticket_no):
if row[0] == name:
if row[3] == "sprint":
return ["sprinter"]
if row[2] == "yes":
return ["speaker", "attendee"]
else:
return ["attendee"]
except:
continue
@bot.event
async def on_ready():
await bot.change_presence(
status=discord.Status.online,
activity=discord.Activity(type=discord.ActivityType.listening, name="!help"),
)
await bot.get_channel(reg_channel_id).send(instruction)
print("Bot is ready")
logging.info("Bot logged in")
@bot.command()
async def register(ctx, *, info):
if not only_respond_reg or ctx.channel.id == reg_channel_id:
info = info.split(",")
roles = roles_given(info[0], info[1])
if roles is None:
logging.info(
f"FAIL: Cannot find request form user {ctx.author} with name={info[0]}, ticket_no={info[1]}"
)
await ctx.send(
f"{ctx.author.mention} Sorry cannot find the ticket #{info[1]} with name: {info[0]}.\nPlease check and make sure you put down your full name same as the one you used in registering your ticket then try again.\nIf you want a team member to help you, please reply to this message with '@registration'"
)
else:
log_msg = f"SUCCESS: Register user {ctx.author} name={info[0]}, ticket_no={info[1]} with roles={roles}"
logging.info(log_msg)
if log_channel_id is not None:
await bot.get_channel(log_channel_id).send(log_msg)
await ctx.message.add_reaction("🎟️")
await ctx.message.add_reaction("🤖")
await ctx.author.edit(nick=info[0])
attendee_role = get(ctx.author.guild.roles, name="attendee")
await ctx.author.add_roles(attendee_role)
for role in roles:
role_id = get(ctx.author.guild.roles, name=role)
await ctx.author.add_roles(role_id)
await ctx.author.send(welcome_msg(ctx.author.mention, roles))
@bot.command()
async def help(ctx):
if not only_respond_reg or ctx.channel.id == reg_channel_id:
await ctx.send(instruction)
bot.run(os.environ["REG_BOT_SECRET"])
| 34.066667 | 315 | 0.613748 | 0 | 0 | 0 | 0 | 1,952 | 0.476563 | 1,911 | 0.466553 | 1,395 | 0.340576 |
39e1a049e695d46df354014950cf2221cf9cdc1c | 1,551 | py | Python | src/gameServer.py | LesGameDevToolsMagique/GameEditor | 06bed29845ded5cca35e57a3dd457dc72c2a2e8e | [
"MIT"
] | null | null | null | src/gameServer.py | LesGameDevToolsMagique/GameEditor | 06bed29845ded5cca35e57a3dd457dc72c2a2e8e | [
"MIT"
] | null | null | null | src/gameServer.py | LesGameDevToolsMagique/GameEditor | 06bed29845ded5cca35e57a3dd457dc72c2a2e8e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# skeleton from http://kmkeen.com/socketserver/2009-04-03-13-45-57-003.html
import socketserver, subprocess, sys
from threading import Thread
from pprint import pprint
import json
my_unix_command = ['bc']
HOST = 'localhost'
PORT = 12321
with open('storage.json') as data_file:
JSONdata = json.load(data_file)['commands']
class JSONSearchHandler:
def search(self, rule):
for command in JSONdata:
if (command['key'] == rule):
return (command['response'])
return('0')
class SingleTCPHandler(socketserver.BaseRequestHandler):
"One instance per connection. Override handle(self) to customize action."
def handle(self):
while True:
data = self.request.recv(1024)
if not data: break
text = data.decode('utf-8')
print("Client wrote: ", text)
response = JSONSearchHandler().search(text)
self.request.send(response.encode())
print ("%s disconnected", self.client_address[0])
class SimpleServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
daemon_threads = True
allow_reuse_address = True
def __init__(self, server_address, RequestHandlerClass):
socketserver.TCPServer.__init__(self, server_address, RequestHandlerClass)
if __name__ == "__main__":
server = SimpleServer((HOST, PORT), SingleTCPHandler)
try:
server.serve_forever()
except KeyboardInterrupt:
sys.exit(0)
| 31.02 | 78 | 0.648614 | 816 | 0.526112 | 0 | 0 | 0 | 0 | 0 | 0 | 277 | 0.178594 |
39e3fc7a595793dc10754a5adbe8f528668e75d2 | 360 | py | Python | src/keycloakclient/aio/openid_connect.py | phoebebright/python-keycloak-client | 8590fbcdbda8edbe993a01bbff06d9d9be679c5e | [
"MIT"
] | null | null | null | src/keycloakclient/aio/openid_connect.py | phoebebright/python-keycloak-client | 8590fbcdbda8edbe993a01bbff06d9d9be679c5e | [
"MIT"
] | null | null | null | src/keycloakclient/aio/openid_connect.py | phoebebright/python-keycloak-client | 8590fbcdbda8edbe993a01bbff06d9d9be679c5e | [
"MIT"
] | null | null | null | from keycloakclient.aio.mixins import WellKnownMixin
from keycloakclient.openid_connect import (
KeycloakOpenidConnect as SyncKeycloakOpenidConnect,
PATH_WELL_KNOWN,
)
__all__ = (
'KeycloakOpenidConnect',
)
class KeycloakOpenidConnect(WellKnownMixin, SyncKeycloakOpenidConnect):
def get_path_well_known(self):
return PATH_WELL_KNOWN
| 24 | 71 | 0.8 | 137 | 0.380556 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.063889 |
39e4afc96a10bdb1d7dfe165b5b83d57bfbc7c47 | 9,987 | py | Python | multi_script_editor/jedi/evaluate/precedence.py | paulwinex/pw_multiScriptEditor | e447e99f87cb07e238baf693b7e124e50efdbc51 | [
"MIT"
] | 142 | 2015-03-21T12:56:21.000Z | 2022-02-08T04:42:46.000Z | jedi/evaluate/precedence.py | blueyed/jedi | a01e4c6b375795bb8c8ee0d4e86d4c535456f5b4 | [
"MIT"
] | 18 | 2015-05-06T21:14:14.000Z | 2015-08-29T18:24:43.000Z | jedi/evaluate/precedence.py | blueyed/jedi | a01e4c6b375795bb8c8ee0d4e86d4c535456f5b4 | [
"MIT"
] | 51 | 2016-05-07T14:27:42.000Z | 2022-02-10T05:55:11.000Z | """
Handles operator precedence.
"""
from jedi._compatibility import unicode
from jedi.parser import representation as pr
from jedi import debug
from jedi.common import PushBackIterator
from jedi.evaluate.compiled import CompiledObject, create, builtin
from jedi.evaluate import analysis
class PythonGrammar(object):
"""
Some kind of mirror of http://docs.python.org/3/reference/grammar.html.
"""
class MultiPart(str):
def __new__(cls, first, second):
self = str.__new__(cls, first)
self.second = second
return self
def __str__(self):
return str.__str__(self) + ' ' + self.second
FACTOR = '+', '-', '~'
POWER = '**',
TERM = '*', '/', '%', '//'
ARITH_EXPR = '+', '-'
SHIFT_EXPR = '<<', '>>'
AND_EXPR = '&',
XOR_EXPR = '^',
EXPR = '|',
COMPARISON = ('<', '>', '==', '>=', '<=', '!=', 'in',
MultiPart('not', 'in'), MultiPart('is', 'not'), 'is')
NOT_TEST = 'not',
AND_TEST = 'and',
OR_TEST = 'or',
#TEST = or_test ['if' or_test 'else' test] | lambdef
TERNARY = 'if',
SLICE = ':',
ORDER = (POWER, TERM, ARITH_EXPR, SHIFT_EXPR, AND_EXPR, XOR_EXPR,
EXPR, COMPARISON, AND_TEST, OR_TEST, TERNARY, SLICE)
FACTOR_PRIORITY = 0 # highest priority
LOWEST_PRIORITY = len(ORDER)
NOT_TEST_PRIORITY = LOWEST_PRIORITY - 4 # priority only lower for `and`/`or`
SLICE_PRIORITY = LOWEST_PRIORITY - 1 # priority only lower for `and`/`or`
class Precedence(object):
def __init__(self, left, operator, right):
self.left = left
self.operator = operator
self.right = right
def parse_tree(self, strip_literals=False):
def process(which):
try:
which = which.parse_tree(strip_literals)
except AttributeError:
pass
if strip_literals and isinstance(which, pr.Literal):
which = which.value
return which
return (process(self.left), self.operator.string, process(self.right))
def __repr__(self):
return '(%s %s %s)' % (self.left, self.operator, self.right)
class TernaryPrecedence(Precedence):
def __init__(self, left, operator, right, check):
super(TernaryPrecedence, self).__init__(left, operator, right)
self.check = check
def create_precedence(expression_list):
iterator = PushBackIterator(iter(expression_list))
return _check_operator(iterator)
def _syntax_error(element, msg='SyntaxError in precedence'):
debug.warning('%s: %s, %s' % (msg, element, element.start_pos))
def _get_number(iterator, priority=PythonGrammar.LOWEST_PRIORITY):
el = next(iterator)
if isinstance(el, pr.Operator):
if el in PythonGrammar.FACTOR:
right = _get_number(iterator, PythonGrammar.FACTOR_PRIORITY)
elif el in PythonGrammar.NOT_TEST \
and priority >= PythonGrammar.NOT_TEST_PRIORITY:
right = _get_number(iterator, PythonGrammar.NOT_TEST_PRIORITY)
elif el in PythonGrammar.SLICE \
and priority >= PythonGrammar.SLICE_PRIORITY:
iterator.push_back(el)
return None
else:
_syntax_error(el)
return _get_number(iterator, priority)
return Precedence(None, el, right)
elif isinstance(el, pr.tokenize.Token):
return _get_number(iterator, priority)
else:
return el
class MergedOperator(pr.Operator):
"""
A way to merge the two operators `is not` and `not int`, which are two
words instead of one.
Maybe there's a better way (directly in the tokenizer/parser? but for now
this is fine.)
"""
def __init__(self, first, second):
string = first.string + ' ' + second.string
super(MergedOperator, self).__init__(first._sub_module, string,
first.parent, first.start_pos)
self.first = first
self.second = second
def _check_operator(iterator, priority=PythonGrammar.LOWEST_PRIORITY):
try:
left = _get_number(iterator, priority)
except StopIteration:
return None
for el in iterator:
if not isinstance(el, pr.Operator):
_syntax_error(el)
continue
operator = None
for check_prio, check in enumerate(PythonGrammar.ORDER):
if check_prio >= priority:
# respect priorities.
iterator.push_back(el)
return left
try:
match_index = check.index(el)
except ValueError:
continue
match = check[match_index]
if isinstance(match, PythonGrammar.MultiPart):
next_tok = next(iterator)
if next_tok == match.second:
el = MergedOperator(el, next_tok)
else:
iterator.push_back(next_tok)
if el == 'not':
continue
operator = el
break
if operator is None:
_syntax_error(el)
continue
if operator in PythonGrammar.POWER:
check_prio += 1 # to the power of is right-associative
elif operator in PythonGrammar.TERNARY:
try:
middle = []
for each in iterator:
if each == 'else':
break
middle.append(each)
middle = create_precedence(middle)
except StopIteration:
_syntax_error(operator, 'SyntaxError ternary incomplete')
right = _check_operator(iterator, check_prio)
if right is None and not operator in PythonGrammar.SLICE:
_syntax_error(iterator.current, 'SyntaxError operand missing')
else:
if operator in PythonGrammar.TERNARY:
left = TernaryPrecedence(left, operator, right, middle)
else:
left = Precedence(left, operator, right)
return left
def _literals_to_types(evaluator, result):
# Changes literals ('a', 1, 1.0, etc) to its type instances (str(),
# int(), float(), etc).
for i, r in enumerate(result):
if is_literal(r):
# Literals are only valid as long as the operations are
# correct. Otherwise add a value-free instance.
cls = builtin.get_by_name(r.name)
result[i] = evaluator.execute(cls)[0]
return list(set(result))
def calculate(evaluator, left_result, operator, right_result):
result = []
if left_result is None and right_result:
# cases like `-1` or `1 + ~1`
for right in right_result:
result.append(_factor_calculate(evaluator, operator, right))
return result
else:
if not left_result or not right_result:
# illegal slices e.g. cause left/right_result to be None
result = (left_result or []) + (right_result or [])
result = _literals_to_types(evaluator, result)
else:
# I don't think there's a reasonable chance that a string
# operation is still correct, once we pass something like six
# objects.
if len(left_result) * len(right_result) > 6:
result = _literals_to_types(evaluator, left_result + right_result)
else:
for left in left_result:
for right in right_result:
result += _element_calculate(evaluator, left, operator, right)
return result
def _factor_calculate(evaluator, operator, right):
if _is_number(right):
if operator == '-':
return create(evaluator, -right.obj)
return right
def _is_number(obj):
return isinstance(obj, CompiledObject) \
and isinstance(obj.obj, (int, float))
def _is_string(obj):
return isinstance(obj, CompiledObject) \
and isinstance(obj.obj, (str, unicode))
def is_literal(obj):
return _is_number(obj) or _is_string(obj)
def _is_tuple(obj):
from jedi.evaluate import iterable
return isinstance(obj, iterable.Array) and obj.type == pr.Array.TUPLE
def _is_list(obj):
from jedi.evaluate import iterable
return isinstance(obj, iterable.Array) and obj.type == pr.Array.LIST
def _element_calculate(evaluator, left, operator, right):
from jedi.evaluate import iterable, representation as er
l_is_num = _is_number(left)
r_is_num = _is_number(right)
if operator == '*':
# for iterables, ignore * operations
if isinstance(left, iterable.Array) or _is_string(left):
return [left]
elif isinstance(right, iterable.Array) or _is_string(right):
return [right]
elif operator == '+':
if l_is_num and r_is_num or _is_string(left) and _is_string(right):
return [create(evaluator, left.obj + right.obj)]
elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right):
return [iterable.MergedArray(evaluator, (left, right))]
elif operator == '-':
if l_is_num and r_is_num:
return [create(evaluator, left.obj - right.obj)]
elif operator == '%':
# With strings and numbers the left type typically remains. Except for
# `int() % float()`.
return [left]
def check(obj):
"""Checks if a Jedi object is either a float or an int."""
return isinstance(obj, er.Instance) and obj.name in ('int', 'float')
# Static analysis, one is a number, the other one is not.
if operator in ('+', '-') and l_is_num != r_is_num \
and not (check(left) or check(right)):
message = "TypeError: unsupported operand type(s) for +: %s and %s"
analysis.add(evaluator, 'type-error-operation', operator,
message % (left, right))
return [left, right]
| 33.513423 | 88 | 0.601382 | 2,624 | 0.262742 | 0 | 0 | 0 | 0 | 0 | 0 | 1,538 | 0.154 |
39e817d468144ef60c9cbbd969d60eec454c7689 | 1,967 | py | Python | search.py | manimaul/mxmcc | 923458b759c8daa74dd969e968bc72b17fdffe02 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2016-08-24T21:30:45.000Z | 2016-08-24T21:30:45.000Z | search.py | manimaul/mxmcc | 923458b759c8daa74dd969e968bc72b17fdffe02 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 5 | 2021-03-18T23:25:15.000Z | 2022-03-11T23:44:20.000Z | search.py | manimaul/mxmcc | 923458b759c8daa74dd969e968bc72b17fdffe02 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
__author__ = 'Will Kamp'
__copyright__ = 'Copyright 2013, Matrix Mariner Inc.'
__license__ = 'BSD'
__email__ = 'will@mxmariner.com'
__status__ = 'Development' # 'Prototype', 'Development', or 'Production'
import os
class MapPathSearch:
def __init__(self, directory, map_extensions=['kap', 'tif'], include_only=None):
"""Searches for files ending with <map_extensions> in <directory> and all subdirectories
Optionally supply set of file names <include_only> to only return paths of files that
are contained in the set eg. {file1.kap, file2.tif}
file_paths is a list of all full paths found
"""
self.file_paths = []
extensions = set()
for ext in map_extensions:
extensions.add(ext.upper())
if include_only is not None:
include_only = set(include_only)
if os.path.isdir(directory):
for root, dirs, files in os.walk(directory):
for f in files:
include = False
i = f.rfind(".")
if i > 0:
ext = f[i+1:].upper()
include = ext in extensions
if include and include_only is not None:
include = f in include_only
if include:
self.file_paths.append(os.path.join(root, f))
else:
print(directory, 'is not a directory.')
# def __walker(self, args, p_dir, p_file):
# map_extensions, include_only = args
# if include_only is not None:
# include_only = set(include_only)
# for f in p_file:
# if f.upper().endswith(map_extensions) and (include_only is None or f in include_only) and not f.startswith(
# "."):
# self.file_paths.append(os.path.join(p_dir, f))
if __name__ == '__main__':
print("foo")
| 34.508772 | 120 | 0.56482 | 1,264 | 0.642603 | 0 | 0 | 0 | 0 | 0 | 0 | 924 | 0.469751 |
39e9b24961999fcc48a120276aefb45a23005614 | 1,585 | py | Python | ggui/style.py | arthur-hav/GGUI | b64495546541bafa168daa150a4de86569fe1242 | [
"MIT"
] | 1 | 2021-02-03T13:33:14.000Z | 2021-02-03T13:33:14.000Z | ggui/style.py | arthur-hav/GGUI | b64495546541bafa168daa150a4de86569fe1242 | [
"MIT"
] | null | null | null | ggui/style.py | arthur-hav/GGUI | b64495546541bafa168daa150a4de86569fe1242 | [
"MIT"
] | null | null | null |
class Style:
def __init__(self,
parent_styles=None,
color=(0, 0, 0, 0),
hover_color=None,
click_color=None,
disabled_color=None,
border_color=None,
border_line_w=0,
fade_in_time=0.0,
fade_out_time=0.0,
transparent=None):
if parent_styles:
for parent_style in reversed(parent_styles):
attrs = parent_style.__dict__
for k, v in attrs.items():
setattr(self, k, v)
self.default_color = self.premultiply(color)
self.hover_color = self.premultiply(hover_color)
self.click_color = self.premultiply(click_color)
self.disabled_color = self.premultiply(disabled_color)
self.transparent = transparent if transparent is not None else self.default_color[3] < 1.0
self.fade_in_time = fade_in_time
self.fade_out_time = fade_out_time
self.border_color = border_color
self.border_line_w = border_line_w
@property
def background(self):
return self.hover_color or self.border_color
def premultiply(self, color):
if not color:
return color
return color[0] * color[3], color[1] * color[3], color[2] * color[3], color[3]
def __str__(self):
return f'#{int(255 * self.default_color[0]):02X}{int(255 * self.default_color[1]):02X}' \
f'{int(255 * self.default_color[2]):02X}{int(255 * self.default_color[3]):02X}'
| 38.658537 | 98 | 0.581073 | 1,583 | 0.998738 | 0 | 0 | 88 | 0.055521 | 0 | 0 | 159 | 0.100315 |
39ebe1a3f9b6deca1adc431db80e1a994f12644b | 5,041 | py | Python | fsh_validator/cli.py | glichtner/fsh-validator | c3b16546221c8d43c24bcee426ec7882938305bd | [
"BSD-3-Clause"
] | null | null | null | fsh_validator/cli.py | glichtner/fsh-validator | c3b16546221c8d43c24bcee426ec7882938305bd | [
"BSD-3-Clause"
] | 1 | 2022-03-01T16:06:09.000Z | 2022-03-01T16:06:09.000Z | fsh_validator/cli.py | glichtner/fsh-validator | c3b16546221c8d43c24bcee426ec7882938305bd | [
"BSD-3-Clause"
] | null | null | null | """Command line interface for fsh-validator."""
import os
import sys
import argparse
from pathlib import Path
import yaml
from .fsh_validator import (
print_box,
run_sushi,
validate_all_fsh,
validate_fsh,
download_validator,
bcolors,
VALIDATOR_BASENAME,
store_log,
assert_sushi_installed,
get_fsh_base_path,
get_fhir_version_from_sushi_config,
)
from .fshpath import FshPath
def get_config(base_path: Path):
"""
Get the config file from the base path.
:param base_path: The base path to the .fsh-validator.yml File.
:return: Configuration
"""
config_file = base_path / ".fsh-validator.yml"
if not config_file.exists():
return dict()
return yaml.safe_load(open(config_file))
def main():
"""
fsh-validator command line interface main.
:return: None
"""
parser = argparse.ArgumentParser(
description="Validate a fsh file",
formatter_class=argparse.RawTextHelpFormatter,
)
arg_fname = parser.add_argument(
"filename", help="fsh file names (basename only - no path)", nargs="*"
)
parser.add_argument(
"--all",
dest="all",
action="store_true",
help="if set, all detected profiles will be validated",
required=False,
default=False,
)
parser.add_argument(
"--subdir",
dest="subdir",
type=str,
help="Specifies the subdirectory (relative to input/fsh/) in which to search for profiles if --all is set",
required=False,
default="",
)
parser.add_argument(
"--validator-path",
dest="path_validator",
type=str,
help="path to validator",
required=False,
default=None,
)
parser.add_argument(
"--verbose",
dest="verbose",
action="store_true",
help="Be verbose",
required=False,
default=False,
)
parser.add_argument(
"--no-sushi",
dest="no_sushi",
action="store_true",
help="Do not run sushi before validating",
required=False,
default=False,
)
parser.add_argument(
"--log-path",
dest="log_path",
type=str,
help="log file path - if supplied, log files will be written",
required=False,
default=None,
)
args = parser.parse_args()
if not args.all and len(args.filename) == 0:
raise argparse.ArgumentError(
arg_fname, "filename must be set if --all is not specified"
)
elif args.all and len(args.filename) == 0:
# Use current working dir as input path
filenames = [FshPath(os.getcwd())]
else:
filenames = [FshPath(filename) for filename in args.filename]
base_paths = set(filename.fsh_base_path() for filename in filenames)
if len(base_paths) > 1:
raise ValueError(
"Found multiple base paths for fsh project, expecting exactly one"
)
base_path = base_paths.pop()
validator_path = (
args.path_validator if args.path_validator is not None else base_path
)
fname_validator = Path(validator_path) / VALIDATOR_BASENAME
if not fname_validator.exists():
print_box("Downloading java validator")
download_validator(fname_validator.resolve())
if not args.no_sushi:
print_box("Running SUSHI")
run_sushi(base_path)
fhir_version = get_fhir_version_from_sushi_config(base_path)
config = get_config(base_path)
if "exclude_code_systems" in config:
exclude_code_systems = set(config["exclude_code_systems"])
else:
exclude_code_systems = set()
if "exclude_resource_type" in config:
exclude_resource_types = set(config["exclude_resource_type"])
else:
exclude_resource_types = set()
if args.all:
print_box("Validating all FSH files")
results = validate_all_fsh(
base_path,
args.subdir,
str(fname_validator),
exclude_code_systems=exclude_code_systems,
exclude_resource_types=exclude_resource_types,
fhir_version=fhir_version,
verbose=args.verbose,
)
else:
print_box("Validating FSH files")
results = validate_fsh(
filenames,
str(fname_validator),
fhir_version=fhir_version,
exclude_code_systems=exclude_code_systems,
exclude_resource_types=exclude_resource_types,
verbose=args.verbose,
)
if args.log_path is not None:
log_path = Path(args.log_path)
if not log_path.exists():
log_path.mkdir()
store_log(results, log_path)
if any([r.failed() for r in results]):
print_box("Errors during profile validation", col=bcolors.FAIL)
sys.exit(1)
else:
print_box("All profiles successfully validated", col=bcolors.OKGREEN)
sys.exit(0)
if __name__ == "__main__":
main()
| 26.671958 | 115 | 0.623686 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,225 | 0.243007 |
39ec0f238d1f205a702d9a13cc4aec9895df5afa | 475 | py | Python | core/util/read_input.py | SimoneABNto/Progetto_ASD_py | b415bcc3581121c5c39e044ac3fbb92420964e68 | [
"MIT"
] | null | null | null | core/util/read_input.py | SimoneABNto/Progetto_ASD_py | b415bcc3581121c5c39e044ac3fbb92420964e68 | [
"MIT"
] | null | null | null | core/util/read_input.py | SimoneABNto/Progetto_ASD_py | b415bcc3581121c5c39e044ac3fbb92420964e68 | [
"MIT"
] | null | null | null | def read_input():
try:
data = input().replace(" ", "") # take the input and remove the extra spaces
input_array = data.split(",") # split the sub substring
input_array[-1] = input_array[-1][:-1]
array = []
for el in input_array:
array.append(float(el)) # convert the element of the array to int
return array
except Exception as e:
print(e)
print('ERROR: bad input')
return []
| 26.388889 | 85 | 0.555789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 136 | 0.286316 |